diff --git a/.github/workflows/Tests.yml b/.github/workflows/Tests.yml
index 590e512..867e0b6 100644
--- a/.github/workflows/Tests.yml
+++ b/.github/workflows/Tests.yml
@@ -47,8 +47,7 @@ jobs:
cp -a $GITHUB_WORKSPACE /home/gap/.gap/pkg/
- name: Prepare environment
run: |
- cp ./GradientDescentForCAP/dev/ci_gaprc /home/gap/.gap/gaprc
- git clone --depth 1 -vv https://github.com/gap-packages/AutoDoc.git
+ cp ./GradientBasedLearningForCAP/dev/ci_gaprc /home/gap/.gap/gaprc
git clone --depth 1 -vv https://github.com/homalg-project/homalg_project.git
git clone --depth 1 -vv https://github.com/homalg-project/CAP_project.git
git clone --depth 1 -vv https://github.com/homalg-project/CategoricalTowers.git
@@ -59,18 +58,20 @@ jobs:
if [ -d "CAP_project/CompilerForCAP" ]; then make -C "CAP_project/CompilerForCAP" doc; fi
if [ -d "CAP_project/MonoidalCategories" ]; then make -C "CAP_project/MonoidalCategories" doc; fi
if [ -d "CAP_project/CartesianCategories" ]; then make -C "CAP_project/CartesianCategories" doc; fi
+ if [ -d "CAP_project/AdditiveClosuresForCAP" ]; then make -C "CAP_project/AdditiveClosuresForCAP" doc; fi
if [ -d "CAP_project/FreydCategoriesForCAP" ]; then make -C "CAP_project/FreydCategoriesForCAP" doc; fi
if [ -d "HigherHomologicalAlgebra/ToolsForHigherHomologicalAlgebra" ]; then make -C "HigherHomologicalAlgebra/ToolsForHigherHomologicalAlgebra" doc; fi
if [ -d "homalg_project/homalg" ]; then make -C "homalg_project/homalg" doc; fi
if [ -d "homalg_project/Modules" ]; then make -C "homalg_project/Modules" doc; fi
if [ -d "CategoricalTowers/ToolsForCategoricalTowers" ]; then make -C "CategoricalTowers/ToolsForCategoricalTowers" doc; fi
if [ -d "CategoricalTowers/Toposes" ]; then make -C "CategoricalTowers/Toposes" doc; fi
- - name: Test GradientDescentForCAP
+ if [ -d "GradientBasedLearningForCAP" ]; then make -C "GradientBasedLearningForCAP" doc; fi
+ - name: Test GradientBasedLearningForCAP
run: |
- make -C GradientDescentForCAP --trace -j $(nproc) --output-sync ci-test
+ make -C GradientBasedLearningForCAP --trace -j $(nproc) --output-sync ci-test
- name: Release package or simulate release
run: |
- cd GradientDescentForCAP
+ cd GradientBasedLearningForCAP
python3 dev/process_coverage.py
git config --global user.name "Bot"
git config --global user.email "empty"
@@ -88,5 +89,5 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: |
- cd GradientDescentForCAP
+ cd GradientBasedLearningForCAP
./dev/upload_codecov.sh
diff --git a/.gitignore b/.gitignore
index ed793a1..528d633 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,4 +31,4 @@
/doc/*.xml
/doc_tmp/
/stats
-/tst/machinelearningforcap*.tst
+/tst/gradientbasedlearningforcap*.tst
diff --git a/PackageInfo.g b/PackageInfo.g
index 1ce60b9..b97a933 100644
--- a/PackageInfo.g
+++ b/PackageInfo.g
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# This file contains package meta data. For additional information on
# the meaning and correct usage of these fields, please consult the
@@ -8,9 +8,9 @@
#
SetPackageInfo( rec(
-PackageName := "GradientDescentForCAP",
-Subtitle := "Exploring categorical machine learning in CAP",
-Version := "2025.11-01",
+PackageName := "GradientBasedLearningForCAP",
+Subtitle := "Gradient Based Learning via Category Theory",
+Version := "2026.01-01",
Date := (function ( ) if IsBound( GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE ) then return GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE; else return Concatenation( ~.Version{[ 1 .. 4 ]}, "-", ~.Version{[ 6, 7 ]}, "-01" ); fi; end)( ),
License := "GPL-2.0-or-later",
@@ -34,13 +34,13 @@ Persons := [
# BEGIN URLS
SourceRepository := rec(
Type := "git",
- URL := "https://github.com/homalg-project/GradientDescentForCAP",
+ URL := "https://github.com/homalg-project/GradientBasedLearningForCAP",
),
IssueTrackerURL := Concatenation( ~.SourceRepository.URL, "/issues" ),
-PackageWWWHome := "https://homalg-project.github.io/pkg/GradientDescentForCAP",
-PackageInfoURL := "https://homalg-project.github.io/GradientDescentForCAP/PackageInfo.g",
-README_URL := "https://homalg-project.github.io/GradientDescentForCAP/README.md",
-ArchiveURL := Concatenation( "https://github.com/homalg-project/GradientDescentForCAP/releases/download/v", ~.Version, "/GradientDescentForCAP-", ~.Version ),
+PackageWWWHome := "https://homalg-project.github.io/pkg/GradientBasedLearningForCAP",
+PackageInfoURL := "https://homalg-project.github.io/GradientBasedLearningForCAP/PackageInfo.g",
+README_URL := "https://homalg-project.github.io/GradientBasedLearningForCAP/README.md",
+ArchiveURL := Concatenation( "https://github.com/homalg-project/GradientBasedLearningForCAP/releases/download/v", ~.Version, "/GradientBasedLearningForCAP-", ~.Version ),
# END URLS
ArchiveFormats := ".tar.gz .zip",
@@ -58,16 +58,16 @@ Status := "dev",
AbstractHTML := "",
PackageDoc := rec(
- BookName := "GradientDescentForCAP",
+ BookName := "GradientBasedLearningForCAP",
ArchiveURLSubset := ["doc"],
HTMLStart := "doc/chap0.html",
PDFFile := "doc/manual.pdf",
SixFile := "doc/manual.six",
- LongTitle := "Exploring categorical machine learning in CAP",
+ LongTitle := "Gradient Based Learning via Category Theory",
),
Dependencies := rec(
- GAP := ">= 4.15.1",
+ GAP := ">= 4.13.0",
NeededOtherPackages := [
[ "GAPDoc", ">= 1.5" ],
[ "CAP", ">= 2024.09-12" ],
diff --git a/README.md b/README.md
index e55de5c..17dece7 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
-# GradientDescentForCAP [![View code][code-img]][code-url]
+# GradientBasedLearningForCAP [![View code][code-img]][code-url]
-### Exploring categorical machine learning in CAP
+### Gradient Based Learning via Category Theory
| Documentation | Latest Release | Build Status | Code Coverage |
| ------------- | -------------- | ------------ | ------------- |
@@ -22,25 +22,25 @@ To obtain current versions of all dependencies, `git clone` (or `git pull` to up
| 3. | [**CategoricalTowers**](https://github.com/homalg-project/CategoricalTowers#readme) | https://github.com/homalg-project/CategoricalTowers.git |
[html-img]: https://img.shields.io/badge/🔗%20HTML-stable-blue.svg
-[html-url]: https://homalg-project.github.io/GradientDescentForCAP/doc/chap0_mj.html
+[html-url]: https://homalg-project.github.io/GradientBasedLearningForCAP/doc/chap0_mj.html
[pdf-img]: https://img.shields.io/badge/🔗%20PDF-stable-blue.svg
-[pdf-url]: https://homalg-project.github.io/GradientDescentForCAP/download_pdf.html
+[pdf-url]: https://homalg-project.github.io/GradientBasedLearningForCAP/download_pdf.html
-[version-img]: https://img.shields.io/endpoint?url=https://homalg-project.github.io/GradientDescentForCAP/badge_version.json&label=🔗%20version&color=yellow
-[version-url]: https://homalg-project.github.io/GradientDescentForCAP/view_release.html
+[version-img]: https://img.shields.io/endpoint?url=https://homalg-project.github.io/GradientBasedLearningForCAP/badge_version.json&label=🔗%20version&color=yellow
+[version-url]: https://homalg-project.github.io/GradientBasedLearningForCAP/view_release.html
-[date-img]: https://img.shields.io/endpoint?url=https://homalg-project.github.io/GradientDescentForCAP/badge_date.json&label=🔗%20released%20on&color=yellow
-[date-url]: https://homalg-project.github.io/GradientDescentForCAP/view_release.html
+[date-img]: https://img.shields.io/endpoint?url=https://homalg-project.github.io/GradientBasedLearningForCAP/badge_date.json&label=🔗%20released%20on&color=yellow
+[date-url]: https://homalg-project.github.io/GradientBasedLearningForCAP/view_release.html
-[tests-img]: https://github.com/homalg-project/GradientDescentForCAP/actions/workflows/Tests.yml/badge.svg?branch=master
-[tests-url]: https://github.com/homalg-project/GradientDescentForCAP/actions/workflows/Tests.yml?query=branch%3Amaster
+[tests-img]: https://github.com/homalg-project/GradientBasedLearningForCAP/actions/workflows/Tests.yml/badge.svg?branch=master
+[tests-url]: https://github.com/homalg-project/GradientBasedLearningForCAP/actions/workflows/Tests.yml?query=branch%3Amaster
-[codecov-img]: https://codecov.io/gh/homalg-project/GradientDescentForCAP/branch/master/graph/badge.svg
-[codecov-url]: https://app.codecov.io/gh/homalg-project/GradientDescentForCAP
+[codecov-img]: https://codecov.io/gh/homalg-project/GradientBasedLearningForCAP/branch/master/graph/badge.svg
+[codecov-url]: https://app.codecov.io/gh/homalg-project/GradientBasedLearningForCAP
[code-img]: https://img.shields.io/badge/-View%20code-blue?logo=github
-[code-url]: https://github.com/homalg-project/GradientDescentForCAP#top
+[code-url]: https://github.com/homalg-project/GradientBasedLearningForCAP#top
### Running the Package with Docker
@@ -93,7 +93,7 @@ In this example, we consider a training dataset consisting of the three points
We aim to compute a line that fits $\mathcal{D}$.
```julia
-gap> LoadPackage( "GradientDescentForCAP" );
+gap> LoadPackage( "GradientBasedLearningForCAP" );
true
gap> Para := CategoryOfParametrisedMorphisms( SkeletalSmoothMaps );
@@ -288,7 +288,7 @@ To facilitate this classification, we use a one-hot encoding scheme for the labe
That is, the training set (the set of labeled exmaples) is a finite subset of $\mathbb{R}^2 \times \mathbb{R}^3 \simeq \mathbb{R}^5$.
```julia
-gap> LoadPackage( "GradientDescentForCAP" );
+gap> LoadPackage( "GradientBasedLearningForCAP" );
true
gap> Para := CategoryOfParametrisedMorphisms( SkeletalSmoothMaps );
diff --git a/dev/.release b/dev/.release
index 518dacf..148f382 100644
--- a/dev/.release
+++ b/dev/.release
@@ -14,7 +14,7 @@ sed "s;Date := .*;Date := \"$(date -I)\",;" PackageInfo.g > PackageInfo.g.bak
mv PackageInfo.g.bak PackageInfo.g
# replace links to packages which are possibly referenced in the documentation, keep this in sync with `Tests.yml.j2`
-for package in CAP_project/CAP CAP_project/CompilerForCAP CAP_project/MonoidalCategories CAP_project/CartesianCategories CAP_project/FreydCategoriesForCAP HigherHomologicalAlgebra/ToolsForHigherHomologicalAlgebra homalg_project/homalg homalg_project/Modules CategoricalTowers/ToolsForCategoricalTowers CategoricalTowers/Toposes; do
+for package in CAP_project/CAP CAP_project/CompilerForCAP CAP_project/MonoidalCategories CAP_project/CartesianCategories CAP_project/AdditiveClosuresForCAP CAP_project/FreydCategoriesForCAP HigherHomologicalAlgebra/ToolsForHigherHomologicalAlgebra homalg_project/homalg homalg_project/Modules CategoricalTowers/ToolsForCategoricalTowers CategoricalTowers/Toposes GradientBasedLearningForCAP; do
# adjust links to other manuals
# Note that we cannot use sed's `-i` option for in-place editing, as
diff --git a/dev/upload_codecov.sh b/dev/upload_codecov.sh
index c3f8a70..ff3812c 100755
--- a/dev/upload_codecov.sh
+++ b/dev/upload_codecov.sh
@@ -21,9 +21,16 @@ curl -O https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpgv codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
+# read the token
+if [ -z "$CODECOV_TOKEN" ]; then
+ echo -e "\033[0;33mCODECOV_TOKEN is not set. Proceeding without token.\033[0m"
+else
+ echo -e "\033[0;32mUsing CODECOV_TOKEN from environment variable.\033[0m"
+fi
+
# execute
chmod +x codecov
while ! ./codecov -Z -v -s ../ -t $CODECOV_TOKEN; do
- echo "Codecov upload failed, retrying in 20s"
- sleep 20
+ echo "Codecov upload failed, retrying in 60s"
+ sleep 60
done
diff --git a/doc/Doc.autodoc b/doc/Doc.autodoc
index 1650c0d..f3ea502 100644
--- a/doc/Doc.autodoc
+++ b/doc/Doc.autodoc
@@ -1 +1,101 @@
-@Chapter Examples and Tests
+@Chapter Introduction
+
+This package provides tools for exploring categorical machine learning using the CAP (Categories, Algorithms, Programming) system.
+It implements automatic differentiation using the lens pattern and provides constructs for building and training neural networks.
+
+@Section Overview
+
+The package implements the following main concepts:
+
+* **Examples**: Examples for creating and training neural networks.
+
+* **Expressions**: A symbolic expression system for representing mathematical formulas.
+
+* **Skeletal Category of Smooth Maps**: A category where objects are Euclidean spaces $\mathbb{R}^n$ and morphisms are smooth maps with their Jacobian matrices.
+
+* **Category of Parametrised Morphisms**: A category that represents morphisms with learnable parameters, used to model neural network layers.
+
+* **Neural Networks**: High-level operations for constructing and training neural networks.
+
+* **Category of Lenses**: A category that models bidirectional data flow, essential for backpropagation in neural networks.
+
+* **Fitting Parameters**: Explain how to learn the parameters in order to minimize a parametrised morphism.
+
+* **CAP Operation**: The new categorical operations needed in this package.
+
+* **Tools**: Few GAP operations and helper functions.
+
+
+@Chapter Examples for neural networks
+
+@Section Binary-class neural network with binary cross-entropy loss function
+@Section Multi-class neural network with cross-entropy loss function
+@Section Neural network with quadratic loss function
+
+@Chapter Expressions
+
+@Section Constructors
+@Section Attributes
+@Section Operations
+@Section Global Functions
+@Section Examples
+@Section GAP Categories
+
+@Chapter Skeletal Category of Smooth Maps
+
+@Section Constructors
+@Section Attributes
+@Section Operations
+@Section Available Smooth Maps
+@Section Supported CAP Operations
+@Section Examples
+@Section GAP Categories
+
+@Chapter Category of Parametrised Morphisms
+
+@Section Definition
+@Section Constructors
+@Section Attributes
+@Section Operations
+@Section Available Parametrised Morphisms
+@Section Supported CAP Operations
+@Section Examples
+@Section GAP Categories
+
+@Chapter Neural Networks
+
+@Section Definition
+@Section Operations
+@Section Examples
+
+@Chapter Category of Lenses
+
+@Section Definition
+@Section Constructors
+@Section Attributes
+@Section Operations
+@Section Available Lenses
+@Section Optimizers
+@Section Supported CAP Operations
+@Section Examples
+@Section GAP Categories
+
+
+@Chapter Fitting Parameters
+
+@Section Introduction
+@Section Notes on Batching
+@Section Operations
+@Section Examples
+
+@Chapter CAP Operations for GradientBasedLearningForCAP
+
+@Section Basic Operations
+@Section Add-Methods
+
+@Chapter Tools
+
+@Section Mathematical Operations
+@Section List Operations
+@Section Helper Functions
+@Section Python Integration
\ No newline at end of file
diff --git a/doc/clean b/doc/clean
index 54b6da2..ffe1f90 100755
--- a/doc/clean
+++ b/doc/clean
@@ -1,2 +1,2 @@
-#!/bin/bash
+#!/bin/sh
rm -f *.{aux,bbl,blg,brf,css,html,idx,ilg,ind,js,lab,log,out,pdf,pnr,six,tex,toc,txt,xml,xml.bib}
diff --git a/doc/sigmoid.png b/doc/sigmoid.png
new file mode 100644
index 0000000..4f82699
Binary files /dev/null and b/doc/sigmoid.png differ
diff --git a/examples/CategoryOfLenses.g b/examples/CategoryOfLenses.g
index 62d48f3..08eca5c 100644
--- a/examples/CategoryOfLenses.g
+++ b/examples/CategoryOfLenses.g
@@ -1,11 +1,11 @@
-#! @Chapter Examples and Tests
+#! @Chapter Category of Lenses
-#! @Section Category of Lenses
+#! @Section Examples
-LoadPackage( "GradientDescentForCAP" );
+LoadPackage( "GradientBasedLearningForCAP" );
#! @Example
-Smooth := CategoryOfSkeletalSmoothMaps( );
+Smooth := SkeletalCategoryOfSmoothMaps( );
#! SkeletalSmoothMaps
Lenses := CategoryOfLenses( Smooth );
#! CategoryOfLenses( SkeletalSmoothMaps )
@@ -29,11 +29,11 @@ f := MorphismConstructor( Lenses, A, [ get, put ], B );
#! (ℝ^1, ℝ^2) -> (ℝ^3, ℝ^4) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^1 -> ℝ^3
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^5 -> ℝ^2
MorphismDatum( f );
#! [ ℝ^1 -> ℝ^3, ℝ^5 -> ℝ^2 ]
@@ -62,11 +62,11 @@ id_A := IdentityMorphism( Lenses, A );
#! (ℝ^1, ℝ^2) -> (ℝ^1, ℝ^2) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^1 -> ℝ^1
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^3 -> ℝ^2
Display( id_A );
#! (ℝ^1, ℝ^2) -> (ℝ^1, ℝ^2) defined by:
@@ -93,71 +93,71 @@ f1 := RandomMorphism( A, B, 5 );
#! (ℝ^1, ℝ^2) -> (ℝ^3, ℝ^4) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^1 -> ℝ^3
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^5 -> ℝ^2
f2 := RandomMorphism( A, B, 5 );
#! (ℝ^1, ℝ^2) -> (ℝ^3, ℝ^4) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^1 -> ℝ^3
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^5 -> ℝ^2
f3 := RandomMorphism( A, B, 5 );
#! (ℝ^1, ℝ^2) -> (ℝ^3, ℝ^4) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^1 -> ℝ^3
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^5 -> ℝ^2
f1_f2 := TensorProductOnMorphisms( Lenses, f1, f2 );
#! (ℝ^2, ℝ^4) -> (ℝ^6, ℝ^8) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^2 -> ℝ^6
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^10 -> ℝ^4
f2_f3 := TensorProductOnMorphisms( Lenses, f2, f3 );
#! (ℝ^2, ℝ^4) -> (ℝ^6, ℝ^8) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^2 -> ℝ^6
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^10 -> ℝ^4
t1 := TensorProductOnMorphisms( Lenses, f1_f2, f3 );
#! (ℝ^3, ℝ^6) -> (ℝ^9, ℝ^12) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^3 -> ℝ^9
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^15 -> ℝ^6
t2 := TensorProductOnMorphisms( Lenses, f1, f2_f3 );
#! (ℝ^3, ℝ^6) -> (ℝ^9, ℝ^12) defined by:
#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^3 -> ℝ^9
#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^15 -> ℝ^6
IsCongruentForMorphisms( t1, t2 );
#! true
@@ -205,126 +205,183 @@ Display( PreCompose( Braiding( A, B ), BraidingInverse( A, B ) ) );
#! ‣ x8
#! ‣ x9
#! ‣ x10
-R := EmbeddingIntoCategoryOfLenses( Smooth, Lenses );
+#! @EndExample
+
+#! @Section Operations
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
+R := ReverseDifferentialLensFunctor( Smooth, Lenses );
#! Embedding functor into category of lenses
SourceOfFunctor( R );
#! SkeletalSmoothMaps
RangeOfFunctor( R );
#! CategoryOfLenses( SkeletalSmoothMaps )
-f := Smooth.Softmax( 2 );
+f := DirectProductFunctorial( [ Smooth.Power(3), Smooth.Power(2) ] );
#! ℝ^2 -> ℝ^2
Display( f );
#! ℝ^2 -> ℝ^2
-#!
-#! ‣ Exp( x1 ) / (Exp( x1 ) + Exp( x2 ))
-#! ‣ Exp( x2 ) / (Exp( x1 ) + Exp( x2 ))
-Rf := ApplyFunctor( R, f );
+#!
+#! ‣ x1 ^ 3
+#! ‣ x2 ^ 2
+f := PreCompose( f, Smooth.Sum(2) );
+#! ℝ^2 -> ℝ^1
+Display( f );
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ x1 ^ 3 + x2 ^ 2
+Rf := ApplyFunctor( R, f );;
+Display( Rf );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^1) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ x1 ^ 3 + x2 ^ 2
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^3 -> ℝ^2
+#!
+#! ‣ x3 * (1 * (3 * x1 ^ 2) + 0)
+#! ‣ x3 * (0 + 1 * (2 * x2 ^ 1))
+#! @EndExample
+
+#! @Section Optimizers
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
+optimizer := Lenses.GradientDescentOptimizer( :learning_rate := 0.01 )( 2 );
#! (ℝ^2, ℝ^2) -> (ℝ^2, ℝ^2) defined by:
-#!
+#!
#! Get Morphism:
-#! ----------
+#! ------------
#! ℝ^2 -> ℝ^2
-#!
+#!
#! Put Morphism:
-#! ----------
+#! ------------
#! ℝ^4 -> ℝ^2
-Display( Rf );
+dummy_input := [ "theta_1", "theta_2", "g1", "g2" ];;
+dummy_input := CreateContextualVariables( dummy_input );
+#! [ theta_1, theta_2, g1, g2 ]
+Display( optimizer : dummy_input := dummy_input );
#! (ℝ^2, ℝ^2) -> (ℝ^2, ℝ^2) defined by:
-#!
+#!
#! Get Morphism:
#! ------------
#! ℝ^2 -> ℝ^2
-#!
-#! ‣ Exp( x1 ) / (Exp( x1 ) + Exp( x2 ))
-#! ‣ Exp( x2 ) / (Exp( x1 ) + Exp( x2 ))
-#!
+#!
+#! ‣ theta_1
+#! ‣ theta_2
#! Put Morphism:
#! ------------
#! ℝ^4 -> ℝ^2
-#!
-#! ‣ x3 *
-#! ((Exp( x1 ) + Exp( x2 ) - Exp( x1 )) * (Exp( x1 ) / (Exp( x1 ) + Exp( x2 )) ^ 2))
-#! + x4 * ((- Exp( x1 )) * (Exp( x2 ) / (Exp( x1 ) + Exp( x2 )) ^ 2))
-#! ‣ x3 * ((- Exp( x2 )) * (Exp( x1 ) / (Exp( x1 ) + Exp( x2 )) ^ 2)) +
-#! x4 *
-#! ((Exp( x1 ) + Exp( x2 ) - Exp( x2 )) * (Exp( x2 ) / (Exp( x1 ) + Exp( x2 )) ^ 2))
-Display( Lenses.GradientDescentOptimizer( :learning_rate := 0.01 )( 2 ) );
-#! (ℝ^2, ℝ^2) -> (ℝ^2, ℝ^2) defined by:
-#!
+#!
+#! ‣ theta_1 + 0.01 * g1
+#! ‣ theta_2 + 0.01 * g2
+optimizer := Lenses.GradientDescentWithMomentumOptimizer(
+ :learning_rate := 0.01, momentum := 0.9 )( 2 );
+#! (ℝ^4, ℝ^4) -> (ℝ^2, ℝ^2) defined by:
+#!
#! Get Morphism:
#! ------------
-#! ℝ^2 -> ℝ^2
-#!
-#! ‣ x1
-#! ‣ x2
-#!
+#! ℝ^4 -> ℝ^2
+#!
#! Put Morphism:
#! ------------
+#! ℝ^6 -> ℝ^4
+dummy_input := [ "s1", "s2", "theta_1", "theta_2", "g1", "g2" ];;
+dummy_input := CreateContextualVariables( dummy_input );
+#! [ s1, s2, theta_1, theta_2, g1, g2 ]
+Display( optimizer : dummy_input := dummy_input );
+#! (ℝ^4, ℝ^4) -> (ℝ^2, ℝ^2) defined by:
+#!
+#! Get Morphism:
+#! ------------
#! ℝ^4 -> ℝ^2
-#!
-#! ‣ x1 + 0.01 * x3
-#! ‣ x2 + 0.01 * x4
-Display( Lenses.GradientDescentWithMomentumOptimizer(
- :learning_rate := 0.01, momentum := 0.9 )( 2 ) );
+#!
+#! ‣ theta_1
+#! ‣ theta_2
+#! Put Morphism:
+#! ------------
+#! ℝ^6 -> ℝ^4
+#!
+#! ‣ 0.9 * s1 + 0.01 * g1
+#! ‣ 0.9 * s2 + 0.01 * g2
+#! ‣ theta_1 + (0.9 * s1 + 0.01 * g1)
+#! ‣ theta_2 + (0.9 * s2 + 0.01 * g2)
+optimizer := Lenses.AdagradOptimizer( :learning_rate := 0.01 )( 2 );
#! (ℝ^4, ℝ^4) -> (ℝ^2, ℝ^2) defined by:
-#!
+#!
#! Get Morphism:
#! ------------
#! ℝ^4 -> ℝ^2
-#!
-#! ‣ x3
-#! ‣ x4
-#!
+#!
#! Put Morphism:
#! ------------
#! ℝ^6 -> ℝ^4
-#!
-#! ‣ 0.9 * x1 + 0.01 * x5
-#! ‣ 0.9 * x2 + 0.01 * x6
-#! ‣ x3 + (0.9 * x1 + 0.01 * x5)
-#! ‣ x4 + (0.9 * x2 + 0.01 * x6)
-Display( Lenses.AdagradOptimizer( :learning_rate := 0.01 )( 2 ) );
+Display( optimizer : dummy_input := dummy_input );
#! (ℝ^4, ℝ^4) -> (ℝ^2, ℝ^2) defined by:
-#!
+#!
#! Get Morphism:
#! ------------
#! ℝ^4 -> ℝ^2
-#!
-#! ‣ x3
-#! ‣ x4
-#!
+#!
+#! ‣ theta_1
+#! ‣ theta_2
#! Put Morphism:
#! ------------
#! ℝ^6 -> ℝ^4
-#!
-#! ‣ x1 + x5 ^ 2
-#! ‣ x2 + x6 ^ 2
-#! ‣ x3 + 0.01 * x5 / (1.e-07 + Sqrt( x1 + x5 ^ 2 ))
-#! ‣ x4 + 0.01 * x6 / (1.e-07 + Sqrt( x2 + x6 ^ 2 ))
-Display( Lenses.AdamOptimizer(
- :learning_rate := 0.01, beta_1 := 0.9, beta_2 := 0.999 )( 2 ) );
+#!
+#! ‣ s1 + g1 ^ 2
+#! ‣ s2 + g2 ^ 2
+#! ‣ theta_1 + 0.01 * g1 / (1.e-07 + Sqrt( s1 + g1 ^ 2 ))
+#! ‣ theta_2 + 0.01 * g2 / (1.e-07 + Sqrt( s2 + g2 ^ 2 ))
+optimizer := Lenses.AdamOptimizer(
+ :learning_rate := 0.01, beta_1 := 0.9, beta_2 := 0.999 )( 2 );
#! (ℝ^7, ℝ^7) -> (ℝ^2, ℝ^2) defined by:
#!
#! Get Morphism:
#! ------------
#! ℝ^7 -> ℝ^2
-#!
-#! ‣ x6
-#! ‣ x7
-#!
#! Put Morphism:
#! ------------
#! ℝ^9 -> ℝ^7
-#!
-#! ‣ x1 + 1
-#! ‣ 0.9 * x2 + 0.1 * x8
-#! ‣ 0.9 * x3 + 0.1 * x9
-#! ‣ 0.999 * x4 + 0.001 * x8 ^ 2
-#! ‣ 0.999 * x5 + 0.001 * x9 ^ 2
-#! ‣ x6 + 0.01 / (1 - 0.999 ^ x1)
-#! * ((0.9 * x2 + 0.1 * x8) /
-#! (1.e-07 + Sqrt( (0.999 * x4 + 0.001 * x8 ^ 2) / (1 - 0.999 ^ x1) )))
-#! ‣ x7 + 0.01 / (1 - 0.999 ^ x1)
-#! * ((0.9 * x3 + 0.1 * x9) /
-#! (1.e-07 + Sqrt( (0.999 * x5 + 0.001 * x9 ^ 2) / (1 - 0.999 ^ x1) )))
+dummy_input :=
+ [ "t", "m1", "m2", "v1", "v2", "theta_1", "theta_2", "g1", "g2" ];;
+dummy_input := CreateContextualVariables( dummy_input );
+#! [ t, m1, m2, v1, v2, theta_1, theta_2, g1, g2 ]
+Display( optimizer : dummy_input := dummy_input );
+#! (ℝ^7, ℝ^7) -> (ℝ^2, ℝ^2) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^7 -> ℝ^2
+#!
+#! ‣ theta_1
+#! ‣ theta_2
+#! Put Morphism:
+#! ------------
+#! ℝ^9 -> ℝ^7
+#!
+#! ‣ t + 1
+#! ‣ 0.9 * m1 + 0.1 * g1
+#! ‣ 0.9 * m2 + 0.1 * g2
+#! ‣ 0.999 * v1 + 0.001 * g1 ^ 2
+#! ‣ 0.999 * v2 + 0.001 * g2 ^ 2
+#! ‣ theta_1 + 0.01 /
+#! (1 - 0.999 ^ t) *
+#! ((0.9 * m1 + 0.1 * g1) /
+#! (1.e-07 + Sqrt( (0.999 * v1 + 0.001 * g1 ^ 2) / (1 - 0.999 ^ t) )))
+#! ‣ theta_2 + 0.01 /
+#! (1 - 0.999 ^ t) *
+#! ((0.9 * m2 + 0.1 * g2) /
+#! (1.e-07 + Sqrt( (0.999 * v2 + 0.001 * g2 ^ 2) / (1 - 0.999 ^ t) )))
#! @EndExample
diff --git a/examples/CategoryOfParametrisedMorphisms.g b/examples/CategoryOfParametrisedMorphisms.g
index 18cfb43..01bd350 100644
--- a/examples/CategoryOfParametrisedMorphisms.g
+++ b/examples/CategoryOfParametrisedMorphisms.g
@@ -1,14 +1,18 @@
-#! @Chapter Examples and Tests
+#! @Chapter Category of Parametrised Morphisms
-#! @Section Category of Parametrised Morphisms
+#! @Section Examples
-LoadPackage( "GradientDescentForCAP" );
+LoadPackage( "GradientBasedLearningForCAP" );
#! @Example
-Smooth := CategoryOfSkeletalSmoothMaps( );
+Smooth := SkeletalCategoryOfSmoothMaps( );
#! SkeletalSmoothMaps
Para := CategoryOfParametrisedMorphisms( Smooth );
#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+Display( Para );
+#! A CAP category with name CategoryOfParametrisedMorphisms( SkeletalSmoothMaps ):
+#!
+#! 12 primitive operations were used to derive 21 operations for this category
R1 := Smooth.( 1 );
#! ℝ^1
R2 := Smooth.( 2 );
@@ -19,109 +23,114 @@ R1 / Para;
#! ℝ^1
Para.( 1 );
#! ℝ^1
-IsEqualForObjects( Para.( 1 ), R1 / Para );
+Para.( 1 ) = R1 / Para;
#! true
-f := Smooth.Softmax( 3 );
-#! ℝ^3 -> ℝ^3
-f := MorphismConstructor( Para, R1 / Para, [ R2, f ], R3 / Para );
-#! ℝ^1 -> ℝ^3 defined by:
-#!
-#! Underlying Object:
-#! -----------------
-#! ℝ^2
-#!
-#! Underlying Morphism:
-#! -------------------
-#! ℝ^3 -> ℝ^3
+f := Para.Sin;;
Display( f );
-#! ℝ^1 -> ℝ^3 defined by:
+#! ℝ^1 -> ℝ^1 defined by:
#!
#! Underlying Object:
#! -----------------
-#! ℝ^2
+#! ℝ^0
#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^3 -> ℝ^3
-#!
-#! ‣ Exp( x1 ) / (Exp( x1 ) + Exp( x2 ) + Exp( x3 ))
-#! ‣ Exp( x2 ) / (Exp( x1 ) + Exp( x2 ) + Exp( x3 ))
-#! ‣ Exp( x3 ) / (Exp( x1 ) + Exp( x2 ) + Exp( x3 ))
-IsWellDefined( f );
-#! true
-r := DirectProductFunctorial( Smooth, [ Smooth.Sqrt, Smooth.Cos ] );
-#! ℝ^2 -> ℝ^2
-Display( r );
-#! ℝ^2 -> ℝ^2
-#!
-#! ‣ Sqrt( x1 )
-#! ‣ Cos( x2 )
-g := ReparametriseMorphism( f, r );
-#! ℝ^1 -> ℝ^3 defined by:
-#!
+#! ℝ^1 -> ℝ^1
+#!
+#! ‣ Sin( x1 )
+p1 := ProjectionInFactorOfDirectProduct( Smooth, [ R3, R3 ], 1 );;
+Display( p1 );
+#! ℝ^6 -> ℝ^3
+#!
+#! ‣ x1
+#! ‣ x2
+#! ‣ x3
+p2 := ProjectionInFactorOfDirectProduct( Smooth, [ R3, R3 ], 2 );;
+Display( p2 );
+#! ℝ^6 -> ℝ^3
+#!
+#! ‣ x4
+#! ‣ x5
+#! ‣ x6
+m := MultiplicationForMorphisms( p1, p2 );
+#! ℝ^6 -> ℝ^3
+Display( m );
+#! ℝ^6 -> ℝ^3
+#!
+#! ‣ x1 * x4
+#! ‣ x2 * x5
+#! ‣ x3 * x6
+h := MorphismConstructor( Para, Para.(3), [ R3, m ], Para.(3) );;
+Display( h );
+#! ℝ^3 -> ℝ^3 defined by:
+#!
#! Underlying Object:
#! -----------------
-#! ℝ^2
-#!
+#! ℝ^3
+#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^3 -> ℝ^3
-Display( g );
-#! ℝ^1 -> ℝ^3 defined by:
-#!
+#! ℝ^6 -> ℝ^3
+#!
+#! ‣ x1 * x4
+#! ‣ x2 * x5
+#! ‣ x3 * x6
+dummy_input := CreateContextualVariables(
+ [ "w1", "w2", "w3", "z1", "z2", "z3" ] );
+#! [ w1, w2, w3, z1, z2, z3 ]
+Display( h : dummy_input := dummy_input );
+#! ℝ^3 -> ℝ^3 defined by:
+#!
#! Underlying Object:
#! -----------------
-#! ℝ^2
-#!
+#! ℝ^3
+#!
#! Underlying Morphism:
#! -------------------
+#! ℝ^6 -> ℝ^3
+#!
+#! ‣ w1 * z1
+#! ‣ w2 * z2
+#! ‣ w3 * z3
+r := DirectProductFunctorial( Smooth,
+ [ Smooth.Sqrt, Smooth.Log, Smooth.Exp ] );;
+Display( r );
#! ℝ^3 -> ℝ^3
#!
-#! ‣ Exp( Sqrt( x1 ) ) / (Exp( Sqrt( x1 ) ) + Exp( Cos( x2 ) ) + Exp( x3 ))
-#! ‣ Exp( Cos( x2 ) ) / (Exp( Sqrt( x1 ) ) + Exp( Cos( x2 ) ) + Exp( x3 ))
-#! ‣ Exp( x3 ) / (Exp( Sqrt( x1 ) ) + Exp( Cos( x2 ) ) + Exp( x3 ))
-l := Para.AffineTransformation( 3, 2 );
-#! ℝ^3 -> ℝ^2 defined by:
-#!
-#! Underlying Object:
-#! -----------------
-#! ℝ^8
-#!
-#! Underlying Morphism:
-#! -------------------
-#! ℝ^11 -> ℝ^2
-h := PreCompose( g, l );
-#! ℝ^1 -> ℝ^2 defined by:
-#!
-#! Underlying Object:
-#! -----------------
-#! ℝ^10
-#!
-#! Underlying Morphism:
-#! -------------------
-#! ℝ^11 -> ℝ^2
-Display( h );
-#! ℝ^1 -> ℝ^2 defined by:
-#!
+#! ‣ Sqrt( x1 )
+#! ‣ Log( x2 )
+#! ‣ Exp( x3 )
+g := ReparametriseMorphism( h, r );;
+Display( g : dummy_input := dummy_input );
+#! ℝ^3 -> ℝ^3 defined by:
+#!
#! Underlying Object:
#! -----------------
-#! ℝ^10
-#!
+#! ℝ^3
+#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^11 -> ℝ^2
-#!
-#! ‣ x1 * (Exp( Sqrt( x9 ) ) / (Exp( Sqrt( x9 ) ) + Exp( Cos( x10 ) ) + Exp( x11 )))
-#! + x2 * (Exp( Cos( x10 ) ) / (Exp( Sqrt( x9 ) ) + Exp( Cos( x10 ) ) + Exp( x11 )))
-#! + x3 * (Exp( x11 ) / (Exp( Sqrt( x9 ) ) + Exp( Cos( x10 ) ) + Exp( x11 ))) + x4
-#! ‣ x5 * (Exp( Sqrt( x9 ) ) / (Exp( Sqrt( x9 ) ) + Exp( Cos( x10 ) ) + Exp( x11 )))
-#! + x6 * (Exp( Cos( x10 ) ) / (Exp( Sqrt( x9 ) ) + Exp( Cos( x10 ) ) + Exp( x11 )))
-#! + x7 * (Exp( x11 ) / (Exp( Sqrt( x9 ) ) + Exp( Cos( x10 ) ) + Exp( x11 ))) + x8
-constants := [ 0.91, 0.24, 0.88, 0.59, 0.67, 0.05, 0.85, 0.31, 0.76, 0.04 ];;
-r := Smooth.Constant( constants );
-#! ℝ^0 -> ℝ^10
-t := ReparametriseMorphism( h, r );
-#! ℝ^1 -> ℝ^2 defined by:
+#! ℝ^6 -> ℝ^3
+#!
+#! ‣ Sqrt( w1 ) * z1
+#! ‣ Log( w2 ) * z2
+#! ‣ Exp( w3 ) * z3
+#! @EndExample
+
+#! Let us illustrate the natural embedding functor from the category of smooth maps into
+#! the category of parametrised morphisms.
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+iota := NaturalEmbedding( Smooth, Para );
+#! Natural embedding into category of parametrised morphisms
+ApplyFunctor( iota, Smooth.( 1 ) );
+#! ℝ^1
+psi := ApplyFunctor( iota, Smooth.Sum( 2 ) );
+#! ℝ^2 -> ℝ^1 defined by:
#!
#! Underlying Object:
#! -----------------
@@ -129,9 +138,9 @@ t := ReparametriseMorphism( h, r );
#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^1 -> ℝ^2
-Display( t );
-#! ℝ^1 -> ℝ^2 defined by:
+#! ℝ^2 -> ℝ^1
+Display( psi );
+#! ℝ^2 -> ℝ^1 defined by:
#!
#! Underlying Object:
#! -----------------
@@ -139,41 +148,51 @@ Display( t );
#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^1 -> ℝ^2
-#!
-#! ‣ 0.91 * (2.39116 / (5.10727 + Exp( x1 ))) + 0.24 * (2.71611 / (5.10727 + Exp( x1 )))
-#! + 0.88 * (Exp( x1 ) / (5.10727 + Exp( x1 ))) + 0.59
-#! ‣ 0.67 * (2.39116 / (5.10727 + Exp( x1 ))) + 0.05 * (2.71611 / (5.10727 + Exp( x1 )))
-#! + 0.85 * (Exp( x1 ) / (5.10727 + Exp( x1 ))) + 0.31
-s := SimplifyMorphism( t, infinity );
-#! ℝ^1 -> ℝ^2 defined by:
+#! ℝ^2 -> ℝ^1
#!
+#! ‣ x1 + x2
+#! @EndExample
+
+#! @Section Available Parametrised Morphisms
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+Display( Para.Relu( 2 ) );
+#! ℝ^2 -> ℝ^2 defined by:
+#!
#! Underlying Object:
#! -----------------
#! ℝ^0
-#!
+#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^1 -> ℝ^2
-Display( s );
-#! ℝ^1 -> ℝ^2 defined by:
-#!
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ Relu( x1 )
+#! ‣ Relu( x2 )
+dummy_input := DummyInputForAffineTransformation( 3, 2, "w", "b", "z" );
+#! [ w1_1, w2_1, w3_1, b_1, w1_2, w2_2, w3_2, b_2, z1, z2, z3 ]
+affine_transformation := Para.AffineTransformation( 3, 2 );;
+Display( affine_transformation : dummy_input := dummy_input );
+#! ℝ^3 -> ℝ^2 defined by:
+#!
#! Underlying Object:
#! -----------------
-#! ℝ^0
-#!
+#! ℝ^8
+#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^1 -> ℝ^2
-#!
-#! ‣ (1.47 * Exp( x1 ) + 5.84111) / (Exp( x1 ) + 5.10727)
-#! ‣ (1.16 * Exp( x1 ) + 3.32114) / (Exp( x1 ) + 5.10727)
-iota := NaturalEmbeddingIntoCategoryOfParametrisedMorphisms( Smooth, Para );
-#! Natural embedding into category of parametrised morphisms
-ApplyFunctor( iota, Smooth.( 1 ) );
-#! ℝ^1
-psi := ApplyFunctor( iota, Smooth.Sum( 2 ) );
-#! ℝ^2 -> ℝ^1 defined by:
+#! ℝ^11 -> ℝ^2
+#!
+#! ‣ w1_1 * z1 + w2_1 * z2 + w3_1 * z3 + b_1
+#! ‣ w1_2 * z1 + w2_2 * z2 + w3_2 * z3 + b_2
+"Let us convert these 2 togits to probabilities via softmax layer.";;
+softmax_layer := Para.Softmax( 2 );;
+Display( softmax_layer );
+#! ℝ^2 -> ℝ^2 defined by:
#!
#! Underlying Object:
#! -----------------
@@ -181,17 +200,30 @@ psi := ApplyFunctor( iota, Smooth.Sum( 2 ) );
#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^2 -> ℝ^1
-Print( DisplayString( psi ) );
-#! ℝ^2 -> ℝ^1 defined by:
+#! ℝ^2 -> ℝ^2
#!
+#! ‣ Exp( x1 ) / (Exp( x1 ) + Exp( x2 ))
+#! ‣ Exp( x2 ) / (Exp( x1 ) + Exp( x2 ))
+probs := PreCompose( affine_transformation, softmax_layer );;
+Display( probs : dummy_input := dummy_input );
+#! ℝ^3 -> ℝ^2 defined by:
+#!
#! Underlying Object:
#! -----------------
-#! ℝ^0
-#!
+#! ℝ^8
+#!
#! Underlying Morphism:
#! -------------------
-#! ℝ^2 -> ℝ^1
-#!
-#! ‣ x1 + x2
+#! ℝ^11 -> ℝ^2
+#!
+#! ‣ Exp( w1_1 * z1 + w2_1 * z2 + w3_1 * z3 + b_1 )
+#! / (Exp( w1_1 * z1 + w2_1 * z2 + w3_1 * z3 + b_1 )
+#! + Exp( w1_2 * z1 + w2_2 * z2 + w3_2 * z3 + b_2 ))
+#! ‣ Exp( w1_2 * z1 + w2_2 * z2 + w3_2 * z3 + b_2 )
+#! / (Exp( w1_1 * z1 + w2_1 * z2 + w3_1 * z3 + b_1 )
+#! + Exp( w1_2 * z1 + w2_2 * z2 + w3_2 * z3 + b_2 ))
+parameters := [ 0.91, 0.24, 0.88, 0.59, 0.67, 0.05, 0.85, 0.31 ];;
+logits := [ 1.0, 2.0, 3.0 ];;
+Eval( probs, [ parameters, logits ] );
+#! [ 0.729088, 0.270912 ]
#! @EndExample
diff --git a/examples/CategoryOfSkeletalSmoothMaps.g b/examples/CategoryOfSkeletalSmoothMaps.g
index a1ea661..e5d09ed 100644
--- a/examples/CategoryOfSkeletalSmoothMaps.g
+++ b/examples/CategoryOfSkeletalSmoothMaps.g
@@ -1,12 +1,26 @@
-#! @Chapter Examples and Tests
+#! @Chapter Skeletal Category of Smooth Maps
-#! @Section Category of Smooth Maps
+#! @Section Examples
-LoadPackage( "GradientDescentForCAP" );
+LoadPackage( "GradientBasedLearningForCAP" );
+
+#! In this example, we demonstrate the usage of the SkeletalCategoryOfSmoothMaps
+#! by constructing objects and morphisms, performing various operations, and
+#! utilizing built-in functions.
#! @Example
-Smooth := CategoryOfSkeletalSmoothMaps( );
+Smooth := SkeletalCategoryOfSmoothMaps( );
#! SkeletalSmoothMaps
+Display( Smooth );
+#! A CAP category with name SkeletalSmoothMaps:
+#!
+#! 49 primitive operations were used to derive 92 operations for this category wh\
+#! ich algorithmically
+#! * IsCartesianCategory
+#! * IsLinearCategoryOverCommutativeRing
+#! * IsSymmetricMonoidalCategory
+#! and furthermore mathematically
+#! * IsStrictMonoidalCategory
R2 := ObjectConstructor( Smooth, 2 );
#! ℝ^2
R2 = Smooth.2;
@@ -30,7 +44,7 @@ Display( f );
#!
#! ‣ x1 ^ 2 + Sin( x2 )
#! ‣ Exp( x1 ) + 3 * x2
-dummy_input := ConvertToExpressions( [ "x1", "x2" ] );
+dummy_input := CreateContextualVariables( [ "x1", "x2" ] );
#! [ x1, x2 ]
Map( f )( dummy_input );
#! [ x1 ^ 2 + Sin( x2 ), Exp( x1 ) + 3 * x2 ]
@@ -130,6 +144,16 @@ PreCompose( Smooth, u, p1 ) = f;
#! true
PreCompose( Smooth, u, p2 ) = g;
#! true
+Display( f );
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ x1 ^ 2 + Sin( x2 )
+#! ‣ Exp( x1 ) + 3 * x2
+Display( g );
+#! ℝ^2 -> ℝ^3
+#! ‣ 3 * x1
+#! ‣ Exp( x2 )
+#! ‣ x1 ^ 3 + Log( x2 )
d := DirectProductFunctorial( Smooth, [ f, g ] );
#! ℝ^4 -> ℝ^5
Display( d );
@@ -250,19 +274,145 @@ Display( Smooth.SoftmaxCrossEntropyLoss( 3 ) );
#! ‣ ((Log( Exp( x1 ) + Exp( x2 ) + Exp( x3 ) ) - x1) * x4
#! + (Log( Exp( x1 ) + Exp( x2 ) + Exp( x3 ) ) - x2) * x5
#! + (Log( Exp( x1 ) + Exp( x2 ) + Exp( x3 ) ) - x3) * x6) / 3
-Display( Smooth.AffineTransformation( 2, 3 ) );
+#! @EndExample
+
+#! In this example, we illustrate the various convenience ways to construct smooth maps. For instance, the smooth map:
+#! @BeginLatexOnly
+#! \[
+#! \begin{array}{l}
+#! g:\mathbb{R}^{2}\rightarrow\mathbb{R}^{3}\\
+#! (x_{1}, x_{2}) \mapsto \left( 3 x_{1}, e^{x_{2}}, x_{1}^{3} + \log{\left(x_{2}\right)} \right) \\
+#! \end{array}
+#! \]
+#! whose Jacobian Matrix is:
+#! \[
+#! \left( \begin{array}{ll}
+#! 3 & 0 \\
+#! 0 & e^{x_{2}} \\
+#! 3 x_{1}^{2} & \frac{1}{x_{2}}
+#! \end{array} \right)
+#! \]
+#! @EndLatexOnly
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+dummy_input := CreateContextualVariables( [ "x1", "x2" ] );
+#! [ x1, x2 ]
+g1 := MorphismConstructor( Smooth,
+ Smooth.2,
+ Pair(
+ x -> [ 3 * x[1], Exp( x[2] ), x[1] ^ 3 + Log( x[2] ) ],
+ x -> [ [ 3, 0 ],
+ [ 0, Exp( x[2] ) ],
+ [ 3 * x[1] ^ 2, 1 / x[2] ] ] ),
+ Smooth.3 );
+#! ℝ^2 -> ℝ^3
+Display( g1 );
+#! ℝ^2 -> ℝ^3
+#! ‣ 3 * x1
+#! ‣ Exp( x2 )
+#! ‣ x1 ^ 3 + Log( x2 )
+Map( g1 )( dummy_input );
+#! [ 3 * x1, Exp( x2 ), x1 ^ 3 + Log( x2 ) ]
+JacobianMatrix( g1 )( dummy_input );
+#! [ [ 3, 0 ], [ 0, Exp( x2 ) ], [ 3 * x1 ^ 2, 1 / x2 ] ]
+"# Use python to compute the Jacobian Matrix";;
+g2 := SmoothMap( Smooth,
+ Smooth.2,
+ x -> [ 3 * x[1], Exp( x[2] ), x[1] ^ 3 + Log( x[2] ) ],
+ Smooth.3,
+ true
+ );
+#! ℝ^2 -> ℝ^3
+g1 = g2;
+#! true
+"# Use python to compute the Jacobian Matrix";;
+g3 := SmoothMap( Smooth,
+ Smooth.2,
+ [ "3 * x1", "Exp( x2 )", "x1 ^ 3 + Log( x2 )" ],
+ Smooth.3,
+ true
+ );
+#! ℝ^2 -> ℝ^3
+g3 = g1;
+#! true
+"# Lazy evaluation of the Jacobian Matrix";;
+g4 := SmoothMap( Smooth,
+ Smooth.2,
+ [ "3 * x1", "Exp( x2 )", "x1 ^ 3 + Log( x2 )" ],
+ Smooth.3,
+ false
+ );
+#! ℝ^2 -> ℝ^3
+g4 = g1;
+#! true
+Map( g4 )( dummy_input );
+#! [ 3 * x1, Exp( x2 ), x1 ^ 3 + Log( x2 ) ]
+J := JacobianMatrix( g4 )( [ 1, 2 ] );
+#! [ [ Diff( [ "x1", "x2" ], "3 * x1", 1 )( [ 1, 2 ] ),
+#! Diff( [ "x1", "x2" ], "3 * x1", 2 )( [ 1, 2 ] ) ],
+#! [ Diff( [ "x1", "x2" ], "Exp( x2 )", 1 )( [ 1, 2 ] ),
+#! Diff( [ "x1", "x2" ], "Exp( x2 )", 2 )( [ 1, 2 ] ) ],
+#! [ Diff( [ "x1", "x2" ], "x1 ^ 3 + Log( x2 )", 1 )( [ 1, 2 ] ),
+#! Diff( [ "x1", "x2" ], "x1 ^ 3 + Log( x2 )", 2 )( [ 1, 2 ] ) ] ]
+"# Evaluation uses python to compute the derivatives";;
+Eval( J[3][2] );
+#! 1/2
+Diff( [ "x1", "x2" ], "x1 ^ 3 + Log( x2 )", 2 )( [ 1, 2 ] );
+#! 1/2
+Eval( J );
+#! [ [ 3, 0 ], [ 0, 7.38906 ], [ 3, 1/2 ] ]
+#! @EndExample
+
+#! In the following example, we demonstrate the construction and usage of an affine transformation
+#! morphism within the SkeletalCategoryOfSmoothMaps.
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+affine_trans := Smooth.AffineTransformation( 2, 3 );
#! ℝ^11 -> ℝ^3
-#!
+Display( affine_trans );
+#! ℝ^11 -> ℝ^3
+#!
#! ‣ x1 * x10 + x2 * x11 + x3
#! ‣ x4 * x10 + x5 * x11 + x6
#! ‣ x7 * x10 + x8 * x11 + x9
-Display( Smooth.PolynomialTransformation( 2, 3, 2 ) );
-#! ℝ^20 -> ℝ^3
-#!
-#! ‣ x1 * x19 ^ 2 + x2 * (x19 ^ 1 * x20 ^ 1) + x3 * x19 ^ 1
-#! + x4 * x20 ^ 2 + x5 * x20 ^ 1 + x6 * 1
-#! ‣ x7 * x19 ^ 2 + x8 * (x19 ^ 1 * x20 ^ 1) + x9 * x19 ^ 1
-#! + x10 * x20 ^ 2 + x11 * x20 ^ 1 + x12 * 1
-#! ‣ x13 * x19 ^ 2 + x14 * (x19 ^ 1 * x20 ^ 1) + x15 * x19 ^ 1
-#! + x16 * x20 ^ 2 + x17 * x20 ^ 1 + x18 * 1
+dummy_input := DummyInputForAffineTransformation( 2, 3, "w", "b", "z" );
+#! [ w1_1, w2_1, b_1, w1_2, w2_2, b_2, w1_3, w2_3, b_3, z1, z2 ]
+Map( affine_trans )( dummy_input );
+#! [ w1_1 * z1 + w2_1 * z2 + b_1,
+#! w1_2 * z1 + w2_2 * z2 + b_2,
+#! w1_3 * z1 + w2_3 * z2 + b_3 ]
+JacobianMatrix( affine_trans )( dummy_input );
+#! [ [ z1, z2, 1, 0, 0, 0, 0, 0, 0, w1_1, w2_1 ],
+#! [ 0, 0, 0, z1, z2, 1, 0, 0, 0, w1_2, w2_2 ],
+#! [ 0, 0, 0, 0, 0, 0, z1, z2, 1, w1_3, w2_3 ] ]
#! @EndExample
+
+#! @BeginLatexOnly
+#! To view the affine transformation map and its Jacobian matrix in LaTeX format, we can use:
+#! \begin{center}
+#! \texttt{Show( LaTeXOutput( affine\_trans : dummy\_input := dummy\_input ) );}
+#! \end{center}
+#! which produces:
+#! \[
+#! \begin{array}{c}
+#! \mathbb{R}^{11}\rightarrow\mathbb{R}^{3}\\
+#! \hline \\
+#! \left( \begin{array}{l}
+#! b_{1} + w_{1 1} z_{1} + w_{2 1} z_{2} \\
+#! b_{2} + w_{1 2} z_{1} + w_{2 2} z_{2} \\
+#! b_{3} + w_{1 3} z_{1} + w_{2 3} z_{2}
+#! \end{array} \right)\\
+#! \\
+#! \hline \\
+#! \left( \begin{array}{lllllllllll}
+#! z_{1} & z_{2} & 1 & 0 & 0 & 0 & 0 & 0 & 0 & w_{1 1} & w_{2 1} \\
+#! 0 & 0 & 0 & z_{1} & z_{2} & 1 & 0 & 0 & 0 & w_{1 2} & w_{2 2} \\
+#! 0 & 0 & 0 & 0 & 0 & 0 & z_{1} & z_{2} & 1 & w_{1 3} & w_{2 3}
+#! \end{array} \right)
+#! \end{array}
+#! \]
+#! @EndLatexOnly
diff --git a/examples/ComputingTheNextLocalMimima/next_local_minima.g b/examples/ComputingTheNextLocalMimima/next_local_minima.g
index 59ffe25..6f45a40 100644
--- a/examples/ComputingTheNextLocalMimima/next_local_minima.g
+++ b/examples/ComputingTheNextLocalMimima/next_local_minima.g
@@ -1,9 +1,9 @@
-LoadPackage( "GradientDescentForCAP" );
+LoadPackage( "GradientBasedLearningForCAP" );
# the function f(x1,x2) = sin(x1)^2 + log(x2)^2 has local miminima at the points (πk, 1) where k ∈ ℤ
-Smooth := CategoryOfSkeletalSmoothMaps( );
+Smooth := SkeletalCategoryOfSmoothMaps( );
Lenses := CategoryOfLenses( Smooth );
Para := CategoryOfParametrisedMorphisms( Smooth );
diff --git a/examples/Expressions.g b/examples/Expressions.g
index 33347a3..a6170ca 100644
--- a/examples/Expressions.g
+++ b/examples/Expressions.g
@@ -1,16 +1,15 @@
-#! @Chapter Examples and Tests
-#! @Section Expressions
+#! @Chapter Expressions
-LoadPackage( "MachineLearning" );
+LoadPackage( "GradientBasedLearningForCAP" );;
+
+#! @Section Examples
#! @Example
-vars := [ "x", "y", "z" ];
-#! [ "x", "y", "z" ]
-e1 := Expression( vars, "x + Sin( y ) * Log( z )" );
-#! x + Sin( y ) * Log( z )
-e2 := Expression( vars, "( x * y + Sin( z ) ) ^ 2" );
-#! (x * y + Sin( z )) ^ 2
+e1 := Expression( ["x", "y"], "x + Sin( y )" );
+#! x + Sin( y )
+e2 := Expression( ["y", "z"], "( y + Sin( z ) ) ^ 2" );
+#! (y + Sin( z )) ^ 2
CategoriesOfObject( e1 );
#! [ "IsExtAElement", "IsNearAdditiveElement", "IsNearAdditiveElementWithZero",
#! "IsNearAdditiveElementWithInverse", "IsAdditiveElement", "IsExtLElement",
@@ -18,15 +17,28 @@ CategoriesOfObject( e1 );
KnownAttributesOfObject( e1 );
#! [ "String", "Variables" ]
String( e1 );
-#! "x + Sin( y ) * Log( z )"
+#! "x + Sin( y )"
Variables( e1 );
-#! [ "x", "y", "z" ]
+#! [ "x", "y" ]
+Variables( e2 );
+#! [ "y", "z" ]
e1 + e2;
-#! x + Sin( y ) * Log( z ) + (x * y + Sin( z )) ^ 2
+#! x + Sin( y ) + (y + Sin( z )) ^ 2
e1 * e2;
-#! (x + Sin( y ) * Log( z )) * (x * y + Sin( z )) ^ 2
+#! (x + Sin( y )) * (y + Sin( z )) ^ 2
e := Sin( e1 ) / e2;
-#! Sin( x + Sin( y ) * Log( z ) ) / (x * y + Sin( z )) ^ 2
+#! Sin( x + Sin( y ) ) / (y + Sin( z )) ^ 2
+Variables( e );
+#! [ "x", "y", "z" ]
+ConstantExpression( "tau" );
+#! #I MakeReadWriteGlobal: tau already read-write
+#! tau
+Variables( tau );
+#! [ ]
+e3 := e1 * Sin( tau );
+#! (x + Sin( y )) * Sin( tau )
+Variables( e3 );
+#! [ "x", "y" ]
f := AsFunction( e );
#! function( vec ) ... end
Display( f );
@@ -36,16 +48,16 @@ Display( f );
#! x := vec[1];
#! y := vec[2];
#! z := vec[3];
-#! return Sin( x + Sin( y ) * Log( z ) ) / (x * y + Sin( z )) ^ 2;
+#! return Sin( x + Sin( y ) ) / (y + Sin( z )) ^ 2;
#! end
x := [ 3, 2, 4 ];
#! [ 3, 2, 4 ]
f( x );
-#! -0.032725
-dummy_input := ConvertToExpressions( [ "x1", "x2", "x3" ] );
+#! -0.449348
+dummy_input := CreateContextualVariables( [ "x1", "x2", "x3" ] );
#! [ x1, x2, x3 ]
f( dummy_input );
-#! Sin( x1 + Sin( x2 ) * Log( x3 ) ) / (x1 * x2 + Sin( x3 )) ^ 2
+#! Sin( x1 + Sin( x2 ) ) / (x2 + Sin( x3 )) ^ 2
AssignExpressions( dummy_input );
#! #I MakeReadWriteGlobal: x1 already read-write
#! #I MakeReadWriteGlobal: x2 already read-write
@@ -56,23 +68,51 @@ Variables( x1 );
#! [ "x1", "x2", "x3" ]
[ [ x1, x2 ] ] * [ [ x3 ], [ -x3 ] ];
#! [ [ x1 * x3 + x2 * (- x3) ] ]
-e := Sin( x1 ) / Cos( x1 ) + Sin( x2 ) ^ 2 + Cos( x2 ) ^ 2;
-#! Sin( x1 ) / Cos( x1 ) + Sin( x2 ) ^ 2 + Cos( x2 ) ^ 2
-SimplifyExpressionUsingPython( [ e ] );
-#! [ "Tan(x1) + 1" ]
+#! @EndExample
+
+#! @Chapter Tools
+
+#! @Section Python Integration
+
+#! @Example
+dummy_input := CreateContextualVariables( [ "a", "b", "c" ] );
+#! [ a, b, c ]
+AssignExpressions( dummy_input );;
+#! #I MakeReadWriteGlobal: a already read-write
+#! #I MakeReadWriteGlobal: b already read-write
+#! #I MakeReadWriteGlobal: c already read-write
+e := Sin( a ) + Cos( b );
+#! Sin( a ) + Cos( b )
Diff( e, 1 )( dummy_input );
-#! Sin( x1 ) ^ 2 / Cos( x1 ) ^ 2 + 1
-LazyDiff( e, 1 )( dummy_input );;
-# Diff( [ "x1", "x2", "x3" ],
-# "(((Sin(x1))/(Cos(x1)))+((Sin(x2))^(2)))+((Cos(x2))^(2))", 1 )( [ x1, x2, x3 ] );
-JacobianMatrixUsingPython( [ x1*Cos(x2)+Exp(x3), x1*x2*x3 ], [ 1, 2, 3 ] );
-#! [ [ "Cos(x2)", "-x1*Sin(x2)", "Exp(x3)" ], [ "x2*x3", "x1*x3", "x1*x2" ] ]
-JacobianMatrix( [ "x1", "x2", "x3" ], [ "x1*Cos(x2)+Exp(x3)", "x1*x2*x3" ],
- [ 1, 2, 3 ] )(dummy_input);
-#! [ [ Cos(x2), (-x1)*Sin(x2), Exp(x3) ], [ x2*x3, x1*x3, x1*x2 ] ]
+#! Cos( a )
+LazyDiff( e, 1 )( dummy_input );
+#! Diff( [ "a", "b", "c" ], "(Sin(a))+(Cos(b))", 1 )( [ a, b, c ] )
+JacobianMatrixUsingPython( [ a*Cos(b)+Exp(c), a*b*c ], [ 1, 2, 3 ] );
+#! [ [ "Cos(b)", "-a*Sin(b)", "Exp(c)" ], [ "b*c", "a*c", "a*b" ] ]
+JacobianMatrix(
+ [ "a", "b", "c" ],
+ [ "a*Cos(b)+Exp(c)", "a*b*c" ],
+ [ 1, 2, 3 ] )(dummy_input);
+#! [ [ Cos(b), (-a)*Sin(b), Exp(c) ], [ b*c, a*c, a*b ] ]
+SimplifyExpressionUsingPython(
+ [ "a", "b" ],
+ [ "Sin(a)^2 + Cos(a)^2", "Exp(Log(b))" ] );
+#! [ "1", "b" ]
LaTeXOutputUsingPython( e );
-#! "\\frac{\\sin{\\left(x_{1} \\right)}}{\\cos{\\left(x_{1} \\right)}}
-#! + \\sin^{2}{\\left(x_{2} \\right)} + \\cos^{2}{\\left(x_{2} \\right)}"
+#! "\\sin{\\left(a \\right)} + \\cos{\\left(b \\right)}"
+AsCythonFunction( [[ "x", "y" ], [ "z" ]], ["f", "g"], ["x*y", "Sin(z)"] );;
+"""
+It will produce output similar to the following lines:
+$ cd /tmp/gaptempdirI6rq3l/
+$ python
+>>> from cython_functions import f, g
+>>> w = [ 2, 3 ] # or any other vector in R^2
+>>> f(w)
+6.0
+""";;
+#! @EndExample
+
+#! @Example
sigmoid := Expression( [ "x" ], "Exp(x)/(1+Exp(x))" );
#! Exp( x ) / (1 + Exp( x ))
sigmoid := AsFunction( sigmoid );
@@ -91,20 +131,15 @@ points := List( 0.1 * [ -20 .. 20 ], x -> [ x, sigmoid( [ x ] ) ] );
#! [ 1.2, 0.768525 ], [ 1.3, 0.785835 ], [ 1.4, 0.802184 ], [ 1.5, 0.817574 ],
#! [ 1.6, 0.832018 ], [ 1.7, 0.845535 ], [ 1.8, 0.858149 ], [ 1.9, 0.869892 ],
#! [ 2., 0.880797 ] ]
-labels := List( points, point -> SelectBasedOnCondition( point[2] < 0.5, 0, 1 ) );
-#! [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-#! 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
+labels := List( points, point -> 0 );;
ScatterPlotUsingPython( points, labels : size := "100", action := "save" );;
-# e.g, dir("/tmp/gaptempdirX7Qsal/")
-AsCythonFunction( [ [ "x", "y" ], [ "z" ] ], [ "f", "g" ], [ "x*y", "Sin(z)" ] );;
-# e.g.,
-# cd /tmp/gaptempdirI6rq3l/
-#
-# start python!
-#
-# from cython_functions import f, g;
-#
-# # w = [ 2 entries :) ]
-#
-# # f(w)
#! @EndExample
+
+#! @BeginLatexOnly
+#! \begin{figure}[ht]
+#! \centering
+#! \includegraphics[width=0.5\textwidth]{sigmoid.png}
+#! \caption{Sigmoid function plot.}
+#! \label{fig:sigmoid}
+#! \end{figure}
+#! @EndLatexOnly
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/neural_network.py b/examples/NeuralNetwork_BinaryCrossEntropy/.neural_network.py
similarity index 100%
rename from examples/NeuralNetwork_BinaryCrossEntropy/neural_network.py
rename to examples/NeuralNetwork_BinaryCrossEntropy/.neural_network.py
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/__pycache__/neural_network.cpython-310.pyc b/examples/NeuralNetwork_BinaryCrossEntropy/__pycache__/neural_network.cpython-310.pyc
deleted file mode 100644
index 92ce294..0000000
Binary files a/examples/NeuralNetwork_BinaryCrossEntropy/__pycache__/neural_network.cpython-310.pyc and /dev/null differ
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/data/create_train_test_examples.g b/examples/NeuralNetwork_BinaryCrossEntropy/data/create_train_test_examples.g
deleted file mode 100644
index 25c416f..0000000
--- a/examples/NeuralNetwork_BinaryCrossEntropy/data/create_train_test_examples.g
+++ /dev/null
@@ -1,57 +0,0 @@
-
-
-# locate the current dir
-current_dir := DirectoryCurrent( );
-
-# create a file for the training dataset
-
-# we have 4 classes in the plan
-
-# class 1: the union of the following sets:
-#
-# : everything inside the circle: (x1-0.5)^2 + (x2-0.5)^2 - 0.20 = 0
-# i.e., the solutions of: (x1-0.5)^2 + (x2-0.5)^2 - 0.20 <= 0
-
-# : everything inside the circle: (x1+0.5)^2 + (x2-0.5)^2 - 0.20 = 0
-# i.e., the solutions of: (x1+0.5)^2 + (x2-0.5)^2 - 0.20 <= 0
-
-# : the polytop defined by the points (0.5, 0), (-0.5, 0), (0, -1)
-# i.e., the common solutions of the inequalities: x2 <= 0, 1 - 2x1 + x2 >= 0, 1 + 2x1 + x2 >= 0
-
-# class 2: everything else
-
-files := [ "training_examples.txt", "test_examples.txt" ];
-nr_examples := [ 3000, 100 ];
-
-for i in [ 1, 2 ] do
-
- file := Filename( current_dir, files[i] );
-
- PrintTo( file, "[\n" );
-
- for j in [ 1 .. nr_examples[i] ] do
-
- # we want more centered training examples
- x1 := Random( [ -0.01, 0.01 ] ) * Random( Concatenation ( [ 1 .. 30 ], [ 1 .. 30 ], [ 30 .. 100 ] ) );
- x2 := Random( [ -0.01, 0.01 ] ) * Random( Concatenation ( [ 1 .. 30 ], [ 1 .. 30 ], [ 30 .. 100 ] ) );
-
- if (x1 - 0.5)^2 + (x2 - 0.5)^2 - 0.20 <= 0. or (x1 + 0.5)^2 + (x2 - 0.5)^2 - 0.20 <= 0. or (x2 <= 0. and 1 - 2 * x1 + x2 >= 0. and 1 + 2 * x1 + x2 >= 0.) then
-
- label := 1;
-
- else
-
- label := 0;
-
- fi;
-
- AppendTo( file, [ x1, x2 , label ], ",\n" );
-
- od;
-
- AppendTo( file, "]" );
-
-od;
-
-Display( "Done!" );
-QUIT;
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/data/generate_examples.g b/examples/NeuralNetwork_BinaryCrossEntropy/data/generate_examples.g
new file mode 100644
index 0000000..f456843
--- /dev/null
+++ b/examples/NeuralNetwork_BinaryCrossEntropy/data/generate_examples.g
@@ -0,0 +1,57 @@
+LoadPackage( "GradientBasedLearningForCAP" );
+
+# create a file for the training dataset
+
+# we have 2 classes in the plan
+
+# class 0:
+#! everything inside the circle: x1^2 + (x2-0.5)^2 - 0.16 = 0
+# i.e., the solutions of: x1^2 + (x2-0.5)^2 - 0.16 <= 0
+
+# everything below the line: x2 = -0.5,
+# i.e., the solutions of: x2 + 0.5 <= 0.
+
+# class 1: everything else
+
+files_name := "training_examples.txt";
+nr_examples := 500;
+
+file := Filename( DirectoryCurrent( ), files_name );
+
+PrintTo( file, "[\n" );
+
+for j in [ 1 .. nr_examples ] do
+
+ x1 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ x2 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+
+ if x1^2 + (x2 - 0.5)^2 - 0.16 <= 0. then
+
+ label := [ 0 ];
+
+ elif x2 + 0.5 <= 0. then
+
+ label := [ 0 ];
+
+ else
+
+ label := [ 1 ];
+
+ fi;
+
+ AppendTo( file, Concatenation( [ x1, x2 ], label ), ",\n" );
+
+od;
+
+AppendTo( file, "];" );
+
+
+# plotting the dataset
+file := Filename( DirectoryCurrent( ), "training_examples.txt" );
+data := EvalString( IO_ReadUntilEOF( IO_File( file ) ) );
+x := List( data, e -> [ e[1], e[2] ] );
+y := List( data, e -> e[3] );
+ScatterPlotUsingPython( x, y );
+
+Display( "Done!" );
+QUIT;
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/data/plot_training_test_examples.g b/examples/NeuralNetwork_BinaryCrossEntropy/data/plot_training_test_examples.g
deleted file mode 100644
index dfe10cd..0000000
--- a/examples/NeuralNetwork_BinaryCrossEntropy/data/plot_training_test_examples.g
+++ /dev/null
@@ -1,10 +0,0 @@
-LoadPackage( "GradientDescentForCAP" );
-
-file := IO_File( "training_examples.txt" );
-#file := IO_File( "test_examples.txt" );
-
-examples := EvalString( IO_ReadUntilEOF( file ) );
-
-points := List( examples, example -> example{[1, 2]} );
-labels := List( examples, example -> example[3] );
-ScatterPlotUsingPython( points, labels : size := "100" );
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/data/scatter_plot_training_examples.png b/examples/NeuralNetwork_BinaryCrossEntropy/data/scatter_plot_training_examples.png
index 7007e84..4a0d72a 100644
Binary files a/examples/NeuralNetwork_BinaryCrossEntropy/data/scatter_plot_training_examples.png and b/examples/NeuralNetwork_BinaryCrossEntropy/data/scatter_plot_training_examples.png differ
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/data/test_examples.txt b/examples/NeuralNetwork_BinaryCrossEntropy/data/test_examples.txt
deleted file mode 100644
index 590dacc..0000000
--- a/examples/NeuralNetwork_BinaryCrossEntropy/data/test_examples.txt
+++ /dev/null
@@ -1,102 +0,0 @@
-[
-[ 0.26, 0.77, 1 ],
-[ -0.13, -0.15, 1 ],
-[ 0.12, 0.84, 0 ],
-[ -0.11, -0.04, 1 ],
-[ -0.64, 0.26, 1 ],
-[ 0.74, -0.11, 0 ],
-[ 0.6900000000000001, 0.17, 1 ],
-[ -0.61, 0.6, 1 ],
-[ 0.32, -0.39, 0 ],
-[ -0.1, 0.1, 0 ],
-[ -0.09, 0.58, 1 ],
-[ -0.11, 0.21, 0 ],
-[ 0.3, -0.45, 0 ],
-[ 0.3, -0.15, 1 ],
-[ 0.8, -0.14, 0 ],
-[ 0.99, 0.05, 0 ],
-[ 0.54, 0.11, 1 ],
-[ -0.89, -0.24, 0 ],
-[ -0.5600000000000001, 0.04, 0 ],
-[ 0.07000000000000001, -0.88, 0 ],
-[ -0.9500000000000001, -0.53, 0 ],
-[ -0.91, -0.16, 0 ],
-[ 0.89, 0.12, 0 ],
-[ 0.9400000000000001, -0.63, 0 ],
-[ -0.14, -0.72, 1 ],
-[ -0.24, 0.78, 1 ],
-[ 0.28, 0.23, 1 ],
-[ -0.74, -0.19, 0 ],
-[ 0.36, 0.18, 1 ],
-[ 0.77, 0.35, 1 ],
-[ -0.9500000000000001, -0.07000000000000001, 0 ],
-[ 0.35, 0.77, 1 ],
-[ 0.31, -0.3, 1 ],
-[ 0.26, 0.9400000000000001, 0 ],
-[ 0.16, 0.64, 1 ],
-[ 0.9500000000000001, -0.05, 0 ],
-[ 0.68, -0.02, 0 ],
-[ 0.01, -0.23, 1 ],
-[ -0.06, -0.45, 1 ],
-[ 0.21, -0.93, 0 ],
-[ 0.92, 0.59, 1 ],
-[ 0.16, 0.3, 1 ],
-[ -0.96, 0.66, 0 ],
-[ 0.71, -0.43, 0 ],
-[ -0.06, -0.92, 0 ],
-[ -0.24, 0.55, 1 ],
-[ -0.19, -0.2, 1 ],
-[ 0.1, 0.17, 0 ],
-[ -0.47, -0.66, 0 ],
-[ -0.51, 0.19, 1 ],
-[ 0.13, -0.13, 1 ],
-[ -0.27, -0.54, 0 ],
-[ -0.17, -0.8300000000000001, 0 ],
-[ -0.27, -0.85, 0 ],
-[ 0.5, 0.3, 1 ],
-[ 0.16, 0.46, 1 ],
-[ -0.19, 0.15, 0 ],
-[ 0.1, -0.84, 0 ],
-[ -0.09, 0.25, 0 ],
-[ 0.54, 0.33, 1 ],
-[ 0.21, -0.13, 1 ],
-[ 0.18, -0.39, 1 ],
-[ 0.67, -0.49, 0 ],
-[ -0.66, -0.79, 0 ],
-[ 0.16, -0.15, 1 ],
-[ 0.84, -0.01, 0 ],
-[ 0.5600000000000001, 0.21, 1 ],
-[ -0.97, 0.46, 0 ],
-[ 0.01, -0.98, 1 ],
-[ -0.51, 0.7000000000000001, 1 ],
-[ 0.12, 0.08, 0 ],
-[ 0.24, -0.47, 1 ],
-[ -0.78, 0.15, 0 ],
-[ 0.84, -0.09, 0 ],
-[ 0.5600000000000001, -0.3, 0 ],
-[ 0.3, 0.75, 1 ],
-[ -0.39, -0.26, 0 ],
-[ -0.02, 0.55, 0 ],
-[ 0.27, 0.91, 0 ],
-[ -0.01, -0.5700000000000001, 1 ],
-[ -0.71, -0.34, 0 ],
-[ 0.47, -0.31, 0 ],
-[ 0.43, 0.12, 1 ],
-[ 0.08, -0.8300000000000001, 1 ],
-[ 0.5700000000000001, -0.3, 0 ],
-[ 0.6, -0.88, 0 ],
-[ -0.43, 0.29, 1 ],
-[ -0.13, 0.24, 0 ],
-[ 0.29, -0.87, 0 ],
-[ 0.3, 0.23, 1 ],
-[ 0.67, -0.2, 0 ],
-[ -0.11, 0.19, 0 ],
-[ -0.8200000000000001, -0.42, 0 ],
-[ -0.8200000000000001, 0.75, 1 ],
-[ -0.22, 0.98, 0 ],
-[ 0.88, 0.61, 1 ],
-[ 0.03, 0.58, 0 ],
-[ -0.79, 0.05, 0 ],
-[ 0.36, 0.74, 1 ],
-[ 0.28, -0.45, 0 ],
-]
\ No newline at end of file
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/data/training_examples.txt b/examples/NeuralNetwork_BinaryCrossEntropy/data/training_examples.txt
index ac53f46..6771a06 100644
--- a/examples/NeuralNetwork_BinaryCrossEntropy/data/training_examples.txt
+++ b/examples/NeuralNetwork_BinaryCrossEntropy/data/training_examples.txt
@@ -1,3002 +1,502 @@
[
-[ 0.29, -0.73, 0 ],
-[ -0.25, -0.68, 0 ],
+[ 0.59, -0.85, 0 ],
+[ -0.76, -0.72, 0 ],
+[ -0.55, -0.99, 0 ],
[ 0.12, -0.3, 1 ],
-[ -0.65, -0.11, 0 ],
-[ -0.86, 0.62, 1 ],
-[ -0.14, 0.38, 1 ],
-[ 0.39, -0.27, 0 ],
-[ -0.03, -0.63, 1 ],
-[ 0.28, 0.73, 1 ],
-[ -0.32, 0.93, 0 ],
-[ -0.32, 0.59, 1 ],
-[ -0.49, 0.19, 1 ],
-[ -0.92, -0.96, 0 ],
-[ -0.37, 0.3, 1 ],
-[ -0.19, 0.21, 1 ],
-[ -0.19, 0.22, 1 ],
-[ 0.06, -0.97, 0 ],
-[ 0.26, 0.42, 1 ],
-[ 0.65, 0.16, 1 ],
-[ -0.19, -0.52, 1 ],
-[ -0.7000000000000001, 0.63, 1 ],
-[ -0.21, 0.88, 0 ],
-[ -0.24, 0.91, 0 ],
-[ -0.11, 0.8100000000000001, 0 ],
-[ 0.62, 0.25, 1 ],
-[ -0.68, 0.08, 0 ],
-[ 0.74, -0.28, 0 ],
-[ 0.67, 0.18, 1 ],
-[ 0.65, 0.62, 1 ],
-[ 0.09, 0.27, 0 ],
-[ 0.71, 0.77, 1 ],
-[ -0.05, -0.07000000000000001, 1 ],
-[ -0.72, 0.06, 0 ],
-[ -0.1, 0.1, 0 ],
-[ -0.73, 0.48, 1 ],
-[ -0.18, -0.5700000000000001, 1 ],
-[ 0.11, 0.5700000000000001, 1 ],
-[ 0.4, -0.12, 1 ],
-[ 0.87, -0.14, 0 ],
-[ -0.61, -0.26, 0 ],
-[ -0.1, -0.42, 1 ],
-[ -0.65, 0.05, 0 ],
-[ -0.09, 0.36, 1 ],
-[ 0.15, 0.43, 1 ],
-[ -0.44, -0.06, 1 ],
-[ 0.8200000000000001, -0.85, 0 ],
-[ 0.21, -0.35, 1 ],
-[ -0.44, 0.15, 1 ],
-[ 0.02, 0.01, 0 ],
-[ -0.17, 0.16, 0 ],
-[ 0.72, -0.4, 0 ],
-[ 0.67, -0.05, 0 ],
-[ -0.86, 0.03, 0 ],
-[ -0.12, 0.65, 1 ],
-[ -0.23, 0.47, 1 ],
-[ -0.88, 0.71, 1 ],
-[ 0.09, 0.49, 1 ],
-[ 0.98, 0.17, 0 ],
-[ -0.54, 0.28, 1 ],
-[ 0.05, -0.84, 1 ],
-[ -0.19, -0.23, 1 ],
-[ -0.18, 0.14, 0 ],
-[ -0.06, 0.62, 0 ],
-[ -0.65, 0.04, 0 ],
-[ -0.02, 0.26, 0 ],
-[ 0.71, 0.04, 0 ],
-[ -0.62, -0.09, 0 ],
-[ -0.45, -0.16, 0 ],
-[ -0.23, 0.13, 0 ],
-[ 0.92, -0.25, 0 ],
-[ 0.6900000000000001, 0.5700000000000001, 1 ],
-[ 0.59, -0.23, 0 ],
-[ 0.28, -0.97, 0 ],
-[ 0.07000000000000001, -0.97, 0 ],
-[ -0.24, 0.09, 0 ],
-[ 0.25, -0.61, 0 ],
-[ 0.23, 0.2, 1 ],
-[ 0.16, -0.21, 1 ],
-[ 0.59, -0.12, 0 ],
-[ 0.08, 0.71, 0 ],
-[ 0.64, 0.99, 0 ],
-[ 0.16, -0.8, 0 ],
-[ 0.28, -0.28, 1 ],
-[ 0.36, 0.02, 0 ],
-[ -0.67, -0.19, 0 ],
-[ 0.71, 0.29, 1 ],
-[ -0.48, -0.9, 0 ],
-[ 0.25, 0.8100000000000001, 1 ],
-[ -0.67, 0.25, 1 ],
-[ -0.9500000000000001, 0.9, 0 ],
-[ 0.02, 0.22, 0 ],
-[ -0.25, 0.11, 0 ],
-[ -0.13, 0.86, 0 ],
-[ 0.14, 0.25, 1 ],
-[ -0.77, 0.27, 1 ],
-[ 0.9500000000000001, -0.14, 0 ],
-[ 0.15, 0.2, 0 ],
-[ 0.28, 0.99, 0 ],
-[ -0.03, 0.62, 0 ],
-[ -0.96, 0.87, 0 ],
-[ 0.24, 0.16, 1 ],
-[ -0.89, -0.34, 0 ],
-[ 0.6900000000000001, 0.62, 1 ],
-[ -0.66, -0.75, 0 ],
-[ -0.64, 0.66, 1 ],
-[ 0.53, 0.31, 1 ],
-[ -1., -0.31, 0 ],
-[ -0.89, 0.25, 0 ],
-[ -0.92, -0.28, 0 ],
-[ -0.04, -0.59, 1 ],
-[ 0.39, -0.04, 1 ],
-[ -0.43, 0.96, 0 ],
-[ 0.18, -0.18, 1 ],
-[ -0.19, 0.29, 1 ],
-[ 0.01, -0.05, 1 ],
-[ 0.87, 0.21, 0 ],
-[ 0.65, -0.29, 0 ],
-[ 0.37, 0.51, 1 ],
-[ -0.14, -0.99, 0 ],
-[ 0.36, -0.99, 0 ],
-[ -0.19, -0.17, 1 ],
-[ -0.05, 0.13, 0 ],
-[ 0.64, -0.9400000000000001, 0 ],
-[ 0.5, 0.84, 1 ],
-[ 0.67, -0.98, 0 ],
-[ 0.16, -0.8100000000000001, 0 ],
-[ 0.63, 0.37, 1 ],
-[ -0.29, 0.05, 0 ],
-[ 0.11, 0.86, 0 ],
-[ 0.61, 0.07000000000000001, 1 ],
-[ -0.6900000000000001, -0.62, 0 ],
-[ -0.73, -0.29, 0 ],
-[ -0.28, -0.44, 0 ],
-[ 0.16, -0.29, 1 ],
-[ -0.98, 0.8100000000000001, 0 ],
-[ 0.2, -0.1, 1 ],
-[ -0.28, 0.63, 1 ],
-[ -0.15, -0.6900000000000001, 1 ],
-[ 0.9500000000000001, -0.06, 0 ],
-[ 0.62, -0.53, 0 ],
-[ -0.29, -0.6900000000000001, 0 ],
-[ -0.97, 0.72, 0 ],
-[ -0.59, -0.2, 0 ],
-[ 0.22, 0.14, 0 ],
-[ -0.37, -0.98, 0 ],
-[ -0.21, 0.14, 0 ],
-[ 0.05, 0.68, 0 ],
-[ -0.02, 0.01, 0 ],
-[ -0.63, 0.4, 1 ],
-[ -0.4, -0.9, 0 ],
-[ 0.21, -0.76, 0 ],
-[ 0.28, -0.39, 1 ],
-[ -0.35, -0.13, 1 ],
-[ 0.19, 0.8, 1 ],
-[ -0.98, -0.4, 0 ],
-[ -0.51, -0.29, 0 ],
-[ 0.34, -0.18, 1 ],
-[ 0.12, 0.26, 0 ],
-[ -0.76, 0.64, 1 ],
-[ 0.3, 0.14, 1 ],
-[ -0.09, 0.14, 0 ],
-[ 0.8300000000000001, 0.17, 0 ],
-[ -0.14, -0.12, 1 ],
-[ -0.5600000000000001, -0.78, 0 ],
-[ -0.93, -0.09, 0 ],
-[ 0.74, 0.65, 1 ],
-[ 0.27, -0.76, 0 ],
-[ 0.08, -0.41, 1 ],
-[ -0.19, -0.28, 1 ],
-[ 0.43, -0.18, 0 ],
-[ 0.6, 0.9400000000000001, 0 ],
-[ 0.13, 0.85, 0 ],
-[ -0.26, -0.28, 1 ],
-[ -0.26, 0.39, 1 ],
-[ -0.84, -0.8100000000000001, 0 ],
-[ -0.05, 0.15, 0 ],
-[ 0.9500000000000001, -0.02, 0 ],
-[ -0.26, 0.13, 1 ],
-[ 0.47, 0.5, 1 ],
-[ 0.19, 0.92, 0 ],
-[ 0.29, -0.09, 1 ],
-[ -0.18, 0.11, 0 ],
-[ 0.29, -0.18, 1 ],
-[ -0.28, -0.4, 1 ],
-[ 0.33, 0.78, 1 ],
-[ 0.9500000000000001, -0.75, 0 ],
-[ 0.25, 0.75, 1 ],
-[ -0.55, -0.02, 0 ],
-[ -0.21, -0.07000000000000001, 1 ],
-[ -0.01, 0.8300000000000001, 0 ],
-[ -0.11, -0.44, 1 ],
-[ -0.75, -0.5700000000000001, 0 ],
-[ 0.45, -0.11, 0 ],
-[ -0.2, 0.11, 0 ],
-[ 0.03, -0.48, 1 ],
-[ -0.53, 0.8, 1 ],
-[ 0.09, -0.02, 1 ],
-[ 0.45, 0.02, 0 ],
-[ 0.27, -0.65, 0 ],
-[ -0.15, 0.29, 1 ],
-[ -0.97, -0.25, 0 ],
-[ 0.3, -0.04, 1 ],
-[ -0.45, -0.5, 0 ],
-[ 0.97, 0.67, 0 ],
-[ 0.8200000000000001, -0.74, 0 ],
-[ 0.37, 0.02, 0 ],
-[ 0.3, -0.11, 1 ],
-[ -0.87, 0.3, 1 ],
-[ -0.11, -0.85, 0 ],
-[ 0.72, 0.99, 0 ],
-[ 0.24, -0.15, 1 ],
-[ -0.99, -0.8200000000000001, 0 ],
-[ 0.5600000000000001, 0.51, 1 ],
-[ 0.1, 0.63, 1 ],
-[ -0.21, 0.11, 0 ],
-[ 0.43, 0.89, 1 ],
-[ -0.43, 0.07000000000000001, 1 ],
-[ 0.05, 0.6, 0 ],
-[ 0.05, -0.22, 1 ],
-[ -0.47, -0.52, 0 ],
-[ -0.16, 0.6, 1 ],
-[ -0.62, -0.4, 0 ],
-[ -0.06, 0.8200000000000001, 0 ],
-[ -0.3, -0.4, 1 ],
-[ 0.65, 0.04, 0 ],
-[ -0.02, 0.12, 0 ],
-[ -0.14, 0.15, 0 ],
-[ -0.6, -0.03, 0 ],
-[ 0.62, -0.99, 0 ],
-[ 0.21, -0.61, 0 ],
-[ -0.6, 0.74, 1 ],
-[ -0.66, -0.08, 0 ],
-[ -0.46, -0.29, 0 ],
-[ -0.93, -0.37, 0 ],
-[ 0.5700000000000001, -0.68, 0 ],
-[ -0.63, -0.8300000000000001, 0 ],
-[ 0.19, -0.71, 0 ],
-[ -0.09, 0.23, 0 ],
-[ 0.16, 0.32, 1 ],
-[ -0.64, -0.02, 0 ],
-[ -0.75, -0.64, 0 ],
-[ 0.17, -0.19, 1 ],
-[ 0.9400000000000001, -0.54, 0 ],
-[ -0.63, -0.16, 0 ],
-[ -0.21, -0.07000000000000001, 1 ],
-[ -0.28, -0.16, 1 ],
-[ 0.32, 0.09, 0 ],
-[ 0.92, -0.17, 0 ],
-[ -0.49, -0.68, 0 ],
-[ 0.41, -0.27, 0 ],
-[ 0.19, 0.28, 1 ],
-[ 0.19, -0.93, 0 ],
-[ -0.01, 0.88, 0 ],
-[ -0.76, 0.2, 1 ],
-[ 0.76, -0.28, 0 ],
-[ -0.14, 0.32, 1 ],
-[ 0.2, 0.11, 0 ],
-[ -0.34, 0.3, 1 ],
-[ 0.17, 0.01, 0 ],
-[ 0.6900000000000001, -0.17, 0 ],
-[ -0.3, 0.42, 1 ],
-[ 0.64, 0.64, 1 ],
-[ -0.49, -0.49, 0 ],
-[ -0.93, -0.31, 0 ],
-[ 0.73, -0.07000000000000001, 0 ],
-[ -0.36, 0.99, 0 ],
-[ 0.14, 0.32, 1 ],
-[ -0.33, 0.29, 1 ],
-[ 0.4, 0.3, 1 ],
-[ 0.72, 0.72, 1 ],
-[ 0.65, 0.32, 1 ],
-[ -0.01, 0.01, 0 ],
-[ 0.1, 0.28, 0 ],
-[ 0.76, -0.43, 0 ],
-[ -0.11, -0.28, 1 ],
-[ -0.2, 0.2, 1 ],
-[ 0.88, 0.16, 0 ],
-[ -0.1, 0.31, 1 ],
-[ 0.62, -0.11, 0 ],
-[ -0.86, -0.15, 0 ],
-[ 0.2, 0.67, 1 ],
-[ -0.06, -0.49, 1 ],
-[ 0.86, -0.02, 0 ],
-[ 0.22, 0.64, 1 ],
-[ -0.03, 0.3, 0 ],
-[ 0.64, -0.62, 0 ],
-[ -0.5600000000000001, -0.01, 0 ],
-[ -0.42, -0.71, 0 ],
-[ 0.22, 0.75, 1 ],
-[ -0.13, 0.08, 0 ],
-[ 0.98, 0.48, 0 ],
-[ 0.29, -0.51, 0 ],
-[ -0.22, -0.84, 0 ],
-[ 0.8300000000000001, 0.5600000000000001, 1 ],
-[ 0.5600000000000001, -0.63, 0 ],
-[ 0.25, -0.38, 1 ],
-[ -0.04, -0.42, 1 ],
-[ -0.8300000000000001, -0.68, 0 ],
-[ -0.21, 0.32, 1 ],
-[ 0.18, -0.08, 1 ],
-[ 0.13, 0.91, 0 ],
-[ 0.58, 0.05, 0 ],
-[ -0.37, 0.09, 1 ],
-[ -0.73, -0.93, 0 ],
-[ -0.9400000000000001, 0.02, 0 ],
-[ -0.04, 0.53, 0 ],
-[ 0.45, -0.53, 0 ],
-[ -0.02, -0.07000000000000001, 1 ],
-[ 0.05, -0.06, 1 ],
-[ -0.08, -0.04, 1 ],
-[ 0.3, -0.86, 0 ],
-[ 0.16, 0.61, 1 ],
-[ -0.47, -0.61, 0 ],
-[ 0.55, -0.97, 0 ],
-[ -0.46, 0.67, 1 ],
-[ 0.33, -0.07000000000000001, 1 ],
-[ 0.32, -0.11, 1 ],
-[ 0.13, -0.99, 0 ],
-[ 0.76, 0.09, 0 ],
-[ -0.6, 0.22, 1 ],
-[ 0.05, -0.08, 1 ],
-[ 0.1, -0.13, 1 ],
-[ 0.87, -0.18, 0 ],
-[ 0.28, -0.51, 0 ],
-[ 0.22, -0.38, 1 ],
-[ 0.2, 0.67, 1 ],
-[ -0.72, 0.11, 0 ],
-[ -0.37, -0.06, 1 ],
-[ 0.13, 0.79, 0 ],
-[ 0.85, 0.67, 1 ],
-[ -0.79, -0.77, 0 ],
-[ -0.38, -0.01, 1 ],
-[ -0.37, -0.17, 1 ],
-[ 0.58, -0.23, 0 ],
-[ -0.4, 0.86, 1 ],
-[ -0.01, -0.22, 1 ],
-[ 0.92, 0.9500000000000001, 0 ],
-[ -0.21, -0.67, 0 ],
-[ -0.49, -0.74, 0 ],
-[ 0.06, -0.32, 1 ],
-[ -0.61, -0.47, 0 ],
-[ -0.84, 0.05, 0 ],
-[ 0.04, 0.16, 0 ],
-[ -0.76, -0.1, 0 ],
-[ 0.03, 0.18, 0 ],
-[ -0.07000000000000001, -0.73, 1 ],
-[ -0.9500000000000001, 0.7000000000000001, 0 ],
-[ 0.08, 0.04, 0 ],
-[ 0.01, -0.44, 1 ],
-[ 0.49, -0.14, 0 ],
-[ 0.17, -0.22, 1 ],
-[ -0.08, -0.58, 1 ],
-[ 0.1, -0.78, 1 ],
-[ -0.65, -0.5, 0 ],
-[ 0.24, -0.5600000000000001, 0 ],
-[ -0.59, 0.67, 1 ],
-[ -0.8, 0.16, 0 ],
-[ 0.25, 0.74, 1 ],
-[ 0.96, -0.38, 0 ],
-[ 0.85, 0.31, 1 ],
-[ -0.78, 0.54, 1 ],
-[ 0.02, 0.9400000000000001, 0 ],
-[ 0.18, -0.53, 1 ],
-[ -0.18, 0.62, 1 ],
-[ -0.19, 0.3, 1 ],
-[ 0.19, -0.17, 1 ],
-[ -0.03, -0.55, 1 ],
-[ -0.01, -0.03, 1 ],
-[ 0.09, 0.59, 1 ],
-[ 0.08, -0.39, 1 ],
-[ -0.28, -0.17, 1 ],
-[ 0.03, -0.1, 1 ],
-[ 0.02, 0.53, 0 ],
-[ -0.3, -0.85, 0 ],
-[ 0.91, -0.53, 0 ],
-[ -0.63, 0.3, 1 ],
-[ 0.98, -0.33, 0 ],
-[ -0.84, 0.28, 1 ],
-[ 0.24, -0.97, 0 ],
-[ -0.72, 0.43, 1 ],
-[ 0.41, 0.16, 1 ],
-[ 0.09, 0.42, 1 ],
-[ 0.28, 0.19, 1 ],
-[ -0.78, -0.5, 0 ],
-[ -0.45, -0.22, 0 ],
-[ -0.87, 0.09, 0 ],
-[ -0.2, 0.25, 1 ],
-[ 0.85, 0.22, 0 ],
-[ -0.44, -0.63, 0 ],
-[ -0.08, 0.13, 0 ],
-[ 0.6, 0.14, 1 ],
-[ -0.88, -0.05, 0 ],
-[ -0.86, -0.72, 0 ],
-[ 0.21, -0.58, 1 ],
-[ 0.09, 0.32, 0 ],
-[ -0.58, -0.68, 0 ],
-[ 0.63, -0.12, 0 ],
-[ -0.51, -0.28, 0 ],
-[ 0.73, 0.8300000000000001, 1 ],
-[ 0.33, -0.41, 0 ],
-[ -0.8100000000000001, 0.87, 0 ],
-[ -0.75, -0.98, 0 ],
-[ 0.99, -0.5600000000000001, 0 ],
-[ 0.37, 0.21, 1 ],
-[ -0.9500000000000001, 0.13, 0 ],
-[ -0.2, -0.8300000000000001, 0 ],
-[ -0.08, 0.7000000000000001, 0 ],
-[ 0.6900000000000001, 0.05, 0 ],
-[ -0.75, 0.51, 1 ],
-[ -0.5700000000000001, 0.8, 1 ],
-[ -0.22, 0.72, 1 ],
-[ -0.27, -0.5600000000000001, 0 ],
-[ -0.46, -0.22, 0 ],
-[ 0.31, 0.21, 1 ],
-[ -0.11, 0.09, 0 ],
-[ -0.15, 0.76, 1 ],
-[ -0.47, -0.19, 0 ],
-[ -0.65, 0.26, 1 ],
-[ 0.73, 0.03, 0 ],
-[ 0.06, -0.06, 1 ],
-[ -0.51, -0.09, 0 ],
-[ 0.23, -0.91, 0 ],
-[ 0.25, -0.01, 1 ],
-[ 0.73, -0.5700000000000001, 0 ],
-[ 0.07000000000000001, 0.01, 0 ],
-[ 0.29, -0.46, 0 ],
-[ 0.67, 0.58, 1 ],
-[ 0.41, -0.15, 1 ],
-[ -0.32, -0.45, 0 ],
-[ -0.35, 0.8300000000000001, 1 ],
-[ -0.24, -0.42, 1 ],
-[ -0.23, -0.93, 0 ],
-[ 0.12, -0.55, 1 ],
-[ -0.26, -0.43, 1 ],
-[ -0.49, 0.38, 1 ],
-[ -0.3, -0.28, 1 ],
-[ 0.18, 0.38, 1 ],
-[ 0.24, 0.18, 1 ],
-[ 0.29, 0.08, 0 ],
-[ -0.75, -0.07000000000000001, 0 ],
-[ 0.12, -0.51, 1 ],
-[ 0.38, -0.28, 0 ],
-[ -0.49, 0.14, 1 ],
-[ -0.37, 0.87, 1 ],
-[ -0.87, -0.66, 0 ],
-[ -0.8200000000000001, 0.01, 0 ],
-[ 0.11, 0.13, 0 ],
-[ 0.93, 0.92, 0 ],
-[ -0.91, -0.5700000000000001, 0 ],
-[ -0.01, -0.07000000000000001, 1 ],
-[ -0.9500000000000001, 0.08, 0 ],
-[ 0.51, 0.04, 0 ],
-[ 0.27, 0.67, 1 ],
-[ -0.5, 0.33, 1 ],
-[ -0.28, 0.75, 1 ],
-[ 0.26, -0.06, 1 ],
-[ 0.68, -0.26, 0 ],
-[ -0.35, 0.54, 1 ],
-[ -0.43, -0.29, 0 ],
-[ -0.46, -0.28, 0 ],
-[ -0.35, -0.65, 0 ],
-[ -0.45, 0.32, 1 ],
-[ 0.5700000000000001, -0.06, 0 ],
-[ 0.19, -0.31, 1 ],
-[ 0.17, 0.75, 1 ],
-[ -0.04, 0.01, 0 ],
-[ 0.11, -0.31, 1 ],
-[ 0.22, -0.09, 1 ],
-[ 0.14, -0.11, 1 ],
-[ 0.5700000000000001, 0.68, 1 ],
-[ 0.05, 0.61, 0 ],
-[ 0.16, 0.73, 1 ],
-[ 0.5600000000000001, 0.97, 0 ],
-[ -0.62, -0.64, 0 ],
-[ 0.43, -0.08, 1 ],
-[ 0.62, 0.2, 1 ],
-[ -0.18, -0.18, 1 ],
-[ 0.3, 0.28, 1 ],
-[ -0.08, 0.17, 0 ],
-[ -0.02, -0.67, 1 ],
-[ -0.99, 0.29, 0 ],
-[ -0.09, -0.91, 0 ],
-[ -0.62, 0.33, 1 ],
-[ -0.96, -0.84, 0 ],
-[ -0.84, -0.09, 0 ],
-[ -0.51, 0.42, 1 ],
-[ 0.15, 0.65, 1 ],
-[ 0.14, -0.15, 1 ],
-[ -0.77, 0.8, 1 ],
-[ -0.44, 0.9, 1 ],
-[ 0.1, 0.07000000000000001, 0 ],
-[ -0.29, -0.06, 1 ],
-[ 0.73, 0.85, 1 ],
-[ 0.25, -0.52, 0 ],
-[ -0.43, -0.64, 0 ],
-[ -0.01, 0.6, 0 ],
-[ 0.34, -0.79, 0 ],
-[ 0.89, 0.92, 0 ],
-[ -0.7000000000000001, -0.75, 0 ],
-[ 0.13, 0.14, 0 ],
-[ -0.24, -0.59, 0 ],
-[ 0.53, -0.96, 0 ],
-[ 0.08, 0.45, 1 ],
-[ -0.9500000000000001, -0.26, 0 ],
-[ 0.06, -0.02, 1 ],
-[ 0.61, 0.59, 1 ],
-[ -0.8300000000000001, -0.8200000000000001, 0 ],
-[ 0.31, -0.88, 0 ],
-[ 0.48, 0.06, 1 ],
-[ 0.73, 0.84, 1 ],
-[ -0.98, 0.3, 0 ],
-[ -0.46, -0.19, 0 ],
+[ -0.96, -0.8100000000000001, 0 ],
+[ 0.97, 0.6900000000000001, 1 ],
+[ -0.45, -0.5600000000000001, 0 ],
+[ -0.36, -0.77, 0 ],
+[ 0.8, 0.6900000000000001, 1 ],
+[ 0.72, 0.37, 1 ],
+[ -0.33, -0.06, 1 ],
+[ 0.9400000000000001, 0.28, 1 ],
+[ 0.05, 0.8, 0 ],
+[ -0.04, 0.63, 0 ],
+[ 0.9, -0.13, 1 ],
+[ 0.49, -0.85, 0 ],
+[ -0.71, 0.6, 1 ],
+[ -0.68, 0.61, 1 ],
+[ -0.19, 0.8100000000000001, 0 ],
+[ 0.17, -0.09, 1 ],
+[ -0.49, 0.73, 1 ],
+[ -0.22, 0.29, 0 ],
+[ 0.27, -0.75, 0 ],
+[ 0.22, 0.53, 0 ],
+[ -0.78, -0.18, 1 ],
+[ 0.46, -0.19, 1 ],
+[ -0.33, -0.34, 1 ],
+[ 0.9400000000000001, -0.51, 0 ],
+[ 0.52, -0.24, 1 ],
+[ 0.46, -0.9500000000000001, 0 ],
+[ 0.8100000000000001, -0.38, 1 ],
+[ 0.39, 0.93, 1 ],
+[ 0.37, -0.99, 0 ],
+[ 0.38, 0.79, 1 ],
+[ -0.33, 0.9, 1 ],
+[ -0.98, 0.18, 1 ],
+[ 0.28, 0.61, 0 ],
+[ 0.96, 0.89, 1 ],
+[ -0.44, 0.11, 1 ],
[ 0.5700000000000001, 0.47, 1 ],
-[ -0.19, -0.41, 1 ],
-[ -0.7000000000000001, -0.8200000000000001, 0 ],
-[ 0.5600000000000001, 0.31, 1 ],
-[ -0.02, 0.85, 0 ],
-[ 0.75, 0.33, 1 ],
-[ 0.66, 0.11, 1 ],
-[ 0.89, -0.15, 0 ],
-[ 0.23, 0.07000000000000001, 0 ],
-[ -0.3, 0.45, 1 ],
-[ -0.19, 0.37, 1 ],
-[ -0.9400000000000001, 0.49, 1 ],
-[ 0.66, -0.6, 0 ],
-[ 0.25, 0.6900000000000001, 1 ],
-[ -0.23, -0.17, 1 ],
-[ -0.4, -0.08, 1 ],
-[ -0.67, -0.77, 0 ],
-[ -0.05, -0.28, 1 ],
-[ -0.05, -0.17, 1 ],
-[ 0.04, 0.66, 0 ],
-[ -0.39, -0.3, 0 ],
-[ 0.24, 0.5600000000000001, 1 ],
-[ 0.88, 0.13, 0 ],
-[ 0.2, -0.09, 1 ],
-[ 0.53, 0.62, 1 ],
-[ -0.5, -0.78, 0 ],
-[ -0.24, -0.48, 1 ],
-[ -0.47, 0.23, 1 ],
-[ 0.32, 0.85, 1 ],
-[ -0.37, -0.01, 1 ],
-[ 0.8, 0.44, 1 ],
-[ 0.2, 0.54, 1 ],
-[ 0.04, 0.79, 0 ],
-[ -0.24, -0.26, 1 ],
-[ 0.91, 0.84, 0 ],
-[ 0.86, 0.88, 0 ],
-[ 0.17, -0.21, 1 ],
-[ 0.47, -0.22, 0 ],
-[ 0.14, -0.2, 1 ],
-[ 0.09, 0.73, 0 ],
-[ 0.46, 0.33, 1 ],
-[ -0.38, -0.19, 1 ],
-[ 0.25, 0.08, 0 ],
-[ 0.89, -0.44, 0 ],
-[ -0.13, 0.16, 0 ],
-[ -0.6900000000000001, -0.06, 0 ],
-[ 1., 0.07000000000000001, 0 ],
-[ -0.47, -0.06, 0 ],
-[ -0.8, 0.37, 1 ],
-[ -0.19, -0.9500000000000001, 0 ],
-[ 0.55, 0.24, 1 ],
-[ 0.7000000000000001, -0.18, 0 ],
-[ -0.67, -0.45, 0 ],
-[ 0.7000000000000001, -0.65, 0 ],
-[ 0.11, -0.32, 1 ],
-[ 0.6900000000000001, 0.04, 0 ],
-[ -0.11, -0.27, 1 ],
-[ 0.19, -0.34, 1 ],
-[ -0.05, 0.86, 0 ],
-[ -0.2, 0.17, 1 ],
-[ -0.5, -0.8200000000000001, 0 ],
-[ 0.8300000000000001, -0.97, 0 ],
-[ -0.8300000000000001, 0.15, 0 ],
-[ -0.5700000000000001, -0.45, 0 ],
-[ -0.22, 0.19, 1 ],
-[ -0.44, 0.18, 1 ],
-[ -0.06, 0.17, 0 ],
-[ -0.97, -0.04, 0 ],
-[ 0.47, -0.19, 0 ],
-[ 0.46, -0.47, 0 ],
-[ 0.3, 0.66, 1 ],
-[ -0.5600000000000001, 0.01, 0 ],
-[ 0.99, 0.07000000000000001, 0 ],
-[ 0.7000000000000001, -0.07000000000000001, 0 ],
-[ 0.38, 0.86, 1 ],
-[ -0.21, -0.92, 0 ],
-[ -0.3, -0.28, 1 ],
-[ -0.3, -0.16, 1 ],
-[ 0.75, -0.52, 0 ],
-[ -0.05, -0.86, 1 ],
-[ -0.16, 0.8300000000000001, 0 ],
-[ 0.08, 0.9, 0 ],
-[ 0.03, -0.02, 1 ],
-[ -0.02, 0.02, 0 ],
-[ 0.2, 0.47, 1 ],
-[ 0.51, -0.19, 0 ],
-[ -0.05, 0.48, 0 ],
-[ 0.5700000000000001, 0.17, 1 ],
-[ 0.49, -0.77, 0 ],
-[ 0.05, -0.78, 1 ],
-[ -0.25, -0.08, 1 ],
-[ 0.48, -0.73, 0 ],
-[ 0.18, -0.99, 0 ],
-[ -0.89, 0.22, 0 ],
-[ 0.05, 0.84, 0 ],
-[ -0.11, -0.24, 1 ],
-[ -0.59, 0.22, 1 ],
-[ -0.09, -0.29, 1 ],
-[ -0.78, 0.21, 1 ],
-[ -0.4, 0.9500000000000001, 0 ],
-[ 0.72, -0.28, 0 ],
-[ -0.91, -0.9, 0 ],
-[ 0.97, -0.32, 0 ],
-[ -0.9, -0.8200000000000001, 0 ],
-[ 0.11, 0.51, 1 ],
-[ 0.01, 0.05, 0 ],
-[ 0.53, -0.12, 0 ],
-[ 0.38, -0.47, 0 ],
-[ 0.47, 0.51, 1 ],
-[ 0.22, 0.67, 1 ],
-[ 0.21, -0.32, 1 ],
-[ 0.09, 0.8100000000000001, 0 ],
-[ -0.03, -0.5700000000000001, 1 ],
-[ -0.8200000000000001, -0.91, 0 ],
-[ 0.02, -0.01, 1 ],
-[ -0.74, 0.07000000000000001, 0 ],
-[ 0.51, 0.54, 1 ],
-[ -0.77, 0.37, 1 ],
-[ -0.14, -0.86, 0 ],
-[ -0.26, 0.9, 0 ],
-[ -0.7000000000000001, 0.28, 1 ],
-[ 0.9, -0.12, 0 ],
-[ -0.46, 0.13, 1 ],
-[ -0.5600000000000001, 0.23, 1 ],
-[ -0.22, 0.07000000000000001, 0 ],
-[ -0.23, 0.9500000000000001, 0 ],
-[ 0.38, 1., 0 ],
-[ -0.74, -0.19, 0 ],
-[ -0.12, -0.75, 1 ],
-[ 0.12, 0.17, 0 ],
-[ -0.16, 0.5600000000000001, 1 ],
-[ -0.73, 0.28, 1 ],
-[ -0.3, 0.22, 1 ],
-[ 0.1, -0.3, 1 ],
-[ 0.24, -0.64, 0 ],
-[ 0.67, -0.54, 0 ],
-[ 0.61, -0.5, 0 ],
-[ -0.84, 0.01, 0 ],
-[ 0.9, 0.7000000000000001, 0 ],
-[ -0.14, 0.31, 1 ],
-[ 0.43, -0.36, 0 ],
-[ 0.2, 0.5600000000000001, 1 ],
-[ -0.75, 0.77, 1 ],
-[ -0.24, -0.54, 0 ],
-[ -0.32, 0.16, 1 ],
-[ -0.04, -0.52, 1 ],
-[ 0.85, 0.02, 0 ],
-[ -0.91, 0.8300000000000001, 0 ],
-[ -0.17, 0.66, 1 ],
-[ 0.88, -0.92, 0 ],
-[ 0.85, -0.3, 0 ],
-[ 0.3, 0.54, 1 ],
-[ -0.74, 0.43, 1 ],
-[ 0.27, -0.74, 0 ],
-[ -0.75, 0.25, 1 ],
-[ 0.19, 0.86, 0 ],
-[ 0.29, -0.3, 1 ],
-[ 0.06, -0.3, 1 ],
-[ -0.25, 0.84, 1 ],
-[ -1., 0.37, 0 ],
-[ 0.09, -0.3, 1 ],
-[ -0.17, 0.64, 1 ],
-[ 0.53, -0.48, 0 ],
-[ -0.18, 0.46, 1 ],
-[ -0.5600000000000001, 0.46, 1 ],
-[ 0.12, 0.68, 1 ],
-[ 0.73, -0.1, 0 ],
-[ -0.06, -0.08, 1 ],
-[ -0.78, 0.65, 1 ],
-[ -0.8300000000000001, -0.25, 0 ],
-[ -0.1, 0.64, 1 ],
-[ -0.32, 0.46, 1 ],
-[ 0.31, 0.77, 1 ],
-[ 0.18, -0.27, 1 ],
-[ 0.28, 0.76, 1 ],
-[ -0.87, 0.14, 0 ],
-[ -0.76, -0.27, 0 ],
-[ 0.21, 0.09, 0 ],
-[ 0.74, 0.44, 1 ],
-[ -0.27, -0.22, 1 ],
-[ 0.4, 0.28, 1 ],
-[ 0.2, -0.55, 1 ],
-[ -0.25, -0.53, 0 ],
-[ 0.8300000000000001, 0.64, 1 ],
-[ -0.07000000000000001, 0.86, 0 ],
-[ -0.74, 0.91, 0 ],
-[ -0.02, -0.9500000000000001, 1 ],
-[ -0.9500000000000001, 0.9400000000000001, 0 ],
-[ -0.34, -0.6900000000000001, 0 ],
-[ 0.49, -0.78, 0 ],
-[ -0.7000000000000001, -1., 0 ],
-[ -0.98, -0.05, 0 ],
-[ -0.24, -0.42, 1 ],
-[ -0.32, -0.36, 1 ],
-[ 0.14, -0.29, 1 ],
-[ 0.92, -0.86, 0 ],
-[ -0.87, -0.67, 0 ],
-[ -0.18, 0.98, 0 ],
-[ 0.45, -0.2, 0 ],
-[ -0.88, 0.16, 0 ],
-[ -0.77, 0.45, 1 ],
-[ 0.24, 0.06, 0 ],
-[ -0.76, 0.66, 1 ],
-[ -0.24, -0.5700000000000001, 0 ],
-[ -0.5, 0.38, 1 ],
-[ 0.2, -0.9400000000000001, 0 ],
-[ 0.4, -0.43, 0 ],
-[ 0.62, 0.8100000000000001, 1 ],
-[ 0.08, -0.31, 1 ],
-[ 0.66, -0.63, 0 ],
-[ -1., 0.7000000000000001, 0 ],
-[ 0.97, -0.89, 0 ],
-[ 0.05, 0.44, 0 ],
-[ -0.2, -0.93, 0 ],
-[ 0.9400000000000001, -0.44, 0 ],
-[ 0.64, -0.52, 0 ],
-[ -0.98, -0.09, 0 ],
-[ 0.96, -0.7000000000000001, 0 ],
-[ 0.2, 0.19, 1 ],
-[ -0.53, 0.26, 1 ],
-[ -0.08, 0.26, 0 ],
-[ -0.11, -1., 0 ],
-[ -0.73, 0.25, 1 ],
-[ 0.54, -0.92, 0 ],
-[ 0.9500000000000001, -0.41, 0 ],
-[ 0.43, -0.85, 0 ],
-[ 0.51, -0.68, 0 ],
-[ 0.68, 0.01, 0 ],
-[ -0.39, -0.42, 0 ],
-[ -0.11, 0.2, 0 ],
-[ -0.35, -0.51, 0 ],
-[ 0.3, 0.24, 1 ],
-[ 0.02, -0.15, 1 ],
-[ 0.24, -0.1, 1 ],
-[ 0.6, -0.27, 0 ],
-[ -0.08, -0.01, 1 ],
-[ 0.16, -0.99, 0 ],
-[ -0.25, 0.08, 0 ],
-[ -0.01, 0.11, 0 ],
-[ -0.99, -1., 0 ],
-[ 0.25, 0.02, 0 ],
-[ -0.3, 0.01, 0 ],
-[ -0.96, 0.78, 0 ],
-[ -0.55, -0.23, 0 ],
-[ 0.13, -0.09, 1 ],
-[ 0.25, 0.05, 0 ],
-[ 0.93, 0.8, 0 ],
-[ 0.45, -0.7000000000000001, 0 ],
-[ 0.5600000000000001, -0.88, 0 ],
-[ 0.26, 0.03, 0 ],
-[ -0.16, -0.21, 1 ],
-[ -0.8300000000000001, -0.85, 0 ],
-[ -0.55, -0.9, 0 ],
-[ 0.19, -0.17, 1 ],
-[ 0.24, 0.3, 1 ],
-[ 0.09, 0.47, 1 ],
-[ -0.6900000000000001, -0.91, 0 ],
-[ -0.44, 0.78, 1 ],
-[ -0.64, 0.38, 1 ],
-[ 0.24, -0.43, 1 ],
-[ 0.06, -0.58, 1 ],
-[ 0.41, 0.2, 1 ],
-[ -0.34, 0.06, 0 ],
-[ 0.37, -0.01, 1 ],
-[ -0.8, 0.92, 0 ],
-[ 0.46, 0.18, 1 ],
-[ -0.5700000000000001, -0.8200000000000001, 0 ],
-[ -0.09, -0.11, 1 ],
-[ -0.75, -0.62, 0 ],
-[ 0.53, 0.16, 1 ],
-[ 0.38, 0.52, 1 ],
-[ 0.18, -0.08, 1 ],
-[ -0.48, -0.18, 0 ],
-[ -0.1, -0.14, 1 ],
-[ 0.46, -0.29, 0 ],
-[ -0.67, 0.9, 1 ],
-[ -0.96, -0.42, 0 ],
-[ -0.68, -0.96, 0 ],
-[ -0.9400000000000001, -0.17, 0 ],
-[ -0.17, -0.99, 0 ],
-[ -0.04, 0.43, 0 ],
-[ 0.85, -0.28, 0 ],
-[ -0.72, -0.24, 0 ],
-[ 0.05, 0.44, 0 ],
-[ 0.66, 0.63, 1 ],
-[ -0.55, 0.09, 1 ],
-[ 0.7000000000000001, -0.23, 0 ],
-[ -0.55, 0.55, 1 ],
-[ -0.25, -0.8300000000000001, 0 ],
-[ 0.5, -0.58, 0 ],
-[ 0.48, -0.12, 0 ],
-[ 0.55, 0.65, 1 ],
-[ -0.12, 0.61, 1 ],
-[ -0.98, -0.45, 0 ],
-[ 0.87, 0.14, 0 ],
-[ 0.73, 0.8300000000000001, 1 ],
-[ 0.28, -0.62, 0 ],
-[ -0.11, -0.6900000000000001, 1 ],
-[ -0.36, 0.12, 1 ],
-[ -0.5700000000000001, -0.15, 0 ],
-[ 0.72, 0.13, 1 ],
-[ 0.14, 0.41, 1 ],
-[ 0.91, -0.14, 0 ],
-[ 0.58, -0.79, 0 ],
-[ -0.9400000000000001, -0.9400000000000001, 0 ],
-[ -0.9, 0.14, 0 ],
-[ -0.38, -0.87, 0 ],
-[ -0.2, -0.48, 1 ],
-[ 0.6900000000000001, -0.05, 0 ],
-[ 0.03, 0.55, 0 ],
-[ -0.77, -0.44, 0 ],
-[ -0.1, 0.18, 0 ],
-[ 0.6, 0.41, 1 ],
-[ -0.8200000000000001, -0.65, 0 ],
-[ 0.86, -0.58, 0 ],
-[ 0.49, 0.63, 1 ],
-[ -0.6, -0.52, 0 ],
-[ 0.05, 0.39, 0 ],
-[ -0.06, -0.86, 1 ],
-[ 0.8, -0.14, 0 ],
-[ -0.08, 0.27, 0 ],
-[ -0.5, -0.26, 0 ],
-[ 0.43, 0.8200000000000001, 1 ],
-[ 0.72, -0.59, 0 ],
-[ -0.44, 0.39, 1 ],
-[ 0.26, -0.03, 1 ],
-[ 0.9, 0.4, 1 ],
-[ -0.5, 0.35, 1 ],
-[ 0.05, 0.23, 0 ],
-[ -0.15, 0.21, 0 ],
-[ -0.17, -0.3, 1 ],
-[ 0.36, -0.1, 1 ],
-[ 0.59, -0.13, 0 ],
-[ -0.14, -0.53, 1 ],
-[ -0.16, -0.9500000000000001, 0 ],
-[ 0.52, 0.39, 1 ],
-[ 0.07000000000000001, 0.25, 0 ],
-[ -0.93, -0.37, 0 ],
-[ 0.37, 0.38, 1 ],
-[ -0.76, -0.08, 0 ],
-[ -0.64, -0.93, 0 ],
-[ 0.25, 0.01, 0 ],
-[ 0.42, -0.24, 0 ],
-[ -0.63, 0.07000000000000001, 0 ],
-[ 0.11, -0.18, 1 ],
-[ 0.68, -0.19, 0 ],
-[ 0.9500000000000001, 0.9500000000000001, 0 ],
-[ 0.09, -0.92, 0 ],
-[ 0.8200000000000001, 0.18, 0 ],
-[ 0.14, 0.07000000000000001, 0 ],
-[ 0.84, -0.26, 0 ],
-[ 0.08, -0.89, 0 ],
-[ -0.47, -0.71, 0 ],
-[ -0.12, -0.16, 1 ],
-[ 0.96, -0.67, 0 ],
-[ -0.14, -0.34, 1 ],
-[ -0.05, -0.1, 1 ],
-[ 0.38, -0.92, 0 ],
-[ -0.43, 0.4, 1 ],
-[ -0.1, 0.73, 0 ],
-[ 0.46, 0.89, 1 ],
-[ -0.47, 0.19, 1 ],
-[ 0.18, 0.23, 1 ],
-[ 1., -0.01, 0 ],
-[ -0.33, 0.43, 1 ],
-[ -0.24, -0.6900000000000001, 0 ],
-[ -0.96, -0.71, 0 ],
-[ -0.78, -0.04, 0 ],
-[ 0.78, 0.87, 0 ],
-[ 0.19, -0.7000000000000001, 0 ],
-[ 0.77, 0.04, 0 ],
-[ -0.92, 0.08, 0 ],
-[ 0.23, -0.3, 1 ],
-[ -0.14, -0.14, 1 ],
-[ -0.68, 0.3, 1 ],
-[ 0.67, -0.25, 0 ],
-[ -0.19, -0.21, 1 ],
-[ -0.63, -0.26, 0 ],
-[ 0.08, -0.07000000000000001, 1 ],
-[ 0.28, 0.5700000000000001, 1 ],
-[ -0.28, -0.47, 0 ],
-[ -0.15, 0.72, 1 ],
-[ 0.09, 0.14, 0 ],
-[ 0.17, -0.5600000000000001, 1 ],
-[ 0.96, 0.36, 0 ],
-[ -0.61, -0.55, 0 ],
-[ -0.65, 0.25, 1 ],
-[ -0.48, 0.5, 1 ],
-[ 0.04, 0.06, 0 ],
-[ -0.05, -0.11, 1 ],
-[ -0.17, -0.97, 0 ],
-[ -0.16, -0.92, 0 ],
-[ -0.25, 0.44, 1 ],
-[ 0.5, 0.54, 1 ],
-[ 0.25, 0.15, 1 ],
-[ -0.64, -0.67, 0 ],
-[ -0.52, -0.27, 0 ],
-[ 0.9400000000000001, 0.48, 1 ],
-[ 0.2, -0.18, 1 ],
-[ 0.44, -0.02, 1 ],
-[ -0.63, -0.44, 0 ],
-[ 0.28, 0.46, 1 ],
-[ 1., 0.8300000000000001, 0 ],
-[ -0.7000000000000001, 0.65, 1 ],
-[ -0.8300000000000001, -0.79, 0 ],
-[ 0.68, 0.09, 0 ],
-[ 0.19, -0.8100000000000001, 0 ],
-[ 0.5700000000000001, 0.93, 1 ],
-[ 0.04, 0.08, 0 ],
-[ 0.3, 0.9500000000000001, 0 ],
-[ 0.12, -0.99, 0 ],
-[ 0.93, 0.25, 0 ],
-[ 0.63, -0.1, 0 ],
-[ -0.48, 0.12, 1 ],
-[ -0.09, -0.14, 1 ],
-[ -0.13, -0.24, 1 ],
-[ 0.1, -0.09, 1 ],
-[ 0.63, 0.11, 1 ],
-[ -0.48, 0.16, 1 ],
-[ 0.11, -0.2, 1 ],
-[ -0.21, -0.27, 1 ],
-[ 0.77, -0.49, 0 ],
-[ -0.78, -0.64, 0 ],
-[ 0.23, 0.61, 1 ],
-[ -0.98, -0.13, 0 ],
-[ -0.76, 0.16, 1 ],
-[ 0.91, 0.28, 0 ],
-[ -0.04, -0.6, 1 ],
-[ 0.66, -0.02, 0 ],
-[ 0.27, 0.39, 1 ],
-[ -0.89, -0.64, 0 ],
-[ 0.3, -0.12, 1 ],
-[ 0.13, -0.5, 1 ],
-[ 0.55, -0.13, 0 ],
-[ 0.39, 0.18, 1 ],
-[ -0.9500000000000001, -0.27, 0 ],
-[ 0.14, 0.71, 1 ],
-[ -0.47, -0.9500000000000001, 0 ],
-[ 0.12, 0.26, 0 ],
-[ 0.26, -0.18, 1 ],
-[ 0.3, -0.25, 1 ],
-[ 0.88, 0.19, 0 ],
-[ -0.04, -0.19, 1 ],
-[ 0.43, -0.23, 0 ],
-[ 0.16, -0.9, 0 ],
-[ 0.71, -0.03, 0 ],
-[ 0.06, 0.07000000000000001, 0 ],
-[ -0.75, 0.67, 1 ],
-[ -0.45, 0.23, 1 ],
-[ -0.32, 0.65, 1 ],
-[ 0.8200000000000001, 0.04, 0 ],
-[ -0.13, 0.67, 1 ],
-[ 0.02, 0.01, 0 ],
-[ 0.71, -0.2, 0 ],
-[ 0.07000000000000001, 0.63, 0 ],
-[ 0.8100000000000001, -0.59, 0 ],
-[ -0.31, 0.07000000000000001, 0 ],
-[ 0.06, -0.77, 1 ],
-[ -0.07000000000000001, 0.86, 0 ],
-[ -0.34, -0.62, 0 ],
-[ -0.55, 0.08, 1 ],
-[ 0.3, -0.12, 1 ],
-[ -0.14, -0.06, 1 ],
-[ -0.91, 0.36, 1 ],
-[ -0.9, 0.01, 0 ],
-[ 0.45, -0.53, 0 ],
-[ -0.73, 0.8300000000000001, 1 ],
-[ 0.41, -0.97, 0 ],
-[ 0.9500000000000001, -0.46, 0 ],
-[ 0.43, 0.08, 1 ],
-[ -0.72, -0.5600000000000001, 0 ],
-[ -0.8100000000000001, 0.25, 1 ],
-[ -0.28, -0.99, 0 ],
-[ 0.4, 0.66, 1 ],
-[ -0.9500000000000001, -0.06, 0 ],
-[ 0.18, -0.3, 1 ],
-[ 0.47, 0.18, 1 ],
-[ 0.99, -0.92, 0 ],
-[ -0.17, -0.19, 1 ],
-[ -0.07000000000000001, 0.01, 0 ],
-[ 0.3, -0.34, 1 ],
-[ -0.03, -0.1, 1 ],
-[ -0.52, -0.04, 0 ],
-[ 0.8300000000000001, -0.15, 0 ],
-[ 0.08, -0.68, 1 ],
-[ -0.31, -0.79, 0 ],
-[ -0.51, -0.99, 0 ],
-[ 0.39, 0.9, 1 ],
-[ 0.06, -0.29, 1 ],
-[ 0.19, 0.17, 0 ],
-[ -0.31, -0.14, 1 ],
-[ -0.07000000000000001, -0.11, 1 ],
-[ 0.26, 0.37, 1 ],
-[ -0.8, -0.22, 0 ],
-[ 0.48, 0.37, 1 ],
-[ -0.5700000000000001, -0.73, 0 ],
-[ -0.8, -0.02, 0 ],
-[ 0.19, 0.8300000000000001, 0 ],
-[ 0.96, -0.2, 0 ],
-[ -0.08, -0.19, 1 ],
-[ -0.76, 0.3, 1 ],
-[ 0.29, 0.29, 1 ],
-[ 0.3, 0.19, 1 ],
-[ -0.2, -0.09, 1 ],
-[ -0.11, 0.88, 0 ],
-[ 0.9500000000000001, -0.03, 0 ],
-[ 0.16, 0.88, 0 ],
-[ -0.17, -0.17, 1 ],
-[ 0.28, -0.75, 0 ],
-[ -0.29, -0.03, 1 ],
-[ -0.65, -0.59, 0 ],
-[ -0.09, 0.29, 0 ],
-[ 0.01, 0.91, 0 ],
-[ 0.99, -0.6900000000000001, 0 ],
-[ -0.35, 0.09, 1 ],
-[ -0.09, 0.47, 1 ],
-[ 0.9400000000000001, 0.6, 0 ],
-[ 0.78, 0.12, 0 ],
-[ -0.39, -0.93, 0 ],
-[ 0.72, 0.86, 1 ],
-[ 0.9400000000000001, 0.24, 0 ],
-[ -0.13, -0.04, 1 ],
-[ 0.18, 0.27, 1 ],
-[ 0.07000000000000001, 0.19, 0 ],
-[ 0.91, 0.13, 0 ],
-[ -0.3, 0.67, 1 ],
-[ -0.8300000000000001, -0.15, 0 ],
-[ -0.66, -0.16, 0 ],
-[ -0.39, -0.29, 0 ],
-[ 0.1, -0.5, 1 ],
-[ -1., 0.71, 0 ],
-[ 0.3, -0.9, 0 ],
-[ 0.38, -0.7000000000000001, 0 ],
-[ -0.5700000000000001, -0.9, 0 ],
-[ 0.03, -0.35, 1 ],
-[ 0.98, 0.18, 0 ],
-[ -0.15, 0.74, 1 ],
-[ -0.31, -0.64, 0 ],
-[ 0.77, -0.4, 0 ],
-[ 0.47, -0.44, 0 ],
-[ -0.85, -0.4, 0 ],
-[ 0.9400000000000001, 0.21, 0 ],
-[ -0.64, 0.5, 1 ],
-[ 0.16, 0.14, 0 ],
-[ 0.35, -0.77, 0 ],
-[ 0.43, -0.02, 1 ],
-[ 0.03, 0.89, 0 ],
-[ 0.55, 0.76, 1 ],
-[ 0.26, -0.33, 1 ],
-[ 0.48, 0.3, 1 ],
-[ 0.46, -0.07000000000000001, 1 ],
-[ 0.89, 0.86, 0 ],
-[ -0.19, 0.25, 1 ],
-[ 0.1, 0.7000000000000001, 0 ],
-[ -0.45, 0.28, 1 ],
-[ 0.3, -0.79, 0 ],
-[ -0.46, -0.1, 0 ],
-[ 0.21, 0.91, 0 ],
-[ 0.28, -0.8100000000000001, 0 ],
-[ 0.92, 0.23, 0 ],
-[ 0.97, 0.07000000000000001, 0 ],
-[ -0.78, -0.2, 0 ],
-[ -0.79, -0.17, 0 ],
-[ -0.07000000000000001, 0.8200000000000001, 0 ],
-[ -0.23, -0.32, 1 ],
-[ 0.02, 0.1, 0 ],
-[ -0.8300000000000001, 0.07000000000000001, 0 ],
-[ 0.44, -0.46, 0 ],
-[ 0.86, 0.71, 1 ],
-[ 0.62, 0.76, 1 ],
-[ -0.33, -0.18, 1 ],
-[ 0.28, 0.47, 1 ],
-[ -0.77, -0.21, 0 ],
-[ 0.4, -0.74, 0 ],
-[ 0.68, -0.75, 0 ],
-[ -0.26, -0.16, 1 ],
-[ -0.74, -0.54, 0 ],
-[ 0.38, -0.06, 1 ],
-[ 0.64, -0.28, 0 ],
-[ 0.7000000000000001, 0.44, 1 ],
-[ 0.5, -0.08, 0 ],
-[ 0.49, -0.68, 0 ],
-[ -0.07000000000000001, 0.8100000000000001, 0 ],
-[ -0.6, -0.55, 0 ],
-[ 0.88, -0.84, 0 ],
-[ -0.67, -0.07000000000000001, 0 ],
-[ 0.02, 0.28, 0 ],
-[ -0.03, -0.2, 1 ],
-[ -0.78, -0.48, 0 ],
-[ -0.66, -0.19, 0 ],
-[ 0.02, 0.88, 0 ],
-[ 0.14, -0.07000000000000001, 1 ],
-[ -0.97, -0.91, 0 ],
-[ -0.9500000000000001, -0.11, 0 ],
-[ 0.12, 0.46, 1 ],
-[ 0.18, -0.35, 1 ],
-[ -0.49, -0.74, 0 ],
-[ 0.84, -0.12, 0 ],
-[ 0.99, 0.17, 0 ],
-[ 0.03, 0.15, 0 ],
-[ 0.68, -0.07000000000000001, 0 ],
-[ -0.76, 0.87, 0 ],
-[ -0.2, 0.3, 1 ],
-[ 0.19, -0.06, 1 ],
-[ -0.39, -0.84, 0 ],
-[ -0.14, 0.68, 1 ],
-[ 0.67, -0.28, 0 ],
-[ 0.25, 0.14, 1 ],
-[ 0.8300000000000001, 0.44, 1 ],
-[ -0.13, 0.1, 0 ],
-[ 0.28, -0.14, 1 ],
-[ -0.06, 0.9500000000000001, 0 ],
-[ 0.2, 0.27, 1 ],
-[ -0.73, -0.03, 0 ],
-[ 0.06, -0.23, 1 ],
-[ -0.34, 0.71, 1 ],
-[ -0.54, -0.58, 0 ],
-[ -0.6, 0.92, 1 ],
-[ 0.54, -0.22, 0 ],
-[ -0.11, 0.71, 1 ],
-[ 0.26, 0.71, 1 ],
-[ 0.44, 0.04, 0 ],
-[ 0.01, -0.08, 1 ],
-[ 0.08, 0.62, 1 ],
-[ -0.16, 0.44, 1 ],
-[ 0.5700000000000001, 0.05, 0 ],
-[ -0.16, 0.32, 1 ],
-[ 0.13, 0.63, 1 ],
-[ -0.5, 0.05, 0 ],
-[ -0.24, 0.18, 1 ],
-[ -0.18, -0.6900000000000001, 0 ],
-[ -0.18, -0.17, 1 ],
-[ -0.62, 0.35, 1 ],
-[ -0.25, -0.23, 1 ],
-[ 0.89, -0.31, 0 ],
-[ -0.03, 0.33, 0 ],
-[ -0.1, -0.33, 1 ],
-[ -0.8, 0.46, 1 ],
-[ 0.26, 0.86, 1 ],
-[ 0.51, 0.25, 1 ],
-[ 0.22, -0.27, 1 ],
-[ 0.09, -0.74, 1 ],
-[ -0.3, -0.3, 1 ],
-[ -0.07000000000000001, 0.38, 1 ],
-[ 0.01, -0.12, 1 ],
-[ 0.34, 0.24, 1 ],
-[ 0.21, 0.26, 1 ],
-[ 0.37, 0.5, 1 ],
-[ 0.9500000000000001, 0.08, 0 ],
-[ 0.26, -0.68, 0 ],
-[ 0.31, 0.06, 0 ],
-[ -0.5600000000000001, 0.09, 1 ],
-[ 0.53, -0.78, 0 ],
-[ -0.38, 0.11, 1 ],
-[ -0.34, 0.13, 1 ],
-[ -0.44, -0.32, 0 ],
-[ -0.12, 0.53, 1 ],
-[ -0.89, -0.61, 0 ],
-[ -0.9, -0.84, 0 ],
-[ 0.8200000000000001, 0.32, 1 ],
-[ 0.76, 0.25, 1 ],
-[ -0.01, 0.29, 0 ],
-[ -0.9500000000000001, 0.27, 0 ],
-[ -0.86, 0.09, 0 ],
-[ 0.27, 0.49, 1 ],
-[ 0.51, 0.24, 1 ],
-[ -0.87, 0.5600000000000001, 1 ],
-[ 0.3, -0.23, 1 ],
-[ 0.13, 0.25, 1 ],
-[ 0.74, -0.36, 0 ],
-[ 0.5600000000000001, 0.7000000000000001, 1 ],
-[ -0.42, 0.89, 1 ],
-[ 0.28, 0.42, 1 ],
-[ 0.14, -0.18, 1 ],
+[ 0.03, -0.05, 1 ],
+[ -0.76, -0.37, 1 ],
+[ -0.06, 0.06, 1 ],
+[ -0.1, 0.1, 1 ],
+[ -0.99, 0.79, 1 ],
+[ -0.18, -0.88, 0 ],
+[ 0.41, 0.31, 1 ],
+[ 0.71, -0.44, 1 ],
+[ 0.42, 0.85, 1 ],
+[ 0.39, -0.21, 1 ],
+[ -0.45, 0.85, 1 ],
+[ -0.27, 0.73, 0 ],
+[ -0.96, 0.35, 1 ],
+[ -0.48, -0.4, 1 ],
+[ -0.09, 0.85, 0 ],
+[ -0.67, 0.45, 1 ],
+[ 0.12, 0.9500000000000001, 1 ],
+[ -0.8200000000000001, -0.06, 1 ],
+[ 0.39, 0.72, 1 ],
+[ -0.65, 0.65, 1 ],
+[ 0.16, 0.87, 1 ],
+[ 0.21, -0.66, 0 ],
+[ -0.75, 0.15, 1 ],
+[ 0.32, 0.01, 1 ],
+[ -0.47, 0.16, 1 ],
+[ 0.6900000000000001, -0.96, 0 ],
+[ 0.98, -0.62, 0 ],
+[ -0.8200000000000001, -0.43, 1 ],
+[ 0.96, -0.75, 0 ],
+[ 0.76, 0.78, 1 ],
+[ -0.72, 0.67, 1 ],
+[ 0.09, 0.8, 0 ],
+[ 0.42, -0.01, 1 ],
+[ 0.47, -0.85, 0 ],
+[ 0.28, 0.05, 1 ],
+[ -0.15, -0.07000000000000001, 1 ],
+[ -0.45, -0.61, 0 ],
+[ -0.23, -0.18, 1 ],
+[ 0.14, -0.06, 1 ],
+[ 0.93, -0.62, 0 ],
+[ 0.34, -0.32, 1 ],
+[ 0.5600000000000001, 0.49, 1 ],
+[ 0.8300000000000001, -0.2, 1 ],
+[ -0.04, -0.61, 0 ],
+[ -0.25, -0.91, 0 ],
+[ -0.76, -0.16, 1 ],
+[ -0.53, 0.5700000000000001, 1 ],
+[ 0.72, -0.72, 0 ],
+[ -0.03, -0.55, 0 ],
+[ 1., 0.7000000000000001, 1 ],
+[ -0.88, 0.9, 1 ],
+[ -0.11, 0.31, 0 ],
+[ 0.97, 0.07000000000000001, 1 ],
+[ -0.07000000000000001, -0.47, 1 ],
+[ -0.54, 0.55, 1 ],
+[ 0.61, -0.36, 1 ],
+[ -0.62, 0.98, 1 ],
+[ -0.92, 0.5, 1 ],
+[ 0.16, -0.51, 0 ],
+[ 0.91, 0.73, 1 ],
+[ 0.12, 0.38, 0 ],
+[ 0.4, 0.92, 1 ],
+[ 1., 0.02, 1 ],
+[ 0.46, -0.22, 1 ],
+[ -0.28, -0.28, 1 ],
+[ 0.67, 0.02, 1 ],
+[ -0.43, 0.92, 1 ],
+[ -0.49, 0.2, 1 ],
+[ 0.47, 0.22, 1 ],
+[ -0.63, -0.79, 0 ],
+[ -0.85, -0.86, 0 ],
+[ 0.75, -0.98, 0 ],
+[ 0.55, -0.48, 1 ],
+[ 0.54, 0.5700000000000001, 1 ],
+[ 0.9500000000000001, 0.51, 1 ],
+[ -0.55, 0.41, 1 ],
+[ -0.43, 0.5, 1 ],
+[ 0.33, -0.75, 0 ],
+[ -0.42, -0.27, 1 ],
+[ 0.99, 0.85, 1 ],
+[ 0.5, 0.15, 1 ],
+[ 0.26, 0.58, 0 ],
+[ 0.8100000000000001, -0.46, 1 ],
+[ 0.33, 0.93, 1 ],
+[ -0.05, 0.9, 1 ],
+[ 0.16, -0.01, 1 ],
+[ -0.65, 0.92, 1 ],
+[ 0.93, -0.98, 0 ],
+[ 0.97, -0.51, 0 ],
+[ 0.92, -0.24, 1 ],
+[ 0.97, 0.06, 1 ],
+[ 0.8200000000000001, 0.84, 1 ],
+[ 0.62, -0.03, 1 ],
+[ -0.96, 0.55, 1 ],
+[ -0.46, -0.93, 0 ],
+[ -0.34, -0.9, 0 ],
+[ 0.52, 0.67, 1 ],
+[ -0.34, -0.85, 0 ],
+[ 0.35, 0.26, 1 ],
+[ -0.18, -0.5700000000000001, 0 ],
+[ -0.16, 0.29, 0 ],
+[ 0.8, 0.48, 1 ],
+[ -0.35, 0.98, 1 ],
+[ 0.62, -0.36, 1 ],
+[ 0.96, -0.5600000000000001, 0 ],
+[ 0.68, 0.8200000000000001, 1 ],
+[ -0.52, -0.02, 1 ],
+[ 0.9, -0.02, 1 ],
+[ -0.49, -0.17, 1 ],
+[ -0.35, 0.13, 1 ],
+[ 0.9500000000000001, -0.96, 0 ],
+[ -0.86, -0.8100000000000001, 0 ],
[ 0.34, -0.24, 1 ],
-[ 0.02, -1., 0 ],
-[ 0.88, -0.66, 0 ],
-[ -0.8, 0.54, 1 ],
-[ 0.45, -0.33, 0 ],
-[ -0.65, -0.5700000000000001, 0 ],
-[ 0.07000000000000001, -0.97, 0 ],
-[ -0.05, -0.9, 1 ],
-[ -0.07000000000000001, 0.39, 1 ],
-[ -0.61, -0.05, 0 ],
-[ 0.52, 0.71, 1 ],
-[ 0.06, -0.02, 1 ],
-[ -0.27, -0.25, 1 ],
-[ 0.44, 0.61, 1 ],
-[ -0.08, -0.49, 1 ],
-[ -0.12, -0.08, 1 ],
-[ 0.01, -0.06, 1 ],
-[ 0.1, 0.98, 0 ],
-[ 0.5600000000000001, -0.55, 0 ],
-[ 0.22, 0.8, 1 ],
-[ 0.11, 0.26, 0 ],
-[ 0.85, -0.65, 0 ],
-[ 0.43, -0.9500000000000001, 0 ],
-[ -0.33, -0.29, 1 ],
-[ 0.08, 0.13, 0 ],
-[ 0.9500000000000001, -0.39, 0 ],
-[ 0.18, -0.07000000000000001, 1 ],
-[ -0.17, -0.11, 1 ],
-[ -0.78, 0.22, 1 ],
-[ -0.28, -0.42, 1 ],
-[ -0.5600000000000001, -0.28, 0 ],
-[ -0.89, -0.9500000000000001, 0 ],
-[ -0.53, -0.17, 0 ],
-[ 0.91, -0.98, 0 ],
-[ -0.29, 0.14, 1 ],
-[ 0.22, 0.41, 1 ],
-[ -0.6, 0.25, 1 ],
-[ -0.31, 0.21, 1 ],
-[ -0.46, 0.21, 1 ],
-[ 0.45, -0.93, 0 ],
-[ -0.78, 0.03, 0 ],
-[ -0.28, -0.76, 0 ],
-[ 0.87, -0.89, 0 ],
-[ -0.8200000000000001, -0.23, 0 ],
-[ -0.23, -0.23, 1 ],
-[ -0.61, 0.18, 1 ],
-[ -0.04, 0.96, 0 ],
-[ -0.33, -0.22, 1 ],
-[ -0.07000000000000001, -0.02, 1 ],
-[ -0.71, -0.05, 0 ],
-[ 0.01, 0.77, 0 ],
-[ 0.3, 0.19, 1 ],
-[ -0.2, 0.71, 1 ],
-[ 0.24, 0.25, 1 ],
-[ 0.3, 0.18, 1 ],
-[ -0.25, -0.03, 1 ],
-[ -0.12, 0.24, 0 ],
-[ -0.1, 0.36, 1 ],
-[ -0.9500000000000001, -0.33, 0 ],
-[ -0.19, 0.98, 0 ],
-[ -0.86, -0.64, 0 ],
-[ 0.43, 0.01, 0 ],
-[ -0.13, 0.23, 0 ],
-[ -0.16, 0.38, 1 ],
-[ -0.28, 0.71, 1 ],
-[ 0.67, -0.79, 0 ],
-[ -0.01, 0.1, 0 ],
-[ -0.46, 0.97, 0 ],
-[ -0.21, -0.77, 0 ],
-[ -0.24, 0.18, 1 ],
-[ -0.42, -0.3, 0 ],
-[ 0.98, -0.45, 0 ],
-[ 0.08, 0.8300000000000001, 0 ],
-[ -0.4, -0.02, 1 ],
-[ -0.07000000000000001, 0.67, 0 ],
-[ 0.61, -0.11, 0 ],
-[ -0.09, -0.03, 1 ],
-[ 0.14, -0.54, 1 ],
-[ -0.18, -0.27, 1 ],
-[ 0.13, 0.16, 0 ],
-[ 0.23, -0.99, 0 ],
-[ -0.3, 0.6, 1 ],
-[ -0.02, 0.88, 0 ],
-[ -0.46, 0.61, 1 ],
-[ 0.09, -0.12, 1 ],
-[ 0.1, 0.7000000000000001, 0 ],
-[ -0.12, 0.1, 0 ],
-[ 0.33, -0.29, 1 ],
-[ 0.08, -0.13, 1 ],
-[ 0.64, 0.19, 1 ],
-[ 0.06, 0.29, 0 ],
-[ 0.2, -0.84, 0 ],
-[ 0.14, 0.43, 1 ],
-[ 0.28, 0.98, 0 ],
-[ 0.08, 0.16, 0 ],
-[ 0.24, -0.41, 1 ],
-[ 0.66, 0.58, 1 ],
-[ 0.28, 0.8, 1 ],
-[ 0.77, -0.72, 0 ],
-[ -0.25, 0.17, 1 ],
-[ 0.67, -0.74, 0 ],
-[ -0.38, 0.08, 1 ],
-[ -0.68, 0.42, 1 ],
-[ 0.34, -0.5700000000000001, 0 ],
-[ 0.07000000000000001, 0.07000000000000001, 0 ],
-[ 0.26, -0.04, 1 ],
-[ -0.28, -0.51, 0 ],
-[ -0.8100000000000001, 0.74, 1 ],
-[ 0.45, -0.24, 0 ],
-[ 0.08, -0.18, 1 ],
-[ -0.29, -0.08, 1 ],
-[ 0.37, 0.6, 1 ],
-[ -0.85, 0.86, 0 ],
-[ -0.89, 0.62, 1 ],
-[ -0.9, -0.59, 0 ],
-[ -0.63, -0.48, 0 ],
-[ -0.05, -0.3, 1 ],
-[ -0.16, 0.23, 1 ],
-[ 0.74, -0.21, 0 ],
-[ -0.3, -0.91, 0 ],
-[ 0.45, -0.87, 0 ],
-[ 0.23, -0.9500000000000001, 0 ],
-[ 0.7000000000000001, -0.21, 0 ],
-[ -0.44, -0.93, 0 ],
-[ -0.34, -0.3, 1 ],
-[ 0.53, 0.68, 1 ],
-[ -0.45, 0.93, 1 ],
-[ -0.84, -0.36, 0 ],
-[ -0.54, -0.71, 0 ],
-[ 0.11, 0.28, 0 ],
-[ -0.63, 0.09, 1 ],
-[ 0.14, -0.3, 1 ],
-[ -0.74, 0.43, 1 ],
-[ 0.08, -0.35, 1 ],
-[ -0.86, -0.16, 0 ],
-[ 0.68, 0.22, 1 ],
-[ -0.29, 0.85, 1 ],
-[ 0.23, 0.15, 1 ],
-[ -0.17, -0.68, 0 ],
-[ -0.21, 0.63, 1 ],
-[ 0.73, 0.26, 1 ],
-[ 0.89, -0.11, 0 ],
-[ 0.16, 0.18, 0 ],
-[ -0.66, -0.5, 0 ],
-[ 0.48, -0.13, 0 ],
-[ -0.3, 0.44, 1 ],
-[ -0.02, 0.9500000000000001, 0 ],
-[ -0.39, 0.18, 1 ],
-[ -0.7000000000000001, -0.34, 0 ],
-[ -0.8, 0.22, 1 ],
-[ 0.45, -0.16, 0 ],
-[ 0.49, -0.91, 0 ],
-[ 0.24, -0.04, 1 ],
-[ -0.79, -0.74, 0 ],
-[ -0.96, 0.42, 0 ],
-[ -0.61, -0.07000000000000001, 0 ],
-[ 0.02, 0.11, 0 ],
-[ -0.1, -0.08, 1 ],
-[ 0.85, -0.28, 0 ],
-[ -0.14, 0.8100000000000001, 0 ],
-[ -0.02, -0.24, 1 ],
-[ 0.8200000000000001, -0.46, 0 ],
-[ -0.7000000000000001, 0.5, 1 ],
-[ 0.8200000000000001, -0.48, 0 ],
-[ -0.8100000000000001, -0.63, 0 ],
-[ 0.85, 0.01, 0 ],
-[ 0.7000000000000001, -0.49, 0 ],
-[ -0.73, -0.23, 0 ],
-[ 0.06, -0.66, 1 ],
-[ -0.05, 0.06, 0 ],
-[ -0.38, 0.25, 1 ],
-[ -0.46, -0.28, 0 ],
-[ -0.25, -0.5700000000000001, 0 ],
-[ -0.44, 0.96, 0 ],
-[ 0.54, -0.28, 0 ],
-[ 0.66, 0.2, 1 ],
-[ -0.24, -0.72, 0 ],
-[ 0.61, -0.3, 0 ],
-[ 0.13, -0.8100000000000001, 0 ],
-[ 0.26, 0.1, 0 ],
-[ -0.98, 0.44, 0 ],
-[ -0.08, 0.52, 1 ],
-[ -0.16, 0.1, 0 ],
-[ -0.44, -0.23, 0 ],
-[ 0.51, 0.63, 1 ],
-[ -0.9500000000000001, -0.16, 0 ],
-[ -0.42, -0.87, 0 ],
-[ 0.23, 0.3, 1 ],
-[ 0.24, -0.68, 0 ],
-[ 0.5700000000000001, -0.47, 0 ],
-[ 0.74, 0.77, 1 ],
-[ -0.96, 0.98, 0 ],
-[ -0.99, 0.2, 0 ],
-[ -0.87, 0.92, 0 ],
-[ 0.58, -0.72, 0 ],
-[ -0.31, 0.05, 0 ],
-[ -0.49, 0.3, 1 ],
-[ -0.45, -0.92, 0 ],
-[ -0.01, 0.99, 0 ],
-[ -0.04, 0.89, 0 ],
-[ -0.88, 0.6900000000000001, 1 ],
-[ -0.34, -0.68, 0 ],
-[ -0.23, -0.64, 0 ],
-[ 0.6900000000000001, 0.19, 1 ],
-[ 0.09, -0.26, 1 ],
-[ 0.9400000000000001, -0.14, 0 ],
-[ 0.11, -0.3, 1 ],
-[ -0.3, -0.96, 0 ],
-[ 0.7000000000000001, 1., 0 ],
-[ 0.25, -0.26, 1 ],
-[ -0.03, 0.36, 0 ],
-[ -0.21, -0.01, 1 ],
-[ 0.18, 0.64, 1 ],
-[ 0.31, -0.64, 0 ],
-[ 0.55, 0.61, 1 ],
-[ -0.88, 0.74, 0 ],
-[ 0.09, 0.97, 0 ],
-[ -0.04, -0.71, 1 ],
-[ -0.07000000000000001, -0.4, 1 ],
-[ -0.5, 0.03, 0 ],
-[ -0.09, -0.18, 1 ],
-[ 0.15, 0.5600000000000001, 1 ],
-[ 0.12, 0.66, 1 ],
-[ -0.37, -0.46, 0 ],
-[ 0.22, 0.93, 0 ],
-[ -0.5700000000000001, 0.25, 1 ],
-[ -0.06, -0.11, 1 ],
-[ 0.58, -0.02, 0 ],
-[ 0.02, 0.09, 0 ],
-[ 0.21, -0.2, 1 ],
-[ 0.02, -0.96, 1 ],
-[ 0.8100000000000001, 0.8, 1 ],
-[ -0.88, -0.32, 0 ],
-[ 0.28, 0.29, 1 ],
-[ -0.68, -0.8100000000000001, 0 ],
-[ 0.38, 0.1, 1 ],
-[ 0.06, -0.27, 1 ],
-[ 0.22, 0.26, 1 ],
-[ -0.64, -0.8300000000000001, 0 ],
-[ -0.25, -0.85, 0 ],
-[ 0.92, -0.85, 0 ],
-[ -0.11, -0.2, 1 ],
-[ 0.19, 0.26, 1 ],
-[ 0.29, -0.19, 1 ],
-[ 0.16, -0.61, 1 ],
-[ 0.17, 0.54, 1 ],
-[ 0.18, 0.21, 1 ],
-[ 0.31, -0.6, 0 ],
-[ 0.79, -0.68, 0 ],
-[ -0.19, -0.04, 1 ],
-[ -0.3, -0.99, 0 ],
-[ 0.34, 0.14, 1 ],
-[ 0.97, -0.6, 0 ],
-[ -0.77, -0.5600000000000001, 0 ],
-[ 0.77, -0.47, 0 ],
-[ -0.84, 0.42, 1 ],
-[ 1., 0.8100000000000001, 0 ],
-[ -0.3, -0.61, 0 ],
-[ -0.24, -0.35, 1 ],
-[ -0.28, 0.88, 1 ],
-[ -0.9, 0.21, 0 ],
-[ -0.2, -0.12, 1 ],
-[ 0.88, -0.58, 0 ],
-[ 0.73, -0.74, 0 ],
-[ -0.06, -0.04, 1 ],
-[ -0.96, -0.97, 0 ],
-[ -0.25, 0.14, 1 ],
-[ -0.55, 0.58, 1 ],
-[ -0.01, -0.25, 1 ],
-[ -0.36, -0.12, 1 ],
-[ 0.12, 0.46, 1 ],
-[ 0.59, 0.24, 1 ],
-[ -0.46, -0.25, 0 ],
-[ 0.02, 0.3, 0 ],
-[ 0.58, 0.18, 1 ],
-[ -0.27, 0.88, 1 ],
-[ -0.05, -0.18, 1 ],
-[ 0.03, -0.12, 1 ],
-[ 0.27, 0.37, 1 ],
-[ -0.6, 0.71, 1 ],
-[ 0.35, -0.52, 0 ],
-[ -0.27, -0.64, 0 ],
-[ 0.9, 0.14, 0 ],
-[ -0.77, -0.58, 0 ],
-[ -0.18, 0.71, 1 ],
-[ 0.07000000000000001, 0.8200000000000001, 0 ],
-[ 0.8200000000000001, 0.15, 0 ],
-[ 0.36, -0.47, 0 ],
-[ 0.05, -0.37, 1 ],
-[ 0.23, 0.88, 0 ],
-[ 0.22, -0.06, 1 ],
-[ 0.99, 0.85, 0 ],
-[ -0.61, -0.89, 0 ],
-[ -0.55, -0.05, 0 ],
-[ -0.1, 0.75, 0 ],
-[ -0.28, -0.33, 1 ],
-[ 0.05, -0.24, 1 ],
-[ 0.71, 0.28, 1 ],
-[ -0.14, -0.18, 1 ],
-[ 0.02, -0.21, 1 ],
-[ -0.93, 0.74, 0 ],
-[ 0.14, 0.01, 0 ],
-[ 0.75, 0.22, 1 ],
-[ 0.18, 0.91, 0 ],
-[ 0.5700000000000001, 0.03, 0 ],
-[ 0.88, 0.1, 0 ],
-[ 0.64, 0.66, 1 ],
-[ 0.17, -0.04, 1 ],
-[ 0.4, -0.3, 0 ],
-[ 0.64, 0.29, 1 ],
-[ 0.14, -0.04, 1 ],
-[ -0.74, 0.39, 1 ],
-[ 0.59, -0.23, 0 ],
-[ 0.28, -0.9, 0 ],
-[ -0.24, -0.19, 1 ],
-[ 0.8100000000000001, 0.02, 0 ],
-[ -0.13, 0.47, 1 ],
-[ 0.08, -0.23, 1 ],
-[ -0.39, 0.05, 0 ],
-[ 0.18, 0.8, 1 ],
-[ -0.49, -0.01, 1 ],
-[ -0.22, 0.12, 0 ],
-[ 0.04, 0.12, 0 ],
-[ 0.4, -0.74, 0 ],
-[ 0.3, -0.21, 1 ],
-[ -0.98, 0.35, 0 ],
-[ -0.97, 0.52, 0 ],
-[ -0.03, 0.68, 0 ],
-[ 0.5700000000000001, 0.26, 1 ],
-[ 0.78, 0.86, 0 ],
-[ 0.2, -0.41, 1 ],
-[ -0.09, 0.14, 0 ],
-[ 0.34, 0.51, 1 ],
-[ 0.03, 0.08, 0 ],
-[ 0.61, 0.88, 1 ],
-[ 0.2, -0.23, 1 ],
-[ 0.06, -0.37, 1 ],
-[ 0.65, -0.36, 0 ],
-[ -0.14, 0.77, 0 ],
-[ -0.04, -0.85, 1 ],
-[ -0.4, -0.51, 0 ],
-[ -0.8200000000000001, 0.1, 0 ],
-[ -0.2, 0.27, 1 ],
-[ 0.13, 0.34, 1 ],
-[ 0.78, 0.29, 1 ],
-[ 0.91, -0.62, 0 ],
-[ 0.85, -0.73, 0 ],
-[ 0.58, 0.24, 1 ],
-[ -0.19, 0.06, 0 ],
-[ 0.09, 0.7000000000000001, 0 ],
-[ -0.04, 0.01, 0 ],
-[ -0.17, 0.29, 1 ],
-[ -0.07000000000000001, 0.27, 0 ],
-[ 0.66, 0.12, 1 ],
-[ 0.76, -0.28, 0 ],
-[ -0.09, 0.24, 0 ],
-[ 0.02, 0.52, 0 ],
-[ -0.8, 0.67, 1 ],
-[ 0.91, 0.63, 1 ],
-[ -0.24, 0.9400000000000001, 0 ],
-[ 0.12, 0.8, 0 ],
-[ -0.06, -0.2, 1 ],
-[ 0.28, 0.41, 1 ],
-[ 0.62, 0.03, 0 ],
-[ 0.03, 0.12, 0 ],
-[ -0.04, 0.54, 0 ],
-[ 0.96, -0.29, 0 ],
-[ 0.44, -0.06, 1 ],
-[ -0.55, -0.86, 0 ],
-[ 0.14, -0.92, 0 ],
-[ -0.76, -0.3, 0 ],
-[ -0.18, 0.05, 0 ],
-[ -0.77, 0.24, 1 ],
-[ 0.2, -0.6, 1 ],
-[ -0.67, -0.22, 0 ],
-[ -0.4, 0.03, 0 ],
-[ 0.96, 0.9500000000000001, 0 ],
-[ 0.99, 0.34, 0 ],
-[ -0.17, -0.29, 1 ],
-[ -0.48, 0.16, 1 ],
-[ -0.06, -0.07000000000000001, 1 ],
-[ 0.09, -0.06, 1 ],
-[ 0.24, -0.15, 1 ],
-[ 0.15, 0.11, 0 ],
-[ 0.91, -0.15, 0 ],
-[ 0.72, -0.12, 0 ],
-[ -0.12, 0.9500000000000001, 0 ],
-[ -0.14, 0.58, 1 ],
-[ -0.38, 0.01, 0 ],
-[ 0.39, 0.45, 1 ],
-[ 0.6, -0.17, 0 ],
-[ 0.22, -0.98, 0 ],
-[ -0.16, -0.88, 0 ],
-[ 0.43, 0.12, 1 ],
-[ -0.02, 0.43, 0 ],
-[ 0.28, 0.46, 1 ],
-[ -0.25, -0.23, 1 ],
-[ 0.99, -0.62, 0 ],
-[ -0.68, -0.11, 0 ],
-[ 0.5600000000000001, -0.68, 0 ],
-[ 0.6, -0.01, 0 ],
-[ -0.8300000000000001, -0.18, 0 ],
-[ 0.14, 0.05, 0 ],
-[ 0.67, 0.44, 1 ],
-[ 0.27, -0.3, 1 ],
-[ 0.65, 0.8, 1 ],
-[ 0.25, 0.01, 0 ],
-[ 0.53, -0.03, 0 ],
-[ 0.11, 0.03, 0 ],
-[ -0.73, 0.21, 1 ],
-[ 0.2, -0.3, 1 ],
-[ -0.3, -0.8, 0 ],
-[ -0.04, 0.74, 0 ],
-[ 0.07000000000000001, 0.7000000000000001, 0 ],
-[ 0.4, -0.5700000000000001, 0 ],
-[ -0.73, -0.74, 0 ],
-[ 0.19, 0.76, 1 ],
-[ 0.18, 0.01, 0 ],
-[ -0.72, -0.39, 0 ],
-[ -0.96, 0.09, 0 ],
-[ 0.14, 0.14, 0 ],
-[ -0.51, 0.37, 1 ],
-[ 0.3, 0.47, 1 ],
-[ 0.24, -0.39, 1 ],
-[ 0.49, -0.24, 0 ],
-[ 0.19, -0.91, 0 ],
-[ -0.5700000000000001, -0.73, 0 ],
-[ -0.43, -0.89, 0 ],
-[ 0.21, -0.16, 1 ],
-[ 0.73, 0.09, 0 ],
-[ 0.72, 0.02, 0 ],
-[ 0.49, -0.22, 0 ],
-[ 0.41, -0.84, 0 ],
-[ -0.25, 0.15, 1 ],
-[ -0.05, -0.1, 1 ],
-[ -0.34, -0.3, 1 ],
-[ 0.33, -0.05, 1 ],
-[ 0.97, -0.1, 0 ],
-[ 0.06, -0.04, 1 ],
-[ 0.97, 0.18, 0 ],
-[ 0.97, -0.16, 0 ],
-[ -0.9400000000000001, 0.6, 0 ],
-[ -0.2, -0.22, 1 ],
-[ -0.2, -0.79, 0 ],
-[ 0.77, -0.1, 0 ],
-[ 0.44, 0.58, 1 ],
-[ 0.93, 0.44, 1 ],
-[ 0.8, 0.86, 0 ],
-[ 0.4, -0.67, 0 ],
-[ -0.14, -0.08, 1 ],
-[ 0.8100000000000001, 0.25, 1 ],
-[ 0.8, -0.9400000000000001, 0 ],
-[ -0.18, -0.33, 1 ],
-[ -0.4, 0.75, 1 ],
-[ 0.21, 0.54, 1 ],
-[ -0.25, -0.64, 0 ],
-[ 0.91, -0.22, 0 ],
-[ 0.4, 0.35, 1 ],
-[ -0.17, 0.46, 1 ],
-[ 0.17, 0.55, 1 ],
-[ -0.55, 0.25, 1 ],
-[ 0.06, -0.5700000000000001, 1 ],
-[ 0.16, -0.86, 0 ],
-[ -0.06, -0.36, 1 ],
-[ 0.53, -0.26, 0 ],
-[ 0.19, -1., 0 ],
-[ 0.4, -0.42, 0 ],
-[ -0.31, 0.28, 1 ],
-[ -0.26, -0.31, 1 ],
-[ -0.19, 0.02, 0 ],
-[ 0.74, 0.8200000000000001, 1 ],
-[ 0.29, 0.32, 1 ],
-[ -0.07000000000000001, -0.98, 0 ],
-[ 0.17, -0.72, 0 ],
-[ 0.04, 0.87, 0 ],
-[ -0.03, -0.37, 1 ],
-[ -0.93, -0.29, 0 ],
-[ -0.6, 0.8100000000000001, 1 ],
-[ -0.08, -0.45, 1 ],
-[ -0.09, -0.23, 1 ],
-[ 0.68, 0.17, 1 ],
-[ 0.6, -0.73, 0 ],
-[ 0.93, -0.05, 0 ],
-[ 0.3, 0.21, 1 ],
-[ 0.89, -0.08, 0 ],
-[ 0.18, -0.37, 1 ],
-[ -0.39, 0.37, 1 ],
-[ 0.41, 0.72, 1 ],
-[ 0.13, -0.7000000000000001, 1 ],
-[ -0.5600000000000001, -0.66, 0 ],
-[ 0.54, 0.58, 1 ],
-[ 0.45, 0.73, 1 ],
-[ -0.26, -0.03, 1 ],
-[ 0.65, -0.71, 0 ],
-[ -0.5600000000000001, 0.45, 1 ],
-[ 0.5, 0.37, 1 ],
-[ -0.91, 0.01, 0 ],
-[ -0.93, 0.27, 0 ],
-[ -0.34, 0.34, 1 ],
-[ 0.05, -0.18, 1 ],
-[ 0.05, -0.76, 1 ],
-[ 0.17, -0.15, 1 ],
-[ 0.49, -0.28, 0 ],
-[ -0.58, 0.34, 1 ],
-[ 0.19, 0.67, 1 ],
-[ 0.06, -0.3, 1 ],
-[ 0.35, -0.05, 1 ],
-[ -0.87, 0.87, 0 ],
-[ -0.8, -0.02, 0 ],
-[ -0.66, -0.18, 0 ],
-[ -0.65, -0.24, 0 ],
-[ -0.8100000000000001, 0.33, 1 ],
-[ -0.55, -0.97, 0 ],
-[ -0.72, 0.23, 1 ],
-[ -0.4, -0.87, 0 ],
-[ 0.9500000000000001, 0.12, 0 ],
-[ 0.17, 0.16, 0 ],
-[ -0.8200000000000001, -0.06, 0 ],
-[ -0.3, 0.13, 1 ],
-[ -0.42, -0.03, 1 ],
-[ 0.25, 0.88, 0 ],
-[ 0.25, 0.8300000000000001, 1 ],
-[ -0.62, -0.3, 0 ],
-[ -0.34, -0.8200000000000001, 0 ],
-[ 0.66, -0.14, 0 ],
-[ -0.89, -0.72, 0 ],
-[ -0.73, -0.4, 0 ],
-[ -0.14, -0.84, 0 ],
-[ 0.27, -0.68, 0 ],
-[ 0.2, -0.24, 1 ],
-[ -0.73, 0.06, 0 ],
-[ 0.33, 0.48, 1 ],
-[ -0.5600000000000001, 0.29, 1 ],
-[ 0.16, 0.12, 0 ],
-[ -0.73, -0.66, 0 ],
-[ -0.33, 0.28, 1 ],
-[ 0.23, 0.46, 1 ],
-[ 0.02, 0.35, 0 ],
-[ 0.21, 0.64, 1 ],
-[ 0.14, -0.13, 1 ],
-[ -0.17, 0.28, 1 ],
-[ 0.28, -0.03, 1 ],
-[ -0.78, 0.22, 1 ],
-[ -0.44, 0.11, 1 ],
-[ -0.66, 0.65, 1 ],
-[ -0.09, 0.25, 0 ],
-[ -0.8100000000000001, 0.22, 1 ],
-[ -0.73, 0.12, 1 ],
-[ 0.06, 0.72, 0 ],
-[ 0.65, -0.79, 0 ],
-[ -0.88, 0.26, 0 ],
-[ 0.53, -0.36, 0 ],
-[ 0.85, -0.12, 0 ],
-[ 0.06, -0.29, 1 ],
-[ 0.17, 0.14, 0 ],
-[ -0.09, -0.07000000000000001, 1 ],
-[ 0.12, -0.15, 1 ],
-[ 0.93, 0.08, 0 ],
-[ -0.84, 0.19, 0 ],
-[ -0.25, -0.05, 1 ],
-[ -0.33, 0.8, 1 ],
-[ 0.43, -0.37, 0 ],
-[ 0.62, -0.4, 0 ],
-[ -0.48, 0.02, 0 ],
+[ 0.8200000000000001, 0.13, 1 ],
+[ -0.8300000000000001, 0.06, 1 ],
+[ -0.4, 0.9400000000000001, 1 ],
+[ 0.68, -0.29, 1 ],
+[ 0.35, 0.41, 0 ],
+[ 0.14, 0.92, 1 ],
+[ 0.09, -0.37, 1 ],
+[ -0.89, -0.93, 0 ],
+[ -0.41, -0.14, 1 ],
+[ -0.75, 0.91, 1 ],
+[ -0.77, -0.71, 0 ],
+[ -0.01, 0.7000000000000001, 0 ],
+[ -0.2, 0.01, 1 ],
+[ 0.28, 0.9400000000000001, 1 ],
+[ -0.84, -0.09, 1 ],
+[ 0.29, 0.9, 1 ],
+[ -0.6, -0.06, 1 ],
+[ -0.29, -0.47, 1 ],
+[ -0.86, -0.53, 0 ],
+[ 0.41, -0.98, 0 ],
+[ 0.2, 0.22, 0 ],
+[ 0.44, -0.68, 0 ],
+[ -0.01, -0.21, 1 ],
+[ 0.19, -0.44, 1 ],
+[ 0.05, 0.99, 1 ],
+[ -0.02, 0.32, 0 ],
+[ -0.9400000000000001, 0.71, 1 ],
+[ -0.71, -0.9, 0 ],
+[ 0.31, -0.21, 1 ],
+[ -0.54, 0.44, 1 ],
+[ -0.7000000000000001, -0.66, 0 ],
+[ -0.93, -0.43, 1 ],
+[ 0.49, 0.88, 1 ],
+[ -0.65, -0.91, 0 ],
+[ -0.71, -0.27, 1 ],
+[ 0.64, 0.65, 1 ],
+[ -0.8200000000000001, 0.18, 1 ],
+[ 0.12, 0.26, 0 ],
+[ -0.62, -0.39, 1 ],
+[ 0.5, -0.39, 1 ],
+[ 0.3, 0.08, 1 ],
+[ -0.44, -0.6, 0 ],
+[ 0.14, 0.18, 0 ],
+[ -0.72, 0.8200000000000001, 1 ],
+[ -0.14, -0.24, 1 ],
+[ -0.87, -0.37, 1 ],
+[ -0.39, 0.72, 1 ],
+[ 0.96, 0.5700000000000001, 1 ],
+[ -0.16, 0.38, 0 ],
+[ -0.8200000000000001, 0.92, 1 ],
+[ 0.72, -0.92, 0 ],
+[ -0.28, 0.29, 0 ],
+[ 0.16, -0.72, 0 ],
+[ 0.71, 0.75, 1 ],
+[ -0.89, -0.14, 1 ],
+[ -0.35, -0.28, 1 ],
+[ -0.48, -0.79, 0 ],
+[ 0.7000000000000001, -0.89, 0 ],
+[ -0.92, -0.05, 1 ],
+[ 0.13, -0.45, 1 ],
+[ 0.42, -0.63, 0 ],
+[ -0.52, -0.02, 1 ],
+[ -0.26, 0.54, 0 ],
+[ -0.38, 0.62, 0 ],
+[ -0.6, 0.19, 1 ],
[ 0.4, 0.29, 1 ],
-[ 0.84, -0.04, 0 ],
-[ -0.32, -0.36, 1 ],
-[ 0.98, -0.02, 0 ],
-[ -0.8300000000000001, -0.22, 0 ],
-[ -0.17, 0.02, 0 ],
-[ 0.74, -0.43, 0 ],
-[ -0.98, 0.75, 0 ],
-[ -0.87, 0.39, 1 ],
-[ -0.04, -0.39, 1 ],
-[ 0.27, 0.45, 1 ],
-[ 0.2, 0.67, 1 ],
-[ 0.39, -0.08, 1 ],
-[ 0.59, 0.02, 0 ],
-[ 0.34, -0.24, 1 ],
-[ -0.43, 0.64, 1 ],
-[ -0.05, -0.3, 1 ],
-[ -0.71, -0.23, 0 ],
-[ 0.93, 0.9, 0 ],
-[ 0.01, 0.18, 0 ],
-[ 0.63, -0.53, 0 ],
-[ -0.19, 0.16, 0 ],
-[ 0.51, 0.42, 1 ],
-[ -0.92, 0.77, 0 ],
-[ -0.38, -0.16, 1 ],
-[ 0.1, 0.44, 1 ],
-[ -0.8300000000000001, -0.29, 0 ],
-[ 0.04, 0.25, 0 ],
-[ -0.22, 0.13, 0 ],
-[ 0.8100000000000001, 0.17, 0 ],
-[ -0.3, 0.27, 1 ],
-[ -0.38, -0.26, 0 ],
-[ 0.84, -0.03, 0 ],
-[ 0.02, 0.2, 0 ],
-[ -0.73, -0.01, 0 ],
-[ -0.76, 0.29, 1 ],
-[ 0.42, -0.4, 0 ],
-[ 0.87, 0.3, 1 ],
-[ 0.2, 0.18, 1 ],
-[ -0.8100000000000001, -0.29, 0 ],
-[ 0.19, 0.98, 0 ],
+[ -0.39, -0.73, 0 ],
+[ 0.48, 0.41, 1 ],
+[ 0.39, -0.29, 1 ],
+[ -0.48, -0.25, 1 ],
+[ -0.97, 0.15, 1 ],
+[ 0.62, -0.84, 0 ],
+[ 0.66, -0.93, 0 ],
+[ -0.97, 0.26, 1 ],
+[ -0.74, 0.05, 1 ],
+[ 0.37, -0.86, 0 ],
+[ 0.64, 0.36, 1 ],
+[ -0.37, -0.51, 0 ],
+[ 0.88, -0.11, 1 ],
+[ -0.75, -0.53, 0 ],
+[ 0.8300000000000001, 0.76, 1 ],
+[ -0.11, -0.37, 1 ],
+[ 0.5, 0.9500000000000001, 1 ],
+[ -0.67, 0.03, 1 ],
+[ -0.79, -0.84, 0 ],
+[ 0.49, 0.39, 1 ],
+[ -0.61, 0.76, 1 ],
+[ 0.32, 0.05, 1 ],
+[ -0.5600000000000001, -0.52, 0 ],
+[ -0.02, -0.15, 1 ],
+[ -0.72, -0.25, 1 ],
+[ -0.55, 0.64, 1 ],
+[ 0.8300000000000001, -0.34, 1 ],
+[ -0.89, -0.8100000000000001, 0 ],
+[ 0.34, 0.66, 0 ],
+[ -0.27, -0.16, 1 ],
+[ 0.38, -0.32, 1 ],
+[ 0.6, -0.11, 1 ],
+[ -0.44, 0.54, 1 ],
+[ -0.41, -0.25, 1 ],
+[ 0.78, -0.9, 0 ],
+[ 0.23, 0.24, 0 ],
+[ -0.48, -0.36, 1 ],
+[ -0.32, -0.9400000000000001, 0 ],
+[ 0.84, -0.5, 0 ],
+[ -0.87, 0.8300000000000001, 1 ],
+[ 0.98, 0.9400000000000001, 1 ],
+[ -0.51, 0.78, 1 ],
+[ 0.41, 1., 1 ],
+[ 0.91, 0.25, 1 ],
+[ -0.74, 0.37, 1 ],
+[ 0.35, 0.91, 1 ],
+[ 0.35, -0.38, 1 ],
+[ -0.78, -0.78, 0 ],
+[ -0.8100000000000001, -0.46, 1 ],
+[ 0.91, -0.93, 0 ],
+[ -0.71, -0.06, 1 ],
+[ 0.76, -0.61, 0 ],
+[ -0.27, 0.96, 1 ],
+[ 0.04, -0.02, 1 ],
+[ 0.28, 0.39, 0 ],
+[ -0.4, 0.8100000000000001, 1 ],
+[ 0.45, -0.48, 1 ],
+[ -0.91, -0.68, 0 ],
+[ -0.33, 0.78, 1 ],
+[ 0.93, -0.02, 1 ],
+[ 0.14, -0.77, 0 ],
+[ -0.8200000000000001, -0.91, 0 ],
+[ 0.6, -0.71, 0 ],
+[ -0.15, -0.08, 1 ],
+[ -0.14, 0.28, 0 ],
+[ -0.77, -0.49, 1 ],
+[ -0.19, -0.68, 0 ],
[ 0.88, -0.99, 0 ],
-[ -0.35, 0.16, 1 ],
-[ 0.46, 0.5, 1 ],
-[ -0.09, 0.9, 0 ],
-[ -0.27, 0.96, 0 ],
-[ -0.19, 0.2, 1 ],
-[ 0.23, 0.31, 1 ],
-[ -0.12, -0.74, 1 ],
-[ -0.47, 0.5700000000000001, 1 ],
-[ -0.25, -0.62, 0 ],
-[ 0.54, 0.85, 1 ],
-[ -0.6900000000000001, 0.17, 1 ],
-[ -0.36, -0.89, 0 ],
-[ -0.49, -0.67, 0 ],
-[ -0.52, 0.05, 0 ],
-[ 0.72, -0.02, 0 ],
-[ 0.01, -0.18, 1 ],
-[ -0.01, 0.27, 0 ],
-[ 0.3, 0.27, 1 ],
-[ -0.47, 0.18, 1 ],
-[ 0.05, -0.45, 1 ],
-[ 0.02, -0.76, 1 ],
-[ 0.39, -0.19, 1 ],
-[ -0.5700000000000001, 0.1, 1 ],
-[ 0.35, 0.16, 1 ],
-[ 0.75, 0.42, 1 ],
-[ -0.17, 0.92, 0 ],
-[ 0.85, 0.07000000000000001, 0 ],
-[ 0.12, -0.89, 0 ],
-[ 0.45, 0.3, 1 ],
-[ -0.27, -0.06, 1 ],
-[ -0.43, 0.03, 0 ],
-[ -0.27, 0.28, 1 ],
-[ -0.67, 0.5600000000000001, 1 ],
-[ 0.6, -0.28, 0 ],
-[ 0.12, -0.74, 1 ],
-[ 0.91, 0.67, 1 ],
-[ 0.8300000000000001, -0.35, 0 ],
-[ 0.74, -0.18, 0 ],
-[ 0.96, -0.9500000000000001, 0 ],
-[ 0.25, -0.09, 1 ],
-[ -0.12, -0.12, 1 ],
-[ 0.01, -0.5700000000000001, 1 ],
-[ 0.86, -0.46, 0 ],
-[ 0.9500000000000001, 0.52, 0 ],
-[ -0.02, -0.31, 1 ],
-[ -0.1, -0.7000000000000001, 1 ],
-[ -0.6900000000000001, 0.24, 1 ],
-[ -0.48, -0.1, 0 ],
-[ -0.51, 0.88, 1 ],
-[ 0.01, 0.93, 0 ],
-[ 0.48, 0.03, 0 ],
-[ 0.55, -0.99, 0 ],
-[ 0.76, 0.73, 1 ],
-[ 0.9400000000000001, -0.22, 0 ],
-[ 0.17, -0.77, 0 ],
-[ -0.8200000000000001, -0.02, 0 ],
-[ 0.42, 0.54, 1 ],
-[ -0.16, -0.99, 0 ],
-[ 0.05, 0.55, 0 ],
-[ -0.47, -0.91, 0 ],
-[ -0.14, 0.75, 1 ],
-[ 0.1, -0.15, 1 ],
-[ 0.09, 0.17, 0 ],
-[ 0.48, -0.1, 0 ],
-[ -0.17, -0.18, 1 ],
-[ -0.31, 0.61, 1 ],
-[ -0.47, 0.03, 0 ],
-[ 0.59, -0.27, 0 ],
-[ 0.16, 0.88, 0 ],
-[ -0.92, -0.73, 0 ],
-[ -0.59, 0.08, 1 ],
-[ 0.67, -0.08, 0 ],
-[ -0.9400000000000001, -0.9400000000000001, 0 ],
-[ -0.04, 0.33, 0 ],
-[ -0.99, -0.18, 0 ],
-[ 0.73, -0.34, 0 ],
-[ -0.18, 0.79, 1 ],
-[ 0.65, -0.18, 0 ],
-[ -0.86, 0.26, 1 ],
-[ -0.63, 0.26, 1 ],
-[ 0.02, 0.15, 0 ],
-[ 0.52, 0.54, 1 ],
-[ 0.73, 0.04, 0 ],
-[ 0.13, -0.99, 0 ],
-[ 0.17, 0.21, 1 ],
-[ 0.28, 0.47, 1 ],
-[ -0.51, 0.47, 1 ],
-[ -0.16, 0.9, 0 ],
-[ 0.06, -0.35, 1 ],
-[ 0.33, 0.9, 1 ],
-[ -0.51, -0.17, 0 ],
-[ -1., -0.96, 0 ],
-[ 0.02, -0.62, 1 ],
-[ 0.02, 0.76, 0 ],
-[ 0.46, 0.26, 1 ],
-[ -0.28, -0.5700000000000001, 0 ],
-[ 0.27, -0.79, 0 ],
-[ -0.99, -0.96, 0 ],
-[ -0.7000000000000001, -0.87, 0 ],
-[ -0.4, 0.29, 1 ],
-[ 0.21, 0.55, 1 ],
-[ 0.24, -0.26, 1 ],
-[ -0.15, -0.46, 1 ],
-[ -0.21, 0.52, 1 ],
-[ -0.17, 0.58, 1 ],
-[ 0.23, 0.78, 1 ],
-[ 0.64, 0.87, 1 ],
-[ -0.7000000000000001, -0.92, 0 ],
-[ -0.07000000000000001, 0.98, 0 ],
-[ 0.07000000000000001, 0.08, 0 ],
-[ 0.22, -0.61, 0 ],
-[ -0.17, -0.33, 1 ],
-[ -0.37, 0.27, 1 ],
-[ 0.8300000000000001, 0.11, 0 ],
-[ 0.44, -0.15, 0 ],
-[ 0.7000000000000001, 0.33, 1 ],
-[ -0.42, 0.25, 1 ],
-[ -0.26, -0.68, 0 ],
-[ 0.1, 0.13, 0 ],
-[ 0.3, 0.23, 1 ],
-[ -0.38, 0.5, 1 ],
-[ 0.07000000000000001, -0.8, 1 ],
-[ -0.13, -0.07000000000000001, 1 ],
-[ -0.3, -0.73, 0 ],
-[ -0.13, 0.59, 1 ],
-[ 0.86, -1., 0 ],
-[ 0.5700000000000001, -0.45, 0 ],
-[ -0.04, 0.78, 0 ],
-[ -0.28, 0.14, 1 ],
-[ 0.64, 0.39, 1 ],
-[ -0.52, -0.17, 0 ],
-[ 0.06, -0.5700000000000001, 1 ],
-[ 0.36, 0.67, 1 ],
-[ -0.41, -0.04, 1 ],
-[ -0.1, -0.08, 1 ],
-[ 0.48, -0.59, 0 ],
-[ 0.78, 0.49, 1 ],
-[ 0.71, -0.27, 0 ],
-[ -0.27, -0.22, 1 ],
-[ -0.05, 0.47, 0 ],
-[ -0.67, -0.74, 0 ],
-[ 0.06, 0.18, 0 ],
-[ 0.12, -0.86, 0 ],
-[ 0.88, 0.88, 0 ],
-[ -0.05, 0.77, 0 ],
-[ -0.11, 0.71, 1 ],
-[ 0.55, 0.63, 1 ],
-[ -0.41, -0.26, 0 ],
-[ 0.59, -0.18, 0 ],
-[ -0.47, 0.8, 1 ],
-[ -0.26, -0.68, 0 ],
-[ -0.9500000000000001, -0.22, 0 ],
-[ -0.59, -0.6900000000000001, 0 ],
-[ 0.77, 0.8300000000000001, 1 ],
-[ 0.09, 0.65, 1 ],
-[ 0.43, 0.14, 1 ],
-[ -0.67, 0.96, 0 ],
-[ 0.22, -0.22, 1 ],
-[ 0.51, -0.38, 0 ],
-[ 0.6, -0.51, 0 ],
-[ -0.1, -0.3, 1 ],
-[ 0.1, -0.16, 1 ],
-[ -0.65, -0.28, 0 ],
-[ -0.8300000000000001, -0.66, 0 ],
-[ -0.05, 0.84, 0 ],
-[ 0.55, 0.49, 1 ],
-[ -0.59, 0.18, 1 ],
-[ 0.23, 0.97, 0 ],
-[ -0.2, -0.5700000000000001, 1 ],
-[ 0.36, -0.18, 1 ],
-[ 0.3, 0.14, 1 ],
-[ 0.5700000000000001, -0.9500000000000001, 0 ],
-[ -0.03, -0.96, 0 ],
-[ 0.63, -0.19, 0 ],
-[ 0.89, 0.99, 0 ],
-[ -0.09, 0.64, 1 ],
-[ 0.17, 0.3, 1 ],
-[ -0.64, -0.24, 0 ],
-[ 0.66, 0.28, 1 ],
-[ 0.37, 0.04, 0 ],
-[ -0.85, 0.22, 0 ],
-[ -0.04, 0.6, 0 ],
-[ 0.45, 0.04, 0 ],
-[ 0.03, -0.9500000000000001, 0 ],
-[ 0.78, 0.53, 1 ],
-[ 0.68, -0.29, 0 ],
-[ 0.3, -0.67, 0 ],
-[ 0.04, -0.15, 1 ],
-[ -0.7000000000000001, 0.2, 1 ],
-[ -0.29, 0.01, 0 ],
-[ -0.26, -0.01, 1 ],
-[ -0.58, 0.45, 1 ],
-[ -0.28, -0.67, 0 ],
-[ -0.8, 0.2, 1 ],
-[ -0.15, -0.62, 1 ],
-[ 0.28, 0.2, 1 ],
-[ 0.99, -0.63, 0 ],
-[ 0.19, 0.63, 1 ],
-[ 0.45, 0.04, 0 ],
-[ 0.8100000000000001, -0.38, 0 ],
-[ 0.13, 0.3, 1 ],
-[ -0.54, -0.38, 0 ],
-[ -0.47, 0.03, 0 ],
-[ -0.49, -0.4, 0 ],
-[ -0.19, 0.41, 1 ],
-[ 0.1, 0.9, 0 ],
-[ -0.23, -0.96, 0 ],
-[ 0.66, -0.08, 0 ],
-[ 0.34, 0.8200000000000001, 1 ],
-[ 0.29, -0.6900000000000001, 0 ],
-[ -0.05, 0.98, 0 ],
-[ -0.65, -0.35, 0 ],
-[ -0.18, 0.02, 0 ],
-[ 0.65, -0.04, 0 ],
-[ -0.86, -0.25, 0 ],
-[ -0.47, 0.66, 1 ],
-[ -0.7000000000000001, -0.9400000000000001, 0 ],
-[ -0.54, -0.97, 0 ],
-[ -0.74, 0.28, 1 ],
-[ 0.11, 0.37, 1 ],
-[ -0.73, 0.28, 1 ],
-[ 0.96, -0.86, 0 ],
-[ -0.15, 0.26, 1 ],
-[ -0.8300000000000001, -0.84, 0 ],
-[ 0.9500000000000001, -0.03, 0 ],
-[ 0.63, 0.53, 1 ],
-[ 0.06, 0.85, 0 ],
-[ -0.36, 0.36, 1 ],
-[ 0.3, -0.68, 0 ],
-[ -0.25, 0.04, 0 ],
-[ 0.13, 0.4, 1 ],
-[ -0.89, 0.51, 1 ],
-[ -0.01, -0.35, 1 ],
-[ -0.86, -0.34, 0 ],
-[ -0.21, -0.96, 0 ],
-[ 0.1, 0.13, 0 ],
-[ -0.22, -0.06, 1 ],
-[ 0.23, 0.02, 0 ],
-[ -0.5, -0.06, 0 ],
-[ 0.6, 0.2, 1 ],
-[ 0.47, 0.59, 1 ],
-[ -0.27, -0.89, 0 ],
-[ -0.17, 0.89, 0 ],
-[ -0.07000000000000001, -0.35, 1 ],
-[ 0.06, 0.8100000000000001, 0 ],
-[ -0.92, -0.9500000000000001, 0 ],
-[ -0.42, -0.48, 0 ],
-[ 0.75, -0.26, 0 ],
-[ 0.02, 0.73, 0 ],
-[ 0.9, 0.09, 0 ],
-[ -0.84, -0.8300000000000001, 0 ],
-[ -0.72, -0.3, 0 ],
-[ 0.32, -0.9400000000000001, 0 ],
-[ -0.01, 0.03, 0 ],
-[ -0.72, 0.8200000000000001, 1 ],
-[ 0.16, 0.91, 0 ],
-[ -0.65, -0.17, 0 ],
-[ 0.22, 0.24, 1 ],
-[ -0.24, 0.19, 1 ],
-[ 0.11, 0.3, 1 ],
-[ 0.55, -0.36, 0 ],
-[ 0.42, -0.02, 1 ],
-[ 0.79, -0.61, 0 ],
-[ -0.76, -0.67, 0 ],
-[ -0.04, 0.17, 0 ],
-[ -0.2, -0.5700000000000001, 1 ],
-[ -0.62, -0.98, 0 ],
-[ -0.24, -0.16, 1 ],
-[ -0.25, -0.5700000000000001, 0 ],
-[ 0.45, 0.7000000000000001, 1 ],
-[ -0.42, 0.03, 0 ],
-[ 0.45, -0.08, 1 ],
-[ -0.33, 0.19, 1 ],
-[ -0.08, 0.97, 0 ],
-[ 0.34, 0.87, 1 ],
-[ 0.23, -0.17, 1 ],
-[ -0.5700000000000001, -0.4, 0 ],
-[ 0.3, -0.42, 0 ],
-[ 0.42, 0.5600000000000001, 1 ],
-[ -0.23, -0.25, 1 ],
-[ 0.26, 0.14, 1 ],
-[ -0.02, -0.48, 1 ],
-[ 0.8, -0.9400000000000001, 0 ],
-[ -0.05, 0.11, 0 ],
-[ -0.43, 0.21, 1 ],
-[ -0.06, -0.03, 1 ],
-[ -0.18, 0.72, 1 ],
-[ 0.47, 0.19, 1 ],
-[ 0.18, 0.21, 1 ],
-[ 0.86, 0.02, 0 ],
-[ 0.53, 0.14, 1 ],
-[ -0.46, 0.39, 1 ],
-[ 0.98, -1., 0 ],
-[ -0.15, -0.98, 0 ],
-[ -0.98, 0.6900000000000001, 0 ],
-[ 0.35, 0.64, 1 ],
-[ 0.96, 0.05, 0 ],
-[ -0.01, 0.71, 0 ],
-[ -0.13, -0.39, 1 ],
-[ 0.48, 0.12, 1 ],
-[ -0.21, -0.1, 1 ],
-[ 0.7000000000000001, 0.8100000000000001, 1 ],
-[ -0.46, -0.22, 0 ],
-[ 0.13, 0.68, 1 ],
-[ -0.6, -0.3, 0 ],
-[ -0.27, 0.07000000000000001, 0 ],
-[ -0.3, -0.5700000000000001, 0 ],
-[ -0.28, 0.84, 1 ],
-[ -0.11, -0.85, 0 ],
-[ -0.23, 0.59, 1 ],
-[ -0.02, -0.22, 1 ],
-[ -0.42, 0.7000000000000001, 1 ],
-[ -0.16, -0.31, 1 ],
-[ -0.68, 0.4, 1 ],
-[ 0.7000000000000001, 0.41, 1 ],
-[ 0.71, -0.29, 0 ],
-[ 0.91, 0.48, 1 ],
-[ 0.36, -0.97, 0 ],
-[ -0.8300000000000001, -0.62, 0 ],
-[ 0.48, -0.2, 0 ],
-[ 0.52, -0.26, 0 ],
-[ -0.98, 0.03, 0 ],
-[ -0.29, 0.22, 1 ],
-[ -0.53, 1., 0 ],
-[ 0.05, 0.47, 0 ],
-[ -0.53, 0.09, 1 ],
-[ 0.11, -0.91, 0 ],
-[ -0.11, -0.44, 1 ],
-[ -0.87, 0.23, 0 ],
-[ 0.07000000000000001, 0.48, 1 ],
-[ 0.54, -0.29, 0 ],
-[ 0.59, -0.11, 0 ],
-[ -0.18, 0.9400000000000001, 0 ],
-[ -0.18, 0.79, 1 ],
-[ 0.22, -0.1, 1 ],
-[ 1., -0.99, 0 ],
-[ 0.16, 0.08, 0 ],
-[ -0.76, 0.27, 1 ],
-[ 0.65, -0.01, 0 ],
-[ -0.45, 0.38, 1 ],
-[ -0.24, -0.48, 1 ],
-[ -0.99, 0.16, 0 ],
-[ -0.6, 0.2, 1 ],
-[ 0.03, -0.18, 1 ],
+[ -0.67, 0.87, 1 ],
+[ -0.84, -0.5, 0 ],
+[ -0.35, -0.7000000000000001, 0 ],
+[ 0.6, -0.92, 0 ],
+[ -0.24, 0.47, 0 ],
+[ 0.26, 0.34, 0 ],
+[ 0.86, -0.63, 0 ],
+[ -0.9500000000000001, -0.02, 1 ],
+[ -0.31, 0.75, 0 ],
+[ -0.9500000000000001, 0.17, 1 ],
+[ -0.49, 0.8, 1 ],
+[ -0.24, -0.6900000000000001, 0 ],
+[ -0.37, 0.61, 0 ],
+[ 0.51, -0.37, 1 ],
+[ -0.58, -0.97, 0 ],
+[ 0.71, -0.4, 1 ],
+[ 0.06, 0.39, 0 ],
+[ 0.59, -0.17, 1 ],
+[ -0.98, -0.8, 0 ],
+[ -0.43, 0.72, 1 ],
+[ -0.5700000000000001, 0.49, 1 ],
+[ 0.58, 0.86, 1 ],
+[ -0.19, -0.51, 0 ],
+[ 0.43, -0.7000000000000001, 0 ],
+[ 0.76, 0.91, 1 ],
+[ -0.58, -0.38, 1 ],
+[ 0.44, 0.63, 1 ],
+[ 0.2, 0.97, 1 ],
+[ 0.11, -0.12, 1 ],
+[ -0.9, 0.3, 1 ],
+[ -0.17, 0.01, 1 ],
+[ 1., -0.47, 1 ],
+[ -0.3, 0.73, 0 ],
+[ 0.9500000000000001, 0.9500000000000001, 1 ],
+[ -0.72, -0.8, 0 ],
+[ -0.4, -0.62, 0 ],
+[ 0.8100000000000001, -0.21, 1 ],
+[ 0.47, 0.54, 1 ],
+[ -0.44, 0.5700000000000001, 1 ],
+[ -0.64, 0.22, 1 ],
+[ -0.68, 0.08, 1 ],
+[ 0.3, 0.86, 1 ],
+[ 0.96, 0.63, 1 ],
+[ -0.99, -0.47, 1 ],
+[ 0.01, 0.54, 0 ],
+[ 0.4, 0.85, 1 ],
+[ -0.97, -0.28, 1 ],
+[ 0.11, -0.41, 1 ],
+[ -0.42, 0.44, 1 ],
+[ -0.2, 0.5, 0 ],
+[ 0.77, 0.46, 1 ],
+[ -0.1, 0.62, 0 ],
+[ 0.87, -0.16, 1 ],
+[ -0.12, -0.37, 1 ],
[ -0.49, 0.2, 1 ],
-[ 0.75, 0.2, 1 ],
-[ 0.67, 0.55, 1 ],
-[ 0.51, 0.9500000000000001, 0 ],
-[ 0.15, -0.87, 0 ],
-[ -0.99, -0.28, 0 ],
-[ 0.9, -0.74, 0 ],
-[ 0.3, -0.07000000000000001, 1 ],
-[ -0.11, 0.53, 1 ],
-[ 0.13, -0.26, 1 ],
-[ 0.72, 0.04, 0 ],
-[ 0.36, 0.52, 1 ],
-[ 0.78, -0.85, 0 ],
-[ 0.51, 0.65, 1 ],
-[ -0.7000000000000001, 0.31, 1 ],
-[ 0.21, -0.66, 0 ],
-[ -0.58, -0.28, 0 ],
-[ 0.96, -0.26, 0 ],
-[ -0.09, -0.08, 1 ],
-[ -0.26, -0.14, 1 ],
-[ -0.86, 0.09, 0 ],
-[ 0.61, 0.23, 1 ],
-[ -0.1, -0.47, 1 ],
-[ 0.49, -0.76, 0 ],
-[ -0.03, 0.21, 0 ],
-[ 0.59, 0.26, 1 ],
-[ -0.64, -0.11, 0 ],
-[ -0.23, 0.47, 1 ],
-[ 0.19, 0.23, 1 ],
-[ -0.21, 0.34, 1 ],
-[ 0.14, 0.17, 0 ],
-[ 0.3, -0.03, 1 ],
-[ 0.24, 0.12, 0 ],
-[ -0.76, 0.19, 1 ],
-[ -0.27, -0.01, 1 ],
-[ -0.06, -0.72, 1 ],
-[ 0.3, 0.5600000000000001, 1 ],
-[ 0.66, -0.26, 0 ],
-[ 0.66, -0.2, 0 ],
-[ 0.59, -0.67, 0 ],
-[ 0.73, -0.06, 0 ],
-[ 0.89, -0.43, 0 ],
-[ 0.51, 0.3, 1 ],
-[ 0.12, 0.08, 0 ],
-[ 0.2, -0.62, 0 ],
-[ 0.14, -0.01, 1 ],
-[ -0.25, 0.34, 1 ],
-[ 0.52, -0.2, 0 ],
-[ 0.52, -0.29, 0 ],
-[ 0.9, 0.68, 1 ],
-[ 0.17, 0.33, 1 ],
-[ -0.54, 0.7000000000000001, 1 ],
-[ 0.17, 0.37, 1 ],
-[ 0.3, 0.25, 1 ],
-[ -0.76, -0.79, 0 ],
-[ 0.5600000000000001, 0.02, 0 ],
-[ -0.24, -0.93, 0 ],
-[ 0.54, 0.08, 1 ],
-[ -0.39, -0.16, 1 ],
-[ 0.87, 0.98, 0 ],
-[ -0.24, 0.5700000000000001, 1 ],
-[ -0.16, -0.5700000000000001, 1 ],
-[ -0.3, -0.06, 1 ],
-[ -0.28, 0.29, 1 ],
-[ -0.03, -0.13, 1 ],
-[ -0.8200000000000001, 0.1, 0 ],
-[ -0.09, 0.4, 1 ],
-[ -0.18, 0.8100000000000001, 1 ],
-[ 0.27, -0.98, 0 ],
-[ -1., -0.9400000000000001, 0 ],
-[ -0.15, -0.14, 1 ],
-[ 0.25, -0.9500000000000001, 0 ],
-[ -0.78, -0.25, 0 ],
-[ -0.05, -0.52, 1 ],
-[ 0.27, -0.96, 0 ],
-[ -0.85, 0.7000000000000001, 1 ],
-[ -0.18, 0.14, 0 ],
-[ -0.29, 0.42, 1 ],
-[ -0.85, 0.07000000000000001, 0 ],
-[ -0.16, -0.66, 1 ],
-[ 0.14, -0.42, 1 ],
-[ -0.44, -0.4, 0 ],
-[ 0.23, 0.13, 0 ],
-[ 0.87, -0.41, 0 ],
-[ 0.21, 0.98, 0 ],
-[ -0.6, -0.5, 0 ],
-[ -0.4, 0.24, 1 ],
-[ 0.04, 0.63, 0 ],
-[ -0.18, -0.64, 1 ],
-[ -0.38, -0.38, 0 ],
-[ 0.8200000000000001, -0.91, 0 ],
-[ 0.29, -0.92, 0 ],
-[ -0.65, -0.89, 0 ],
-[ -0.27, 0.88, 1 ],
-[ -0.27, 0.24, 1 ],
-[ -0.17, -0.24, 1 ],
-[ 0.08, -0.68, 1 ],
-[ -0.04, 0.6, 0 ],
-[ 0.67, -0.03, 0 ],
-[ 0.84, -1., 0 ],
-[ 0.22, 0.51, 1 ],
-[ -0.92, -0.36, 0 ],
-[ 0.26, 0.01, 0 ],
-[ -0.49, -0.8200000000000001, 0 ],
-[ -0.88, -0.05, 0 ],
-[ 0.19, -0.17, 1 ],
-[ 0.71, -0.18, 0 ],
-[ 0.89, -0.16, 0 ],
-[ 0.44, 0.3, 1 ],
-[ 0.05, -0.26, 1 ],
-[ 0.84, -0.06, 0 ],
-[ 0.49, -0.07000000000000001, 0 ],
-[ -0.8, 0.09, 0 ],
-[ 0.32, -0.52, 0 ],
-[ -0.36, 0.25, 1 ],
-[ -0.22, 0.84, 1 ],
-[ -0.46, 0.9500000000000001, 0 ],
-[ 0.85, 0.46, 1 ],
-[ 0.42, 0.17, 1 ],
-[ 0.34, 0.21, 1 ],
-[ -0.18, -0.16, 1 ],
-[ 0.14, -0.54, 1 ],
-[ -0.61, -0.79, 0 ],
-[ -0.26, 0.23, 1 ],
-[ 0.45, 0.61, 1 ],
-[ 0.02, 0.2, 0 ],
-[ -0.74, 0.41, 1 ],
-[ -0.23, -0.5700000000000001, 0 ],
-[ -0.96, -0.99, 0 ],
-[ -0.27, 0.29, 1 ],
-[ 0.12, -0.06, 1 ],
-[ -0.44, 0.53, 1 ],
-[ -0.46, -0.7000000000000001, 0 ],
-[ 0.04, 0.45, 0 ],
-[ -0.19, 0.63, 1 ],
-[ 0.88, 0.87, 0 ],
-[ -0.16, 0.25, 1 ],
-[ 0.09, 0.3, 0 ],
-[ -0.13, -0.33, 1 ],
-[ -0.14, 0.65, 1 ],
-[ 0.16, 0.03, 0 ],
-[ 0.39, -0.26, 0 ],
-[ 0.26, 0.86, 1 ],
-[ 0.14, 0.46, 1 ],
-[ -0.31, -0.12, 1 ],
-[ -0.62, 0.35, 1 ],
-[ -0.05, -0.04, 1 ],
-[ -0.8, -0.19, 0 ],
-[ -0.63, 0.01, 0 ],
-[ 0.99, 0.28, 0 ],
-[ 0.21, 0.06, 0 ],
-[ -0.2, -0.19, 1 ],
-[ -0.05, -0.03, 1 ],
-[ -0.24, 0.68, 1 ],
-[ 0.14, 0.88, 0 ],
-[ 0.97, 0.61, 0 ],
-[ -0.03, -0.62, 1 ],
-[ 0.48, 0.89, 1 ],
-[ -0.3, -0.91, 0 ],
-[ -0.04, -0.05, 1 ],
-[ -0.6900000000000001, 0.01, 0 ],
-[ -0.87, 0.52, 1 ],
-[ -0.26, 0.88, 0 ],
-[ 0.66, -0.5600000000000001, 0 ],
-[ -0.08, -0.23, 1 ],
-[ -0.16, 0.09, 0 ],
-[ 0.86, 0.18, 0 ],
-[ -0.28, -0.11, 1 ],
-[ 0.99, 0.15, 0 ],
-[ -0.25, -0.07000000000000001, 1 ],
-[ 0.22, 0.8100000000000001, 1 ],
-[ 0.76, 0.61, 1 ],
-[ 0.03, -0.41, 1 ],
-[ 0.5600000000000001, -0.23, 0 ],
-[ 0.48, -0.33, 0 ],
-[ 0.52, -0.67, 0 ],
-[ 0.5600000000000001, 0.79, 1 ],
-[ 0.45, -0.76, 0 ],
-[ 0.47, -0.08, 0 ],
-[ -0.85, -0.15, 0 ],
-[ 0.04, -0.3, 1 ],
-[ -0.31, -0.78, 0 ],
-[ -0.3, -0.5600000000000001, 0 ],
-[ 0.8200000000000001, 0.49, 1 ],
-[ 0.3, -0.24, 1 ],
-[ -0.01, 0.12, 0 ],
-[ -0.78, -0.73, 0 ],
-[ -0.12, -1., 0 ],
-[ 0.47, 0.3, 1 ],
-[ 0.27, -0.16, 1 ],
-[ -0.6900000000000001, -0.93, 0 ],
-[ -0.14, -0.26, 1 ],
-[ 0.34, -0.19, 1 ],
-[ 0.1, 0.2, 0 ],
-[ 0.6900000000000001, 0.15, 1 ],
-[ -0.44, 0.46, 1 ],
-[ 0.28, 0.22, 1 ],
-[ -0.05, 0.24, 0 ],
-[ 0.51, 0.39, 1 ],
-[ 0.49, -0.06, 0 ],
-[ -0.39, -0.3, 0 ],
-[ -0.17, 0.9, 0 ],
-[ 0.39, 0.29, 1 ],
-[ -0.03, 0.55, 0 ],
-[ -0.21, 0.8200000000000001, 1 ],
-[ -0.8100000000000001, -0.45, 0 ],
-[ -1., 0.8, 0 ],
-[ -0.89, -0.68, 0 ],
-[ -0.99, -0.87, 0 ],
-[ 0.19, 0.78, 1 ],
-[ -0.2, -0.88, 0 ],
-[ 0.29, -0.2, 1 ],
-[ 0.84, 0.39, 1 ],
-[ -0.96, 0.79, 0 ],
-[ -1., 0.8, 0 ],
-[ 0.97, 0.16, 0 ],
-[ 0.76, 0.74, 1 ],
-[ 0.22, -0.3, 1 ],
-[ 0.08, 0.38, 1 ],
-[ 0.08, -0.8100000000000001, 1 ],
-[ 0.29, 0.5600000000000001, 1 ],
-[ -0.16, -0.22, 1 ],
-[ -0.24, 0.29, 1 ],
-[ -0.05, -0.37, 1 ],
-[ -0.18, 0.8, 1 ],
-[ -0.12, -0.66, 1 ],
-[ 0.67, 0.34, 1 ],
-[ 0.23, 0.65, 1 ],
-[ -0.61, -0.62, 0 ],
-[ -0.26, -0.21, 1 ],
-[ 0.37, -0.05, 1 ],
-[ 0.01, 0.9500000000000001, 0 ],
-[ -0.6, -0.37, 0 ],
-[ -0.61, -0.09, 0 ],
+[ 0.98, -0.06, 1 ],
+[ -0.8, 0.1, 1 ],
+[ -0.36, -0.32, 1 ],
+[ 0.27, 0.32, 0 ],
+[ 0.9500000000000001, -0.03, 1 ],
+[ 0.17, 0.66, 0 ],
+[ -0.98, -0.07000000000000001, 1 ],
+[ -0.87, -0.9500000000000001, 0 ],
+[ -0.08, -0.44, 1 ],
+[ -0.48, -0.36, 1 ],
+[ 0.22, 0.17, 0 ],
+[ 0.38, 0.01, 1 ],
+[ 0.99, 1., 1 ],
+[ -0.5600000000000001, -0.73, 0 ],
+[ 0.8200000000000001, -0.09, 1 ],
+[ -0.62, -0.23, 1 ],
+[ 0.49, 0.07000000000000001, 1 ],
+[ 0.87, 0.87, 1 ],
+[ -0.48, 0.78, 1 ],
+[ -0.18, -0.5700000000000001, 0 ],
+[ 0.77, -0.61, 0 ],
+[ 0.45, -0.74, 0 ],
+[ -0.9, 0.18, 1 ],
+[ -0.13, 0.43, 0 ],
+[ 0.26, 0.89, 1 ],
+[ 0.35, -0.68, 0 ],
+[ 0.34, -0.39, 1 ],
+[ -0.1, -0.6, 0 ],
+[ -0.72, -0.32, 1 ],
+[ -0.68, 0.32, 1 ],
+[ 0.02, -0.45, 1 ],
+[ -0.65, 0.04, 1 ],
+[ 0.84, 0.76, 1 ],
+[ -0.84, -0.32, 1 ],
+[ -0.4, -0.07000000000000001, 1 ],
+[ 0.35, -0.19, 1 ],
+[ 0.36, -0.65, 0 ],
+[ 0.67, -0.3, 1 ],
+[ 0.86, 0.65, 1 ],
+[ -0.38, 0.9400000000000001, 1 ],
+[ -0.79, 0.17, 1 ],
+[ 0.86, -0.8100000000000001, 0 ],
+[ 0.77, 0.98, 1 ],
+[ 0.21, -0.07000000000000001, 1 ],
+[ 0.6900000000000001, -0.11, 1 ],
+[ 0.86, -0.8100000000000001, 0 ],
+[ 0.8, 0.02, 1 ],
+[ -0.09, -0.39, 1 ],
+[ -0.64, 0.85, 1 ],
+[ 0.52, 0.05, 1 ],
+[ -0.74, -0.27, 1 ],
+[ 0.43, 0.1, 1 ],
+[ -0.13, 0.68, 0 ],
+[ -0.93, -0.06, 1 ],
+[ 0.58, -0.48, 1 ],
+[ -0.46, 0.15, 1 ],
+[ 0.47, 0.22, 1 ],
+[ -0.6900000000000001, 0.2, 1 ],
+[ 0.98, -0.54, 0 ],
+[ -0.3, 0.07000000000000001, 1 ],
+[ -0.39, -0.68, 0 ],
+[ -0.32, 0.02, 1 ],
+[ -0.13, 0.02, 1 ],
+[ 0.53, 0.11, 1 ],
+[ -0.21, 0.41, 0 ],
+[ -0.6900000000000001, -0.62, 0 ],
+[ -0.01, -0.68, 0 ],
+[ -0.17, 0.29, 0 ],
+[ -0.72, -1., 0 ],
+[ -0.85, 0.6900000000000001, 1 ],
+[ -0.72, -0.78, 0 ],
+[ -0.22, 0.07000000000000001, 1 ],
+[ 0.21, -0.98, 0 ],
+[ -0.8, -0.51, 0 ],
+[ 0.06, -0.78, 0 ],
+[ -0.92, -0.78, 0 ],
+[ -0.59, 0.5600000000000001, 1 ],
+[ 0.05, 0.87, 0 ],
+[ 0.16, -0.3, 1 ],
+[ -0.36, -0.1, 1 ],
+[ 0.55, -0.8, 0 ],
+[ 0.75, -0.5600000000000001, 0 ],
+[ -0.97, -0.79, 0 ],
+[ 0.48, -0.84, 0 ],
+[ 0.38, 0.04, 1 ],
+[ 0.01, -0.3, 1 ],
+[ 0.32, -0.44, 1 ],
+[ 0.17, -0.22, 1 ],
[ -0.08, -0.89, 0 ],
-[ 0.9500000000000001, -0.3, 0 ],
-[ -0.77, -0.05, 0 ],
-[ 0.02, 0.45, 0 ],
-[ -0.97, 0.61, 0 ],
-[ -0.54, 0.2, 1 ],
-[ 0.19, -0.28, 1 ],
-[ -0.75, -0.8200000000000001, 0 ],
-[ -0.48, 0.97, 0 ],
-[ 0.64, 0.09, 1 ],
-[ -0.34, 0.47, 1 ],
-[ -0.88, 0.23, 0 ],
-[ 0.46, 0.71, 1 ],
-[ 0.39, 0.79, 1 ],
-[ -0.29, -0.3, 1 ],
-[ -0.45, -0.79, 0 ],
-[ -0.24, -0.79, 0 ],
-[ -0.17, 0.73, 1 ],
-[ 0.32, 0.08, 0 ],
-[ -0.11, 0.92, 0 ],
-[ 0.22, -0.48, 1 ],
-[ -1., 0.11, 0 ],
-[ -0.13, 0.04, 0 ],
-[ -0.5600000000000001, 0.76, 1 ],
-[ 0.37, -0.54, 0 ],
-[ -0.84, -0.43, 0 ],
-[ -0.88, 0.67, 1 ],
-[ -0.79, 0.14, 0 ],
-[ -0.09, -0.49, 1 ],
-[ 0.21, -0.91, 0 ],
-[ 0.6, 0.03, 0 ],
-[ 0.16, -0.72, 0 ],
-[ -0.78, 0.34, 1 ],
-[ -0.22, -0.1, 1 ],
-[ 0.02, -0.65, 1 ],
-[ -0.25, 0.14, 1 ],
-[ 0.42, -0.2, 0 ],
-[ -0.33, -0.68, 0 ],
-[ -0.55, -0.52, 0 ],
-[ 0.68, -0.79, 0 ],
-[ 0.6, -0.6900000000000001, 0 ],
-[ 0.14, -0.7000000000000001, 1 ],
-[ -0.24, 0.06, 0 ],
-[ 1., -0.96, 0 ],
-[ -0.72, -0.9500000000000001, 0 ],
-[ 0.25, 0.48, 1 ],
-[ 0.98, 0.19, 0 ],
-[ -0.77, 0.34, 1 ],
-[ -0.71, 0.22, 1 ],
-[ -0.2, 0.11, 0 ],
-[ -0.15, 0.02, 0 ],
-[ -0.02, -0.22, 1 ],
-[ 0.37, -0.07000000000000001, 1 ],
-[ -0.18, 1., 0 ],
-[ 0.79, 0.72, 1 ],
-[ 0.14, 0.22, 0 ],
-[ -0.64, -0.05, 0 ],
-[ 0.8300000000000001, -0.19, 0 ],
-[ -0.13, 0.73, 1 ],
-[ -0.88, -0.78, 0 ],
-[ 0.65, 0.38, 1 ],
-[ -0.3, 0.75, 1 ],
-[ -0.91, 0.53, 1 ],
-[ -0.42, 0.12, 1 ],
-[ 0.71, -0.32, 0 ],
-[ -0.22, 0.39, 1 ],
-[ 0.17, 0.24, 1 ],
-[ -0.29, -0.07000000000000001, 1 ],
-[ 0.2, -0.84, 0 ],
-[ -0.48, -0.08, 0 ],
-[ -0.33, 0.7000000000000001, 1 ],
-[ 0.38, -0.09, 1 ],
-[ 0.6900000000000001, -0.88, 0 ],
-[ 0.03, 0.45, 0 ],
-[ -0.41, 0.8100000000000001, 1 ],
-[ 0.61, 0.75, 1 ],
-[ -0.04, -0.5, 1 ],
-[ -0.45, -0.9400000000000001, 0 ],
-[ 0.08, -0.09, 1 ],
-[ 0.17, 0.22, 1 ],
-[ -0.52, -0.8300000000000001, 0 ],
-[ 0.29, 0.19, 1 ],
-[ -0.64, -0.06, 0 ],
-[ 0.47, -0.08, 0 ],
-[ -0.8, 0.8100000000000001, 1 ],
-[ 0.61, -0.01, 0 ],
-[ -0.85, -0.1, 0 ],
-[ 0.99, -0.52, 0 ],
-[ 0.16, -0.42, 1 ],
-[ 0.63, -0.3, 0 ],
-[ -0.74, -0.3, 0 ],
-[ 0.61, 0.12, 1 ],
-[ 0.98, -0.12, 0 ],
-[ 0.54, 1., 0 ],
-[ -0.45, 0.11, 1 ],
-[ -0.09, -0.33, 1 ],
-[ 0.62, 0.17, 1 ],
-[ -0.03, 0.1, 0 ],
-[ 0.79, -0.15, 0 ],
-[ 0.27, 0.17, 1 ],
-[ 0.06, -0.18, 1 ],
-[ -0.38, -0.71, 0 ],
-[ -0.18, -0.7000000000000001, 0 ],
-[ 0.9, 0.05, 0 ],
-[ -0.51, -0.73, 0 ],
-[ -0.48, 0.26, 1 ],
-[ 0.99, -0.41, 0 ],
-[ 0.64, -0.17, 0 ],
-[ 0.63, 0.23, 1 ],
-[ -0.37, 0.59, 1 ],
-[ -0.52, 0.14, 1 ],
-[ 0.09, -0.63, 1 ],
-[ -0.93, 0.28, 0 ],
-[ 0.02, -0.2, 1 ],
-[ -0.15, -0.39, 1 ],
-[ 0.4, 0.01, 0 ],
-[ 0.71, 0.2, 1 ],
-[ 0.9400000000000001, -0.18, 0 ],
-[ 0.32, -0.9, 0 ],
-[ 0.3, 0.52, 1 ],
-[ -0.51, -0.9500000000000001, 0 ],
-[ -0.26, -0.72, 0 ],
-[ -0.14, -0.17, 1 ],
-[ 0.97, 0.48, 0 ],
-[ 0.1, -0.08, 1 ],
-[ -0.71, -0.9500000000000001, 0 ],
-[ -0.44, 0.24, 1 ],
-[ -0.39, 0.41, 1 ],
-[ 0.13, -0.78, 0 ],
-[ -0.14, 0.65, 1 ],
-[ -0.71, 0.44, 1 ],
-[ -0.6900000000000001, 0.06, 0 ],
-[ -0.31, -0.32, 1 ],
-[ -0.55, -0.75, 0 ],
-[ -0.54, 0.07000000000000001, 1 ],
-[ 0.05, 0.74, 0 ],
-[ 0.58, 0.01, 0 ],
-[ -0.21, 1., 0 ],
-[ 0.97, -0.68, 0 ],
-[ 0.03, 0.21, 0 ],
-[ -0.86, -0.05, 0 ],
-[ 0.1, -0.98, 0 ],
-[ 0.28, 0.12, 1 ],
-[ 0.1, -0.3, 1 ],
-[ -0.14, 0.27, 1 ],
-[ -0.78, 0.27, 1 ],
-[ -0.17, -0.24, 1 ],
-[ 0.62, -0.42, 0 ],
-[ 0.77, -0.2, 0 ],
-[ 0.14, -0.8, 0 ],
-[ -0.78, -0.77, 0 ],
-[ -0.46, 0.88, 1 ],
-[ -0.52, 0.79, 1 ],
-[ -0.84, 0.5700000000000001, 1 ],
-[ -0.38, 0.22, 1 ],
-[ -0.4, 0.73, 1 ],
-[ -0.8300000000000001, -0.12, 0 ],
-[ 0.25, -0.28, 1 ],
-[ 0.27, -0.34, 1 ],
-[ 0.23, -0.03, 1 ],
-[ 0.14, 0.5600000000000001, 1 ],
-[ -0.04, -0.22, 1 ],
-[ 0.52, 0.5700000000000001, 1 ],
-[ -0.75, -0.5700000000000001, 0 ],
-[ -0.68, 0.27, 1 ],
-[ 0.54, 0.05, 0 ],
-[ 0.78, 0.11, 0 ],
-[ 0.02, -0.3, 1 ],
-[ -0.5700000000000001, 0.28, 1 ],
-[ 0.34, 0.39, 1 ],
-[ 0.46, -0.6900000000000001, 0 ],
-[ 0.93, -0.3, 0 ],
-[ 0.15, 0.12, 0 ],
-[ 0.85, 0.99, 0 ],
-[ 0.47, 0.01, 0 ],
-[ -0.55, 0.67, 1 ],
-[ -0.5, 0.49, 1 ],
-[ -0.51, -0.68, 0 ],
-[ 0.78, 0.05, 0 ],
-[ -0.96, 0.09, 0 ],
-[ -0.23, 0.68, 1 ],
-[ 0.28, -0.93, 0 ],
-[ -0.54, 0.52, 1 ],
-[ -0.24, 0.5600000000000001, 1 ],
-[ 0.61, -0.21, 0 ],
-[ -0.5700000000000001, 0.22, 1 ],
-[ -0.47, -0.35, 0 ],
-[ -0.97, 0.22, 0 ],
-[ -0.37, 0.16, 1 ],
-[ 0.65, -0.13, 0 ],
-[ -0.14, -0.26, 1 ],
-[ 0.88, -0.65, 0 ],
-[ 0.98, -0.08, 0 ],
-[ 0.19, -0.14, 1 ],
-[ 0.47, 0.03, 0 ],
-[ -0.24, -0.28, 1 ],
-[ -0.07000000000000001, 0.29, 0 ],
-[ 0.88, -0.97, 0 ],
-[ -0.11, 0.08, 0 ],
-[ -0.42, 0.2, 1 ],
-[ 0.8, 0.86, 0 ],
-[ -0.06, 0.55, 1 ],
-[ 0.34, -0.2, 1 ],
-[ 0.23, 0.14, 0 ],
-[ 0.6900000000000001, -0.59, 0 ],
-[ -0.26, -0.29, 1 ],
-[ -0.04, -0.8, 1 ],
-[ -0.42, -0.54, 0 ],
-[ -0.87, -0.03, 0 ],
-[ 0.72, 0.19, 1 ],
-[ -0.27, -0.73, 0 ],
-[ -0.92, 0.96, 0 ],
-[ 0.41, -0.64, 0 ],
-[ 0.58, -0.18, 0 ],
-[ 0.05, -0.24, 1 ],
-[ -0.85, 0.01, 0 ],
-[ -0.37, 0.53, 1 ],
-[ -0.02, -0.22, 1 ],
-[ 0.02, -0.19, 1 ],
-[ -0.66, 0.9, 1 ],
-[ -1., -0.9500000000000001, 0 ],
-[ 0.1, 0.76, 0 ],
-[ 0.74, 0.49, 1 ],
-[ -0.35, -0.23, 1 ],
-[ 0.75, 0.55, 1 ],
-[ 0.71, -0.61, 0 ],
-[ 0.33, -0.39, 0 ],
-[ -0.2, -0.01, 1 ],
-[ -0.24, 0.28, 1 ],
-[ 0.07000000000000001, 0.13, 0 ],
-[ 0.26, -0.76, 0 ],
-[ -0.29, 0.15, 1 ],
-[ -0.59, 0.26, 1 ],
-[ -0.76, 0.06, 0 ],
-[ 0.93, -0.48, 0 ],
-[ 0.05, 0.37, 0 ],
-[ 0.25, -0.88, 0 ],
-[ -0.2, 0.02, 0 ],
-[ -0.9500000000000001, 0.31, 0 ],
-[ 0.64, -0.49, 0 ],
-[ 0.53, -0.16, 0 ],
-[ -0.75, 0.96, 0 ],
-[ 0.03, 0.5700000000000001, 0 ],
-[ -0.27, 0.13, 1 ],
-[ 0.84, 0.02, 0 ],
-[ 0.89, 0.01, 0 ],
-[ 0.08, 0.63, 1 ],
-[ -0.58, 0.05, 0 ],
-[ -0.9, -0.62, 0 ],
-[ 0.06, 0.9400000000000001, 0 ],
-[ -0.9500000000000001, -0.27, 0 ],
-[ 0.09, 0.13, 0 ],
-[ 0.15, 0.16, 0 ],
-[ 0.92, 0.8, 0 ],
-[ -0.14, -0.32, 1 ],
-[ -0.18, -1., 0 ],
-[ -0.89, -0.42, 0 ],
-[ -0.14, 0.88, 0 ],
-[ -0.44, -0.17, 0 ],
-[ -0.21, -0.14, 1 ],
-[ -0.31, -0.85, 0 ],
-[ 0.7000000000000001, 0.4, 1 ],
-[ 0.88, 0.44, 1 ],
-[ -0.3, -0.02, 1 ],
-[ 0.89, 0.88, 0 ],
-[ -0.73, -0.3, 0 ],
-[ 0.03, -0.5, 1 ],
-[ 0.41, 0.61, 1 ],
-[ -0.15, -0.13, 1 ],
-[ -0.35, 0.37, 1 ],
-[ 0.8300000000000001, -0.04, 0 ],
-[ -0.37, 0.85, 1 ],
-[ -0.46, 0.19, 1 ],
-[ 0.04, 1., 0 ],
-[ -0.41, 0.75, 1 ],
-[ -0.5600000000000001, 0.26, 1 ],
+[ 0.92, -0.35, 1 ],
+[ 0.35, -0.96, 0 ],
+[ -0.78, 0.8100000000000001, 1 ],
+[ 0.39, 0.37, 1 ],
+[ -0.43, 0.66, 1 ],
+[ 0.98, -0.51, 0 ],
+[ -0.2, 0.89, 1 ],
+[ 1., 0.25, 1 ],
+[ 0.58, -0.6900000000000001, 0 ],
+[ 0.54, 0.22, 1 ],
+[ -0.7000000000000001, -0.86, 0 ],
+[ 0.52, -0.9, 0 ],
+[ 0.8, -0.38, 1 ],
+[ -0.46, 0.84, 1 ],
+[ -0.14, 0.48, 0 ],
+[ 0.66, -0.71, 0 ],
+[ -0.72, -0.76, 0 ],
+[ 0.28, 0.19, 1 ],
+[ -0.86, -0.03, 1 ],
+[ -0.11, 0.9500000000000001, 1 ],
+[ -0.01, -0.33, 1 ],
+[ 0.38, -0.91, 0 ],
+[ 0.9, 0.08, 1 ],
+[ -0.7000000000000001, -0.05, 1 ],
+[ -0.47, 0.33, 1 ],
+[ -0.1, 0.02, 1 ],
+[ 0.84, -0.61, 0 ],
+[ -0.93, -0.55, 0 ],
+[ 0.89, 0.43, 1 ],
+[ -0.9500000000000001, -0.84, 0 ],
+[ -0.9400000000000001, 0.6, 1 ],
+[ 0.01, -0.67, 0 ],
+[ -0.58, 0.72, 1 ],
+[ 0.5600000000000001, -0.8, 0 ],
+[ -0.19, 0.74, 0 ],
+[ 0.25, 0.77, 0 ],
+[ 0.09, 0.73, 0 ],
+[ 0.51, 0.9400000000000001, 1 ],
+[ -0.77, -0.45, 1 ],
+[ -0.52, -0.09, 1 ],
+[ -0.5, 0.12, 1 ],
+[ 0.19, 0.52, 0 ],
[ -0.32, -0.64, 0 ],
-[ -0.77, 0.6, 1 ],
-[ -0.01, 0.89, 0 ],
-[ -0.5600000000000001, 0.09, 1 ],
-[ -0.27, -0.33, 1 ],
-[ 0.16, -0.06, 1 ],
-[ 0.06, 0.33, 0 ],
-[ 0.7000000000000001, 0.02, 0 ],
-[ -0.8, 0.18, 1 ],
-[ 0.18, -0.74, 0 ],
-[ -0.21, 0.6, 1 ],
-[ 0.35, -0.62, 0 ],
-[ -0.2, -0.27, 1 ],
-[ 0.9400000000000001, -0.34, 0 ],
-[ 0.93, -0.26, 0 ],
-[ -0.99, 0.24, 0 ],
-[ -0.3, -0.88, 0 ],
-[ -0.59, 0.62, 1 ],
-[ -0.1, -0.41, 1 ],
-[ -0.15, -0.93, 0 ],
-[ 0.63, -0.22, 0 ],
-[ 0.26, 0.01, 0 ],
-[ -0.47, 0.35, 1 ],
-[ 0.11, -0.1, 1 ],
-[ 0.19, -0.23, 1 ],
-[ -0.41, 0.37, 1 ],
-[ -0.23, -0.14, 1 ],
-[ 0.15, 0.22, 0 ],
-[ -0.52, 0.54, 1 ],
-[ -0.61, -0.03, 0 ],
-[ -0.47, 0.91, 1 ],
-[ 0.97, 0.29, 0 ],
-[ 0.38, -0.52, 0 ],
-[ 0.04, 0.13, 0 ],
-[ -0.02, 0.01, 0 ],
-[ 0.28, -0.63, 0 ],
-[ -0.03, -0.1, 1 ],
-[ 0.42, -0.04, 1 ],
-[ 0.88, -0.92, 0 ],
-[ -0.02, -0.13, 1 ],
-[ -0.14, 0.12, 0 ],
-[ 0.17, 0.48, 1 ],
-[ -0.15, -0.11, 1 ],
-[ 0.9400000000000001, 0.7000000000000001, 0 ],
-[ -0.65, -0.23, 0 ],
-[ 0.26, -0.14, 1 ],
-[ 0.87, -0.3, 0 ],
-[ -0.98, -0.07000000000000001, 0 ],
-[ 0.04, 0.49, 0 ],
-[ 0.23, 0.89, 0 ],
-[ 0.24, 0.22, 1 ],
-[ 0.44, -0.5700000000000001, 0 ],
-[ -0.05, -0.47, 1 ],
-[ 0.8, 0.3, 1 ],
-[ -0.05, 0.39, 0 ],
-[ -0.98, 0.28, 0 ],
-[ -0.14, -0.2, 1 ],
-[ 0.99, -0.09, 0 ],
-[ -0.64, 0.88, 1 ],
-[ -0.4, 0.28, 1 ],
-[ -0.38, -0.5700000000000001, 0 ],
-[ -0.43, -0.06, 1 ],
-[ -0.33, 0.27, 1 ],
-[ -0.22, 0.12, 0 ],
-[ 0.71, -0.6, 0 ],
-[ -0.61, 0.9, 1 ],
-[ -0.32, 0.19, 1 ],
-[ -0.71, -0.5, 0 ],
-[ 0.3, -0.01, 1 ],
-[ -0.26, 0.55, 1 ],
-[ 0.99, 0.72, 0 ],
-[ 0.3, 0.46, 1 ],
-[ 0.5, 0.08, 1 ],
-[ 0.97, -0.97, 0 ],
-[ 0.14, 0.02, 0 ],
-[ -0.09, 0.04, 0 ],
-[ -0.46, 0.21, 1 ],
-[ -0.1, 0.08, 0 ],
-[ -0.29, 0.88, 1 ],
-[ 0.59, 0.22, 1 ],
-[ -0.73, 0.18, 1 ],
-[ 0.77, 0.16, 1 ],
-[ -0.21, -0.54, 1 ],
-[ 0.5, 0.37, 1 ],
-[ 0.09, 0.77, 0 ],
-[ -0.66, 0.38, 1 ],
-[ -0.98, -0.4, 0 ],
-[ 0.48, 0.21, 1 ],
-[ 0.01, -0.07000000000000001, 1 ],
-[ 0.96, -0.14, 0 ],
-[ 0.71, -0.24, 0 ],
-[ -0.28, 0.64, 1 ],
-[ 0.48, -0.58, 0 ],
-[ -0.63, 0.92, 1 ],
-[ 0.14, 0.04, 0 ],
-[ -0.11, 0.54, 1 ],
-[ -0.04, 0.31, 0 ],
-[ -0.15, 0.68, 1 ],
-[ -0.27, 0.12, 1 ],
-[ -0.29, 0.35, 1 ],
-[ 0.5, -0.44, 0 ],
-[ 0.24, 0.46, 1 ],
-[ -0.5600000000000001, 0.78, 1 ],
-[ -0.22, 0.41, 1 ],
-[ 0.2, 0.15, 0 ],
-[ -0.12, 0.36, 1 ],
-[ 0.49, 0.23, 1 ],
-[ -0.01, 0.08, 0 ],
-[ -0.85, 0.3, 1 ],
-[ 0.62, 0.06, 0 ],
-[ -0.86, -0.1, 0 ],
-[ 0.09, -0.16, 1 ],
-[ -0.48, -0.28, 0 ],
-[ 0.04, 0.62, 0 ],
-[ -0.02, -0.65, 1 ],
-[ 0.99, 0.9500000000000001, 0 ],
-[ -0.9400000000000001, 0.74, 0 ],
-[ 0.51, 0.01, 0 ],
-[ 0.54, 0.26, 1 ],
-[ 0.67, -0.74, 0 ],
-[ -0.51, -0.21, 0 ],
-[ 0.66, 0.18, 1 ],
-[ 0.6900000000000001, 0.4, 1 ],
-[ 0.79, -0.04, 0 ],
-[ 0.14, 0.68, 1 ],
-[ -0.27, 0.17, 1 ],
-[ 0.05, -0.64, 1 ],
-[ -0.28, 0.99, 0 ],
-[ 0.58, -0.23, 0 ],
-[ -0.5600000000000001, -0.07000000000000001, 0 ],
-[ 0.27, -0.58, 0 ],
-[ -0.47, -0.52, 0 ],
-[ -0.25, 0.29, 1 ],
-[ 0.03, -0.28, 1 ],
-[ -0.16, 0.66, 1 ],
-[ -0.68, 0.02, 0 ],
-[ 0.37, 0.19, 1 ],
-[ 0.05, 0.61, 0 ],
-[ 0.99, 0.7000000000000001, 0 ],
-[ 0.97, 0.59, 0 ],
-[ 0.32, 0.28, 1 ],
-[ -0.64, -0.23, 0 ],
-[ -0.99, -0.13, 0 ],
-[ -0.44, -0.4, 0 ],
-[ -0.8200000000000001, -0.61, 0 ],
-[ 0.44, 0.13, 1 ],
-[ 0.19, -0.77, 0 ],
-[ -0.49, 0.54, 1 ],
-[ 0.6900000000000001, -0.27, 0 ],
-[ 0.13, 0.39, 1 ],
-[ -0.05, 0.02, 0 ],
-[ -0.13, 0.65, 1 ],
-[ 0.09, 0.2, 0 ],
-[ -0.3, -0.44, 0 ],
-[ 0.6, -0.41, 0 ],
-[ 0.18, 0.24, 1 ],
-[ 0.45, 0.98, 0 ],
-[ 0.8200000000000001, 0.8300000000000001, 0 ],
-[ -0.31, -0.39, 0 ],
-[ 0.18, 0.21, 1 ],
-[ -0.9, -0.08, 0 ],
-[ -0.21, 0.91, 0 ],
-[ -0.26, -0.52, 0 ],
-[ -0.8100000000000001, -0.8300000000000001, 0 ],
-[ 0.32, 0.8300000000000001, 1 ],
-[ -0.65, -0.12, 0 ],
-[ 0.19, -0.03, 1 ],
-[ -0.01, 0.17, 0 ],
-[ -0.44, -0.26, 0 ],
-[ 0.87, -0.2, 0 ],
-[ 0.21, 0.5, 1 ],
-[ -0.17, 0.05, 0 ],
-[ 0.19, 0.39, 1 ],
-[ -0.91, -0.19, 0 ],
-[ 0.19, 0.46, 1 ],
-[ -0.31, -0.03, 1 ],
-[ 0.17, -0.53, 1 ],
-[ 0.36, 0.07000000000000001, 0 ],
-[ 0.07000000000000001, -1., 0 ],
-[ 0.71, -0.96, 0 ],
-[ -0.05, -0.38, 1 ],
-[ 0.96, -0.87, 0 ],
-[ -0.42, -0.66, 0 ],
-[ -0.41, -0.9400000000000001, 0 ],
-[ -0.71, -0.17, 0 ],
-[ -0.11, -0.7000000000000001, 1 ],
-[ 0.04, -0.3, 1 ],
-[ -0.07000000000000001, -0.27, 1 ],
-[ -0.1, 0.42, 1 ],
-[ -0.79, 0.51, 1 ],
-[ 0.36, 0.79, 1 ],
-[ 0.01, 0.71, 0 ],
-[ 0.49, -0.08, 0 ],
-[ -0.12, -0.8200000000000001, 0 ],
-[ -0.66, -0.96, 0 ],
-[ 0.26, -0.3, 1 ],
-[ 0.01, -0.29, 1 ],
-[ 0.27, 0.77, 1 ],
-[ 0.09, 0.42, 1 ],
-[ 0.11, 0.6900000000000001, 1 ],
-[ 0.4, 0.02, 0 ],
-[ -0.46, -0.5, 0 ],
-[ -0.7000000000000001, -1., 0 ],
-[ -0.8100000000000001, 0.87, 0 ],
-[ -0.2, 0.8, 1 ],
-[ 0.6900000000000001, 0.72, 1 ],
-[ -0.27, -0.38, 1 ],
-[ 0.74, -0.08, 0 ],
-[ -0.11, -0.23, 1 ],
-[ 0.2, 0.66, 1 ],
-[ -0.73, -0.66, 0 ],
-[ 0.65, -0.17, 0 ],
-[ -0.05, -0.02, 1 ],
-[ 0.17, 0.68, 1 ],
-[ -0.37, 0.02, 0 ],
-[ -0.45, -0.18, 0 ],
-[ -0.68, 0.18, 1 ],
-[ -0.12, 0.31, 1 ],
-[ 0.21, -0.87, 0 ],
-[ 0.9500000000000001, 0.07000000000000001, 0 ],
-[ 0.68, -0.5600000000000001, 0 ],
-[ 0.41, 0.2, 1 ],
-[ -0.09, 0.91, 0 ],
-[ -0.01, -0.8200000000000001, 1 ],
-[ -0.97, -0.1, 0 ],
-[ 0.4, -0.19, 1 ],
-[ -0.37, 0.13, 1 ],
-[ 0.4, 0.76, 1 ],
-[ -0.2, -0.37, 1 ],
-[ 0.8100000000000001, 0.15, 0 ],
-[ 0.07000000000000001, -0.24, 1 ],
-[ -0.34, -0.13, 1 ],
-[ -0.68, 0.24, 1 ],
-[ -0.5700000000000001, -0.21, 0 ],
-[ -0.09, -0.93, 0 ],
-[ -0.62, 0.61, 1 ],
-[ -0.27, -0.76, 0 ],
-[ -0.12, -0.05, 1 ],
-[ -0.65, 0.75, 1 ],
-[ -0.8, 0.75, 1 ],
-[ -0.28, 0.58, 1 ],
-[ 0.28, -0.5, 0 ],
-[ 0.1, 0.09, 0 ],
-[ -0.87, 0.58, 1 ],
-[ 0.09, -0.01, 1 ],
-[ -0.66, 0.01, 0 ],
-[ -0.42, 0.79, 1 ],
-[ -0.11, -0.42, 1 ],
-[ 0.52, -0.11, 0 ],
-[ -0.37, 0.3, 1 ],
-[ 0.27, -0.5, 0 ],
-[ 0.49, 0.9400000000000001, 1 ],
-[ 0.25, 0.02, 0 ],
-[ 0.08, 0.33, 0 ],
-[ 0.25, 0.05, 0 ],
-[ -0.87, -0.67, 0 ],
-[ 0.8200000000000001, -0.85, 0 ],
-[ 0.27, -0.02, 1 ],
-[ -0.39, -0.23, 0 ],
-[ -0.12, -0.13, 1 ],
-[ -0.36, -0.74, 0 ],
-[ 0.5600000000000001, 0.8100000000000001, 1 ],
-[ 0.2, 0.18, 1 ],
-[ -0.52, -0.04, 0 ],
-[ 0.11, 0.04, 0 ],
-[ 0.8200000000000001, 0.1, 0 ],
-[ -0.5700000000000001, 0.3, 1 ],
-[ 0.26, -0.29, 1 ],
-[ 0.14, 0.21, 0 ],
-[ 0.8200000000000001, -0.6900000000000001, 0 ],
-[ -0.06, -0.21, 1 ],
-[ 0.34, -0.35, 0 ],
-[ 0.24, 0.32, 1 ],
-[ 0.17, 0.08, 0 ],
-[ -0.5700000000000001, 0.38, 1 ],
-[ -0.62, 0.02, 0 ],
-[ 0.96, -0.15, 0 ],
-[ -0.3, -0.02, 1 ],
-[ 0.08, -0.28, 1 ],
-[ -0.25, 0.5, 1 ],
-[ -0.03, 0.86, 0 ],
-[ -0.27, 0.07000000000000001, 0 ],
-[ -0.65, -0.97, 0 ],
-[ 0.87, 0.37, 1 ],
-[ -0.49, 0.16, 1 ],
-[ 0.25, -0.2, 1 ],
-[ -0.86, -0.47, 0 ],
-[ 0.07000000000000001, -0.59, 1 ],
-[ -0.02, 0.17, 0 ],
-[ -0.05, -0.09, 1 ],
-[ -0.15, -0.76, 0 ],
-[ -0.21, 0.23, 1 ],
-[ -0.06, 0.68, 0 ],
-[ -0.96, -0.33, 0 ],
-[ -0.52, 0.09, 1 ],
-[ 0.92, 0.5700000000000001, 1 ],
-[ 0.9, -0.79, 0 ],
-[ -0.29, 0.72, 1 ],
-[ 0.3, -0.05, 1 ],
-[ 0.86, -0.12, 0 ],
-[ -0.44, 0.42, 1 ],
-[ 0.9400000000000001, 0.9500000000000001, 0 ],
-[ 0.9, 0.76, 0 ],
-[ -0.25, -0.55, 0 ],
-[ 0.26, -0.43, 1 ],
-[ -0.11, 0.35, 1 ],
-[ 0.24, 0.01, 0 ],
-[ 0.12, -0.01, 1 ],
-[ 0.1, 0.8100000000000001, 0 ],
-[ 0.21, 0.04, 0 ],
-[ 0.21, -0.04, 1 ],
-[ 0.12, 0.15, 0 ],
-[ -0.72, -0.11, 0 ],
-[ -0.8100000000000001, 0.03, 0 ],
-[ 0.02, 0.7000000000000001, 0 ],
-[ 1., -0.66, 0 ],
-[ 0.28, 0.42, 1 ],
-[ -0.86, 0.27, 1 ],
-[ 0.61, 0.62, 1 ],
-[ -0.29, 0.97, 0 ],
-[ 0.31, 0.29, 1 ],
-[ -0.05, 0.73, 0 ],
-[ -0.79, -0.28, 0 ],
-[ -0.03, 0.32, 0 ],
-[ 0.17, 0.18, 0 ],
-[ -0.08, 0.45, 1 ],
-[ 0.12, 0.25, 0 ],
-[ 0.44, -0.12, 1 ],
-[ -0.9500000000000001, 0.61, 0 ],
-[ -0.31, 0.17, 1 ],
-[ -0.58, 1., 0 ],
-[ 0.03, 0.58, 0 ],
-[ -0.2, -0.73, 0 ],
-[ 0.09, 0.61, 1 ],
-[ -0.3, -0.31, 1 ],
-[ 0.06, 0.71, 0 ],
-[ 0.9500000000000001, -0.15, 0 ],
-[ -0.85, 0.5, 1 ],
-[ -0.28, -0.86, 0 ],
-[ -0.7000000000000001, 0.4, 1 ],
-[ 0.68, 0.32, 1 ],
-[ -0.44, 0.24, 1 ],
-[ -0.89, 0.8200000000000001, 0 ],
-[ 0.9500000000000001, -0.97, 0 ],
-[ -0.35, 0.27, 1 ],
-[ -0.24, 0.3, 1 ],
-[ 0.5700000000000001, 0.96, 0 ],
-[ -0.3, 0.9, 0 ],
-[ 0.85, -0.1, 0 ],
-[ 0.32, 0.7000000000000001, 1 ],
-[ 0.73, 0.46, 1 ],
-[ 0.49, -0.25, 0 ],
-[ -0.97, -0.42, 0 ],
-[ -0.96, 0.74, 0 ],
-]
\ No newline at end of file
+[ 0.89, -0.38, 1 ],
+[ 0.93, -0.68, 0 ],
+[ -0.3, 0.44, 0 ],
+[ -0.24, -0.67, 0 ],
+[ -0.35, -0.23, 1 ],
+[ -0.37, 0.51, 0 ],
+[ -0.89, 0.09, 1 ],
+[ 0.72, -0.89, 0 ],
+[ -0.99, 0.9400000000000001, 1 ],
+[ -0.1, 0.13, 0 ],
+[ -0.8200000000000001, -0.58, 0 ],
+[ 0.39, 0.64, 1 ],
+[ -0.86, -0.7000000000000001, 0 ],
+[ -0.43, -0.63, 0 ],
+[ -0.01, 0.02, 1 ],
+[ -0.87, 0.9, 1 ],
+[ -0.68, 0.21, 1 ],
+[ -0.72, -0.72, 0 ],
+[ -0.47, -0.9500000000000001, 0 ],
+[ -0.91, 0.13, 1 ],
+[ 1., 0.9400000000000001, 1 ],
+[ -0.41, -0.8200000000000001, 0 ],
+[ -0.8300000000000001, 0.15, 1 ],
+[ -0.68, 0.49, 1 ],
+[ 0.5700000000000001, -0.72, 0 ],
+[ 0.87, -0.12, 1 ],
+];
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/neural_network.g b/examples/NeuralNetwork_BinaryCrossEntropy/neural_network.g
index 794dd0e..a6d2f1d 100644
--- a/examples/NeuralNetwork_BinaryCrossEntropy/neural_network.g
+++ b/examples/NeuralNetwork_BinaryCrossEntropy/neural_network.g
@@ -1,61 +1,213 @@
-LoadPackage( "GradientDescentForCAP" );
+#! @Chapter Examples for neural networks
+#! @Section Binary-class neural network with binary cross-entropy loss function
-Smooth := SkeletalSmoothMaps;
-Lenses := CategoryOfLenses( Smooth );
-Para := CategoryOfParametrisedMorphisms( Smooth );
+LoadPackage( "GradientBasedLearningForCAP" );
+#! This example demonstrates how to train a small feed-forward neural network
+#! for a binary classification task using the $\texttt{GradientBasedLearningForCAP}$
+#! package. We use the binary cross-entropy loss and optimise the network
+#! parameters with gradient descent.
+#!
+#! The dataset consists of points $(x_1, x_2) \in \mathbb{R}^2$ labelled by a
+#! non-linear decision rule describing two regions that form $\emph{class 0}$:
+#! @BeginLatexOnly
+#! \[
+#! x_1^2 + (x_2 - 0.5)^2 \le 0.16
+#! \qquad\text{(inside a circle of radius $0.4$ centred at $(0,0.5)$)}
+#! \]
+#! together with
+#! \[
+#! x_2 \le -0.5
+#! \qquad\text{(below the horizontal line)}
+#! \]
+#! @EndLatexOnly
+#!
+#! All remaining points belong to $\emph{class 1}$.
-## The function we are trying minimize
-f := LossMorphismOfNeuralNetwork( Para, 2, [ 6, 6, 6 ], 1, "Sigmoid" );
+#! @BeginLatexOnly
+#! \begin{center}
+#! \includegraphics[width=0.5\textwidth]{../examples/NeuralNetwork_BinaryCrossEntropy/data/scatter_plot_training_examples.png}
+#! \end{center}
+#! @EndLatexOnly
-## One epoch update of the parameters
-optimizer := Lenses.GradientDescentOptimizer( : learning_rate := 0.01 );
+#! Hence the classification
+#! boundary is not linearly separable and requires a non-linear model.
+#! We build a neural network with three hidden layers and a sigmoid output, fit
+#! it on the provided training examples for several epochs, and then evaluate
+#! the trained model on a grid of input points to visualise the learned
+#! decision regions.
-training_examples_path := "data/training_examples.txt";
-
-batch_size := 1;
-
-one_epoch_update := OneEpochUpdateLens( f, optimizer, training_examples_path, batch_size );
-
-## Initialize the parameters and apply updates nr_epochs times
-# Affine transformation 1:
-# [[-0.8253305 -0.5519657 0.5695067 -0.55729055 0.31805855 0.61735636]
-# [-0.35807824 0.5614671 0.22939318 0.35675365 -0.18503785 0.33399004]
-# [ 0. 0. 0. 0. 0. 0. ]]
-#
-# Affine transformation 2:
-# [[ 0.7025898 0.40877253 0.20995468 -0.44061258 -0.22173643 -0.13360518]
-# [ 0.07342285 -0.3142012 0.6646419 -0.41498005 -0.05349869 0.417332 ]
-# [ 0.55641776 0.18377596 -0.48378092 -0.21800154 -0.03115106 0.58989066]
-# [-0.6362306 0.62437385 -0.16361392 -0.5767567 -0.23374996 0.41223532]
-# [-0.51186776 -0.11312515 0.38304657 0.05555409 0.14883518 0.673979 ]
-# [ 0.5672495 -0.62053126 0.58642703 0.463453 -0.16229266 0.61534077]
-# [ 0. 0. 0. 0. 0. 0. ]]
-#
-# Affine transformation 3:
-# [[ 0.6278847 0.30747253 0.6831216 -0.00804061 0.2587368 0.58708113]
-# [ 0.25971985 -0.56481445 -0.05164099 0.5079209 -0.2920736 -0.44521216]
-# [ 0.6427106 -0.3777305 0.51464826 0.60129505 0.22585636 -0.4216755 ]
-# [ 0.37545222 -0.5169188 0.01861829 0.60314924 0.11511129 0.61970514]
-# [-0.33681872 0.6038982 0.16922754 0.01404923 0.6107089 0.12995255]
-# [-0.41838637 -0.00476396 -0.13253736 0.3037265 -0.5459578 0.13572395]
-# [ 0. 0. 0. 0. 0. 0. ]]
-#
-# Affine transformation 4:
-# [[ 0.5750419 ]
-# [-0.05423206]
-# [-0.7180484 ]
-# [ 0.41225922]
-# [ 0.43444705]
-# [ 0.6085528 ]
-# [ 0. ]]
-
-# Initial weights as vector:
-w :=[ 0.5750418901443481, -0.05423206090927124, -0.7180483937263489, 0.41225922107696533, 0.4344470500946045, 0.6085528135299683, 0.0, 0.6278846859931946, 0.2597198486328125, 0.6427106261253357, 0.3754522204399109, -0.33681872487068176, -0.4183863699436188, 0.0, 0.3074725270271301, -0.5648144483566284, -0.37773048877716064, -0.5169187784194946, 0.6038982272148132, -0.004763960838317871, 0.0, 0.6831216216087341, -0.051640987396240234, 0.5146482586860657, 0.018618285655975342, 0.16922754049301147, -0.1325373649597168, 0.0, -0.00804060697555542, 0.5079209208488464, 0.6012950539588928, 0.6031492352485657, 0.014049232006072998, 0.3037264943122864, 0.0, 0.258736789226532, -0.2920736074447632, 0.22585636377334595, 0.11511129140853882, 0.6107088923454285, -0.5459578037261963, 0.0, 0.5870811343193054, -0.44521215558052063, -0.42167550325393677, 0.6197051405906677, 0.1299525499343872, 0.13572394847869873, 0.0, 0.7025898098945618, 0.07342284917831421, 0.5564177632331848, -0.6362305879592896, -0.5118677616119385, 0.5672494769096375, 0.0, 0.4087725281715393, -0.3142012059688568, 0.18377596139907837, 0.6243738532066345, -0.11312514543533325, -0.6205312609672546, 0.0, 0.20995467901229858, 0.6646419167518616, -0.4837809205055237, -0.1636139154434204, 0.38304656744003296, 0.5864270329475403, 0.0, -0.4406125843524933, -0.41498005390167236, -0.21800154447555542, -0.5767567157745361, 0.055554091930389404, 0.4634529948234558, 0.0, -0.22173643112182617, -0.053498685359954834, -0.03115105628967285, -0.23374995589256287, 0.1488351821899414, -0.16229265928268433, 0.0, -0.13360518217086792, 0.4173319935798645, 0.5898906588554382, 0.4122353196144104, 0.6739789843559265, 0.6153407692909241, 0.0, -0.8253304958343506, -0.3580782413482666, 0.0, -0.5519657135009766, 0.5614671111106873, 0.0, 0.5695067048072815, 0.22939318418502808, 0.0, -0.5572905540466309, 0.3567536473274231, 0.0, 0.3180585503578186, -0.18503785133361816, 0.0, 0.6173563599586487, 0.33399003744125366, 0.0 ];
+#! @BeginLatexOnly
+#! Concretely, we choose three hidden layers, each with 6 neurons:
+#! \[
+#! \texttt{hidden\_layers} = [6,6,6].
+#! \]
+#! With input dimension \(2\) and output dimension \(1\), the affine maps between
+#! consecutive layers therefore have the following matrix dimensions (together
+#! bias vectors):
+#! \[
+#! \binom{W_1}{b_1} \in \mathbb{R}^{ 3 \times 6},\quad
+#! \binom{W_2}{b_2} \in \mathbb{R}^{ 7 \times 6},\quad
+#! \binom{W_3}{b_3} \in \mathbb{R}^{ 7 \times 6},\quad
+#! \binom{W_4}{b_4} \in \mathbb{R}^{ 7 \times 1}.
+#! \]
+#! Equivalently, each layer computes for an input $a_k$ the output \(z_{k+1} := (a_k\;\;1)\binom{W_{k+1}}{b_{k+1}}=a_k W_{k+1} + b_{k+1}\), where
+#! \(a_0 \in \mathbb{R}^2\), \(a_1,a_2,a_3 \in \mathbb{R}^6\), and the final output
+#! lies in \(\mathbb{R}^1\).
+#! The non-linear activation function ReLU is applied after each hidden layer.
+#! And Sigmoid is applied after the final layer to obtain a probability
+#! estimate for class \(1\).
+#! \[
+#! a_0 \mapsto
+#! \color{red}\mathbf{Sigmoid}\left(
+#! \color{blue}\left(
+#! \color{green}\mathbf{Relu}\left(
+#! \color{yellow}\left(
+#! \color{red}\mathbf{Relu}\left(
+#! \color{blue}\left(
+#! \color{green}\mathbf{Relu}\left(
+#! \color{yellow}\left(
+#! a_0\;\; 1
+#! \color{yellow}\right)
+#! \binom{W_1}{b_1}
+#! \color{green}\right)
+#! \;\; 1
+#! \color{blue}\right)
+#! \binom{W_2}{b_2}
+#! \color{red}\right)
+#! \;\; 1
+#! \color{yellow}\right)
+#! \binom{W_3}{b_3}
+#! \color{green}\right)
+#! \;\; 1
+#! \color{blue}\right)
+#! \binom{W_4}{b_4}
+#! \color{red}\right) \in \mathbb{R}^1
+#! \]
+#! If that probability is greater than \(0.5\), we predict class \(1\), otherwise class \(0\).
+#! That is, the total number of parameters (weights and biases) is \(109\).
+#! After training, we obtain a weight vector \(w \in \mathbb{R}^{109}\).
+#! The first $7$ entries of $w$ correspond to the column $\binom{W_4}{b_4}\in \mathbb{R}^{7\times 1}$,
+#! the next $42$ entries correspond to the concatenation of the columns of $\binom{W_3}{b_3}\in \mathbb{R}^{7\times 6}$, and so on.
+#! @EndLatexOnly
+#! @Example
+Smooth := SkeletalSmoothMaps;
+#! SkeletalSmoothMaps
+Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+hidden_layers := [ 6, 6, 6 ];;
+f := NeuralNetworkLossMorphism( Para, 2, hidden_layers, 1, "Sigmoid" );;
+optimizer := Lenses.GradientDescentOptimizer( : learning_rate := 0.01 );
+#! function( n ) ... end
+training_examples_path := Filename(
+ DirectoriesPackageLibrary("GradientBasedLearningForCAP", "examples")[1],
+ "NeuralNetwork_BinaryCrossEntropy/data/training_examples.txt" );;
+batch_size := 2;
+#! 2
+one_epoch_update := OneEpochUpdateLens( f, optimizer,
+ training_examples_path, batch_size );
+#! (ℝ^109, ℝ^109) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^109 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^109 -> ℝ^109
+nr_weights := RankOfObject( Source( PutMorphism( one_epoch_update ) ) );
+#! 109
+rs := RandomSource( IsMersenneTwister, 1 );;
+w := List( [ 1 .. nr_weights ], i -> 0.001 * Random( rs, [ -1000 .. 1000 ] ) );;
+w{[ 1 .. 5 ]};
+#! [ 0.789, -0.767, -0.613, -0.542, 0.301 ]
nr_epochs := 25;
+#! 25
+w := Fit( one_epoch_update, nr_epochs, w : verbose := true );;
+#! Epoch 0/25 - loss = 0.6274786697292678
+#! Epoch 1/25 - loss = 0.50764552556010512
+#! Epoch 2/25 - loss = 0.46701509497218296
+#! Epoch 3/25 - loss = 0.43998434603387304
+#! Epoch 4/25 - loss = 0.41390897205434185
+#! Epoch 5/25 - loss = 0.38668229524419645
+#! Epoch 6/25 - loss = 0.3615103023137366
+#! Epoch 7/25 - loss = 0.33852687543477167
+#! Epoch 8/25 - loss = 0.31713408584173464
+#! Epoch 9/25 - loss = 0.29842876608165969
+#! Epoch 10/25 - loss = 0.28310739567373933
+#! Epoch 11/25 - loss = 0.26735508537538627
+#! Epoch 12/25 - loss = 0.25227135017462571
+#! Epoch 13/25 - loss = 0.23858070423434527
+#! Epoch 14/25 - loss = 0.22557724727481232
+#! Epoch 15/25 - loss = 0.2151923109202202
+#! Epoch 16/25 - loss = 0.20589044111812799
+#! Epoch 17/25 - loss = 0.19857151366814263
+#! Epoch 18/25 - loss = 0.19229381748983518
+#! Epoch 19/25 - loss = 0.18814544378812006
+#! Epoch 20/25 - loss = 0.18465371077598913
+#! Epoch 21/25 - loss = 0.18166012790192537
+#! Epoch 22/25 - loss = 0.17685616213693178
+#! Epoch 23/25 - loss = 0.17665872918251943
+#! Epoch 24/25 - loss = 0.17073585936950184
+#! Epoch 25/25 - loss = 0.16744783175344116
+w;
+#! [ 1.47751, -0.285187, -1.87358, -1.87839, 0.687266,
+#! -0.88329, -0.607225, 0.57876, 0.084489, 1.1218,
+#! 0.289778, -1.15844, 0.562299, -0.725222, 0.724775,
+#! 0.643942, 0.202536, 0.131565, 0.768751, -0.345379,
+#! -0.147853, -1.52103, -1.26183, 1.39931, 0.00143737,
+#! -0.819752, -0.90015, -0.534457, 0.74204, -0.768,
+#! -1.85381, 0.225274, -0.384199, 1.1034, 0.82565,
+#! 0.423966, 0.719847, 0.487972, 0.266537, -0.442324,
+#! 0.520839, 0.306871, -0.205834, -0.314044, 0.0395323,
+#! -0.489954, -0.368816, 0.305383, -0.181872, 0.775344,
+#! -0.57507, -0.792, -0.937068, 1.39995, -0.0236236,
+#! 0.370827, -0.778542, -0.783943, 0.034, 0.343554,
+#! -1.00419, 0.857391, -1.07632, -0.677147, 0.839605,
+#! 0.719, 1.40418, -0.221851, 1.29824, 0.510027,
+#! 0.217811, 0.344086, 0.579, 0.576412, 0.070248,
+#! -0.145523, 0.468713, 0.680618, 0.199966, -0.497,
+#! -0.408801, 0.0519444, -0.597412, 0.137205, 1.25696,
+#! -0.0884903, -0.252, -0.721624, -1.25962, 0.894349,
+#! 0.447327, -1.00492, -1.54383, 0.464574, -0.723211,
+#! -0.108064, -0.486439, -0.385, -0.484, -0.862,
+#! -0.121845, 1.0856, 1.09068, 1.69466, 0.938733,
+#! 0.529301, -0.465345, 1.23872, 1.07609 ]
+predict := NeuralNetworkPredictionMorphism( Para, 2, hidden_layers, 1, "Sigmoid" );
+#! ℝ^2 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^109
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^111 -> ℝ^1
+predict_given_w := ReparametriseMorphism( predict, Smooth.Constant( w ) );
+#! ℝ^2 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^0
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^2 -> ℝ^1
+predict_using_w := UnderlyingMorphism( predict_given_w );
+#! ℝ^2 -> ℝ^1
+inputs := Cartesian( 0.1 * [ -10 .. 10 ], 0.1 * [ -10 .. 10 ] );;
+predictions := List( inputs, x ->
+ SelectBasedOnCondition( predict_using_w( x )[1] > 0.5, 1, 0 ) );;
+# ScatterPlotUsingPython( inputs, predictions );
+#! @EndExample
-w := Fit( one_epoch_update, nr_epochs, w );
-
-# After 10 epochs
+#! Executing the command $\texttt{ScatterPlotUsingPython( inputs, predictions );}$ produces the following plot:
+#! @BeginLatexOnly
+#! \begin{center}
+#! \includegraphics[width=0.5\textwidth]{../examples/NeuralNetwork_BinaryCrossEntropy/scatter_plot_predictions.png}
+#! \end{center}
+#! @EndLatexOnly
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/predict/predict.g b/examples/NeuralNetwork_BinaryCrossEntropy/predict/predict.g
deleted file mode 100644
index 0b9f73b..0000000
--- a/examples/NeuralNetwork_BinaryCrossEntropy/predict/predict.g
+++ /dev/null
@@ -1,29 +0,0 @@
-LoadPackage( "GradientDescentForCAP" );
-
-
-Smooth := SkeletalSmoothMaps;
-Para := CategoryOfParametrisedMorphisms( Smooth );
-
-
-## The function we are trying minimize
-predict := PredictionMorphismOfNeuralNetwork( Para, 2, [ 6, 6, 6 ], 1, "Sigmoid" );
-
-w :=
-[ -2.38307, 1.15643, -2.66248, 0.202663, 1.60522, 1.59726, 0.863359, -0.0672719, 1.01209, 0.917226, 0.37308, -0.874496, -1.47228, 2.07342,
- 0.89595, -0.680186, -0.231272, -0.517491, 1.06231, -0.272462, -0.0247753, 2.65761, 0.442513, 0.740077, 0.031517, 0.642174, 1.99124, -3.92456,
- -0.222431, 0.18775, 0.532778, 0.595967, 0.00709807, 0.0340034, -0.479791, 0.939005, -0.526106, 0.297613, 0.114837, 1.25861, -0.927486,
- -0.0156671, 1.22388, -0.931856, -0.538889, 0.612112, 1.06765, 1.34294, 1.26254, 1.28733, 0.102527, 0.283199, -2.90589, -0.00263669, 1.21756,
- 0.923131, 1.2773, -2.62932, -0.805291, 0.85247, -2.66598, -0.562073, -0.328354, 1.58834, 0.783209, -0.457507, -2.02291, 1.03056, 1.68808,
- 0.0119393, -0.440639, -0.411077, -0.285951, -0.575991, -0.109411, 0.369939, -0.166541, -1.02908, 0.0936507, -0.205397, -2.38514, 0.102405,
- -0.998627, 0.660356, 0.510562, 0.865732, 0.440473, 1.10545, 0.718466, 0.678435, -1.08617, -4.02352, -2.32914, -1.8468, -3.33328, -0.0866419,
- 0.0746266, 1.18941, 0.231497, -0.188211, -0.112001, 4.46077, 0.15901, 3.51666, -0.105457, -0.241886, 3.87409, -2.16175, -1.90423 ];
-
-## Let us use w to predict:
-predict_using_w := UnderlyingMorphism( ReparametriseMorphism( predict, Smooth.Constant( w ) ) );
-
-## create inputs:
-inputs := Cartesian( 0.01 * [ -100 .. 100 ], 0.01 * [ -100 .. 100 ] );
-predictions := List( inputs, x -> SelectBasedOnCondition( predict_using_w( x )[1] > 0.5, 1, 0 ) );
-
-
-ScatterPlotUsingPython( inputs, predictions );
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/predict/scatter_plot_predictions.png b/examples/NeuralNetwork_BinaryCrossEntropy/predict/scatter_plot_predictions.png
deleted file mode 100644
index 2fc0182..0000000
Binary files a/examples/NeuralNetwork_BinaryCrossEntropy/predict/scatter_plot_predictions.png and /dev/null differ
diff --git a/examples/NeuralNetwork_BinaryCrossEntropy/scatter_plot_predictions.png b/examples/NeuralNetwork_BinaryCrossEntropy/scatter_plot_predictions.png
new file mode 100644
index 0000000..74f63dd
Binary files /dev/null and b/examples/NeuralNetwork_BinaryCrossEntropy/scatter_plot_predictions.png differ
diff --git a/examples/NeuralNetwork_CrossEntropy/README.md b/examples/NeuralNetwork_CrossEntropy/README.md
deleted file mode 100644
index ec16912..0000000
--- a/examples/NeuralNetwork_CrossEntropy/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-1) To create train and test examples see "create_train_test_examples.g"
-2) To visualize the training (or test) examples see "plot_training_test_examples.g"
-3) To explore the neural network in CAP see "neural_network.g"
-4) To create the same neural network in TensorFlow see "neural_network.py"
diff --git a/examples/NeuralNetwork_CrossEntropy/__pycache__/neural_network_tf.cpython-310.pyc b/examples/NeuralNetwork_CrossEntropy/__pycache__/neural_network_tf.cpython-310.pyc
deleted file mode 100644
index 207e539..0000000
Binary files a/examples/NeuralNetwork_CrossEntropy/__pycache__/neural_network_tf.cpython-310.pyc and /dev/null differ
diff --git a/examples/NeuralNetwork_CrossEntropy/data/create_train_test_examples.g b/examples/NeuralNetwork_CrossEntropy/data/create_train_test_examples.g
deleted file mode 100644
index b047edd..0000000
--- a/examples/NeuralNetwork_CrossEntropy/data/create_train_test_examples.g
+++ /dev/null
@@ -1,64 +0,0 @@
-
-
-# locate the current dir
-current_dir := DirectoryCurrent( );
-
-# create a file for the training dataset
-
-# we have 4 classes in the plan
-
-# class 0: everything outside of classes 1, 2 or 4
-
-# class 1: everything inside the circle: (x1-0.5)^2 + (x2-0.5)^2 - 0.20 = 0
-# i.e., the solutions of: (x1-0.5)^2 + (x2-0.5)^2 - 0.20 <= 0
-
-# class 2: everything inside the circle: (x1+0.5)^2 + (x2-0.5)^2 - 0.20 = 0
-# i.e., the solutions of: (x1+0.5)^2 + (x2-0.5)^2 - 0.20 <= 0
-
-# class 3: the polytop defined by the points (0.5, 0), (-0.5, 0), (0, -1)
-# i.e., the common solutions of the inequalities: x2 <= 0, 1 - 2x1 + x2 >= 0, 1 + 2x1 + x2 >= 0
-
-files := [ "training_examples.txt", "test_examples.txt" ];
-nr_examples := [ 3000, 100 ];
-
-noise := 0.5;
-
-for i in [ 1, 2 ] do
-
- file := Filename( current_dir, files[i] );
-
- PrintTo( file, "[\n" );
-
- for j in [ 1 .. nr_examples[i] ] do
-
- x1 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
- x2 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
-
- if (x1 - 0.5)^2 + (x2 - 0.5)^2 - 0.20 <= 0. then
-
- label := [ 0, 1, 0, 0 ];
-
- elif (x1 + 0.5)^2 + (x2 - 0.5)^2 - 0.20 <= 0. then
-
- label := [ 0, 0, 1, 0 ];
-
- elif x2 <= 0. and 1 - 2 * x1 + x2 >= 0. and 1 + 2 * x1 + x2 >= 0. then
-
- label := [ 0, 0, 0, 1 ];
-
- else
-
- label := [ 1, 0, 0, 0 ];
-
- fi;
-
- AppendTo( file, Concatenation( [ x1, x2 ], label ), ",\n" );
-
- od;
-
- AppendTo( file, "]" );
-
-od;
-
-Display( "Done!" );
-QUIT;
diff --git a/examples/NeuralNetwork_CrossEntropy/data/generate_examples.g b/examples/NeuralNetwork_CrossEntropy/data/generate_examples.g
new file mode 100644
index 0000000..03350dc
--- /dev/null
+++ b/examples/NeuralNetwork_CrossEntropy/data/generate_examples.g
@@ -0,0 +1,74 @@
+LoadPackage( "GradientBasedLearningForCAP" );
+
+# create a file for the training dataset
+
+# we have 3 classes in the plan
+
+# class 0 ~ [1, 0, 0]:
+#! everything inside the circle: x1^2 + (x2-0.5)^2 - 0.16 = 0
+# i.e., the solutions of: x1^2 + (x2-0.5)^2 - 0.16 <= 0
+# class 1 ~ [0, 1, 0]:
+# everything below the line: x2 = -0.5,
+# i.e., the solutions of: x2 + 0.5 <= 0
+# class 2 ~ [0, 0, 1]: everything else
+
+
+nr_examples := 500;
+nr_examples_per_class := Int( nr_examples / 3 );
+
+class_0_count := 0;
+class_1_count := 0;
+class_2_count := 0;
+
+training_examples := [ ];
+
+while class_0_count < nr_examples_per_class do
+ x := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ y := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ if x^2 + (y - 0.5)^2 - 0.16 <= 0. then
+ Add( training_examples, Concatenation( [ x, y ], [ 1, 0, 0 ] ) );
+ class_0_count := class_0_count + 1;
+ fi;
+od;;
+
+while class_1_count < nr_examples_per_class do
+ x := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ y := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ if y + 0.5 <= 0. then
+ Add( training_examples, Concatenation( [ x, y ], [ 0, 1, 0 ] ) );
+ class_1_count := class_1_count + 1;
+ fi;
+od;;
+
+while class_2_count < nr_examples_per_class do
+ x := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ y := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ if not ( x^2 + (y - 0.5)^2 - 0.16 <= 0. or y + 0.5 <= 0. ) then
+ Add( training_examples, Concatenation( [ x, y ], [ 0, 0, 1 ] ) );
+ class_2_count := class_2_count + 1;
+ fi;
+od;;
+
+
+# shuffle the training examples
+training_examples := Shuffle( training_examples );
+
+# write the training examples to a file
+files_name := "training_examples.txt";
+file := Filename( DirectoryCurrent( ), files_name );
+
+PrintTo( file, "[\n" );
+for example in training_examples do
+ AppendTo( file, example, ",\n" );
+od;
+AppendTo( file, "];" );
+
+# plotting the dataset
+file := Filename( DirectoryCurrent( ), files_name );
+data := EvalString( IO_ReadUntilEOF( IO_File( file ) ) );
+x := List( data, e -> [ e[1], e[2] ] );
+y := List( data, e -> Position( e{[3,4,5]}, 1 ) - 1 );
+ScatterPlotUsingPython( x, y );
+
+Display( "Done!" );
+QUIT;
diff --git a/examples/NeuralNetwork_CrossEntropy/data/plot_training_test_examples.g b/examples/NeuralNetwork_CrossEntropy/data/plot_training_test_examples.g
deleted file mode 100644
index 22af161..0000000
--- a/examples/NeuralNetwork_CrossEntropy/data/plot_training_test_examples.g
+++ /dev/null
@@ -1,10 +0,0 @@
-LoadPackage( "GradientDescentForCAP" );
-
-file := IO_File( "training_examples.txt" );
-#file := IO_File( "test_examples.txt" );
-
-examples := EvalString( IO_ReadUntilEOF( file ) );
-
-points := List( examples, example -> example{[1, 2]} );
-labels := List( examples, example -> Position( example{[3 .. 6]}, 1 ) );
-ScatterPlotUsingPython( points, labels : size := "100" );
diff --git a/examples/NeuralNetwork_CrossEntropy/data/scatter_plot_training_examples.png b/examples/NeuralNetwork_CrossEntropy/data/scatter_plot_training_examples.png
index 688cca1..d01de8e 100644
Binary files a/examples/NeuralNetwork_CrossEntropy/data/scatter_plot_training_examples.png and b/examples/NeuralNetwork_CrossEntropy/data/scatter_plot_training_examples.png differ
diff --git a/examples/NeuralNetwork_CrossEntropy/data/test_examples.txt b/examples/NeuralNetwork_CrossEntropy/data/test_examples.txt
deleted file mode 100644
index 35953ef..0000000
--- a/examples/NeuralNetwork_CrossEntropy/data/test_examples.txt
+++ /dev/null
@@ -1,102 +0,0 @@
-[
-[ -0.35, 0.8300000000000001, 0, 0, 1, 0 ],
-[ -0.8200000000000001, -0.68, 1, 0, 0, 0 ],
-[ -0.51, 0.76, 0, 0, 1, 0 ],
-[ 0.84, -0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.53, -0.19, 1, 0, 0, 0 ],
-[ -0.59, 0.12, 0, 0, 1, 0 ],
-[ 0.1, -0.17, 0, 0, 0, 1 ],
-[ -0.38, -0.45, 1, 0, 0, 0 ],
-[ 0.09, 0.73, 1, 0, 0, 0 ],
-[ 0.8200000000000001, -0.75, 1, 0, 0, 0 ],
-[ 0.38, 0.71, 0, 1, 0, 0 ],
-[ -0.7000000000000001, 0.72, 0, 0, 1, 0 ],
-[ 0.28, -0.13, 0, 0, 0, 1 ],
-[ -0.5700000000000001, -0.44, 1, 0, 0, 0 ],
-[ 0.96, -0.74, 1, 0, 0, 0 ],
-[ 0.75, -1., 1, 0, 0, 0 ],
-[ 0.06, -0.62, 0, 0, 0, 1 ],
-[ -0.63, -0.86, 1, 0, 0, 0 ],
-[ -0.71, 0.73, 0, 0, 1, 0 ],
-[ -0.07000000000000001, 0.35, 1, 0, 0, 0 ],
-[ 0.59, 0.18, 0, 1, 0, 0 ],
-[ -0.6, 0.31, 0, 0, 1, 0 ],
-[ -0.51, 0.32, 0, 0, 1, 0 ],
-[ 0.51, -0.99, 1, 0, 0, 0 ],
-[ 0.5700000000000001, 0.51, 0, 1, 0, 0 ],
-[ -0.77, -0.35, 1, 0, 0, 0 ],
-[ 0.4, -0.18, 0, 0, 0, 1 ],
-[ -0.9, 0.28, 1, 0, 0, 0 ],
-[ 0.12, 0.1, 1, 0, 0, 0 ],
-[ -0.3, -0.51, 1, 0, 0, 0 ],
-[ 0.5700000000000001, -0.17, 1, 0, 0, 0 ],
-[ -0.2, 0.8200000000000001, 0, 0, 1, 0 ],
-[ -0.5700000000000001, -0.17, 1, 0, 0, 0 ],
-[ -0.64, 0.2, 0, 0, 1, 0 ],
-[ 0.93, -0.73, 1, 0, 0, 0 ],
-[ 0.5, 0.14, 0, 1, 0, 0 ],
-[ -0.31, 0.39, 0, 0, 1, 0 ],
-[ -0.89, -0.33, 1, 0, 0, 0 ],
-[ -0.77, 0.53, 0, 0, 1, 0 ],
-[ 0.9500000000000001, -0.74, 1, 0, 0, 0 ],
-[ -0.6900000000000001, -0.5700000000000001, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.78, 1, 0, 0, 0 ],
-[ 0.8300000000000001, -0.65, 1, 0, 0, 0 ],
-[ 0.74, 0.71, 0, 1, 0, 0 ],
-[ -0.61, 0.29, 0, 0, 1, 0 ],
-[ 0.25, -0.58, 1, 0, 0, 0 ],
-[ 0.07000000000000001, -0.27, 0, 0, 0, 1 ],
-[ -0.65, 0.64, 0, 0, 1, 0 ],
-[ -0.33, 0.5, 0, 0, 1, 0 ],
-[ 0.8100000000000001, -0.63, 1, 0, 0, 0 ],
-[ -0.04, -0.52, 0, 0, 0, 1 ],
-[ 0.8300000000000001, 0.64, 0, 1, 0, 0 ],
-[ -0.23, 0.24, 0, 0, 1, 0 ],
-[ 0.65, -0.24, 1, 0, 0, 0 ],
-[ -0.62, -0.53, 1, 0, 0, 0 ],
-[ 0.49, 0.85, 0, 1, 0, 0 ],
-[ 0.05, 0.04, 1, 0, 0, 0 ],
-[ 0.41, 0.02, 1, 0, 0, 0 ],
-[ -0.61, -0.88, 1, 0, 0, 0 ],
-[ 0.58, 0.12, 0, 1, 0, 0 ],
-[ -0.74, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.77, -1., 1, 0, 0, 0 ],
-[ 0.61, 0.45, 0, 1, 0, 0 ],
-[ 0.44, 0.64, 0, 1, 0, 0 ],
-[ 0.36, 0.31, 0, 1, 0, 0 ],
-[ 0.3, 0.9, 1, 0, 0, 0 ],
-[ 0.01, -0.88, 0, 0, 0, 1 ],
-[ 0.86, 0.16, 1, 0, 0, 0 ],
-[ 0.35, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.8, -0.14, 1, 0, 0, 0 ],
-[ 0.51, -0.99, 1, 0, 0, 0 ],
-[ 0.91, 0.48, 0, 1, 0, 0 ],
-[ -0.59, 0.52, 0, 0, 1, 0 ],
-[ -0.22, 0.87, 1, 0, 0, 0 ],
-[ -0.32, 0.27, 0, 0, 1, 0 ],
-[ 0.28, -0.79, 1, 0, 0, 0 ],
-[ -0.34, 0.54, 0, 0, 1, 0 ],
-[ -0.88, 0.72, 0, 0, 1, 0 ],
-[ -0.54, 0.06, 0, 0, 1, 0 ],
-[ 0.08, -0.21, 0, 0, 0, 1 ],
-[ -0.88, 0.52, 0, 0, 1, 0 ],
-[ -0.78, -0.66, 1, 0, 0, 0 ],
-[ -0.99, 0.52, 1, 0, 0, 0 ],
-[ -0.51, 0.6, 0, 0, 1, 0 ],
-[ 0.65, 0.96, 1, 0, 0, 0 ],
-[ -0.13, -0.32, 0, 0, 0, 1 ],
-[ 0.75, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.5600000000000001, -0.96, 1, 0, 0, 0 ],
-[ 0.73, -0.01, 1, 0, 0, 0 ],
-[ -0.36, 0.49, 0, 0, 1, 0 ],
-[ -0.44, 0.67, 0, 0, 1, 0 ],
-[ 0.62, -0.25, 1, 0, 0, 0 ],
-[ -0.58, -0.37, 1, 0, 0, 0 ],
-[ 0.42, 0.46, 0, 1, 0, 0 ],
-[ -0.41, 0.16, 0, 0, 1, 0 ],
-[ 0.32, -0.09, 0, 0, 0, 1 ],
-[ -0.63, 0.97, 1, 0, 0, 0 ],
-[ 0.79, -0.5, 1, 0, 0, 0 ],
-[ 0.62, -0.64, 1, 0, 0, 0 ],
-[ -0.06, 0.64, 1, 0, 0, 0 ],
-]
\ No newline at end of file
diff --git a/examples/NeuralNetwork_CrossEntropy/data/training_examples.txt b/examples/NeuralNetwork_CrossEntropy/data/training_examples.txt
index 7c857a3..9a05695 100644
--- a/examples/NeuralNetwork_CrossEntropy/data/training_examples.txt
+++ b/examples/NeuralNetwork_CrossEntropy/data/training_examples.txt
@@ -1,3002 +1,500 @@
[
-[ 0.59, -0.85, 1, 0, 0, 0 ],
-[ -0.76, -0.72, 1, 0, 0, 0 ],
-[ -0.55, -0.99, 1, 0, 0, 0 ],
-[ 0.12, -0.3, 0, 0, 0, 1 ],
-[ -0.96, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.97, 0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.45, -0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.36, -0.77, 1, 0, 0, 0 ],
-[ 0.8, 0.6900000000000001, 0, 1, 0, 0 ],
-[ 0.72, 0.37, 0, 1, 0, 0 ],
-[ -0.33, -0.06, 0, 0, 0, 1 ],
-[ 0.9400000000000001, 0.28, 1, 0, 0, 0 ],
-[ 0.05, 0.8, 1, 0, 0, 0 ],
-[ -0.04, 0.63, 1, 0, 0, 0 ],
-[ 0.9, -0.13, 1, 0, 0, 0 ],
-[ 0.49, -0.85, 1, 0, 0, 0 ],
-[ -0.71, 0.6, 0, 0, 1, 0 ],
-[ -0.68, 0.61, 0, 0, 1, 0 ],
-[ -0.19, 0.8100000000000001, 0, 0, 1, 0 ],
-[ 0.17, -0.09, 0, 0, 0, 1 ],
-[ -0.49, 0.73, 0, 0, 1, 0 ],
-[ -0.22, 0.29, 0, 0, 1, 0 ],
-[ 0.27, -0.75, 1, 0, 0, 0 ],
-[ 0.22, 0.53, 0, 1, 0, 0 ],
-[ -0.78, -0.18, 1, 0, 0, 0 ],
-[ 0.46, -0.19, 1, 0, 0, 0 ],
-[ -0.33, -0.34, 1, 0, 0, 0 ],
-[ 0.9400000000000001, -0.51, 1, 0, 0, 0 ],
-[ 0.52, -0.24, 1, 0, 0, 0 ],
-[ 0.46, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.8100000000000001, -0.38, 1, 0, 0, 0 ],
-[ 0.39, 0.93, 0, 1, 0, 0 ],
-[ 0.37, -0.99, 1, 0, 0, 0 ],
-[ 0.38, 0.79, 0, 1, 0, 0 ],
-[ -0.33, 0.9, 0, 0, 1, 0 ],
-[ -0.98, 0.18, 1, 0, 0, 0 ],
-[ 0.28, 0.61, 0, 1, 0, 0 ],
-[ 0.96, 0.89, 1, 0, 0, 0 ],
-[ -0.44, 0.11, 0, 0, 1, 0 ],
-[ 0.5700000000000001, 0.47, 0, 1, 0, 0 ],
-[ 0.03, -0.05, 0, 0, 0, 1 ],
-[ -0.76, -0.37, 1, 0, 0, 0 ],
-[ -0.06, 0.06, 1, 0, 0, 0 ],
-[ -0.1, 0.1, 1, 0, 0, 0 ],
-[ -0.99, 0.79, 1, 0, 0, 0 ],
-[ -0.18, -0.88, 1, 0, 0, 0 ],
-[ 0.41, 0.31, 0, 1, 0, 0 ],
-[ 0.71, -0.44, 1, 0, 0, 0 ],
-[ 0.42, 0.85, 0, 1, 0, 0 ],
-[ 0.39, -0.21, 0, 0, 0, 1 ],
-[ -0.45, 0.85, 0, 0, 1, 0 ],
-[ -0.27, 0.73, 0, 0, 1, 0 ],
-[ -0.96, 0.35, 1, 0, 0, 0 ],
-[ -0.48, -0.4, 1, 0, 0, 0 ],
-[ -0.09, 0.85, 1, 0, 0, 0 ],
-[ -0.67, 0.45, 0, 0, 1, 0 ],
-[ 0.12, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.06, 1, 0, 0, 0 ],
-[ 0.39, 0.72, 0, 1, 0, 0 ],
-[ -0.65, 0.65, 0, 0, 1, 0 ],
-[ 0.16, 0.87, 1, 0, 0, 0 ],
-[ 0.21, -0.66, 1, 0, 0, 0 ],
-[ -0.75, 0.15, 0, 0, 1, 0 ],
-[ 0.32, 0.01, 1, 0, 0, 0 ],
-[ -0.47, 0.16, 0, 0, 1, 0 ],
-[ 0.6900000000000001, -0.96, 1, 0, 0, 0 ],
-[ 0.98, -0.62, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.43, 1, 0, 0, 0 ],
-[ 0.96, -0.75, 1, 0, 0, 0 ],
-[ 0.76, 0.78, 0, 1, 0, 0 ],
-[ -0.72, 0.67, 0, 0, 1, 0 ],
-[ 0.09, 0.8, 1, 0, 0, 0 ],
-[ 0.42, -0.01, 0, 0, 0, 1 ],
-[ 0.47, -0.85, 1, 0, 0, 0 ],
-[ 0.28, 0.05, 1, 0, 0, 0 ],
-[ -0.15, -0.07000000000000001, 0, 0, 0, 1 ],
-[ -0.45, -0.61, 1, 0, 0, 0 ],
-[ -0.23, -0.18, 0, 0, 0, 1 ],
-[ 0.14, -0.06, 0, 0, 0, 1 ],
-[ 0.93, -0.62, 1, 0, 0, 0 ],
-[ 0.34, -0.32, 1, 0, 0, 0 ],
-[ 0.5600000000000001, 0.49, 0, 1, 0, 0 ],
-[ 0.8300000000000001, -0.2, 1, 0, 0, 0 ],
-[ -0.04, -0.61, 0, 0, 0, 1 ],
-[ -0.25, -0.91, 1, 0, 0, 0 ],
-[ -0.76, -0.16, 1, 0, 0, 0 ],
-[ -0.53, 0.5700000000000001, 0, 0, 1, 0 ],
-[ 0.72, -0.72, 1, 0, 0, 0 ],
-[ -0.03, -0.55, 0, 0, 0, 1 ],
-[ 1., 0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.88, 0.9, 1, 0, 0, 0 ],
-[ -0.11, 0.31, 0, 0, 1, 0 ],
-[ 0.97, 0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.47, 0, 0, 0, 1 ],
-[ -0.54, 0.55, 0, 0, 1, 0 ],
-[ 0.61, -0.36, 1, 0, 0, 0 ],
-[ -0.62, 0.98, 1, 0, 0, 0 ],
-[ -0.92, 0.5, 0, 0, 1, 0 ],
-[ 0.16, -0.51, 0, 0, 0, 1 ],
-[ 0.91, 0.73, 1, 0, 0, 0 ],
-[ 0.12, 0.38, 0, 1, 0, 0 ],
-[ 0.4, 0.92, 0, 1, 0, 0 ],
-[ 1., 0.02, 1, 0, 0, 0 ],
-[ 0.46, -0.22, 1, 0, 0, 0 ],
-[ -0.28, -0.28, 0, 0, 0, 1 ],
-[ 0.67, 0.02, 1, 0, 0, 0 ],
-[ -0.43, 0.92, 0, 0, 1, 0 ],
-[ -0.49, 0.2, 0, 0, 1, 0 ],
-[ 0.47, 0.22, 0, 1, 0, 0 ],
-[ -0.63, -0.79, 1, 0, 0, 0 ],
-[ -0.85, -0.86, 1, 0, 0, 0 ],
-[ 0.75, -0.98, 1, 0, 0, 0 ],
-[ 0.55, -0.48, 1, 0, 0, 0 ],
-[ 0.54, 0.5700000000000001, 0, 1, 0, 0 ],
-[ 0.9500000000000001, 0.51, 1, 0, 0, 0 ],
-[ -0.55, 0.41, 0, 0, 1, 0 ],
-[ -0.43, 0.5, 0, 0, 1, 0 ],
-[ 0.33, -0.75, 1, 0, 0, 0 ],
-[ -0.42, -0.27, 1, 0, 0, 0 ],
-[ 0.99, 0.85, 1, 0, 0, 0 ],
-[ 0.5, 0.15, 0, 1, 0, 0 ],
-[ 0.26, 0.58, 0, 1, 0, 0 ],
-[ 0.8100000000000001, -0.46, 1, 0, 0, 0 ],
-[ 0.33, 0.93, 1, 0, 0, 0 ],
-[ -0.05, 0.9, 1, 0, 0, 0 ],
-[ 0.16, -0.01, 0, 0, 0, 1 ],
-[ -0.65, 0.92, 0, 0, 1, 0 ],
-[ 0.93, -0.98, 1, 0, 0, 0 ],
-[ 0.97, -0.51, 1, 0, 0, 0 ],
-[ 0.92, -0.24, 1, 0, 0, 0 ],
-[ 0.97, 0.06, 1, 0, 0, 0 ],
-[ 0.8200000000000001, 0.84, 1, 0, 0, 0 ],
-[ 0.62, -0.03, 1, 0, 0, 0 ],
-[ -0.96, 0.55, 1, 0, 0, 0 ],
-[ -0.46, -0.93, 1, 0, 0, 0 ],
-[ -0.34, -0.9, 1, 0, 0, 0 ],
-[ 0.52, 0.67, 0, 1, 0, 0 ],
-[ -0.34, -0.85, 1, 0, 0, 0 ],
-[ 0.35, 0.26, 0, 1, 0, 0 ],
-[ -0.18, -0.5700000000000001, 0, 0, 0, 1 ],
-[ -0.16, 0.29, 0, 0, 1, 0 ],
-[ 0.8, 0.48, 0, 1, 0, 0 ],
-[ -0.35, 0.98, 1, 0, 0, 0 ],
-[ 0.62, -0.36, 1, 0, 0, 0 ],
-[ 0.96, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.68, 0.8200000000000001, 0, 1, 0, 0 ],
-[ -0.52, -0.02, 1, 0, 0, 0 ],
-[ 0.9, -0.02, 1, 0, 0, 0 ],
-[ -0.49, -0.17, 1, 0, 0, 0 ],
-[ -0.35, 0.13, 0, 0, 1, 0 ],
-[ 0.9500000000000001, -0.96, 1, 0, 0, 0 ],
-[ -0.86, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.34, -0.24, 0, 0, 0, 1 ],
-[ 0.8200000000000001, 0.13, 1, 0, 0, 0 ],
-[ -0.8300000000000001, 0.06, 1, 0, 0, 0 ],
-[ -0.4, 0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.68, -0.29, 1, 0, 0, 0 ],
-[ 0.35, 0.41, 0, 1, 0, 0 ],
-[ 0.14, 0.92, 1, 0, 0, 0 ],
-[ 0.09, -0.37, 0, 0, 0, 1 ],
-[ -0.89, -0.93, 1, 0, 0, 0 ],
-[ -0.41, -0.14, 0, 0, 0, 1 ],
-[ -0.75, 0.91, 1, 0, 0, 0 ],
-[ -0.77, -0.71, 1, 0, 0, 0 ],
-[ -0.01, 0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.2, 0.01, 1, 0, 0, 0 ],
-[ 0.28, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.84, -0.09, 1, 0, 0, 0 ],
-[ 0.29, 0.9, 1, 0, 0, 0 ],
-[ -0.6, -0.06, 1, 0, 0, 0 ],
-[ -0.29, -0.47, 1, 0, 0, 0 ],
-[ -0.86, -0.53, 1, 0, 0, 0 ],
-[ 0.41, -0.98, 1, 0, 0, 0 ],
-[ 0.2, 0.22, 0, 1, 0, 0 ],
-[ 0.44, -0.68, 1, 0, 0, 0 ],
-[ -0.01, -0.21, 0, 0, 0, 1 ],
-[ 0.19, -0.44, 0, 0, 0, 1 ],
-[ 0.05, 0.99, 1, 0, 0, 0 ],
-[ -0.02, 0.32, 1, 0, 0, 0 ],
-[ -0.9400000000000001, 0.71, 1, 0, 0, 0 ],
-[ -0.71, -0.9, 1, 0, 0, 0 ],
-[ 0.31, -0.21, 0, 0, 0, 1 ],
-[ -0.54, 0.44, 0, 0, 1, 0 ],
-[ -0.7000000000000001, -0.66, 1, 0, 0, 0 ],
-[ -0.93, -0.43, 1, 0, 0, 0 ],
-[ 0.49, 0.88, 0, 1, 0, 0 ],
-[ -0.65, -0.91, 1, 0, 0, 0 ],
-[ -0.71, -0.27, 1, 0, 0, 0 ],
-[ 0.64, 0.65, 0, 1, 0, 0 ],
-[ -0.8200000000000001, 0.18, 1, 0, 0, 0 ],
-[ 0.12, 0.26, 1, 0, 0, 0 ],
-[ -0.62, -0.39, 1, 0, 0, 0 ],
-[ 0.5, -0.39, 1, 0, 0, 0 ],
-[ 0.3, 0.08, 1, 0, 0, 0 ],
-[ -0.44, -0.6, 1, 0, 0, 0 ],
-[ 0.14, 0.18, 1, 0, 0, 0 ],
-[ -0.72, 0.8200000000000001, 0, 0, 1, 0 ],
-[ -0.14, -0.24, 0, 0, 0, 1 ],
-[ -0.87, -0.37, 1, 0, 0, 0 ],
-[ -0.39, 0.72, 0, 0, 1, 0 ],
-[ 0.96, 0.5700000000000001, 1, 0, 0, 0 ],
-[ -0.16, 0.38, 0, 0, 1, 0 ],
-[ -0.8200000000000001, 0.92, 1, 0, 0, 0 ],
-[ 0.72, -0.92, 1, 0, 0, 0 ],
-[ -0.28, 0.29, 0, 0, 1, 0 ],
-[ 0.16, -0.72, 1, 0, 0, 0 ],
-[ 0.71, 0.75, 0, 1, 0, 0 ],
-[ -0.89, -0.14, 1, 0, 0, 0 ],
-[ -0.35, -0.28, 0, 0, 0, 1 ],
-[ -0.48, -0.79, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.89, 1, 0, 0, 0 ],
-[ -0.92, -0.05, 1, 0, 0, 0 ],
-[ 0.13, -0.45, 0, 0, 0, 1 ],
-[ 0.42, -0.63, 1, 0, 0, 0 ],
-[ -0.52, -0.02, 1, 0, 0, 0 ],
-[ -0.26, 0.54, 0, 0, 1, 0 ],
-[ -0.38, 0.62, 0, 0, 1, 0 ],
-[ -0.6, 0.19, 0, 0, 1, 0 ],
-[ 0.4, 0.29, 0, 1, 0, 0 ],
-[ -0.39, -0.73, 1, 0, 0, 0 ],
-[ 0.48, 0.41, 0, 1, 0, 0 ],
-[ 0.39, -0.29, 1, 0, 0, 0 ],
-[ -0.48, -0.25, 1, 0, 0, 0 ],
-[ -0.97, 0.15, 1, 0, 0, 0 ],
-[ 0.62, -0.84, 1, 0, 0, 0 ],
-[ 0.66, -0.93, 1, 0, 0, 0 ],
-[ -0.97, 0.26, 1, 0, 0, 0 ],
-[ -0.74, 0.05, 1, 0, 0, 0 ],
-[ 0.37, -0.86, 1, 0, 0, 0 ],
-[ 0.64, 0.36, 0, 1, 0, 0 ],
-[ -0.37, -0.51, 1, 0, 0, 0 ],
-[ 0.88, -0.11, 1, 0, 0, 0 ],
-[ -0.75, -0.53, 1, 0, 0, 0 ],
-[ 0.8300000000000001, 0.76, 0, 1, 0, 0 ],
-[ -0.11, -0.37, 0, 0, 0, 1 ],
-[ 0.5, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.67, 0.03, 1, 0, 0, 0 ],
-[ -0.79, -0.84, 1, 0, 0, 0 ],
-[ 0.49, 0.39, 0, 1, 0, 0 ],
-[ -0.61, 0.76, 0, 0, 1, 0 ],
-[ 0.32, 0.05, 1, 0, 0, 0 ],
-[ -0.5600000000000001, -0.52, 1, 0, 0, 0 ],
-[ -0.02, -0.15, 0, 0, 0, 1 ],
-[ -0.72, -0.25, 1, 0, 0, 0 ],
-[ -0.55, 0.64, 0, 0, 1, 0 ],
-[ 0.8300000000000001, -0.34, 1, 0, 0, 0 ],
-[ -0.89, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.34, 0.66, 0, 1, 0, 0 ],
-[ -0.27, -0.16, 0, 0, 0, 1 ],
-[ 0.38, -0.32, 1, 0, 0, 0 ],
-[ 0.6, -0.11, 1, 0, 0, 0 ],
-[ -0.44, 0.54, 0, 0, 1, 0 ],
-[ -0.41, -0.25, 1, 0, 0, 0 ],
-[ 0.78, -0.9, 1, 0, 0, 0 ],
-[ 0.23, 0.24, 0, 1, 0, 0 ],
-[ -0.48, -0.36, 1, 0, 0, 0 ],
-[ -0.32, -0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.84, -0.5, 1, 0, 0, 0 ],
-[ -0.87, 0.8300000000000001, 1, 0, 0, 0 ],
-[ 0.98, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.51, 0.78, 0, 0, 1, 0 ],
-[ 0.41, 1., 1, 0, 0, 0 ],
-[ 0.91, 0.25, 1, 0, 0, 0 ],
-[ -0.74, 0.37, 0, 0, 1, 0 ],
-[ 0.35, 0.91, 0, 1, 0, 0 ],
-[ 0.35, -0.38, 1, 0, 0, 0 ],
-[ -0.78, -0.78, 1, 0, 0, 0 ],
-[ -0.8100000000000001, -0.46, 1, 0, 0, 0 ],
-[ 0.91, -0.93, 1, 0, 0, 0 ],
-[ -0.71, -0.06, 1, 0, 0, 0 ],
-[ 0.76, -0.61, 1, 0, 0, 0 ],
-[ -0.27, 0.96, 1, 0, 0, 0 ],
-[ 0.04, -0.02, 0, 0, 0, 1 ],
-[ 0.28, 0.39, 0, 1, 0, 0 ],
-[ -0.4, 0.8100000000000001, 0, 0, 1, 0 ],
-[ 0.45, -0.48, 1, 0, 0, 0 ],
-[ -0.91, -0.68, 1, 0, 0, 0 ],
-[ -0.33, 0.78, 0, 0, 1, 0 ],
-[ 0.93, -0.02, 1, 0, 0, 0 ],
-[ 0.14, -0.77, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.91, 1, 0, 0, 0 ],
-[ 0.6, -0.71, 1, 0, 0, 0 ],
-[ -0.15, -0.08, 0, 0, 0, 1 ],
-[ -0.14, 0.28, 0, 0, 1, 0 ],
-[ -0.77, -0.49, 1, 0, 0, 0 ],
-[ -0.19, -0.68, 1, 0, 0, 0 ],
-[ 0.88, -0.99, 1, 0, 0, 0 ],
-[ -0.67, 0.87, 0, 0, 1, 0 ],
-[ -0.84, -0.5, 1, 0, 0, 0 ],
-[ -0.35, -0.7000000000000001, 1, 0, 0, 0 ],
-[ 0.6, -0.92, 1, 0, 0, 0 ],
-[ -0.24, 0.47, 0, 0, 1, 0 ],
-[ 0.26, 0.34, 0, 1, 0, 0 ],
-[ 0.86, -0.63, 1, 0, 0, 0 ],
-[ -0.9500000000000001, -0.02, 1, 0, 0, 0 ],
-[ -0.31, 0.75, 0, 0, 1, 0 ],
-[ -0.9500000000000001, 0.17, 1, 0, 0, 0 ],
-[ -0.49, 0.8, 0, 0, 1, 0 ],
-[ -0.24, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.37, 0.61, 0, 0, 1, 0 ],
-[ 0.51, -0.37, 1, 0, 0, 0 ],
-[ -0.58, -0.97, 1, 0, 0, 0 ],
-[ 0.71, -0.4, 1, 0, 0, 0 ],
-[ 0.06, 0.39, 1, 0, 0, 0 ],
-[ 0.59, -0.17, 1, 0, 0, 0 ],
-[ -0.98, -0.8, 1, 0, 0, 0 ],
-[ -0.43, 0.72, 0, 0, 1, 0 ],
-[ -0.5700000000000001, 0.49, 0, 0, 1, 0 ],
-[ 0.58, 0.86, 0, 1, 0, 0 ],
-[ -0.19, -0.51, 0, 0, 0, 1 ],
-[ 0.43, -0.7000000000000001, 1, 0, 0, 0 ],
-[ 0.76, 0.91, 1, 0, 0, 0 ],
-[ -0.58, -0.38, 1, 0, 0, 0 ],
-[ 0.44, 0.63, 0, 1, 0, 0 ],
-[ 0.2, 0.97, 1, 0, 0, 0 ],
-[ 0.11, -0.12, 0, 0, 0, 1 ],
-[ -0.9, 0.3, 1, 0, 0, 0 ],
-[ -0.17, 0.01, 1, 0, 0, 0 ],
-[ 1., -0.47, 1, 0, 0, 0 ],
-[ -0.3, 0.73, 0, 0, 1, 0 ],
-[ 0.9500000000000001, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.72, -0.8, 1, 0, 0, 0 ],
-[ -0.4, -0.62, 1, 0, 0, 0 ],
-[ 0.8100000000000001, -0.21, 1, 0, 0, 0 ],
-[ 0.47, 0.54, 0, 1, 0, 0 ],
-[ -0.44, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.64, 0.22, 0, 0, 1, 0 ],
-[ -0.68, 0.08, 1, 0, 0, 0 ],
-[ 0.3, 0.86, 0, 1, 0, 0 ],
-[ 0.96, 0.63, 1, 0, 0, 0 ],
-[ -0.99, -0.47, 1, 0, 0, 0 ],
-[ 0.01, 0.54, 1, 0, 0, 0 ],
-[ 0.4, 0.85, 0, 1, 0, 0 ],
-[ -0.97, -0.28, 1, 0, 0, 0 ],
-[ 0.11, -0.41, 0, 0, 0, 1 ],
-[ -0.42, 0.44, 0, 0, 1, 0 ],
-[ -0.2, 0.5, 0, 0, 1, 0 ],
-[ 0.77, 0.46, 0, 1, 0, 0 ],
-[ -0.1, 0.62, 0, 0, 1, 0 ],
-[ 0.87, -0.16, 1, 0, 0, 0 ],
-[ -0.12, -0.37, 0, 0, 0, 1 ],
-[ -0.49, 0.2, 0, 0, 1, 0 ],
-[ 0.98, -0.06, 1, 0, 0, 0 ],
-[ -0.8, 0.1, 1, 0, 0, 0 ],
-[ -0.36, -0.32, 1, 0, 0, 0 ],
-[ 0.27, 0.32, 0, 1, 0, 0 ],
-[ 0.9500000000000001, -0.03, 1, 0, 0, 0 ],
-[ 0.17, 0.66, 0, 1, 0, 0 ],
-[ -0.98, -0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.87, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.08, -0.44, 0, 0, 0, 1 ],
-[ -0.48, -0.36, 1, 0, 0, 0 ],
-[ 0.22, 0.17, 0, 1, 0, 0 ],
-[ 0.38, 0.01, 1, 0, 0, 0 ],
-[ 0.99, 1., 1, 0, 0, 0 ],
-[ -0.5600000000000001, -0.73, 1, 0, 0, 0 ],
-[ 0.8200000000000001, -0.09, 1, 0, 0, 0 ],
-[ -0.62, -0.23, 1, 0, 0, 0 ],
-[ 0.49, 0.07000000000000001, 0, 1, 0, 0 ],
-[ 0.87, 0.87, 1, 0, 0, 0 ],
-[ -0.48, 0.78, 0, 0, 1, 0 ],
-[ -0.18, -0.5700000000000001, 0, 0, 0, 1 ],
-[ 0.77, -0.61, 1, 0, 0, 0 ],
-[ 0.45, -0.74, 1, 0, 0, 0 ],
-[ -0.9, 0.18, 1, 0, 0, 0 ],
-[ -0.13, 0.43, 0, 0, 1, 0 ],
-[ 0.26, 0.89, 1, 0, 0, 0 ],
-[ 0.35, -0.68, 1, 0, 0, 0 ],
-[ 0.34, -0.39, 1, 0, 0, 0 ],
-[ -0.1, -0.6, 0, 0, 0, 1 ],
-[ -0.72, -0.32, 1, 0, 0, 0 ],
-[ -0.68, 0.32, 0, 0, 1, 0 ],
-[ 0.02, -0.45, 0, 0, 0, 1 ],
-[ -0.65, 0.04, 1, 0, 0, 0 ],
-[ 0.84, 0.76, 0, 1, 0, 0 ],
-[ -0.84, -0.32, 1, 0, 0, 0 ],
-[ -0.4, -0.07000000000000001, 0, 0, 0, 1 ],
-[ 0.35, -0.19, 0, 0, 0, 1 ],
-[ 0.36, -0.65, 1, 0, 0, 0 ],
-[ 0.67, -0.3, 1, 0, 0, 0 ],
-[ 0.86, 0.65, 0, 1, 0, 0 ],
-[ -0.38, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.79, 0.17, 0, 0, 1, 0 ],
-[ 0.86, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.77, 0.98, 1, 0, 0, 0 ],
-[ 0.21, -0.07000000000000001, 0, 0, 0, 1 ],
-[ 0.6900000000000001, -0.11, 1, 0, 0, 0 ],
-[ 0.86, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.8, 0.02, 1, 0, 0, 0 ],
-[ -0.09, -0.39, 0, 0, 0, 1 ],
-[ -0.64, 0.85, 0, 0, 1, 0 ],
-[ 0.52, 0.05, 1, 0, 0, 0 ],
-[ -0.74, -0.27, 1, 0, 0, 0 ],
-[ 0.43, 0.1, 0, 1, 0, 0 ],
-[ -0.13, 0.68, 0, 0, 1, 0 ],
-[ -0.93, -0.06, 1, 0, 0, 0 ],
-[ 0.58, -0.48, 1, 0, 0, 0 ],
-[ -0.46, 0.15, 0, 0, 1, 0 ],
-[ 0.47, 0.22, 0, 1, 0, 0 ],
-[ -0.6900000000000001, 0.2, 0, 0, 1, 0 ],
-[ 0.98, -0.54, 1, 0, 0, 0 ],
-[ -0.3, 0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.39, -0.68, 1, 0, 0, 0 ],
-[ -0.32, 0.02, 1, 0, 0, 0 ],
-[ -0.13, 0.02, 1, 0, 0, 0 ],
-[ 0.53, 0.11, 0, 1, 0, 0 ],
-[ -0.21, 0.41, 0, 0, 1, 0 ],
-[ -0.6900000000000001, -0.62, 1, 0, 0, 0 ],
-[ -0.01, -0.68, 0, 0, 0, 1 ],
-[ -0.17, 0.29, 0, 0, 1, 0 ],
-[ -0.72, -1., 1, 0, 0, 0 ],
-[ -0.85, 0.6900000000000001, 0, 0, 1, 0 ],
-[ -0.72, -0.78, 1, 0, 0, 0 ],
-[ -0.22, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.21, -0.98, 1, 0, 0, 0 ],
-[ -0.8, -0.51, 1, 0, 0, 0 ],
-[ 0.06, -0.78, 0, 0, 0, 1 ],
-[ -0.92, -0.78, 1, 0, 0, 0 ],
-[ -0.59, 0.5600000000000001, 0, 0, 1, 0 ],
-[ 0.05, 0.87, 1, 0, 0, 0 ],
-[ 0.16, -0.3, 0, 0, 0, 1 ],
-[ -0.36, -0.1, 0, 0, 0, 1 ],
-[ 0.55, -0.8, 1, 0, 0, 0 ],
-[ 0.75, -0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.97, -0.79, 1, 0, 0, 0 ],
-[ 0.48, -0.84, 1, 0, 0, 0 ],
-[ 0.38, 0.04, 1, 0, 0, 0 ],
-[ 0.01, -0.3, 0, 0, 0, 1 ],
-[ 0.32, -0.44, 1, 0, 0, 0 ],
-[ 0.17, -0.22, 0, 0, 0, 1 ],
-[ -0.08, -0.89, 1, 0, 0, 0 ],
-[ 0.92, -0.35, 1, 0, 0, 0 ],
-[ 0.35, -0.96, 1, 0, 0, 0 ],
-[ -0.78, 0.8100000000000001, 0, 0, 1, 0 ],
-[ 0.39, 0.37, 0, 1, 0, 0 ],
-[ -0.43, 0.66, 0, 0, 1, 0 ],
-[ 0.98, -0.51, 1, 0, 0, 0 ],
-[ -0.2, 0.89, 1, 0, 0, 0 ],
-[ 1., 0.25, 1, 0, 0, 0 ],
-[ 0.58, -0.6900000000000001, 1, 0, 0, 0 ],
-[ 0.54, 0.22, 0, 1, 0, 0 ],
-[ -0.7000000000000001, -0.86, 1, 0, 0, 0 ],
-[ 0.52, -0.9, 1, 0, 0, 0 ],
-[ 0.8, -0.38, 1, 0, 0, 0 ],
-[ -0.46, 0.84, 0, 0, 1, 0 ],
-[ -0.14, 0.48, 0, 0, 1, 0 ],
-[ 0.66, -0.71, 1, 0, 0, 0 ],
-[ -0.72, -0.76, 1, 0, 0, 0 ],
-[ 0.28, 0.19, 0, 1, 0, 0 ],
-[ -0.86, -0.03, 1, 0, 0, 0 ],
-[ -0.11, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.01, -0.33, 0, 0, 0, 1 ],
-[ 0.38, -0.91, 1, 0, 0, 0 ],
-[ 0.9, 0.08, 1, 0, 0, 0 ],
-[ -0.7000000000000001, -0.05, 1, 0, 0, 0 ],
-[ -0.47, 0.33, 0, 0, 1, 0 ],
-[ -0.1, 0.02, 1, 0, 0, 0 ],
-[ 0.84, -0.61, 1, 0, 0, 0 ],
-[ -0.93, -0.55, 1, 0, 0, 0 ],
-[ 0.89, 0.43, 0, 1, 0, 0 ],
-[ -0.9500000000000001, -0.84, 1, 0, 0, 0 ],
-[ -0.9400000000000001, 0.6, 1, 0, 0, 0 ],
-[ 0.01, -0.67, 0, 0, 0, 1 ],
-[ -0.58, 0.72, 0, 0, 1, 0 ],
-[ 0.5600000000000001, -0.8, 1, 0, 0, 0 ],
-[ -0.19, 0.74, 0, 0, 1, 0 ],
-[ 0.25, 0.77, 0, 1, 0, 0 ],
-[ 0.09, 0.73, 1, 0, 0, 0 ],
-[ 0.51, 0.9400000000000001, 0, 1, 0, 0 ],
-[ -0.77, -0.45, 1, 0, 0, 0 ],
-[ -0.52, -0.09, 1, 0, 0, 0 ],
-[ -0.5, 0.12, 0, 0, 1, 0 ],
-[ 0.19, 0.52, 0, 1, 0, 0 ],
-[ -0.32, -0.64, 1, 0, 0, 0 ],
-[ 0.89, -0.38, 1, 0, 0, 0 ],
-[ 0.93, -0.68, 1, 0, 0, 0 ],
-[ -0.3, 0.44, 0, 0, 1, 0 ],
-[ -0.24, -0.67, 1, 0, 0, 0 ],
-[ -0.35, -0.23, 0, 0, 0, 1 ],
-[ -0.37, 0.51, 0, 0, 1, 0 ],
-[ -0.89, 0.09, 1, 0, 0, 0 ],
-[ 0.72, -0.89, 1, 0, 0, 0 ],
-[ -0.99, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.1, 0.13, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.58, 1, 0, 0, 0 ],
-[ 0.39, 0.64, 0, 1, 0, 0 ],
-[ -0.86, -0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.43, -0.63, 1, 0, 0, 0 ],
-[ -0.01, 0.02, 1, 0, 0, 0 ],
-[ -0.87, 0.9, 1, 0, 0, 0 ],
-[ -0.68, 0.21, 0, 0, 1, 0 ],
-[ -0.72, -0.72, 1, 0, 0, 0 ],
-[ -0.47, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.91, 0.13, 1, 0, 0, 0 ],
-[ 1., 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.41, -0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.8300000000000001, 0.15, 1, 0, 0, 0 ],
-[ -0.68, 0.49, 0, 0, 1, 0 ],
-[ 0.5700000000000001, -0.72, 1, 0, 0, 0 ],
-[ 0.87, -0.12, 1, 0, 0, 0 ],
-[ -0.75, -0.39, 1, 0, 0, 0 ],
-[ 0.93, 0.13, 1, 0, 0, 0 ],
-[ -0.31, 0.66, 0, 0, 1, 0 ],
-[ 0.03, 0.08, 1, 0, 0, 0 ],
-[ -0.31, -0.21, 0, 0, 0, 1 ],
-[ -0.9400000000000001, 0.18, 1, 0, 0, 0 ],
-[ -0.86, 0.16, 1, 0, 0, 0 ],
-[ -0.19, -0.27, 0, 0, 0, 1 ],
-[ 0.5600000000000001, 0.2, 0, 1, 0, 0 ],
-[ 0.98, 0.52, 1, 0, 0, 0 ],
-[ -0.06, -0.8200000000000001, 0, 0, 0, 1 ],
-[ -0.09, 0.23, 1, 0, 0, 0 ],
-[ -0.51, 0.49, 0, 0, 1, 0 ],
-[ 0.55, -0.87, 1, 0, 0, 0 ],
-[ 0.01, 0.05, 1, 0, 0, 0 ],
-[ -0.88, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.18, -0.19, 0, 0, 0, 1 ],
-[ 0.59, -0.77, 1, 0, 0, 0 ],
-[ 0.98, 0.89, 1, 0, 0, 0 ],
-[ 0.97, -0.21, 1, 0, 0, 0 ],
-[ -0.28, -0.63, 1, 0, 0, 0 ],
-[ -0.71, -0.76, 1, 0, 0, 0 ],
-[ -0.66, 0.67, 0, 0, 1, 0 ],
-[ 0.45, 0.89, 0, 1, 0, 0 ],
-[ -0.16, 0.73, 0, 0, 1, 0 ],
-[ -0.07000000000000001, -0.8, 0, 0, 0, 1 ],
-[ -0.1, 0.5, 0, 0, 1, 0 ],
-[ 0.05, -0.74, 0, 0, 0, 1 ],
-[ -0.8, 0.6900000000000001, 0, 0, 1, 0 ],
-[ -0.16, 0.64, 0, 0, 1, 0 ],
-[ -0.58, 0.48, 0, 0, 1, 0 ],
-[ 0.6900000000000001, 0.54, 0, 1, 0, 0 ],
-[ 0.18, 0.37, 0, 1, 0, 0 ],
-[ 0.29, 0.76, 0, 1, 0, 0 ],
-[ -0.16, 0.05, 1, 0, 0, 0 ],
-[ -0.37, 0.9, 0, 0, 1, 0 ],
-[ -0.8200000000000001, 0.39, 0, 0, 1, 0 ],
-[ -0.8100000000000001, -0.58, 1, 0, 0, 0 ],
-[ -0.8, 0.8300000000000001, 0, 0, 1, 0 ],
-[ -0.11, -0.59, 0, 0, 0, 1 ],
-[ 0.09, 0.97, 1, 0, 0, 0 ],
-[ 0.33, 0.5600000000000001, 0, 1, 0, 0 ],
-[ -0.78, 0.38, 0, 0, 1, 0 ],
-[ -0.1, -0.78, 0, 0, 0, 1 ],
-[ 0.43, 0.46, 0, 1, 0, 0 ],
-[ 0.34, -0.67, 1, 0, 0, 0 ],
-[ -0.88, -0.01, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.25, 0, 0, 0, 1 ],
-[ 0.28, 0.38, 0, 1, 0, 0 ],
-[ 0.8200000000000001, 0.04, 1, 0, 0, 0 ],
-[ 0.27, 0.34, 0, 1, 0, 0 ],
-[ 0.13, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.64, -0.28, 1, 0, 0, 0 ],
-[ 0.48, 0.85, 0, 1, 0, 0 ],
-[ 0.9500000000000001, -0.31, 1, 0, 0, 0 ],
-[ 0.36, 0.99, 1, 0, 0, 0 ],
-[ -0.26, -0.31, 0, 0, 0, 1 ],
-[ 0.62, 0.85, 0, 1, 0, 0 ],
-[ -0.74, -0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.77, -0.21, 1, 0, 0, 0 ],
-[ 0.36, -0.66, 1, 0, 0, 0 ],
-[ 0.17, -0.76, 1, 0, 0, 0 ],
-[ 0.63, 0.88, 0, 1, 0, 0 ],
-[ -0.98, 0.19, 1, 0, 0, 0 ],
-[ -0.98, 0.43, 1, 0, 0, 0 ],
-[ -0.47, 0.23, 0, 0, 1, 0 ],
-[ -0.63, 0.88, 0, 0, 1, 0 ],
-[ 0.08, 0.31, 1, 0, 0, 0 ],
-[ 0.21, -0.25, 0, 0, 0, 1 ],
-[ -0.62, 0.52, 0, 0, 1, 0 ],
-[ -0.39, 0.93, 0, 0, 1, 0 ],
-[ 0.17, -0.91, 1, 0, 0, 0 ],
-[ 0.54, 0.99, 1, 0, 0, 0 ],
-[ 0.39, 0.05, 1, 0, 0, 0 ],
-[ 0.92, 0.46, 0, 1, 0, 0 ],
-[ 0.24, 0.87, 1, 0, 0, 0 ],
-[ 0.93, -0.22, 1, 0, 0, 0 ],
-[ 0.65, -0.74, 1, 0, 0, 0 ],
-[ -0.08, 0.9, 1, 0, 0, 0 ],
-[ -0.67, 0.2, 0, 0, 1, 0 ],
-[ -0.47, -0.85, 1, 0, 0, 0 ],
-[ 0.3, 0.37, 0, 1, 0, 0 ],
-[ 0.61, 0.1, 0, 1, 0, 0 ],
-[ 0.46, -0.5700000000000001, 1, 0, 0, 0 ],
-[ -0.41, 0.32, 0, 0, 1, 0 ],
-[ -0.98, -0.02, 1, 0, 0, 0 ],
-[ 0.72, -0.66, 1, 0, 0, 0 ],
-[ -0.48, 0.71, 0, 0, 1, 0 ],
-[ -0.88, -0.6, 1, 0, 0, 0 ],
-[ 0.59, -0.64, 1, 0, 0, 0 ],
-[ -0.01, -0.66, 0, 0, 0, 1 ],
-[ -0.36, 0.28, 0, 0, 1, 0 ],
-[ -0.28, -0.39, 0, 0, 0, 1 ],
-[ -0.8200000000000001, 0.46, 0, 0, 1, 0 ],
-[ 0.73, 0.92, 1, 0, 0, 0 ],
-[ 0.96, 0.33, 1, 0, 0, 0 ],
-[ 0.41, 0.45, 0, 1, 0, 0 ],
-[ -0.11, 0.75, 1, 0, 0, 0 ],
-[ 0.1, 0.37, 0, 1, 0, 0 ],
-[ -0.52, -0.4, 1, 0, 0, 0 ],
-[ -0.36, 1., 1, 0, 0, 0 ],
-[ 0.74, -0.87, 1, 0, 0, 0 ],
-[ -0.8300000000000001, -0.74, 1, 0, 0, 0 ],
-[ -0.45, -0.01, 0, 0, 0, 1 ],
-[ 0.91, 0.65, 0, 1, 0, 0 ],
-[ -1., 0.6, 1, 0, 0, 0 ],
-[ -0.96, -0.53, 1, 0, 0, 0 ],
-[ 0.71, 0.88, 0, 1, 0, 0 ],
-[ -0.13, 0.4, 0, 0, 1, 0 ],
-[ -0.54, -0.64, 1, 0, 0, 0 ],
-[ -0.9, 0.84, 1, 0, 0, 0 ],
-[ -0.9500000000000001, 0.86, 1, 0, 0, 0 ],
-[ 0.08, 0.52, 0, 1, 0, 0 ],
-[ 0.76, -1., 1, 0, 0, 0 ],
-[ -0.2, 0.88, 1, 0, 0, 0 ],
-[ 0.36, -0.02, 0, 0, 0, 1 ],
-[ 0.24, 0.9, 1, 0, 0, 0 ],
-[ -0.04, 1., 1, 0, 0, 0 ],
-[ -0.9500000000000001, -0.62, 1, 0, 0, 0 ],
-[ -0.46, 0.36, 0, 0, 1, 0 ],
-[ 0.9500000000000001, 0.08, 1, 0, 0, 0 ],
-[ -0.01, 0.15, 1, 0, 0, 0 ],
-[ -0.41, -0.2, 1, 0, 0, 0 ],
-[ -0.43, -0.49, 1, 0, 0, 0 ],
-[ 0.88, 0.78, 1, 0, 0, 0 ],
-[ -0.19, -0.71, 1, 0, 0, 0 ],
-[ -0.72, -0.63, 1, 0, 0, 0 ],
-[ -0.87, 0.87, 1, 0, 0, 0 ],
-[ 0.62, -0.02, 1, 0, 0, 0 ],
-[ 0.2, 0.5, 0, 1, 0, 0 ],
-[ 0.97, 0.11, 1, 0, 0, 0 ],
-[ 0.45, 0.53, 0, 1, 0, 0 ],
-[ 0.37, -0.66, 1, 0, 0, 0 ],
-[ 0.1, -0.49, 0, 0, 0, 1 ],
-[ 0.1, -0.31, 0, 0, 0, 1 ],
-[ 0.8, 0.86, 1, 0, 0, 0 ],
-[ -0.97, -0.6900000000000001, 1, 0, 0, 0 ],
-[ 0.84, -0.5, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.17, 0, 0, 0, 1 ],
-[ -0.71, -0.08, 1, 0, 0, 0 ],
-[ -0.37, 0.28, 0, 0, 1, 0 ],
-[ -0.29, 0.47, 0, 0, 1, 0 ],
-[ -0.41, -0.72, 1, 0, 0, 0 ],
-[ 0.35, -0.92, 1, 0, 0, 0 ],
-[ 0.8200000000000001, 0.97, 1, 0, 0, 0 ],
-[ -0.64, 0.7000000000000001, 0, 0, 1, 0 ],
-[ -0.06, 0.29, 1, 0, 0, 0 ],
-[ 0.61, 0.05, 1, 0, 0, 0 ],
-[ 0.88, 0.99, 1, 0, 0, 0 ],
-[ 0.13, 0.68, 0, 1, 0, 0 ],
-[ -0.09, 0.84, 1, 0, 0, 0 ],
-[ 0.93, -0.12, 1, 0, 0, 0 ],
-[ -0.13, -0.31, 0, 0, 0, 1 ],
-[ -0.3, -0.79, 1, 0, 0, 0 ],
-[ -0.78, 0.39, 0, 0, 1, 0 ],
-[ -0.53, 0.5, 0, 0, 1, 0 ],
-[ 0.6900000000000001, -0.31, 1, 0, 0, 0 ],
-[ 0.74, 0.2, 0, 1, 0, 0 ],
-[ 1., -0.6900000000000001, 1, 0, 0, 0 ],
-[ -1., 0.12, 1, 0, 0, 0 ],
-[ -0.26, 0.52, 0, 0, 1, 0 ],
-[ -0.77, 0.5600000000000001, 0, 0, 1, 0 ],
-[ 0.43, 0.9, 0, 1, 0, 0 ],
-[ -0.92, 0.34, 1, 0, 0, 0 ],
-[ 0.04, -0.51, 0, 0, 0, 1 ],
-[ 0.78, -0.52, 1, 0, 0, 0 ],
-[ 0.14, -0.98, 1, 0, 0, 0 ],
-[ 0.76, 0.97, 1, 0, 0, 0 ],
-[ 0.18, -0.91, 1, 0, 0, 0 ],
-[ -0.91, -0.5, 1, 0, 0, 0 ],
-[ -0.36, -0.63, 1, 0, 0, 0 ],
-[ 0.49, 0.25, 0, 1, 0, 0 ],
-[ 0.38, 0.05, 1, 0, 0, 0 ],
-[ -0.43, 0.48, 0, 0, 1, 0 ],
-[ -1., -0.06, 1, 0, 0, 0 ],
-[ 0.03, 0.96, 1, 0, 0, 0 ],
-[ -0.78, -0.47, 1, 0, 0, 0 ],
-[ 0.5700000000000001, 0.68, 0, 1, 0, 0 ],
-[ -0.27, -0.36, 0, 0, 0, 1 ],
-[ 0.6900000000000001, 0.96, 1, 0, 0, 0 ],
-[ 0.2, -0.18, 0, 0, 0, 1 ],
-[ -0.98, -0.76, 1, 0, 0, 0 ],
-[ 0.49, -0.96, 1, 0, 0, 0 ],
-[ -0.96, 0.11, 1, 0, 0, 0 ],
-[ -0.63, 1., 1, 0, 0, 0 ],
-[ 0.78, 0.8100000000000001, 0, 1, 0, 0 ],
-[ -0.8100000000000001, -0.5700000000000001, 1, 0, 0, 0 ],
-[ 0.19, -0.65, 1, 0, 0, 0 ],
-[ -0.05, 0.2, 1, 0, 0, 0 ],
-[ 0.15, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.8, 0.17, 0, 0, 1, 0 ],
-[ -0.45, -0.88, 1, 0, 0, 0 ],
-[ -0.76, -0.22, 1, 0, 0, 0 ],
-[ 0.19, -0.75, 1, 0, 0, 0 ],
-[ 0.48, -0.9, 1, 0, 0, 0 ],
-[ 0.89, -0.17, 1, 0, 0, 0 ],
-[ -0.79, -0.45, 1, 0, 0, 0 ],
-[ 0.04, 0.78, 1, 0, 0, 0 ],
-[ -0.07000000000000001, 0.77, 1, 0, 0, 0 ],
-[ -0.43, 0.16, 0, 0, 1, 0 ],
-[ 0.91, -0.47, 1, 0, 0, 0 ],
-[ 0.6, 0.97, 1, 0, 0, 0 ],
-[ -0.87, 0.6, 0, 0, 1, 0 ],
-[ 0.02, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.12, -0.28, 0, 0, 0, 1 ],
-[ -0.52, -0.37, 1, 0, 0, 0 ],
-[ 0.6900000000000001, 0.31, 0, 1, 0, 0 ],
-[ -0.11, -0.01, 0, 0, 0, 1 ],
-[ 0.23, -0.6, 1, 0, 0, 0 ],
-[ -0.09, 0.42, 0, 0, 1, 0 ],
-[ -0.6900000000000001, 0.04, 1, 0, 0, 0 ],
-[ 0.01, -0.05, 0, 0, 0, 1 ],
-[ -0.5600000000000001, -0.16, 1, 0, 0, 0 ],
-[ 1., 0.99, 1, 0, 0, 0 ],
-[ -0.8, 0.15, 1, 0, 0, 0 ],
-[ 0.35, -0.5700000000000001, 1, 0, 0, 0 ],
-[ 0.17, -0.32, 0, 0, 0, 1 ],
-[ 0.62, 0.5, 0, 1, 0, 0 ],
-[ 0.78, 0.8200000000000001, 0, 1, 0, 0 ],
-[ -0.49, -0.3, 1, 0, 0, 0 ],
-[ -0.2, 0.79, 0, 0, 1, 0 ],
-[ 0.44, 0.17, 0, 1, 0, 0 ],
-[ 0.74, -0.43, 1, 0, 0, 0 ],
-[ 0.86, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.41, 0.9, 0, 0, 1, 0 ],
-[ -0.49, -0.4, 1, 0, 0, 0 ],
-[ 0.63, 0.79, 0, 1, 0, 0 ],
-[ -0.02, -0.43, 0, 0, 0, 1 ],
-[ 0.22, 0.35, 0, 1, 0, 0 ],
-[ 0.66, 0.8100000000000001, 0, 1, 0, 0 ],
-[ 0.11, -0.9, 1, 0, 0, 0 ],
-[ 0.71, -0.9, 1, 0, 0, 0 ],
-[ 0.22, -0.16, 0, 0, 0, 1 ],
-[ 0.09, -0.29, 0, 0, 0, 1 ],
-[ -0.12, -0.23, 0, 0, 0, 1 ],
-[ 0.42, -0.65, 1, 0, 0, 0 ],
-[ 0.9500000000000001, 0.79, 1, 0, 0, 0 ],
-[ 0.75, -0.28, 1, 0, 0, 0 ],
-[ -0.91, 0.08, 1, 0, 0, 0 ],
-[ -0.28, -0.45, 1, 0, 0, 0 ],
-[ -0.06, -0.5, 0, 0, 0, 1 ],
-[ 0.42, -0.15, 0, 0, 0, 1 ],
-[ 0.35, 0.84, 0, 1, 0, 0 ],
-[ -0.29, 0.59, 0, 0, 1, 0 ],
-[ 0.6900000000000001, -0.78, 1, 0, 0, 0 ],
-[ 0.78, 0.41, 0, 1, 0, 0 ],
-[ 0.8300000000000001, 0.62, 0, 1, 0, 0 ],
-[ 0.98, 0.41, 1, 0, 0, 0 ],
-[ -0.63, 0.09, 0, 0, 1, 0 ],
-[ 0.89, -0.33, 1, 0, 0, 0 ],
-[ -0.96, -0.38, 1, 0, 0, 0 ],
-[ -0.96, 0.32, 1, 0, 0, 0 ],
-[ -0.31, -0.72, 1, 0, 0, 0 ],
-[ 0.37, 0.75, 0, 1, 0, 0 ],
-[ 0.85, -0.4, 1, 0, 0, 0 ],
-[ 0.68, -0.44, 1, 0, 0, 0 ],
-[ -0.89, 0.93, 1, 0, 0, 0 ],
-[ -0.98, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.13, -0.8, 1, 0, 0, 0 ],
-[ 0.01, 0.45, 1, 0, 0, 0 ],
-[ -0.44, 0.43, 0, 0, 1, 0 ],
-[ -0.87, 0.23, 1, 0, 0, 0 ],
-[ -0.22, 0.66, 0, 0, 1, 0 ],
-[ 0.37, -0.84, 1, 0, 0, 0 ],
-[ 0.18, 0.77, 0, 1, 0, 0 ],
-[ 0.34, -0.47, 1, 0, 0, 0 ],
-[ 0.48, 0.19, 0, 1, 0, 0 ],
-[ -0.7000000000000001, 0.03, 1, 0, 0, 0 ],
-[ -0.61, -0.53, 1, 0, 0, 0 ],
-[ -0.22, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.96, 0.16, 1, 0, 0, 0 ],
-[ -0.09, 0.46, 0, 0, 1, 0 ],
-[ 0.87, -0.14, 1, 0, 0, 0 ],
-[ -0.87, -0.75, 1, 0, 0, 0 ],
-[ -0.3, 0.99, 1, 0, 0, 0 ],
-[ 0.47, 0.4, 0, 1, 0, 0 ],
-[ -0.3, 0.71, 0, 0, 1, 0 ],
-[ -0.77, 0.98, 1, 0, 0, 0 ],
-[ -0.85, 0.92, 1, 0, 0, 0 ],
-[ -0.8100000000000001, -0.48, 1, 0, 0, 0 ],
-[ 0.31, 0.66, 0, 1, 0, 0 ],
-[ -0.47, -0.48, 1, 0, 0, 0 ],
-[ 0.84, 0.74, 0, 1, 0, 0 ],
-[ 0.11, -0.67, 0, 0, 0, 1 ],
-[ 0.53, 0.34, 0, 1, 0, 0 ],
-[ -0.27, 0.1, 1, 0, 0, 0 ],
-[ -0.24, -0.85, 1, 0, 0, 0 ],
-[ -0.63, 0.46, 0, 0, 1, 0 ],
-[ -0.34, -0.8300000000000001, 1, 0, 0, 0 ],
-[ 0.97, 0.7000000000000001, 1, 0, 0, 0 ],
-[ 0.29, -0.73, 1, 0, 0, 0 ],
-[ 0.15, 0.21, 1, 0, 0, 0 ],
-[ -0.78, -0.25, 1, 0, 0, 0 ],
-[ 0.03, -0.74, 0, 0, 0, 1 ],
-[ 0.47, 0.3, 0, 1, 0, 0 ],
-[ -0.61, 0.75, 0, 0, 1, 0 ],
-[ -0.21, -0.19, 0, 0, 0, 1 ],
-[ 0.89, 0.71, 0, 1, 0, 0 ],
-[ 0.74, 0.27, 0, 1, 0, 0 ],
-[ -0.77, -0.04, 1, 0, 0, 0 ],
-[ -0.78, 0.19, 0, 0, 1, 0 ],
-[ 0.98, -0.31, 1, 0, 0, 0 ],
-[ 0.36, -0.3, 1, 0, 0, 0 ],
-[ -0.09, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.03, 0.55, 1, 0, 0, 0 ],
-[ 0.8200000000000001, -0.73, 1, 0, 0, 0 ],
-[ -0.29, -0.52, 1, 0, 0, 0 ],
-[ -0.47, 0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.84, -0.79, 1, 0, 0, 0 ],
-[ -0.18, 0.77, 0, 0, 1, 0 ],
-[ -0.27, 0.59, 0, 0, 1, 0 ],
-[ -0.77, 0.12, 1, 0, 0, 0 ],
-[ 0.99, 0.28, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.36, 0, 0, 0, 1 ],
-[ -0.38, -0.44, 1, 0, 0, 0 ],
-[ -0.98, -0.15, 1, 0, 0, 0 ],
-[ 0.24, -0.52, 0, 0, 0, 1 ],
-[ -0.55, -0.4, 1, 0, 0, 0 ],
-[ 0.91, -0.63, 1, 0, 0, 0 ],
-[ 0.77, 0.65, 0, 1, 0, 0 ],
-[ 0.59, 0.78, 0, 1, 0, 0 ],
-[ 0.48, -0.73, 1, 0, 0, 0 ],
-[ -0.29, 0.28, 0, 0, 1, 0 ],
-[ -0.5600000000000001, 0.71, 0, 0, 1, 0 ],
-[ -0.32, 0.75, 0, 0, 1, 0 ],
-[ -0.27, 0.21, 0, 0, 1, 0 ],
-[ 0.39, 0.74, 0, 1, 0, 0 ],
-[ -0.12, 0.75, 1, 0, 0, 0 ],
-[ -0.55, 0.67, 0, 0, 1, 0 ],
-[ -0.5700000000000001, -0.52, 1, 0, 0, 0 ],
-[ 0.8200000000000001, 0.71, 0, 1, 0, 0 ],
-[ 0.61, 0.2, 0, 1, 0, 0 ],
-[ -0.52, 0.45, 0, 0, 1, 0 ],
-[ -0.84, 0.19, 1, 0, 0, 0 ],
-[ 0.88, 0.61, 0, 1, 0, 0 ],
-[ -0.07000000000000001, 0.08, 1, 0, 0, 0 ],
-[ -0.6, 0.02, 1, 0, 0, 0 ],
-[ -0.24, 0.19, 0, 0, 1, 0 ],
-[ -0.22, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.65, -0.22, 1, 0, 0, 0 ],
-[ 0.8, -0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.93, -1., 1, 0, 0, 0 ],
-[ -0.23, -0.03, 0, 0, 0, 1 ],
-[ -0.01, -0.61, 0, 0, 0, 1 ],
-[ -0.97, 0.54, 1, 0, 0, 0 ],
-[ -0.73, -0.16, 1, 0, 0, 0 ],
-[ -0.67, 0.44, 0, 0, 1, 0 ],
-[ -0.78, 0.61, 0, 0, 1, 0 ],
-[ 0.51, -0.48, 1, 0, 0, 0 ],
-[ 0.71, -0.17, 1, 0, 0, 0 ],
-[ 0.96, -0.13, 1, 0, 0, 0 ],
-[ 0.97, -0.8300000000000001, 1, 0, 0, 0 ],
-[ 0.2, -0.32, 0, 0, 0, 1 ],
-[ 0.97, 0.85, 1, 0, 0, 0 ],
-[ 0.39, -0.92, 1, 0, 0, 0 ],
-[ 0.76, 0.44, 0, 1, 0, 0 ],
-[ 0.36, -0.22, 0, 0, 0, 1 ],
-[ -0.37, 0.73, 0, 0, 1, 0 ],
-[ -0.8100000000000001, 0.6900000000000001, 0, 0, 1, 0 ],
-[ 0.27, 0.5, 0, 1, 0, 0 ],
-[ -0.92, 0.71, 1, 0, 0, 0 ],
-[ -0.74, 0.93, 1, 0, 0, 0 ],
-[ 0.22, 0.39, 0, 1, 0, 0 ],
-[ 0.29, 0.47, 0, 1, 0, 0 ],
-[ -0.62, 0.65, 0, 0, 1, 0 ],
-[ -0.52, -0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.03, 1., 1, 0, 0, 0 ],
-[ -0.51, 0.08, 0, 0, 1, 0 ],
-[ -0.03, 0.35, 1, 0, 0, 0 ],
-[ 0.3, 0.91, 1, 0, 0, 0 ],
-[ -0.75, -0.5, 1, 0, 0, 0 ],
-[ -0.98, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.9500000000000001, -0.98, 1, 0, 0, 0 ],
-[ 0.1, 0.9, 1, 0, 0, 0 ],
-[ -0.34, 0.01, 1, 0, 0, 0 ],
-[ -0.39, 0.16, 0, 0, 1, 0 ],
-[ -0.22, 0.49, 0, 0, 1, 0 ],
-[ -0.04, 0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.12, 0.17, 1, 0, 0, 0 ],
-[ -0.1, -0.5, 0, 0, 0, 1 ],
-[ -0.8, 0.25, 0, 0, 1, 0 ],
-[ 0.85, -0.08, 1, 0, 0, 0 ],
-[ 0.24, 0.79, 0, 1, 0, 0 ],
-[ -0.72, 0.74, 0, 0, 1, 0 ],
-[ -0.86, 0.87, 1, 0, 0, 0 ],
-[ 0.9400000000000001, 0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.61, -0.49, 1, 0, 0, 0 ],
-[ -0.12, 0.99, 1, 0, 0, 0 ],
-[ 0.55, 0.01, 1, 0, 0, 0 ],
-[ -0.7000000000000001, -0.58, 1, 0, 0, 0 ],
-[ -0.5600000000000001, -0.53, 1, 0, 0, 0 ],
-[ -0.63, 0.49, 0, 0, 1, 0 ],
-[ 0.2, -0.29, 0, 0, 0, 1 ],
-[ 0.04, -0.8200000000000001, 0, 0, 0, 1 ],
-[ 0.61, 0.54, 0, 1, 0, 0 ],
-[ 0.02, -0.15, 0, 0, 0, 1 ],
-[ 0.54, -0.67, 1, 0, 0, 0 ],
-[ 0.26, 0.91, 1, 0, 0, 0 ],
-[ 0.91, -1., 1, 0, 0, 0 ],
-[ -0.29, -0.25, 0, 0, 0, 1 ],
-[ 0.59, -0.46, 1, 0, 0, 0 ],
-[ -0.85, 0.17, 1, 0, 0, 0 ],
-[ -0.02, 0.93, 1, 0, 0, 0 ],
-[ 0.41, -0.5, 1, 0, 0, 0 ],
-[ 0.52, -0.03, 1, 0, 0, 0 ],
-[ 0.25, 0.71, 0, 1, 0, 0 ],
-[ -0.6, 0.31, 0, 0, 1, 0 ],
-[ -0.52, 0.29, 0, 0, 1, 0 ],
-[ 0.08, -0.98, 1, 0, 0, 0 ],
-[ -0.28, 0.13, 0, 0, 1, 0 ],
-[ -0.39, 0.65, 0, 0, 1, 0 ],
-[ 0.05, 0.16, 1, 0, 0, 0 ],
-[ 0.9400000000000001, -0.96, 1, 0, 0, 0 ],
-[ -0.22, -0.24, 0, 0, 0, 1 ],
-[ -0.47, 0.5600000000000001, 0, 0, 1, 0 ],
-[ 0.8100000000000001, -0.37, 1, 0, 0, 0 ],
-[ 0.76, -0.51, 1, 0, 0, 0 ],
-[ -0.61, -0.53, 1, 0, 0, 0 ],
-[ 0.86, -0.45, 1, 0, 0, 0 ],
-[ -0.8200000000000001, 0.49, 0, 0, 1, 0 ],
-[ -0.45, -0.38, 1, 0, 0, 0 ],
-[ 0.6, 0.39, 0, 1, 0, 0 ],
-[ 0.66, 0.75, 0, 1, 0, 0 ],
-[ -0.97, -0.59, 1, 0, 0, 0 ],
-[ -0.75, 0.12, 1, 0, 0, 0 ],
-[ -0.9500000000000001, 0.6900000000000001, 1, 0, 0, 0 ],
-[ 0.31, -0.74, 1, 0, 0, 0 ],
-[ 0.06, -0.6900000000000001, 0, 0, 0, 1 ],
-[ -0.62, 0.27, 0, 0, 1, 0 ],
-[ 0.72, 0.29, 0, 1, 0, 0 ],
-[ -0.5, -0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.68, -0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.67, 0, 0, 0, 1 ],
-[ 0.77, 0.48, 0, 1, 0, 0 ],
-[ 0.2, -0.84, 1, 0, 0, 0 ],
-[ -0.89, -0.58, 1, 0, 0, 0 ],
-[ -0.09, -0.41, 0, 0, 0, 1 ],
-[ -0.63, -0.27, 1, 0, 0, 0 ],
-[ 0.51, -0.54, 1, 0, 0, 0 ],
-[ -0.86, 0.84, 1, 0, 0, 0 ],
-[ 0.3, 0.6900000000000001, 0, 1, 0, 0 ],
-[ 0.8300000000000001, 0.48, 0, 1, 0, 0 ],
-[ -0.08, -0.79, 0, 0, 0, 1 ],
-[ -0.48, -0.08, 1, 0, 0, 0 ],
-[ -0.47, 0.43, 0, 0, 1, 0 ],
-[ -0.04, -0.29, 0, 0, 0, 1 ],
-[ -0.98, 0.55, 1, 0, 0, 0 ],
-[ -0.73, -0.68, 1, 0, 0, 0 ],
-[ -0.99, -0.27, 1, 0, 0, 0 ],
-[ -0.47, -1., 1, 0, 0, 0 ],
-[ -0.79, 0.99, 1, 0, 0, 0 ],
-[ -0.04, 0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.52, 0.28, 0, 0, 1, 0 ],
-[ 0.97, -0.86, 1, 0, 0, 0 ],
-[ -0.06, -0.85, 0, 0, 0, 1 ],
-[ 0.91, 0.13, 1, 0, 0, 0 ],
-[ 0.05, 0.75, 1, 0, 0, 0 ],
-[ 0.97, 0.64, 1, 0, 0, 0 ],
-[ 0.13, 0.73, 0, 1, 0, 0 ],
-[ -0.39, 0.63, 0, 0, 1, 0 ],
-[ -0.49, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.53, -0.75, 1, 0, 0, 0 ],
-[ 0.02, 0.31, 1, 0, 0, 0 ],
-[ -0.25, -0.91, 1, 0, 0, 0 ],
-[ 0.8100000000000001, -0.89, 1, 0, 0, 0 ],
-[ 0.79, -0.72, 1, 0, 0, 0 ],
-[ 0.66, -0.86, 1, 0, 0, 0 ],
-[ 0.8100000000000001, -0.96, 1, 0, 0, 0 ],
-[ -0.42, 0.52, 0, 0, 1, 0 ],
-[ -0.73, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.37, -0.76, 1, 0, 0, 0 ],
-[ 0.5, 0.8200000000000001, 0, 1, 0, 0 ],
-[ 0.66, 0.41, 0, 1, 0, 0 ],
-[ -0.88, -0.8, 1, 0, 0, 0 ],
-[ -0.41, -1., 1, 0, 0, 0 ],
-[ -0.97, 0.12, 1, 0, 0, 0 ],
-[ -0.93, -0.15, 1, 0, 0, 0 ],
-[ 0.05, 0.13, 1, 0, 0, 0 ],
-[ 0.74, 0.16, 0, 1, 0, 0 ],
-[ 0.72, 0.37, 0, 1, 0, 0 ],
-[ 0.33, 0.3, 0, 1, 0, 0 ],
-[ -0.28, -0.76, 1, 0, 0, 0 ],
-[ 0.6900000000000001, -0.85, 1, 0, 0, 0 ],
-[ -0.5, 0.8, 0, 0, 1, 0 ],
-[ 0.23, -0.63, 1, 0, 0, 0 ],
-[ -0.64, -0.88, 1, 0, 0, 0 ],
-[ -0.96, 0.39, 1, 0, 0, 0 ],
-[ -0.55, -0.16, 1, 0, 0, 0 ],
-[ -0.8, 0.8100000000000001, 0, 0, 1, 0 ],
-[ -0.18, 0.89, 1, 0, 0, 0 ],
-[ 1., -0.54, 1, 0, 0, 0 ],
-[ 0.3, -0.05, 0, 0, 0, 1 ],
-[ 0.03, 0.52, 1, 0, 0, 0 ],
-[ 0.08, -0.84, 0, 0, 0, 1 ],
-[ 0.13, -0.2, 0, 0, 0, 1 ],
-[ 0.74, 0.22, 0, 1, 0, 0 ],
-[ 0.61, -0.79, 1, 0, 0, 0 ],
-[ -0.73, 0.21, 0, 0, 1, 0 ],
-[ -0.71, 0.8, 0, 0, 1, 0 ],
-[ 0.3, -0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.18, -0.71, 1, 0, 0, 0 ],
-[ 0.35, 0.6, 0, 1, 0, 0 ],
-[ 0.7000000000000001, -0.78, 1, 0, 0, 0 ],
-[ 0.85, -0.5, 1, 0, 0, 0 ],
-[ -0.44, -0.15, 1, 0, 0, 0 ],
-[ 0.88, 0.8, 1, 0, 0, 0 ],
-[ 0.27, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.39, -0.25, 1, 0, 0, 0 ],
-[ -0.47, 0.74, 0, 0, 1, 0 ],
-[ 0.16, 0.91, 1, 0, 0, 0 ],
-[ -0.9, -0.75, 1, 0, 0, 0 ],
-[ 0.7000000000000001, 0.29, 0, 1, 0, 0 ],
-[ 0.8, -0.03, 1, 0, 0, 0 ],
-[ 0.88, 0.71, 0, 1, 0, 0 ],
-[ -0.8100000000000001, 0.8200000000000001, 0, 0, 1, 0 ],
-[ 0.66, 0.35, 0, 1, 0, 0 ],
-[ 0.53, -0.15, 1, 0, 0, 0 ],
-[ 0.5700000000000001, -0.16, 1, 0, 0, 0 ],
-[ -0.47, -0.63, 1, 0, 0, 0 ],
-[ -0.35, 0.7000000000000001, 0, 0, 1, 0 ],
-[ -0.05, 0.47, 1, 0, 0, 0 ],
-[ 0.9, -0.43, 1, 0, 0, 0 ],
-[ -0.48, -0.84, 1, 0, 0, 0 ],
-[ -0.16, -0.84, 1, 0, 0, 0 ],
-[ 0.8300000000000001, 0.38, 0, 1, 0, 0 ],
-[ 0.34, -0.37, 1, 0, 0, 0 ],
-[ 0.73, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.01, -0.68, 0, 0, 0, 1 ],
-[ 0.88, 0.68, 0, 1, 0, 0 ],
-[ 0.75, -0.96, 1, 0, 0, 0 ],
-[ -0.08, -0.6900000000000001, 0, 0, 0, 1 ],
-[ -0.9, 0.24, 1, 0, 0, 0 ],
-[ -0.14, -0.24, 0, 0, 0, 1 ],
-[ -0.5600000000000001, 0.37, 0, 0, 1, 0 ],
-[ 0.11, -0.77, 0, 0, 0, 1 ],
-[ 0.99, -0.19, 1, 0, 0, 0 ],
-[ 0.3, 0.26, 0, 1, 0, 0 ],
-[ -0.7000000000000001, 0.08, 1, 0, 0, 0 ],
-[ 0.19, 0.1, 1, 0, 0, 0 ],
-[ 0.44, 0.46, 0, 1, 0, 0 ],
-[ -0.91, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.99, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.78, 0.91, 1, 0, 0, 0 ],
-[ -0.91, -0.78, 1, 0, 0, 0 ],
-[ -0.51, 0.86, 0, 0, 1, 0 ],
-[ 0.72, -0.52, 1, 0, 0, 0 ],
-[ 0.25, -0.46, 0, 0, 0, 1 ],
-[ 0.89, -0.22, 1, 0, 0, 0 ],
-[ 0.25, 0.25, 0, 1, 0, 0 ],
-[ 0.88, -0.05, 1, 0, 0, 0 ],
-[ -0.8, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.5, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.32, -0.88, 1, 0, 0, 0 ],
-[ 0.38, -0.1, 0, 0, 0, 1 ],
-[ 0.46, -0.64, 1, 0, 0, 0 ],
-[ -0.77, -0.78, 1, 0, 0, 0 ],
-[ 0.32, -0.19, 0, 0, 0, 1 ],
-[ 0.8100000000000001, 0.23, 0, 1, 0, 0 ],
-[ 0.59, -0.03, 1, 0, 0, 0 ],
-[ -0.14, -0.46, 0, 0, 0, 1 ],
-[ 0.74, -0.51, 1, 0, 0, 0 ],
-[ -1., -0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.28, -0.5700000000000001, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.24, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.17, 1, 0, 0, 0 ],
-[ -0.14, 0.04, 1, 0, 0, 0 ],
-[ -0.28, 0.48, 0, 0, 1, 0 ],
-[ 0.71, -0.3, 1, 0, 0, 0 ],
-[ -0.14, -0.14, 0, 0, 0, 1 ],
-[ -0.5600000000000001, 0.67, 0, 0, 1, 0 ],
-[ -0.31, 0.43, 0, 0, 1, 0 ],
-[ 0.98, -0.55, 1, 0, 0, 0 ],
-[ -0.71, -0.51, 1, 0, 0, 0 ],
-[ -0.9400000000000001, -0.59, 1, 0, 0, 0 ],
-[ 0.43, 0.08, 0, 1, 0, 0 ],
-[ -0.37, 0.04, 1, 0, 0, 0 ],
-[ 0.6, 0.88, 0, 1, 0, 0 ],
-[ -0.22, 0.09, 1, 0, 0, 0 ],
-[ 0.75, -0.86, 1, 0, 0, 0 ],
-[ 0.49, 1., 1, 0, 0, 0 ],
-[ -0.39, 0.14, 0, 0, 1, 0 ],
-[ 0.97, -0.37, 1, 0, 0, 0 ],
-[ 0.87, 0.5, 0, 1, 0, 0 ],
-[ 0.91, -0.67, 1, 0, 0, 0 ],
-[ -0.86, -0.8300000000000001, 1, 0, 0, 0 ],
-[ 0.26, 0.86, 0, 1, 0, 0 ],
-[ -0.31, -0.36, 0, 0, 0, 1 ],
-[ 0.25, -0.42, 0, 0, 0, 1 ],
-[ 0.66, -0.36, 1, 0, 0, 0 ],
-[ -0.05, -0.41, 0, 0, 0, 1 ],
-[ -0.47, -0.12, 1, 0, 0, 0 ],
-[ -0.16, -0.29, 0, 0, 0, 1 ],
-[ -0.55, 0.75, 0, 0, 1, 0 ],
-[ 0.8100000000000001, 0.85, 1, 0, 0, 0 ],
-[ 0.96, 0.79, 1, 0, 0, 0 ],
-[ -0.15, -0.09, 0, 0, 0, 1 ],
-[ -0.98, -0.59, 1, 0, 0, 0 ],
-[ 0.8300000000000001, -0.19, 1, 0, 0, 0 ],
-[ -0.12, 0.8, 1, 0, 0, 0 ],
-[ 0.73, -0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.85, -0.46, 1, 0, 0, 0 ],
-[ 0.99, 0.96, 1, 0, 0, 0 ],
-[ -0.15, -0.5700000000000001, 0, 0, 0, 1 ],
-[ 0.41, 0.25, 0, 1, 0, 0 ],
-[ 0.6, 0.77, 0, 1, 0, 0 ],
-[ 0.47, 0.06, 0, 1, 0, 0 ],
-[ 0.76, -0.51, 1, 0, 0, 0 ],
-[ 0.96, -0.76, 1, 0, 0, 0 ],
-[ -0.49, 0.92, 0, 0, 1, 0 ],
-[ 0.99, 0.58, 1, 0, 0, 0 ],
-[ 0.19, -0.63, 1, 0, 0, 0 ],
-[ 0.37, 0.06, 1, 0, 0, 0 ],
-[ 0.45, 0.64, 0, 1, 0, 0 ],
-[ 0.68, 0.6, 0, 1, 0, 0 ],
-[ 0.26, -0.88, 1, 0, 0, 0 ],
-[ 0.42, -0.02, 0, 0, 0, 1 ],
-[ 0.05, 0.68, 1, 0, 0, 0 ],
-[ -0.25, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.13, 0.79, 1, 0, 0, 0 ],
-[ 0.12, -0.09, 0, 0, 0, 1 ],
-[ -0.44, -0.43, 1, 0, 0, 0 ],
-[ -0.89, 0.1, 1, 0, 0, 0 ],
-[ 0.13, 0.08, 1, 0, 0, 0 ],
-[ 0.76, -0.23, 1, 0, 0, 0 ],
-[ 0.79, 0.67, 0, 1, 0, 0 ],
-[ -0.27, -0.92, 1, 0, 0, 0 ],
-[ 0.16, 0.11, 1, 0, 0, 0 ],
-[ -1., -0.4, 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.8, 0, 0, 1, 0 ],
-[ -0.91, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.53, 0.92, 0, 1, 0, 0 ],
-[ -0.01, -0.47, 0, 0, 0, 1 ],
-[ -0.43, -0.12, 0, 0, 0, 1 ],
-[ 0.16, 0.16, 1, 0, 0, 0 ],
-[ 0.68, -0.04, 1, 0, 0, 0 ],
-[ -0.91, 0.71, 1, 0, 0, 0 ],
-[ -0.97, -0.17, 1, 0, 0, 0 ],
-[ 0.27, 0.7000000000000001, 0, 1, 0, 0 ],
-[ -0.35, -0.33, 1, 0, 0, 0 ],
-[ 0.3, -0.12, 0, 0, 0, 1 ],
-[ 0.78, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.8100000000000001, 0.36, 0, 0, 1, 0 ],
-[ 0.85, -0.87, 1, 0, 0, 0 ],
-[ 0.7000000000000001, 0.18, 0, 1, 0, 0 ],
-[ -0.32, -0.15, 0, 0, 0, 1 ],
-[ -0.73, 0.14, 0, 0, 1, 0 ],
-[ 0.05, 0.23, 1, 0, 0, 0 ],
-[ -0.84, 0.44, 0, 0, 1, 0 ],
-[ 0.12, 0.6, 0, 1, 0, 0 ],
-[ 0.5600000000000001, -0.63, 1, 0, 0, 0 ],
-[ -0.18, 0.6, 0, 0, 1, 0 ],
-[ -0.55, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.03, 0.46, 1, 0, 0, 0 ],
-[ -0.41, 0.37, 0, 0, 1, 0 ],
-[ -0.7000000000000001, 0.27, 0, 0, 1, 0 ],
-[ -0.33, 0.74, 0, 0, 1, 0 ],
-[ -0.6, -0.98, 1, 0, 0, 0 ],
-[ 0.9500000000000001, 0.23, 1, 0, 0, 0 ],
-[ -0.46, -0.59, 1, 0, 0, 0 ],
-[ 0.73, -0.33, 1, 0, 0, 0 ],
-[ 0.06, 0.05, 1, 0, 0, 0 ],
-[ -0.37, -0.4, 1, 0, 0, 0 ],
-[ 0.98, -0.8300000000000001, 1, 0, 0, 0 ],
-[ 0.47, -0.8, 1, 0, 0, 0 ],
-[ -0.63, 0.96, 1, 0, 0, 0 ],
-[ 0.58, 0.97, 1, 0, 0, 0 ],
-[ -0.6, 0.9, 0, 0, 1, 0 ],
-[ -0.8200000000000001, 0.98, 1, 0, 0, 0 ],
-[ 0.24, -0.36, 0, 0, 0, 1 ],
-[ 0.98, -0.91, 1, 0, 0, 0 ],
-[ 0.49, 0.21, 0, 1, 0, 0 ],
-[ -0.07000000000000001, 0.04, 1, 0, 0, 0 ],
-[ 0.84, 0.42, 0, 1, 0, 0 ],
-[ -0.41, 0.77, 0, 0, 1, 0 ],
-[ 0.8200000000000001, 0.9, 1, 0, 0, 0 ],
-[ 0.36, -0.39, 1, 0, 0, 0 ],
-[ -0.74, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.85, -0.65, 1, 0, 0, 0 ],
-[ -1., -0.48, 1, 0, 0, 0 ],
-[ -0.3, -0.86, 1, 0, 0, 0 ],
-[ 0.9500000000000001, 0.11, 1, 0, 0, 0 ],
-[ -0.44, -0.12, 0, 0, 0, 1 ],
-[ -0.14, -0.26, 0, 0, 0, 1 ],
-[ -0.58, -0.2, 1, 0, 0, 0 ],
-[ -0.8300000000000001, 0.01, 1, 0, 0, 0 ],
-[ 0.76, -0.84, 1, 0, 0, 0 ],
-[ -0.06, 0.12, 1, 0, 0, 0 ],
-[ 0.72, -0.51, 1, 0, 0, 0 ],
-[ 0.99, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.46, 0.74, 0, 0, 1, 0 ],
-[ 0.38, -0.53, 1, 0, 0, 0 ],
-[ -0.44, 0.55, 0, 0, 1, 0 ],
-[ -0.28, 0.75, 0, 0, 1, 0 ],
-[ -0.02, 0.67, 1, 0, 0, 0 ],
-[ 0.71, 0.37, 0, 1, 0, 0 ],
-[ -0.76, -0.06, 1, 0, 0, 0 ],
-[ 0.12, -0.88, 1, 0, 0, 0 ],
-[ 0.5600000000000001, 0.61, 0, 1, 0, 0 ],
-[ 0.78, 0.44, 0, 1, 0, 0 ],
-[ -0.22, 0.02, 1, 0, 0, 0 ],
-[ -0.74, -0.66, 1, 0, 0, 0 ],
-[ -0.49, -0.85, 1, 0, 0, 0 ],
-[ -0.07000000000000001, 0.31, 1, 0, 0, 0 ],
-[ 0.44, -0.65, 1, 0, 0, 0 ],
-[ -0.08, -0.41, 0, 0, 0, 1 ],
-[ -0.4, -0.63, 1, 0, 0, 0 ],
-[ -0.34, 0.68, 0, 0, 1, 0 ],
-[ -0.15, 0.99, 1, 0, 0, 0 ],
-[ -0.99, -0.62, 1, 0, 0, 0 ],
-[ -0.11, -0.29, 0, 0, 0, 1 ],
-[ 0.04, -0.02, 0, 0, 0, 1 ],
-[ 0.77, 0.51, 0, 1, 0, 0 ],
-[ 0.35, -0.5, 1, 0, 0, 0 ],
-[ -0.59, 0.19, 0, 0, 1, 0 ],
-[ 0.5600000000000001, -0.77, 1, 0, 0, 0 ],
-[ 0.68, -0.44, 1, 0, 0, 0 ],
-[ -0.64, -0.73, 1, 0, 0, 0 ],
-[ -0.41, 0.26, 0, 0, 1, 0 ],
-[ 0.47, -0.54, 1, 0, 0, 0 ],
-[ -0.22, 0.79, 0, 0, 1, 0 ],
-[ 0.68, 0.9, 0, 1, 0, 0 ],
-[ 0.73, -0.88, 1, 0, 0, 0 ],
-[ -1., -0.93, 1, 0, 0, 0 ],
-[ -0.37, -1., 1, 0, 0, 0 ],
-[ 0.19, 0.79, 0, 1, 0, 0 ],
-[ 0.18, 0.91, 1, 0, 0, 0 ],
-[ -0.23, 0.61, 0, 0, 1, 0 ],
-[ -0.49, -0.16, 1, 0, 0, 0 ],
-[ 0.6, 0.49, 0, 1, 0, 0 ],
-[ 0.29, 0.59, 0, 1, 0, 0 ],
-[ 0.6, 0.49, 0, 1, 0, 0 ],
-[ -0.89, -0.09, 1, 0, 0, 0 ],
-[ -0.9, -0.09, 1, 0, 0, 0 ],
-[ 0.4, 0.59, 0, 1, 0, 0 ],
-[ -0.76, 0.16, 0, 0, 1, 0 ],
-[ 0.97, -0.17, 1, 0, 0, 0 ],
-[ -0.47, 0.58, 0, 0, 1, 0 ],
-[ -0.31, -0.29, 0, 0, 0, 1 ],
-[ -0.67, -0.42, 1, 0, 0, 0 ],
-[ -0.88, 0.88, 1, 0, 0, 0 ],
-[ -0.65, -0.75, 1, 0, 0, 0 ],
-[ 0.29, 0.17, 0, 1, 0, 0 ],
-[ 0.9400000000000001, -0.54, 1, 0, 0, 0 ],
-[ 1., -0.66, 1, 0, 0, 0 ],
-[ 0.39, -0.09, 0, 0, 0, 1 ],
-[ 0.78, 0.19, 0, 1, 0, 0 ],
-[ 0.42, 0.22, 0, 1, 0, 0 ],
-[ 0.12, -0.61, 0, 0, 0, 1 ],
-[ 0.28, 0.7000000000000001, 0, 1, 0, 0 ],
-[ -0.19, 0.6, 0, 0, 1, 0 ],
-[ -0.04, 0.55, 1, 0, 0, 0 ],
-[ -0.18, -0.34, 0, 0, 0, 1 ],
-[ -0.13, -0.8, 1, 0, 0, 0 ],
-[ 0.04, 0.48, 1, 0, 0, 0 ],
-[ 0.27, 0.37, 0, 1, 0, 0 ],
-[ 0.49, 0.16, 0, 1, 0, 0 ],
-[ 0.26, -0.13, 0, 0, 0, 1 ],
-[ -0.78, 0.8200000000000001, 0, 0, 1, 0 ],
-[ -0.08, 0.98, 1, 0, 0, 0 ],
-[ -0.65, -0.33, 1, 0, 0, 0 ],
-[ -0.46, -0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.29, 0.93, 1, 0, 0, 0 ],
-[ -0.44, -0.61, 1, 0, 0, 0 ],
-[ -0.52, 0.03, 1, 0, 0, 0 ],
-[ 0.47, -0.61, 1, 0, 0, 0 ],
-[ -0.06, 0.12, 1, 0, 0, 0 ],
-[ 0.34, -0.62, 1, 0, 0, 0 ],
-[ 0.38, -0.91, 1, 0, 0, 0 ],
-[ -0.64, -0.88, 1, 0, 0, 0 ],
-[ -0.67, 0.33, 0, 0, 1, 0 ],
-[ -0.66, 0.01, 1, 0, 0, 0 ],
-[ 0.48, -0.78, 1, 0, 0, 0 ],
-[ -0.23, 0.06, 1, 0, 0, 0 ],
-[ -0.65, 0.5600000000000001, 0, 0, 1, 0 ],
-[ -0.36, 0.48, 0, 0, 1, 0 ],
-[ 0.9500000000000001, 0.88, 1, 0, 0, 0 ],
-[ -0.71, 0.78, 0, 0, 1, 0 ],
-[ -0.54, -0.05, 1, 0, 0, 0 ],
-[ 0.72, -0.74, 1, 0, 0, 0 ],
-[ 0.19, -0.84, 1, 0, 0, 0 ],
-[ -0.78, 0.92, 1, 0, 0, 0 ],
-[ 0.8100000000000001, 0.14, 1, 0, 0, 0 ],
-[ 0.28, 0.14, 0, 1, 0, 0 ],
-[ 0.66, -0.06, 1, 0, 0, 0 ],
-[ 0.93, 0.33, 1, 0, 0, 0 ],
-[ 0.52, 0.86, 0, 1, 0, 0 ],
-[ 0.7000000000000001, 0.6900000000000001, 0, 1, 0, 0 ],
-[ 0.4, 0.64, 0, 1, 0, 0 ],
-[ 0.6, 0.8200000000000001, 0, 1, 0, 0 ],
-[ 0.77, -0.66, 1, 0, 0, 0 ],
-[ 0.58, -0.13, 1, 0, 0, 0 ],
-[ -0.7000000000000001, 0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.6, -0.49, 1, 0, 0, 0 ],
-[ 0.55, 0.1, 0, 1, 0, 0 ],
-[ 0.45, -0.75, 1, 0, 0, 0 ],
-[ -0.76, 0.58, 0, 0, 1, 0 ],
-[ 0.67, -0.8, 1, 0, 0, 0 ],
-[ -0.77, -0.4, 1, 0, 0, 0 ],
-[ 0.24, -0.06, 0, 0, 0, 1 ],
-[ 0.8300000000000001, 0.28, 0, 1, 0, 0 ],
-[ -0.06, -0.76, 0, 0, 0, 1 ],
-[ 0.23, 0.31, 0, 1, 0, 0 ],
-[ 0.07000000000000001, -0.84, 0, 0, 0, 1 ],
-[ -0.91, -0.5, 1, 0, 0, 0 ],
-[ -0.47, 0.89, 0, 0, 1, 0 ],
-[ -0.37, 0.76, 0, 0, 1, 0 ],
-[ -0.87, 0.6900000000000001, 0, 0, 1, 0 ],
-[ 0.53, -0.58, 1, 0, 0, 0 ],
-[ 0.02, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.4, -0.37, 1, 0, 0, 0 ],
-[ 0.75, -0.77, 1, 0, 0, 0 ],
-[ 0.71, 0.5700000000000001, 0, 1, 0, 0 ],
-[ -0.2, 0.29, 0, 0, 1, 0 ],
-[ -0.8100000000000001, 0.73, 0, 0, 1, 0 ],
-[ 0.71, 0.25, 0, 1, 0, 0 ],
-[ -0.33, -0.51, 1, 0, 0, 0 ],
-[ 0.54, -0.99, 1, 0, 0, 0 ],
-[ -0.85, 0.11, 1, 0, 0, 0 ],
-[ -0.39, -0.08, 0, 0, 0, 1 ],
-[ -0.99, -0.66, 1, 0, 0, 0 ],
-[ -0.19, -0.36, 0, 0, 0, 1 ],
-[ 0.9500000000000001, -0.98, 1, 0, 0, 0 ],
-[ -0.8, 0.28, 0, 0, 1, 0 ],
-[ 0.74, 0.8100000000000001, 0, 1, 0, 0 ],
-[ -0.31, 0.1, 0, 0, 1, 0 ],
-[ -0.08, 0.54, 0, 0, 1, 0 ],
-[ -0.72, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.65, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.91, -0.53, 1, 0, 0, 0 ],
-[ 0.8200000000000001, 0.1, 1, 0, 0, 0 ],
-[ -0.67, -0.97, 1, 0, 0, 0 ],
-[ 0.98, -0.05, 1, 0, 0, 0 ],
-[ -0.31, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.02, 0.58, 1, 0, 0, 0 ],
-[ -0.78, 1., 1, 0, 0, 0 ],
-[ 0.03, -0.2, 0, 0, 0, 1 ],
-[ -0.8100000000000001, -0.62, 1, 0, 0, 0 ],
-[ -0.17, 0.02, 1, 0, 0, 0 ],
-[ 0.02, -0.07000000000000001, 0, 0, 0, 1 ],
-[ -0.11, -0.74, 0, 0, 0, 1 ],
-[ -0.05, -0.67, 0, 0, 0, 1 ],
-[ 0.68, -0.41, 1, 0, 0, 0 ],
-[ 0.42, 0.77, 0, 1, 0, 0 ],
-[ 0.48, -0.26, 1, 0, 0, 0 ],
-[ -0.66, -0.8, 1, 0, 0, 0 ],
-[ -0.73, -0.22, 1, 0, 0, 0 ],
-[ -0.48, -0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.12, 0.02, 1, 0, 0, 0 ],
-[ 0.47, 0.03, 1, 0, 0, 0 ],
-[ 0.15, 0.32, 0, 1, 0, 0 ],
-[ -0.07000000000000001, -0.85, 0, 0, 0, 1 ],
-[ 0.29, 0.3, 0, 1, 0, 0 ],
-[ 0.8100000000000001, -0.06, 1, 0, 0, 0 ],
-[ -0.7000000000000001, -0.36, 1, 0, 0, 0 ],
-[ -0.14, 0.99, 1, 0, 0, 0 ],
-[ 0.06, 0.49, 0, 1, 0, 0 ],
-[ 0.25, 0.14, 0, 1, 0, 0 ],
-[ 0.8, 0.9, 1, 0, 0, 0 ],
-[ -0.43, 0.4, 0, 0, 1, 0 ],
-[ 0.28, -0.11, 0, 0, 0, 1 ],
-[ 0.63, -0.36, 1, 0, 0, 0 ],
-[ 0.7000000000000001, 0.59, 0, 1, 0, 0 ],
-[ 0.5, 0.5700000000000001, 0, 1, 0, 0 ],
-[ -0.38, -0.03, 0, 0, 0, 1 ],
-[ 0.93, 0.05, 1, 0, 0, 0 ],
-[ 0.97, -0.73, 1, 0, 0, 0 ],
-[ 0.65, 0.91, 0, 1, 0, 0 ],
-[ -0.85, -0.89, 1, 0, 0, 0 ],
-[ -0.91, 0.16, 1, 0, 0, 0 ],
-[ 0.9500000000000001, 0.85, 1, 0, 0, 0 ],
-[ -0.22, -0.58, 1, 0, 0, 0 ],
-[ 0.49, -0.66, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.08, 1, 0, 0, 0 ],
-[ 0.47, 0.8200000000000001, 0, 1, 0, 0 ],
-[ 0.96, 0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.63, 0.3, 0, 0, 1, 0 ],
-[ 0.25, -0.35, 0, 0, 0, 1 ],
-[ -0.16, 0.74, 0, 0, 1, 0 ],
-[ 0.46, 0.99, 1, 0, 0, 0 ],
-[ 0.35, -0.16, 0, 0, 0, 1 ],
-[ 0.26, 0.32, 0, 1, 0, 0 ],
-[ -0.63, 0.43, 0, 0, 1, 0 ],
-[ 0.34, 0.08, 1, 0, 0, 0 ],
-[ -0.47, -0.22, 1, 0, 0, 0 ],
-[ -0.24, 0.48, 0, 0, 1, 0 ],
-[ -0.48, -1., 1, 0, 0, 0 ],
-[ -0.93, -0.17, 1, 0, 0, 0 ],
-[ -0.93, 0.73, 1, 0, 0, 0 ],
-[ 0.37, -0.04, 0, 0, 0, 1 ],
-[ -0.23, 0.91, 1, 0, 0, 0 ],
-[ -0.62, -0.03, 1, 0, 0, 0 ],
-[ 0.39, -0.98, 1, 0, 0, 0 ],
-[ -0.4, -0.64, 1, 0, 0, 0 ],
-[ -0.66, 0.77, 0, 0, 1, 0 ],
-[ 0.27, 0.87, 0, 1, 0, 0 ],
-[ 0.8200000000000001, 0.76, 0, 1, 0, 0 ],
-[ 0.28, -0.76, 1, 0, 0, 0 ],
-[ 0.53, 0.92, 0, 1, 0, 0 ],
-[ -0.27, 0.39, 0, 0, 1, 0 ],
-[ -0.55, -0.88, 1, 0, 0, 0 ],
-[ -0.61, -0.79, 1, 0, 0, 0 ],
-[ -0.6, -0.37, 1, 0, 0, 0 ],
-[ 0.6900000000000001, 0.31, 0, 1, 0, 0 ],
-[ -0.12, 0.65, 0, 0, 1, 0 ],
-[ 0.73, -0.54, 1, 0, 0, 0 ],
-[ 0.64, -0.54, 1, 0, 0, 0 ],
-[ -0.14, -0.91, 1, 0, 0, 0 ],
-[ 0.17, 0.26, 0, 1, 0, 0 ],
-[ 0.04, 0.12, 1, 0, 0, 0 ],
-[ -0.8100000000000001, 0.8100000000000001, 0, 0, 1, 0 ],
-[ -0.36, 0.38, 0, 0, 1, 0 ],
-[ 0.27, 0.96, 1, 0, 0, 0 ],
-[ -0.99, 0.62, 1, 0, 0, 0 ],
-[ 0.5700000000000001, -0.88, 1, 0, 0, 0 ],
-[ 0.09, 0.84, 1, 0, 0, 0 ],
-[ -0.15, -0.88, 1, 0, 0, 0 ],
-[ -0.6900000000000001, 0.11, 0, 0, 1, 0 ],
-[ -0.65, 0.42, 0, 0, 1, 0 ],
-[ 0.47, -0.2, 1, 0, 0, 0 ],
-[ -0.25, 0.75, 0, 0, 1, 0 ],
-[ -0.63, -0.42, 1, 0, 0, 0 ],
-[ 0.84, -0.17, 1, 0, 0, 0 ],
-[ -0.29, 0.96, 1, 0, 0, 0 ],
-[ 0.74, 0.61, 0, 1, 0, 0 ],
-[ 0.25, -0.31, 0, 0, 0, 1 ],
-[ 0.29, -0.87, 1, 0, 0, 0 ],
-[ 0.26, -0.66, 1, 0, 0, 0 ],
-[ 0.76, -1., 1, 0, 0, 0 ],
-[ 0.8, 0.8200000000000001, 0, 1, 0, 0 ],
-[ 0.66, 0.15, 0, 1, 0, 0 ],
-[ -0.43, 0.87, 0, 0, 1, 0 ],
-[ 0.16, -0.97, 1, 0, 0, 0 ],
-[ 0.43, 0.36, 0, 1, 0, 0 ],
-[ 0.18, 0.09, 1, 0, 0, 0 ],
-[ 0.05, 0.59, 1, 0, 0, 0 ],
-[ -0.73, 0.64, 0, 0, 1, 0 ],
-[ 0.28, 0.73, 0, 1, 0, 0 ],
-[ 0.14, -0.28, 0, 0, 0, 1 ],
-[ 0.85, -0.41, 1, 0, 0, 0 ],
-[ 0.97, -0.2, 1, 0, 0, 0 ],
-[ 0.01, 0.87, 1, 0, 0, 0 ],
-[ 0.8, -0.97, 1, 0, 0, 0 ],
-[ -0.3, -0.84, 1, 0, 0, 0 ],
-[ 0.52, -0.76, 1, 0, 0, 0 ],
-[ -0.64, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.9500000000000001, 0.37, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.05, 0, 0, 0, 1 ],
-[ -0.75, -0.84, 1, 0, 0, 0 ],
-[ -0.37, 0.7000000000000001, 0, 0, 1, 0 ],
-[ -0.98, -0.49, 1, 0, 0, 0 ],
-[ -0.55, 0.8300000000000001, 0, 0, 1, 0 ],
-[ 0.67, -0.4, 1, 0, 0, 0 ],
-[ 0.13, 0.37, 0, 1, 0, 0 ],
-[ -0.73, -0.27, 1, 0, 0, 0 ],
-[ -0.25, 0.75, 0, 0, 1, 0 ],
-[ 0.92, -0.9, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.08, 1, 0, 0, 0 ],
-[ -0.8, -0.41, 1, 0, 0, 0 ],
-[ 0.42, -0.08, 0, 0, 0, 1 ],
-[ 0.73, -0.97, 1, 0, 0, 0 ],
-[ -0.29, 0.1, 1, 0, 0, 0 ],
-[ 0.01, 0.87, 1, 0, 0, 0 ],
-[ -0.47, 0.23, 0, 0, 1, 0 ],
-[ 0.52, 1., 1, 0, 0, 0 ],
-[ 0.11, 0.26, 1, 0, 0, 0 ],
-[ 0.36, -0.96, 1, 0, 0, 0 ],
-[ 0.74, -0.91, 1, 0, 0, 0 ],
-[ -0.64, -0.29, 1, 0, 0, 0 ],
-[ 0.6, 0.43, 0, 1, 0, 0 ],
-[ 0.92, -0.45, 1, 0, 0, 0 ],
-[ 0.7000000000000001, 0.35, 0, 1, 0, 0 ],
-[ 0.07000000000000001, -0.17, 0, 0, 0, 1 ],
-[ -0.45, -0.28, 1, 0, 0, 0 ],
-[ -0.87, 0.52, 0, 0, 1, 0 ],
-[ -0.28, -0.73, 1, 0, 0, 0 ],
-[ -0.5600000000000001, 0.87, 0, 0, 1, 0 ],
-[ -0.9, 0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.79, -0.84, 1, 0, 0, 0 ],
-[ -0.44, 0.16, 0, 0, 1, 0 ],
-[ -0.01, -0.29, 0, 0, 0, 1 ],
-[ 0.16, 0.44, 0, 1, 0, 0 ],
-[ 0.22, 0.72, 0, 1, 0, 0 ],
-[ -0.91, 0.25, 1, 0, 0, 0 ],
-[ -1., 0.47, 1, 0, 0, 0 ],
-[ 0.21, -0.77, 1, 0, 0, 0 ],
-[ 0.35, -0.31, 1, 0, 0, 0 ],
-[ 0.37, -0.09, 0, 0, 0, 1 ],
-[ -0.73, -0.03, 1, 0, 0, 0 ],
-[ 0.12, -0.28, 0, 0, 0, 1 ],
-[ -0.74, 0.61, 0, 0, 1, 0 ],
-[ -0.8, -0.23, 1, 0, 0, 0 ],
-[ -0.87, -0.23, 1, 0, 0, 0 ],
-[ -0.12, 0.35, 0, 0, 1, 0 ],
-[ 0.48, -0.04, 0, 0, 0, 1 ],
-[ 0.19, -0.64, 1, 0, 0, 0 ],
-[ -0.52, -0.6, 1, 0, 0, 0 ],
-[ -0.02, -0.73, 0, 0, 0, 1 ],
-[ -0.9, -0.18, 1, 0, 0, 0 ],
-[ 0.6, 0.72, 0, 1, 0, 0 ],
-[ 0.11, -0.36, 0, 0, 0, 1 ],
-[ -0.09, 0.2, 1, 0, 0, 0 ],
-[ 0.91, 0.24, 1, 0, 0, 0 ],
-[ 0.05, 0.25, 1, 0, 0, 0 ],
-[ 1., 0.88, 1, 0, 0, 0 ],
-[ 0.18, -0.25, 0, 0, 0, 1 ],
-[ -0.03, -0.42, 0, 0, 0, 1 ],
-[ 0.66, 0.91, 0, 1, 0, 0 ],
-[ -0.4, 0.67, 0, 0, 1, 0 ],
-[ -0.13, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.85, -0.71, 1, 0, 0, 0 ],
-[ -0.04, -0.53, 0, 0, 0, 1 ],
-[ 0.1, -0.74, 0, 0, 0, 1 ],
-[ 0.31, -0.43, 1, 0, 0, 0 ],
-[ 0.53, 0.53, 0, 1, 0, 0 ],
-[ -0.85, 0.62, 0, 0, 1, 0 ],
-[ 0.13, -0.14, 0, 0, 0, 1 ],
-[ 0.66, 0.4, 0, 1, 0, 0 ],
-[ -0.35, -0.43, 1, 0, 0, 0 ],
-[ 0.67, -0.77, 1, 0, 0, 0 ],
-[ 0.51, -0.49, 1, 0, 0, 0 ],
-[ 0.63, 0.48, 0, 1, 0, 0 ],
-[ -0.73, -0.3, 1, 0, 0, 0 ],
-[ 0.01, -0.76, 0, 0, 0, 1 ],
-[ 0.38, 0.4, 0, 1, 0, 0 ],
-[ -0.63, -0.39, 1, 0, 0, 0 ],
-[ 0.32, -0.11, 0, 0, 0, 1 ],
-[ -0.62, -0.48, 1, 0, 0, 0 ],
-[ 0.54, -0.92, 1, 0, 0, 0 ],
-[ -0.35, -0.07000000000000001, 0, 0, 0, 1 ],
-[ -0.09, -0.71, 0, 0, 0, 1 ],
-[ -0.14, -0.45, 0, 0, 0, 1 ],
-[ -0.85, -0.75, 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.6, 0, 0, 1, 0 ],
-[ 0.46, 0.23, 0, 1, 0, 0 ],
-[ -0.4, 0.61, 0, 0, 1, 0 ],
-[ 0.52, -0.53, 1, 0, 0, 0 ],
-[ 0.58, 0.91, 0, 1, 0, 0 ],
-[ -0.66, 0.22, 0, 0, 1, 0 ],
-[ -0.77, 0.89, 1, 0, 0, 0 ],
-[ 0.87, -0.09, 1, 0, 0, 0 ],
-[ -0.6, 0.39, 0, 0, 1, 0 ],
-[ 0.49, -0.17, 1, 0, 0, 0 ],
-[ 0.86, 0.64, 0, 1, 0, 0 ],
-[ -0.46, -0.66, 1, 0, 0, 0 ],
-[ 0.08, -0.13, 0, 0, 0, 1 ],
-[ 0.9500000000000001, 0.19, 1, 0, 0, 0 ],
-[ 0.36, 0.29, 0, 1, 0, 0 ],
-[ 0.5, -0.06, 1, 0, 0, 0 ],
-[ 0.88, 0.74, 1, 0, 0, 0 ],
-[ 0.28, 0.01, 1, 0, 0, 0 ],
-[ 0.38, 0.51, 0, 1, 0, 0 ],
-[ 0.55, 0.08, 0, 1, 0, 0 ],
-[ 0.51, -0.54, 1, 0, 0, 0 ],
-[ 0.48, 0.99, 1, 0, 0, 0 ],
-[ -0.35, 0.86, 0, 0, 1, 0 ],
-[ -0.36, 0.28, 0, 0, 1, 0 ],
-[ 0.35, -0.46, 1, 0, 0, 0 ],
-[ -0.79, -0.55, 1, 0, 0, 0 ],
-[ 0.5, 0.98, 1, 0, 0, 0 ],
-[ -0.31, -0.6900000000000001, 1, 0, 0, 0 ],
-[ 0.38, -0.99, 1, 0, 0, 0 ],
-[ 0.73, 0.68, 0, 1, 0, 0 ],
-[ -0.47, -0.88, 1, 0, 0, 0 ],
-[ 0.97, -0.03, 1, 0, 0, 0 ],
-[ 0.9500000000000001, -0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.5600000000000001, -0.04, 1, 0, 0, 0 ],
-[ -0.58, -0.64, 1, 0, 0, 0 ],
-[ 0.6, -0.68, 1, 0, 0, 0 ],
-[ 0.44, -0.55, 1, 0, 0, 0 ],
-[ 0.76, -0.88, 1, 0, 0, 0 ],
-[ 0.64, 0.87, 0, 1, 0, 0 ],
-[ -0.76, -0.98, 1, 0, 0, 0 ],
-[ -0.45, 0.68, 0, 0, 1, 0 ],
-[ 0.22, -0.8, 1, 0, 0, 0 ],
-[ 0.26, -0.07000000000000001, 0, 0, 0, 1 ],
-[ -0.11, 0.7000000000000001, 0, 0, 1, 0 ],
-[ 0.93, -0.54, 1, 0, 0, 0 ],
-[ 0.97, -0.9, 1, 0, 0, 0 ],
-[ -0.36, -0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.68, 0.79, 0, 0, 1, 0 ],
-[ -0.35, -0.43, 1, 0, 0, 0 ],
-[ 0.61, -0.16, 1, 0, 0, 0 ],
-[ 0.53, 0.64, 0, 1, 0, 0 ],
-[ -0.21, -0.14, 0, 0, 0, 1 ],
-[ -0.52, 0.76, 0, 0, 1, 0 ],
-[ -0.16, 0.2, 1, 0, 0, 0 ],
-[ 0.53, -0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.98, -0.97, 1, 0, 0, 0 ],
-[ 0.06, -0.19, 0, 0, 0, 1 ],
-[ -0.75, -0.72, 1, 0, 0, 0 ],
-[ -0.24, -0.09, 0, 0, 0, 1 ],
-[ 0.34, 0.97, 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.64, 0, 0, 1, 0 ],
-[ -0.8300000000000001, -0.67, 1, 0, 0, 0 ],
-[ -0.15, -0.05, 0, 0, 0, 1 ],
-[ 0.18, -0.44, 0, 0, 0, 1 ],
-[ 0.28, -0.41, 0, 0, 0, 1 ],
-[ 0.9400000000000001, 0.39, 1, 0, 0, 0 ],
-[ 0.14, -0.6, 0, 0, 0, 1 ],
-[ -0.08, 0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.74, 0.38, 0, 0, 1, 0 ],
-[ -0.53, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.2, -0.17, 0, 0, 0, 1 ],
-[ 0.88, 0.71, 0, 1, 0, 0 ],
-[ 0.99, 0.52, 1, 0, 0, 0 ],
-[ -0.29, 0.5, 0, 0, 1, 0 ],
-[ 0.32, -0.53, 1, 0, 0, 0 ],
-[ 0.15, -0.17, 0, 0, 0, 1 ],
-[ -0.99, -0.21, 1, 0, 0, 0 ],
-[ 0.9400000000000001, 0.26, 1, 0, 0, 0 ],
-[ 0.42, -0.5700000000000001, 1, 0, 0, 0 ],
-[ -0.41, 0.14, 0, 0, 1, 0 ],
-[ 0.18, -0.17, 0, 0, 0, 1 ],
-[ -0.97, -0.52, 1, 0, 0, 0 ],
-[ -0.15, 0.79, 1, 0, 0, 0 ],
-[ -0.64, 0.11, 0, 0, 1, 0 ],
-[ -0.32, -0.6, 1, 0, 0, 0 ],
-[ 0.75, -0.66, 1, 0, 0, 0 ],
-[ 0.61, 0.14, 0, 1, 0, 0 ],
-[ 0.45, -0.44, 1, 0, 0, 0 ],
-[ 0.23, 0.72, 0, 1, 0, 0 ],
-[ 0.04, -0.46, 0, 0, 0, 1 ],
-[ 0.06, -0.12, 0, 0, 0, 1 ],
-[ 0.54, -0.49, 1, 0, 0, 0 ],
-[ 0.34, -0.29, 0, 0, 0, 1 ],
-[ -0.5600000000000001, -0.52, 1, 0, 0, 0 ],
-[ -0.46, -0.6, 1, 0, 0, 0 ],
-[ -0.92, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.32, 0.41, 0, 1, 0, 0 ],
-[ -0.37, 0.87, 0, 0, 1, 0 ],
-[ -0.15, 0.73, 0, 0, 1, 0 ],
-[ -0.38, 0.19, 0, 0, 1, 0 ],
-[ 0.65, 0.29, 0, 1, 0, 0 ],
-[ 0.25, -0.32, 0, 0, 0, 1 ],
-[ -0.05, 0.77, 1, 0, 0, 0 ],
-[ -0.59, 0.8100000000000001, 0, 0, 1, 0 ],
-[ 0.25, -0.61, 1, 0, 0, 0 ],
-[ 0.42, -0.41, 1, 0, 0, 0 ],
-[ 0.63, 0.8, 0, 1, 0, 0 ],
-[ 0.22, -0.5, 0, 0, 0, 1 ],
-[ 0.37, -0.98, 1, 0, 0, 0 ],
-[ -0.73, 0.91, 1, 0, 0, 0 ],
-[ -0.97, -0.61, 1, 0, 0, 0 ],
-[ -0.35, 0.36, 0, 0, 1, 0 ],
-[ -0.6900000000000001, 0.68, 0, 0, 1, 0 ],
-[ -0.35, -0.77, 1, 0, 0, 0 ],
-[ -0.28, -0.71, 1, 0, 0, 0 ],
-[ -0.5700000000000001, -0.54, 1, 0, 0, 0 ],
-[ 0.07000000000000001, 0.85, 1, 0, 0, 0 ],
-[ -0.17, 0.97, 1, 0, 0, 0 ],
-[ 0.08, -0.27, 0, 0, 0, 1 ],
-[ 0.67, -0.92, 1, 0, 0, 0 ],
-[ -0.36, 0.64, 0, 0, 1, 0 ],
-[ 0.13, -0.45, 0, 0, 0, 1 ],
-[ 0.26, 0.51, 0, 1, 0, 0 ],
-[ 0.29, -0.79, 1, 0, 0, 0 ],
-[ 0.75, -0.45, 1, 0, 0, 0 ],
-[ 0.02, -0.71, 0, 0, 0, 1 ],
-[ 0.4, -0.75, 1, 0, 0, 0 ],
-[ -0.29, 0.8300000000000001, 0, 0, 1, 0 ],
-[ -0.8300000000000001, 0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.9400000000000001, -0.46, 1, 0, 0, 0 ],
-[ -0.19, 0.12, 1, 0, 0, 0 ],
-[ -0.88, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.23, 0.25, 0, 1, 0, 0 ],
-[ 0.6, 0.24, 0, 1, 0, 0 ],
-[ -0.99, 0.55, 1, 0, 0, 0 ],
-[ -0.78, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.34, 0.47, 0, 0, 1, 0 ],
-[ -0.01, -0.96, 0, 0, 0, 1 ],
-[ -0.34, 0.59, 0, 0, 1, 0 ],
-[ 0.68, -0.97, 1, 0, 0, 0 ],
-[ 0.6, 0.46, 0, 1, 0, 0 ],
-[ -0.89, -0.8, 1, 0, 0, 0 ],
-[ -0.21, -0.62, 1, 0, 0, 0 ],
-[ 0.64, -0.72, 1, 0, 0, 0 ],
-[ -0.8, 0.97, 1, 0, 0, 0 ],
-[ -0.3, -0.47, 1, 0, 0, 0 ],
-[ -0.8100000000000001, -0.22, 1, 0, 0, 0 ],
-[ -0.02, -0.04, 0, 0, 0, 1 ],
-[ 0.31, -0.5, 1, 0, 0, 0 ],
-[ 0.16, -0.65, 0, 0, 0, 1 ],
-[ -0.99, -0.21, 1, 0, 0, 0 ],
-[ -0.37, -0.19, 0, 0, 0, 1 ],
-[ -0.59, 1., 1, 0, 0, 0 ],
-[ 0.88, -0.19, 1, 0, 0, 0 ],
-[ 0.13, -0.63, 0, 0, 0, 1 ],
-[ -0.61, 0.78, 0, 0, 1, 0 ],
-[ -0.5600000000000001, 0.18, 0, 0, 1, 0 ],
-[ -0.45, 0.43, 0, 0, 1, 0 ],
-[ 0.8300000000000001, -0.6, 1, 0, 0, 0 ],
-[ -0.49, -0.72, 1, 0, 0, 0 ],
-[ -0.36, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.03, 0.16, 1, 0, 0, 0 ],
-[ -0.55, -0.51, 1, 0, 0, 0 ],
-[ -0.39, 0.67, 0, 0, 1, 0 ],
-[ -0.21, -0.01, 0, 0, 0, 1 ],
-[ 0.21, -0.18, 0, 0, 0, 1 ],
-[ 0.9500000000000001, 0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.9, 0.86, 1, 0, 0, 0 ],
-[ 0.92, -0.88, 1, 0, 0, 0 ],
-[ 0.32, 0.2, 0, 1, 0, 0 ],
-[ 0.99, -0.34, 1, 0, 0, 0 ],
-[ -0.5700000000000001, -0.71, 1, 0, 0, 0 ],
-[ -0.8100000000000001, 0.65, 0, 0, 1, 0 ],
-[ 0.77, -0.89, 1, 0, 0, 0 ],
-[ -0.24, 0.1, 1, 0, 0, 0 ],
-[ -0.15, 0.87, 1, 0, 0, 0 ],
-[ 0.23, 0.8300000000000001, 0, 1, 0, 0 ],
-[ -0.68, -0.08, 1, 0, 0, 0 ],
-[ -0.44, 0.78, 0, 0, 1, 0 ],
-[ 0.9500000000000001, 0.04, 1, 0, 0, 0 ],
-[ -0.88, 0.55, 0, 0, 1, 0 ],
-[ -0.36, -0.41, 1, 0, 0, 0 ],
-[ 0.89, -0.67, 1, 0, 0, 0 ],
-[ 0.38, 0.66, 0, 1, 0, 0 ],
-[ 0.09, 0.21, 1, 0, 0, 0 ],
-[ -0.58, 0.32, 0, 0, 1, 0 ],
-[ -0.34, 0.25, 0, 0, 1, 0 ],
-[ 0.89, 0.58, 0, 1, 0, 0 ],
-[ 0.29, -0.99, 1, 0, 0, 0 ],
-[ -0.3, -0.19, 0, 0, 0, 1 ],
-[ 0.1, 0.36, 0, 1, 0, 0 ],
-[ -0.6900000000000001, 0.76, 0, 0, 1, 0 ],
-[ 0.92, 0.5600000000000001, 0, 1, 0, 0 ],
-[ -0.39, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.45, -0.62, 1, 0, 0, 0 ],
-[ -0.28, -0.41, 0, 0, 0, 1 ],
-[ -0.5, 0.07000000000000001, 0, 0, 1, 0 ],
-[ 0.06, 0.5600000000000001, 0, 1, 0, 0 ],
-[ 0.29, -0.2, 0, 0, 0, 1 ],
-[ -0.63, 0.46, 0, 0, 1, 0 ],
-[ -0.92, 0.17, 1, 0, 0, 0 ],
-[ 0.49, -0.39, 1, 0, 0, 0 ],
-[ 0.48, 0.51, 0, 1, 0, 0 ],
-[ 0.76, -0.62, 1, 0, 0, 0 ],
-[ -0.88, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.37, -0.67, 1, 0, 0, 0 ],
-[ -0.5700000000000001, -0.04, 1, 0, 0, 0 ],
-[ -0.3, -0.59, 1, 0, 0, 0 ],
-[ 0.11, 0.02, 1, 0, 0, 0 ],
-[ 0.65, 0.91, 0, 1, 0, 0 ],
-[ 0.76, -0.44, 1, 0, 0, 0 ],
-[ 1., -0.84, 1, 0, 0, 0 ],
-[ -0.91, -0.37, 1, 0, 0, 0 ],
-[ 0.87, 0.11, 1, 0, 0, 0 ],
-[ -0.84, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.42, -0.73, 1, 0, 0, 0 ],
-[ 0.48, 0.25, 0, 1, 0, 0 ],
-[ -0.73, -0.92, 1, 0, 0, 0 ],
-[ -0.54, -0.66, 1, 0, 0, 0 ],
-[ -0.28, 0.76, 0, 0, 1, 0 ],
-[ -0.9400000000000001, -0.79, 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.07000000000000001, 0, 0, 1, 0 ],
-[ 0.3, -0.44, 1, 0, 0, 0 ],
-[ -0.08, 0.67, 1, 0, 0, 0 ],
-[ -0.77, 0.63, 0, 0, 1, 0 ],
-[ -0.34, -0.16, 0, 0, 0, 1 ],
-[ -0.6900000000000001, 0.44, 0, 0, 1, 0 ],
-[ -0.86, 0.89, 1, 0, 0, 0 ],
-[ -0.47, -0.25, 1, 0, 0, 0 ],
-[ -0.61, -0.46, 1, 0, 0, 0 ],
-[ 0.42, 0.42, 0, 1, 0, 0 ],
-[ 0.43, -0.16, 1, 0, 0, 0 ],
-[ 0.92, 0.86, 1, 0, 0, 0 ],
-[ -0.54, -0.47, 1, 0, 0, 0 ],
-[ -0.8100000000000001, -0.98, 1, 0, 0, 0 ],
-[ 0.06, 0.12, 1, 0, 0, 0 ],
-[ 0.89, 0.98, 1, 0, 0, 0 ],
-[ 0.65, 0.35, 0, 1, 0, 0 ],
-[ -0.48, 0.03, 1, 0, 0, 0 ],
-[ -0.42, 0.5700000000000001, 0, 0, 1, 0 ],
-[ 0.68, -0.06, 1, 0, 0, 0 ],
-[ -0.91, 0.18, 1, 0, 0, 0 ],
-[ 0.91, 0.48, 0, 1, 0, 0 ],
-[ 0.99, 0.17, 1, 0, 0, 0 ],
-[ -0.27, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.66, -0.62, 1, 0, 0, 0 ],
-[ -0.92, -0.1, 1, 0, 0, 0 ],
-[ -0.89, -0.48, 1, 0, 0, 0 ],
-[ 0.3, -0.5, 1, 0, 0, 0 ],
-[ -0.34, -0.35, 1, 0, 0, 0 ],
-[ 0.45, 0.43, 0, 1, 0, 0 ],
-[ -0.78, 0.52, 0, 0, 1, 0 ],
-[ -0.35, -0.68, 1, 0, 0, 0 ],
-[ 0.23, 0.55, 0, 1, 0, 0 ],
-[ 0.06, 0.5700000000000001, 0, 1, 0, 0 ],
-[ 0.23, 0.6900000000000001, 0, 1, 0, 0 ],
-[ -0.86, 0.22, 1, 0, 0, 0 ],
-[ 0.64, -0.78, 1, 0, 0, 0 ],
-[ -0.4, -0.86, 1, 0, 0, 0 ],
-[ -0.35, -0.48, 1, 0, 0, 0 ],
-[ 0.1, 0.08, 1, 0, 0, 0 ],
-[ -0.28, -0.64, 1, 0, 0, 0 ],
-[ 0.8300000000000001, -0.54, 1, 0, 0, 0 ],
-[ 0.06, -0.14, 0, 0, 0, 1 ],
-[ -0.78, -0.18, 1, 0, 0, 0 ],
-[ 0.02, -0.21, 0, 0, 0, 1 ],
-[ -0.9400000000000001, -0.62, 1, 0, 0, 0 ],
-[ -0.44, 0.31, 0, 0, 1, 0 ],
-[ 0.91, -0.98, 1, 0, 0, 0 ],
-[ 0.34, -0.37, 1, 0, 0, 0 ],
-[ 0.25, 0.76, 0, 1, 0, 0 ],
-[ 0.8100000000000001, 0.8200000000000001, 0, 1, 0, 0 ],
-[ -0.51, 0.87, 0, 0, 1, 0 ],
-[ 0.16, -0.1, 0, 0, 0, 1 ],
-[ 0.9500000000000001, 0.53, 1, 0, 0, 0 ],
-[ -0.98, -0.04, 1, 0, 0, 0 ],
-[ -0.34, 0.71, 0, 0, 1, 0 ],
-[ -0.59, 0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.29, 0.72, 0, 1, 0, 0 ],
-[ -0.9500000000000001, -0.05, 1, 0, 0, 0 ],
-[ -0.16, 0.7000000000000001, 0, 0, 1, 0 ],
-[ 0.9, -0.23, 1, 0, 0, 0 ],
-[ 0.89, -0.05, 1, 0, 0, 0 ],
-[ 0.01, -0.98, 0, 0, 0, 1 ],
-[ 0.32, -0.13, 0, 0, 0, 1 ],
-[ 0.78, 0.38, 0, 1, 0, 0 ],
-[ -0.23, -0.7000000000000001, 1, 0, 0, 0 ],
-[ 0.44, 0.05, 1, 0, 0, 0 ],
-[ 0.62, 0.18, 0, 1, 0, 0 ],
-[ 0.76, -0.8, 1, 0, 0, 0 ],
-[ -0.31, -0.52, 1, 0, 0, 0 ],
-[ 0.12, 0.04, 1, 0, 0, 0 ],
-[ 0.42, 0.71, 0, 1, 0, 0 ],
-[ -1., 0.3, 1, 0, 0, 0 ],
-[ -0.99, -0.01, 1, 0, 0, 0 ],
-[ 0.76, 0.5600000000000001, 0, 1, 0, 0 ],
-[ 0.85, 0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.03, 0.99, 1, 0, 0, 0 ],
-[ 0.51, 0.26, 0, 1, 0, 0 ],
-[ 0.43, -0.52, 1, 0, 0, 0 ],
-[ 0.74, -0.5, 1, 0, 0, 0 ],
-[ -0.38, -0.53, 1, 0, 0, 0 ],
-[ 0.44, 0.13, 0, 1, 0, 0 ],
-[ -0.42, 0.33, 0, 0, 1, 0 ],
-[ 0.38, 0.05, 1, 0, 0, 0 ],
-[ 0.96, 0.5, 1, 0, 0, 0 ],
-[ -0.23, 0.36, 0, 0, 1, 0 ],
-[ -0.68, 0.96, 1, 0, 0, 0 ],
-[ -0.67, -0.14, 1, 0, 0, 0 ],
-[ 0.12, 0.45, 0, 1, 0, 0 ],
-[ 0.63, -0.04, 1, 0, 0, 0 ],
-[ -0.9, -0.47, 1, 0, 0, 0 ],
-[ -0.71, -0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.47, -0.34, 1, 0, 0, 0 ],
-[ 0.35, -0.2, 0, 0, 0, 1 ],
-[ 0.27, 0.16, 0, 1, 0, 0 ],
-[ 0.65, 0.19, 0, 1, 0, 0 ],
-[ -0.07000000000000001, 0.87, 1, 0, 0, 0 ],
-[ 0.72, -0.42, 1, 0, 0, 0 ],
-[ 0.68, 0.05, 1, 0, 0, 0 ],
-[ -0.46, -0.2, 1, 0, 0, 0 ],
-[ 0.34, -0.98, 1, 0, 0, 0 ],
-[ -0.49, 0.36, 0, 0, 1, 0 ],
-[ 0.39, 0.41, 0, 1, 0, 0 ],
-[ -0.04, -0.45, 0, 0, 0, 1 ],
-[ 0.3, -0.05, 0, 0, 0, 1 ],
-[ -0.38, -0.17, 0, 0, 0, 1 ],
-[ -0.5, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.75, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.96, 0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.5, -0.78, 1, 0, 0, 0 ],
-[ -0.09, 0.54, 0, 0, 1, 0 ],
-[ 0.02, 0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.34, 0.98, 1, 0, 0, 0 ],
-[ 0.59, 0.63, 0, 1, 0, 0 ],
-[ 0.86, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.24, 0.22, 0, 0, 1, 0 ],
-[ 0.5700000000000001, 0.48, 0, 1, 0, 0 ],
-[ -0.43, -0.14, 0, 0, 0, 1 ],
-[ 0.72, 0.28, 0, 1, 0, 0 ],
-[ 0.72, 0.93, 1, 0, 0, 0 ],
-[ 0.86, -0.35, 1, 0, 0, 0 ],
-[ 0.93, -0.12, 1, 0, 0, 0 ],
-[ -0.34, 0.6, 0, 0, 1, 0 ],
-[ 0.79, -0.35, 1, 0, 0, 0 ],
-[ 0.73, -0.32, 1, 0, 0, 0 ],
-[ 0.38, -0.06, 0, 0, 0, 1 ],
-[ -0.26, 0.64, 0, 0, 1, 0 ],
-[ -0.8200000000000001, -0.14, 1, 0, 0, 0 ],
-[ -0.03, -0.01, 0, 0, 0, 1 ],
-[ -0.6, -0.48, 1, 0, 0, 0 ],
-[ 0.33, -0.05, 0, 0, 0, 1 ],
-[ -0.58, -0.86, 1, 0, 0, 0 ],
-[ 0.54, 0.37, 0, 1, 0, 0 ],
-[ -0.91, -0.68, 1, 0, 0, 0 ],
-[ 0.33, -0.22, 0, 0, 0, 1 ],
-[ -0.2, 0.33, 0, 0, 1, 0 ],
-[ 0.66, 0.02, 1, 0, 0, 0 ],
-[ 0.26, -0.85, 1, 0, 0, 0 ],
-[ -0.19, -0.79, 1, 0, 0, 0 ],
-[ 0.54, 0.16, 0, 1, 0, 0 ],
-[ -0.06, -0.72, 0, 0, 0, 1 ],
-[ -0.7000000000000001, -0.25, 1, 0, 0, 0 ],
-[ -0.36, 0.54, 0, 0, 1, 0 ],
-[ -0.06, 0.15, 1, 0, 0, 0 ],
-[ 0.47, -0.52, 1, 0, 0, 0 ],
-[ 0.54, 0.49, 0, 1, 0, 0 ],
-[ 0.89, 0.87, 1, 0, 0, 0 ],
-[ -0.73, 0.98, 1, 0, 0, 0 ],
-[ -0.42, 0.39, 0, 0, 1, 0 ],
-[ 0.31, -0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.89, -0.6900000000000001, 1, 0, 0, 0 ],
-[ 0.22, -0.06, 0, 0, 0, 1 ],
-[ 0.02, -0.25, 0, 0, 0, 1 ],
-[ 0.91, -0.12, 1, 0, 0, 0 ],
-[ -0.31, 0.22, 0, 0, 1, 0 ],
-[ -0.86, -0.87, 1, 0, 0, 0 ],
-[ -0.16, -0.84, 1, 0, 0, 0 ],
-[ 0.3, 0.04, 1, 0, 0, 0 ],
-[ 0.12, -0.32, 0, 0, 0, 1 ],
-[ 0.74, 0.5700000000000001, 0, 1, 0, 0 ],
-[ -0.58, 0.28, 0, 0, 1, 0 ],
-[ 0.77, -0.55, 1, 0, 0, 0 ],
-[ -0.53, 0.02, 1, 0, 0, 0 ],
-[ -0.93, -0.99, 1, 0, 0, 0 ],
-[ -0.32, 0.9, 0, 0, 1, 0 ],
-[ -0.17, -0.87, 1, 0, 0, 0 ],
-[ -0.99, 0.91, 1, 0, 0, 0 ],
-[ -0.01, -0.84, 0, 0, 0, 1 ],
-[ -0.48, 0.44, 0, 0, 1, 0 ],
-[ 0.35, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.55, 0.6, 0, 1, 0, 0 ],
-[ 0.75, -0.75, 1, 0, 0, 0 ],
-[ 0.5700000000000001, -0.3, 1, 0, 0, 0 ],
-[ 0.48, 0.62, 0, 1, 0, 0 ],
-[ -0.1, 0.76, 1, 0, 0, 0 ],
-[ -0.3, -0.84, 1, 0, 0, 0 ],
-[ -0.79, -0.92, 1, 0, 0, 0 ],
-[ -0.66, 0.03, 1, 0, 0, 0 ],
-[ -0.16, 0.71, 0, 0, 1, 0 ],
-[ 0.91, -0.16, 1, 0, 0, 0 ],
-[ 0.22, 0.92, 1, 0, 0, 0 ],
-[ -0.6, -0.61, 1, 0, 0, 0 ],
-[ -0.76, -0.04, 1, 0, 0, 0 ],
-[ 0.8300000000000001, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.67, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.71, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.21, -0.54, 0, 0, 0, 1 ],
-[ 0.23, 0.97, 1, 0, 0, 0 ],
-[ 0.91, 0.74, 1, 0, 0, 0 ],
-[ -0.45, -0.05, 0, 0, 0, 1 ],
-[ 0.05, -0.31, 0, 0, 0, 1 ],
-[ 0.39, 0.44, 0, 1, 0, 0 ],
-[ 0.44, -0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.68, 0.61, 0, 1, 0, 0 ],
-[ 0.78, 0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.24, -0.7000000000000001, 1, 0, 0, 0 ],
-[ 0.8, -0.08, 1, 0, 0, 0 ],
-[ 0.06, -0.85, 0, 0, 0, 1 ],
-[ 0.8100000000000001, -0.73, 1, 0, 0, 0 ],
-[ -0.74, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.51, -0.46, 1, 0, 0, 0 ],
-[ 0.62, -0.14, 1, 0, 0, 0 ],
-[ 0.35, 0.54, 0, 1, 0, 0 ],
-[ -0.52, 0.72, 0, 0, 1, 0 ],
-[ -0.46, -0.16, 1, 0, 0, 0 ],
-[ -0.55, 0.75, 0, 0, 1, 0 ],
-[ 0.4, -0.35, 1, 0, 0, 0 ],
-[ -0.84, 0.97, 1, 0, 0, 0 ],
-[ 0.65, -0.24, 1, 0, 0, 0 ],
-[ -0.6, 0.28, 0, 0, 1, 0 ],
-[ -0.35, 0.19, 0, 0, 1, 0 ],
-[ -0.1, 0.36, 0, 0, 1, 0 ],
-[ -0.04, 0.38, 1, 0, 0, 0 ],
-[ 0.64, 0.18, 0, 1, 0, 0 ],
-[ 0.97, -0.63, 1, 0, 0, 0 ],
-[ 0.91, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.74, -0.5, 1, 0, 0, 0 ],
-[ -0.45, -0.4, 1, 0, 0, 0 ],
-[ 0.97, -0.06, 1, 0, 0, 0 ],
-[ 0.18, 0.89, 1, 0, 0, 0 ],
-[ 0.15, 0.39, 0, 1, 0, 0 ],
-[ 0.29, 0.62, 0, 1, 0, 0 ],
-[ 0.36, 0.71, 0, 1, 0, 0 ],
-[ -0.98, -0.14, 1, 0, 0, 0 ],
-[ -0.38, 0.13, 0, 0, 1, 0 ],
-[ 0.25, 0.61, 0, 1, 0, 0 ],
-[ 0.36, -1., 1, 0, 0, 0 ],
-[ -0.71, 0.7000000000000001, 0, 0, 1, 0 ],
-[ 0.31, 0.52, 0, 1, 0, 0 ],
-[ -0.53, -0.75, 1, 0, 0, 0 ],
-[ -0.9500000000000001, 0.11, 1, 0, 0, 0 ],
-[ -0.22, 0.71, 0, 0, 1, 0 ],
-[ 0.09, 0.29, 1, 0, 0, 0 ],
-[ -0.17, 0.12, 1, 0, 0, 0 ],
-[ -0.46, 0.43, 0, 0, 1, 0 ],
-[ 0.55, -0.86, 1, 0, 0, 0 ],
-[ 0.25, 0.06, 1, 0, 0, 0 ],
-[ -0.1, -0.11, 0, 0, 0, 1 ],
-[ 0.53, 0.64, 0, 1, 0, 0 ],
-[ -0.03, -0.06, 0, 0, 0, 1 ],
-[ -0.14, -0.67, 0, 0, 0, 1 ],
-[ 0.23, -0.49, 0, 0, 0, 1 ],
-[ 0.62, -0.51, 1, 0, 0, 0 ],
-[ -0.03, 0.71, 1, 0, 0, 0 ],
-[ -0.48, 0.98, 1, 0, 0, 0 ],
-[ -0.6900000000000001, -0.62, 1, 0, 0, 0 ],
-[ 0.28, -0.91, 1, 0, 0, 0 ],
-[ 0.09, 0.51, 0, 1, 0, 0 ],
-[ -0.13, -0.98, 1, 0, 0, 0 ],
-[ 0.32, 0.15, 0, 1, 0, 0 ],
-[ 0.59, 0.63, 0, 1, 0, 0 ],
-[ -0.37, -0.01, 0, 0, 0, 1 ],
-[ 0.47, -0.71, 1, 0, 0, 0 ],
-[ -0.08, 0.04, 1, 0, 0, 0 ],
-[ 0.92, 0.84, 1, 0, 0, 0 ],
-[ -0.33, -0.68, 1, 0, 0, 0 ],
-[ -0.07000000000000001, -0.29, 0, 0, 0, 1 ],
-[ -0.91, 0.45, 0, 0, 1, 0 ],
-[ 0.31, -0.38, 0, 0, 0, 1 ],
-[ -0.14, -0.39, 0, 0, 0, 1 ],
-[ -0.06, -0.23, 0, 0, 0, 1 ],
-[ 0.99, 0.85, 1, 0, 0, 0 ],
-[ -0.86, -0.91, 1, 0, 0, 0 ],
-[ -0.7000000000000001, -0.93, 1, 0, 0, 0 ],
-[ 0.61, 0.32, 0, 1, 0, 0 ],
-[ 0.25, 0.08, 1, 0, 0, 0 ],
-[ 0.18, -0.98, 1, 0, 0, 0 ],
-[ -0.7000000000000001, 0.74, 0, 0, 1, 0 ],
-[ 0.6, 0.24, 0, 1, 0, 0 ],
-[ 0.16, -0.33, 0, 0, 0, 1 ],
-[ -0.72, -0.87, 1, 0, 0, 0 ],
-[ -0.97, 0.27, 1, 0, 0, 0 ],
-[ -0.9, -0.06, 1, 0, 0, 0 ],
-[ 0.85, 0.39, 0, 1, 0, 0 ],
-[ -0.26, 0.76, 0, 0, 1, 0 ],
-[ -0.48, -0.87, 1, 0, 0, 0 ],
-[ 0.44, 0.76, 0, 1, 0, 0 ],
-[ 0.32, -0.66, 1, 0, 0, 0 ],
-[ -0.65, 0.35, 0, 0, 1, 0 ],
-[ 0.02, 0.47, 1, 0, 0, 0 ],
-[ -0.05, -0.3, 0, 0, 0, 1 ],
-[ 0.5700000000000001, -0.85, 1, 0, 0, 0 ],
-[ -0.66, -0.65, 1, 0, 0, 0 ],
-[ 0.28, 0.35, 0, 1, 0, 0 ],
-[ -0.48, 0.05, 1, 0, 0, 0 ],
-[ -0.8200000000000001, 0.3, 0, 0, 1, 0 ],
-[ -0.13, 0.28, 0, 0, 1, 0 ],
-[ -0.28, -0.89, 1, 0, 0, 0 ],
-[ 0.08, -0.75, 0, 0, 0, 1 ],
-[ 0.19, 0.98, 1, 0, 0, 0 ],
-[ 0.47, -0.6, 1, 0, 0, 0 ],
-[ 0.66, -0.35, 1, 0, 0, 0 ],
-[ -0.7000000000000001, 0.85, 0, 0, 1, 0 ],
-[ -1., -0.02, 1, 0, 0, 0 ],
-[ -0.97, -0.48, 1, 0, 0, 0 ],
-[ -0.96, -0.54, 1, 0, 0, 0 ],
-[ -1., 0.64, 1, 0, 0, 0 ],
-[ -0.86, -0.97, 1, 0, 0, 0 ],
-[ -0.55, 0.23, 0, 0, 1, 0 ],
-[ -0.71, -0.46, 1, 0, 0, 0 ],
-[ -0.23, 0.15, 0, 0, 1, 0 ],
-[ 0.6900000000000001, 0.47, 0, 1, 0, 0 ],
-[ 0.46, -0.07000000000000001, 0, 0, 0, 1 ],
-[ -0.16, -0.48, 0, 0, 0, 1 ],
-[ -0.06, -0.3, 0, 0, 0, 1 ],
-[ 0.91, -0.73, 1, 0, 0, 0 ],
-[ -0.37, 0.48, 0, 0, 1, 0 ],
-[ -0.34, 0.74, 0, 0, 1, 0 ],
-[ -0.22, 0.19, 0, 0, 1, 0 ],
-[ -0.93, -0.6, 1, 0, 0, 0 ],
-[ -0.1, -0.74, 0, 0, 0, 1 ],
-[ -0.49, -0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.68, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.05, -0.99, 1, 0, 0, 0 ],
-[ -0.68, 0.19, 0, 0, 1, 0 ],
-[ -0.8200000000000001, -0.27, 1, 0, 0, 0 ],
-[ -0.99, 0.2, 1, 0, 0, 0 ],
-[ -0.24, -0.11, 0, 0, 0, 1 ],
-[ -0.2, 0.64, 0, 0, 1, 0 ],
-[ 0.79, -0.87, 1, 0, 0, 0 ],
-[ 0.36, -0.29, 1, 0, 0, 0 ],
-[ 0.46, 0.52, 0, 1, 0, 0 ],
-[ 0.53, -0.61, 1, 0, 0, 0 ],
-[ 0.07000000000000001, -0.47, 0, 0, 0, 1 ],
-[ -0.64, 0.58, 0, 0, 1, 0 ],
-[ 0.53, 0.35, 0, 1, 0, 0 ],
-[ -0.16, 0.49, 0, 0, 1, 0 ],
-[ 0.66, 0.51, 0, 1, 0, 0 ],
-[ 0.7000000000000001, 0.63, 0, 1, 0, 0 ],
-[ 0.14, -0.93, 1, 0, 0, 0 ],
-[ -0.86, -0.5, 1, 0, 0, 0 ],
-[ -0.62, 0.58, 0, 0, 1, 0 ],
-[ 0.51, -0.33, 1, 0, 0, 0 ],
-[ -0.22, 0.52, 0, 0, 1, 0 ],
-[ -0.62, 0.75, 0, 0, 1, 0 ],
-[ 0.1, -0.19, 0, 0, 0, 1 ],
-[ -0.97, 0.29, 1, 0, 0, 0 ],
-[ -0.72, 0.25, 0, 0, 1, 0 ],
-[ -0.5600000000000001, 0.52, 0, 0, 1, 0 ],
-[ -0.13, -0.4, 0, 0, 0, 1 ],
-[ 0.62, 0.06, 1, 0, 0, 0 ],
-[ 0.15, 0.96, 1, 0, 0, 0 ],
-[ -0.87, -0.76, 1, 0, 0, 0 ],
-[ 0.44, 0.6, 0, 1, 0, 0 ],
-[ 0.84, -0.99, 1, 0, 0, 0 ],
-[ 0.9500000000000001, -0.11, 1, 0, 0, 0 ],
-[ 0.9, 0.06, 1, 0, 0, 0 ],
-[ -0.68, -0.99, 1, 0, 0, 0 ],
-[ 0.47, 0.44, 0, 1, 0, 0 ],
-[ -0.66, -0.48, 1, 0, 0, 0 ],
-[ -0.07000000000000001, 0.42, 0, 0, 1, 0 ],
-[ -0.75, 0.8300000000000001, 0, 0, 1, 0 ],
-[ 0.38, -0.99, 1, 0, 0, 0 ],
-[ 0.19, -0.62, 0, 0, 0, 1 ],
-[ -0.97, -0.77, 1, 0, 0, 0 ],
-[ 0.32, 0.35, 0, 1, 0, 0 ],
-[ 0.3, 0.8200000000000001, 0, 1, 0, 0 ],
-[ -0.19, 0.93, 1, 0, 0, 0 ],
-[ -0.46, -0.71, 1, 0, 0, 0 ],
-[ -0.79, 0.02, 1, 0, 0, 0 ],
-[ 0.71, 0.59, 0, 1, 0, 0 ],
-[ 0.5700000000000001, -0.04, 1, 0, 0, 0 ],
-[ -0.4, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.73, -0.41, 1, 0, 0, 0 ],
-[ 0.75, -0.87, 1, 0, 0, 0 ],
-[ -0.52, -0.16, 1, 0, 0, 0 ],
-[ 0.28, 0.59, 0, 1, 0, 0 ],
-[ 0.25, 0.8100000000000001, 0, 1, 0, 0 ],
-[ -0.96, 0.1, 1, 0, 0, 0 ],
-[ 0.33, 0.71, 0, 1, 0, 0 ],
-[ 0.84, -0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.04, -0.7000000000000001, 0, 0, 0, 1 ],
-[ 0.27, 0.76, 0, 1, 0, 0 ],
-[ 0.2, 0.98, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.38, 1, 0, 0, 0 ],
-[ 0.34, 0.8300000000000001, 0, 1, 0, 0 ],
-[ 0.8100000000000001, -0.32, 1, 0, 0, 0 ],
-[ 0.89, 0.65, 0, 1, 0, 0 ],
-[ -0.8, -0.05, 1, 0, 0, 0 ],
-[ 0.08, -0.74, 0, 0, 0, 1 ],
-[ 0.9500000000000001, -0.31, 1, 0, 0, 0 ],
-[ -0.88, -0.58, 1, 0, 0, 0 ],
-[ -0.39, 0.51, 0, 0, 1, 0 ],
-[ -0.3, 0.39, 0, 0, 1, 0 ],
-[ -0.8, 0.6, 0, 0, 1, 0 ],
-[ -0.23, 0.75, 0, 0, 1, 0 ],
-[ 0.92, -0.42, 1, 0, 0, 0 ],
-[ 0.59, 0.73, 0, 1, 0, 0 ],
-[ 0.12, -0.84, 1, 0, 0, 0 ],
-[ -0.49, 0.46, 0, 0, 1, 0 ],
-[ 0.07000000000000001, 0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.05, 0.45, 1, 0, 0, 0 ],
-[ -1., -0.8, 1, 0, 0, 0 ],
-[ 0.93, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.46, 0.45, 0, 0, 1, 0 ],
-[ 0.75, -0.16, 1, 0, 0, 0 ],
-[ -0.59, 0.34, 0, 0, 1, 0 ],
-[ 0.25, -0.54, 1, 0, 0, 0 ],
-[ 0.71, -0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.22, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.6900000000000001, -0.68, 1, 0, 0, 0 ],
-[ 0.5600000000000001, 0.8100000000000001, 0, 1, 0, 0 ],
-[ 0.58, 0.13, 0, 1, 0, 0 ],
-[ -0.32, 0.41, 0, 0, 1, 0 ],
-[ -0.31, -0.1, 0, 0, 0, 1 ],
-[ -0.39, 0.73, 0, 0, 1, 0 ],
-[ -0.6, 1., 1, 0, 0, 0 ],
-[ 0.38, 0.63, 0, 1, 0, 0 ],
-[ 0.6, 0.48, 0, 1, 0, 0 ],
-[ -0.53, 0.58, 0, 0, 1, 0 ],
-[ 0.49, 0.01, 1, 0, 0, 0 ],
-[ 0.88, -0.65, 1, 0, 0, 0 ],
-[ 0.02, -0.66, 0, 0, 0, 1 ],
-[ 0.64, 0.06, 1, 0, 0, 0 ],
-[ -0.2, -0.77, 1, 0, 0, 0 ],
-[ 0.4, -0.72, 1, 0, 0, 0 ],
-[ -0.36, -0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.25, -0.89, 1, 0, 0, 0 ],
-[ -0.91, 0.45, 0, 0, 1, 0 ],
-[ 0.49, 0.2, 0, 1, 0, 0 ],
-[ 0.53, 0.62, 0, 1, 0, 0 ],
-[ -0.42, -0.92, 1, 0, 0, 0 ],
-[ -0.78, 0.88, 1, 0, 0, 0 ],
-[ -0.55, -0.93, 1, 0, 0, 0 ],
-[ 1., -0.46, 1, 0, 0, 0 ],
-[ -1., 0.17, 1, 0, 0, 0 ],
-[ -0.67, -0.15, 1, 0, 0, 0 ],
-[ -0.77, -0.74, 1, 0, 0, 0 ],
-[ 0.99, -0.5, 1, 0, 0, 0 ],
-[ 0.92, -0.35, 1, 0, 0, 0 ],
-[ 0.98, -0.62, 1, 0, 0, 0 ],
-[ 0.31, -0.78, 1, 0, 0, 0 ],
-[ -0.18, -0.01, 0, 0, 0, 1 ],
-[ 0.27, 0.61, 0, 1, 0, 0 ],
-[ 0.5700000000000001, -0.78, 1, 0, 0, 0 ],
-[ 0.18, 0.05, 1, 0, 0, 0 ],
-[ -0.76, 0.32, 0, 0, 1, 0 ],
-[ -0.8300000000000001, 0.76, 0, 0, 1, 0 ],
-[ 0.17, -0.49, 0, 0, 0, 1 ],
-[ -0.88, 0.23, 1, 0, 0, 0 ],
-[ 0.1, 0.66, 0, 1, 0, 0 ],
-[ 0.42, 0.06, 0, 1, 0, 0 ],
-[ -0.52, 0.04, 1, 0, 0, 0 ],
-[ 0.24, -0.73, 1, 0, 0, 0 ],
-[ 0.42, 0.37, 0, 1, 0, 0 ],
-[ 0.5700000000000001, 0.12, 0, 1, 0, 0 ],
-[ -0.6, -0.34, 1, 0, 0, 0 ],
-[ 0.76, 0.3, 0, 1, 0, 0 ],
-[ -0.5700000000000001, -0.35, 1, 0, 0, 0 ],
-[ -0.68, 0.8, 0, 0, 1, 0 ],
-[ 0.71, 0.62, 0, 1, 0, 0 ],
-[ -0.12, -0.92, 1, 0, 0, 0 ],
-[ 0.52, -0.89, 1, 0, 0, 0 ],
-[ 0.73, 0.38, 0, 1, 0, 0 ],
-[ -0.51, 0.91, 0, 0, 1, 0 ],
-[ -0.39, 0.12, 0, 0, 1, 0 ],
-[ -0.27, -0.54, 1, 0, 0, 0 ],
-[ 0.86, 0.23, 1, 0, 0, 0 ],
-[ -0.66, 0.73, 0, 0, 1, 0 ],
-[ 0.77, 0.38, 0, 1, 0, 0 ],
-[ -0.11, -0.39, 0, 0, 0, 1 ],
-[ -0.51, 0.51, 0, 0, 1, 0 ],
-[ -0.12, 0.01, 1, 0, 0, 0 ],
-[ -0.88, 0.11, 1, 0, 0, 0 ],
-[ 0.2, -0.65, 1, 0, 0, 0 ],
-[ -0.48, 0.32, 0, 0, 1, 0 ],
-[ 0.1, -0.02, 0, 0, 0, 1 ],
-[ -0.62, -0.4, 1, 0, 0, 0 ],
-[ -0.76, -0.99, 1, 0, 0, 0 ],
-[ -1., 0.52, 1, 0, 0, 0 ],
-[ 0.4, -0.79, 1, 0, 0, 0 ],
-[ -0.1, -0.33, 0, 0, 0, 1 ],
-[ -0.66, 0.8200000000000001, 0, 0, 1, 0 ],
-[ 0.1, -0.02, 0, 0, 0, 1 ],
-[ 0.9500000000000001, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.38, 0.25, 0, 1, 0, 0 ],
-[ 0.35, 0.66, 0, 1, 0, 0 ],
-[ 0.33, 0.77, 0, 1, 0, 0 ],
-[ -0.72, -0.39, 1, 0, 0, 0 ],
-[ -0.44, -0.93, 1, 0, 0, 0 ],
-[ 0.78, -0.06, 1, 0, 0, 0 ],
-[ -0.85, -0.16, 1, 0, 0, 0 ],
-[ -0.02, 0.79, 1, 0, 0, 0 ],
-[ -0.42, -0.49, 1, 0, 0, 0 ],
-[ 0.99, 0.93, 1, 0, 0, 0 ],
-[ -0.87, 0.2, 1, 0, 0, 0 ],
-[ -0.3, 0.54, 0, 0, 1, 0 ],
-[ 0.1, -0.45, 0, 0, 0, 1 ],
-[ 0.45, -0.24, 1, 0, 0, 0 ],
-[ 0.93, 0.79, 1, 0, 0, 0 ],
-[ -0.4, -0.2, 1, 0, 0, 0 ],
-[ 0.17, -0.48, 0, 0, 0, 1 ],
-[ -0.73, 0.06, 1, 0, 0, 0 ],
-[ 0.92, -0.78, 1, 0, 0, 0 ],
-[ 0.33, 0.9, 0, 1, 0, 0 ],
-[ -0.31, -0.27, 0, 0, 0, 1 ],
-[ 0.8100000000000001, 0.67, 0, 1, 0, 0 ],
-[ -0.89, -0.39, 1, 0, 0, 0 ],
-[ 0.67, 0.38, 0, 1, 0, 0 ],
-[ 0.63, 0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.42, -0.51, 1, 0, 0, 0 ],
-[ 0.98, -0.17, 1, 0, 0, 0 ],
-[ -0.31, -0.04, 0, 0, 0, 1 ],
-[ 0.13, -0.02, 0, 0, 0, 1 ],
-[ -0.19, 0.92, 1, 0, 0, 0 ],
-[ 0.9, -0.04, 1, 0, 0, 0 ],
-[ -0.65, -0.44, 1, 0, 0, 0 ],
-[ 0.96, -0.15, 1, 0, 0, 0 ],
-[ 0.15, -0.48, 0, 0, 0, 1 ],
-[ 0.9500000000000001, 0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.09, 0.26, 1, 0, 0, 0 ],
-[ 0.02, 0.18, 1, 0, 0, 0 ],
-[ -0.8300000000000001, 0.44, 0, 0, 1, 0 ],
-[ -0.67, 0.8300000000000001, 0, 0, 1, 0 ],
-[ 0.92, 0.53, 0, 1, 0, 0 ],
-[ 0.43, -0.02, 0, 0, 0, 1 ],
-[ 0.17, 0.51, 0, 1, 0, 0 ],
-[ 0.58, 0.78, 0, 1, 0, 0 ],
-[ -0.8200000000000001, 0.78, 0, 0, 1, 0 ],
-[ -0.32, 0.6, 0, 0, 1, 0 ],
-[ 0.36, -0.42, 1, 0, 0, 0 ],
-[ 0.5600000000000001, 0.08, 0, 1, 0, 0 ],
-[ 0.89, 0.09, 1, 0, 0, 0 ],
-[ -0.5600000000000001, -0.51, 1, 0, 0, 0 ],
-[ -0.02, -0.5600000000000001, 0, 0, 0, 1 ],
-[ -1., 0.13, 1, 0, 0, 0 ],
-[ 0.9400000000000001, 0.14, 1, 0, 0, 0 ],
-[ -0.9, 0.84, 1, 0, 0, 0 ],
-[ -0.73, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.6900000000000001, -0.88, 1, 0, 0, 0 ],
-[ 0.61, -0.65, 1, 0, 0, 0 ],
-[ 1., -0.17, 1, 0, 0, 0 ],
-[ -0.85, 0.55, 0, 0, 1, 0 ],
-[ 0.29, 0.51, 0, 1, 0, 0 ],
-[ 0.86, 0.54, 0, 1, 0, 0 ],
-[ -0.84, -0.27, 1, 0, 0, 0 ],
-[ -0.71, 0.27, 0, 0, 1, 0 ],
-[ -0.41, -0.21, 1, 0, 0, 0 ],
-[ 0.8300000000000001, -0.17, 1, 0, 0, 0 ],
-[ 0.48, -0.14, 1, 0, 0, 0 ],
-[ -0.68, 0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.41, -0.8100000000000001, 1, 0, 0, 0 ],
-[ 0.9400000000000001, -0.09, 1, 0, 0, 0 ],
-[ 0.37, 0.01, 1, 0, 0, 0 ],
-[ 0.37, 0.48, 0, 1, 0, 0 ],
-[ 0.52, -0.92, 1, 0, 0, 0 ],
-[ -0.17, -0.26, 0, 0, 0, 1 ],
-[ -0.64, -0.58, 1, 0, 0, 0 ],
-[ 0.4, -0.06, 0, 0, 0, 1 ],
-[ 0.12, 0.5600000000000001, 0, 1, 0, 0 ],
-[ 0.75, -0.45, 1, 0, 0, 0 ],
-[ 0.34, -0.73, 1, 0, 0, 0 ],
-[ 0.32, -0.55, 1, 0, 0, 0 ],
-[ -0.55, -0.99, 1, 0, 0, 0 ],
-[ 0.1, 0.43, 0, 1, 0, 0 ],
-[ 0.61, 0.86, 0, 1, 0, 0 ],
-[ -0.53, -0.48, 1, 0, 0, 0 ],
-[ 0.8100000000000001, 0.92, 1, 0, 0, 0 ],
-[ -0.05, -0.13, 0, 0, 0, 1 ],
-[ -0.07000000000000001, -0.61, 0, 0, 0, 1 ],
-[ -0.47, -0.59, 1, 0, 0, 0 ],
-[ 0.87, 0.1, 1, 0, 0, 0 ],
-[ -0.84, -0.03, 1, 0, 0, 0 ],
-[ 0.88, -0.8, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.34, 1, 0, 0, 0 ],
-[ 0.75, 0.44, 0, 1, 0, 0 ],
-[ 0.9500000000000001, 0.7000000000000001, 1, 0, 0, 0 ],
-[ -0.8300000000000001, -0.72, 1, 0, 0, 0 ],
-[ -0.78, 0.44, 0, 0, 1, 0 ],
-[ 0.88, 0.67, 0, 1, 0, 0 ],
-[ 0.74, 0.63, 0, 1, 0, 0 ],
-[ 0.89, -0.34, 1, 0, 0, 0 ],
-[ -0.4, -0.38, 1, 0, 0, 0 ],
-[ 0.24, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.8200000000000001, -0.52, 1, 0, 0, 0 ],
-[ 0.65, -0.71, 1, 0, 0, 0 ],
-[ -0.85, -0.58, 1, 0, 0, 0 ],
-[ -0.16, -0.67, 0, 0, 0, 1 ],
-[ 0.52, -0.09, 1, 0, 0, 0 ],
-[ 0.53, 0.24, 0, 1, 0, 0 ],
-[ -0.5600000000000001, 0.04, 1, 0, 0, 0 ],
-[ -0.38, -0.36, 1, 0, 0, 0 ],
-[ 0.06, 0.18, 1, 0, 0, 0 ],
-[ 0.43, -0.3, 1, 0, 0, 0 ],
-[ 0.9400000000000001, 0.6, 1, 0, 0, 0 ],
-[ -0.05, 0.04, 1, 0, 0, 0 ],
-[ -0.41, 0.43, 0, 0, 1, 0 ],
-[ 0.66, 0.4, 0, 1, 0, 0 ],
-[ 0.92, 0.62, 0, 1, 0, 0 ],
-[ 0.7000000000000001, 0.05, 1, 0, 0, 0 ],
-[ 0.49, -0.72, 1, 0, 0, 0 ],
-[ -0.74, 0.35, 0, 0, 1, 0 ],
-[ 0.19, 0.03, 1, 0, 0, 0 ],
-[ -0.68, -0.48, 1, 0, 0, 0 ],
-[ -0.78, 0.21, 0, 0, 1, 0 ],
-[ -0.99, -0.47, 1, 0, 0, 0 ],
-[ -0.53, 0.23, 0, 0, 1, 0 ],
-[ -0.9, -0.64, 1, 0, 0, 0 ],
-[ 0.43, 0.63, 0, 1, 0, 0 ],
-[ 0.8, -0.96, 1, 0, 0, 0 ],
-[ 0.96, 0.74, 1, 0, 0, 0 ],
-[ 0.44, -0.98, 1, 0, 0, 0 ],
-[ 0.38, 0.24, 0, 1, 0, 0 ],
-[ -0.06, 0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.34, 1, 0, 0, 0 ],
-[ -0.8200000000000001, -0.51, 1, 0, 0, 0 ],
-[ 0.1, -0.85, 1, 0, 0, 0 ],
-[ -0.97, -0.6, 1, 0, 0, 0 ],
-[ 0.1, -0.16, 0, 0, 0, 1 ],
-[ -0.96, -0.28, 1, 0, 0, 0 ],
-[ -0.97, -0.23, 1, 0, 0, 0 ],
-[ -0.05, 0.62, 1, 0, 0, 0 ],
-[ 0.7000000000000001, 0.64, 0, 1, 0, 0 ],
-[ 0.8, -0.9, 1, 0, 0, 0 ],
-[ 0.18, 0.1, 1, 0, 0, 0 ],
-[ -0.92, -0.64, 1, 0, 0, 0 ],
-[ 0.93, 0.25, 1, 0, 0, 0 ],
-[ 0.08, -0.96, 1, 0, 0, 0 ],
-[ -0.18, 0.61, 0, 0, 1, 0 ],
-[ 0.14, 0.88, 1, 0, 0, 0 ],
-[ -0.01, 0.33, 1, 0, 0, 0 ],
-[ -0.7000000000000001, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.19, 0.06, 1, 0, 0, 0 ],
-[ 0.02, -0.64, 0, 0, 0, 1 ],
-[ 0.23, 0.17, 0, 1, 0, 0 ],
-[ 0.6, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.54, 0.06, 0, 0, 1, 0 ],
-[ 0.97, 0.58, 1, 0, 0, 0 ],
-[ 0.68, 0.04, 1, 0, 0, 0 ],
-[ -0.5, 0.22, 0, 0, 1, 0 ],
-[ -0.66, 0.8200000000000001, 0, 0, 1, 0 ],
-[ 0.05, 0.76, 1, 0, 0, 0 ],
-[ 0.04, 0.03, 1, 0, 0, 0 ],
-[ -0.07000000000000001, 0.84, 1, 0, 0, 0 ],
-[ 1., -0.84, 1, 0, 0, 0 ],
-[ -0.66, -0.98, 1, 0, 0, 0 ],
-[ 0.34, -0.45, 1, 0, 0, 0 ],
-[ -0.5, 0.8100000000000001, 0, 0, 1, 0 ],
-[ -0.22, 0.31, 0, 0, 1, 0 ],
-[ -0.26, -0.31, 0, 0, 0, 1 ],
-[ -0.21, -0.38, 0, 0, 0, 1 ],
-[ -0.08, -0.66, 0, 0, 0, 1 ],
-[ 0.6, -0.53, 1, 0, 0, 0 ],
-[ 0.2, -0.15, 0, 0, 0, 1 ],
-[ -0.62, -0.18, 1, 0, 0, 0 ],
-[ 0.87, 0.02, 1, 0, 0, 0 ],
-[ -0.84, -0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.49, 0.9400000000000001, 0, 1, 0, 0 ],
-[ 0.76, 0.78, 0, 1, 0, 0 ],
-[ 0.04, 0.65, 1, 0, 0, 0 ],
-[ -0.22, 0.13, 1, 0, 0, 0 ],
-[ 0.3, -0.13, 0, 0, 0, 1 ],
-[ 0.85, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.78, 0.33, 0, 0, 1, 0 ],
-[ -0.8, -0.08, 1, 0, 0, 0 ],
-[ -0.91, 0.49, 0, 0, 1, 0 ],
-[ 0.72, 0.6900000000000001, 0, 1, 0, 0 ],
-[ 0.55, 0.65, 0, 1, 0, 0 ],
-[ -0.04, 0.8, 1, 0, 0, 0 ],
-[ -0.85, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.6900000000000001, 0.38, 0, 0, 1, 0 ],
-[ 0.65, 0.7000000000000001, 0, 1, 0, 0 ],
-[ -0.8200000000000001, 1., 1, 0, 0, 0 ],
-[ -0.35, 0.01, 1, 0, 0, 0 ],
-[ -0.96, -0.66, 1, 0, 0, 0 ],
-[ -0.48, 0.32, 0, 0, 1, 0 ],
-[ 0.96, -0.04, 1, 0, 0, 0 ],
-[ -0.2, -0.46, 0, 0, 0, 1 ],
-[ -0.48, -0.78, 1, 0, 0, 0 ],
-[ 0.97, -0.71, 1, 0, 0, 0 ],
-[ -0.53, -0.23, 1, 0, 0, 0 ],
-[ -0.64, 0.28, 0, 0, 1, 0 ],
-[ 0.93, -0.11, 1, 0, 0, 0 ],
-[ 0.68, -0.36, 1, 0, 0, 0 ],
-[ 0.37, 0.38, 0, 1, 0, 0 ],
-[ -0.99, -0.49, 1, 0, 0, 0 ],
-[ -0.37, 0.26, 0, 0, 1, 0 ],
-[ -0.85, 0.09, 1, 0, 0, 0 ],
-[ 0.88, -0.03, 1, 0, 0, 0 ],
-[ 0.9400000000000001, 0.84, 1, 0, 0, 0 ],
-[ 0.06, 0.67, 1, 0, 0, 0 ],
-[ -0.85, 0.39, 0, 0, 1, 0 ],
-[ 0.67, 0.6, 0, 1, 0, 0 ],
-[ -0.99, -0.55, 1, 0, 0, 0 ],
-[ 0.04, 0.43, 1, 0, 0, 0 ],
-[ 0.71, -0.97, 1, 0, 0, 0 ],
-[ 0.9400000000000001, 0.71, 1, 0, 0, 0 ],
-[ -0.01, -0.11, 0, 0, 0, 1 ],
-[ 0.8, -0.72, 1, 0, 0, 0 ],
-[ -0.65, -0.21, 1, 0, 0, 0 ],
-[ -0.93, 0.85, 1, 0, 0, 0 ],
-[ 0.42, -0.09, 0, 0, 0, 1 ],
-[ -0.22, -0.36, 0, 0, 0, 1 ],
-[ -0.36, 0.23, 0, 0, 1, 0 ],
-[ 0.96, -0.44, 1, 0, 0, 0 ],
-[ -0.09, 0.6, 0, 0, 1, 0 ],
-[ 0.91, 0.5, 0, 1, 0, 0 ],
-[ 0.65, -0.78, 1, 0, 0, 0 ],
-[ 0.79, -0.1, 1, 0, 0, 0 ],
-[ 0.43, -0.5700000000000001, 1, 0, 0, 0 ],
-[ -0.3, -0.52, 1, 0, 0, 0 ],
-[ -0.47, 0.79, 0, 0, 1, 0 ],
-[ -0.07000000000000001, -0.66, 0, 0, 0, 1 ],
-[ 0.06, 0.13, 1, 0, 0, 0 ],
-[ -0.34, -0.41, 1, 0, 0, 0 ],
-[ 0.37, -0.44, 1, 0, 0, 0 ],
-[ -0.73, 0.8100000000000001, 0, 0, 1, 0 ],
-[ 0.26, 0.06, 1, 0, 0, 0 ],
-[ 0.42, -0.89, 1, 0, 0, 0 ],
-[ 0.51, -0.55, 1, 0, 0, 0 ],
-[ -0.34, 0.63, 0, 0, 1, 0 ],
-[ -0.33, -0.3, 0, 0, 0, 1 ],
-[ 0.97, 0.63, 1, 0, 0, 0 ],
-[ -0.97, -0.6900000000000001, 1, 0, 0, 0 ],
-[ 0.45, -0.24, 1, 0, 0, 0 ],
-[ 0.33, -0.75, 1, 0, 0, 0 ],
-[ 1., -0.34, 1, 0, 0, 0 ],
-[ 0.68, 0.25, 0, 1, 0, 0 ],
-[ -0.72, 1., 1, 0, 0, 0 ],
-[ 0.84, 0.72, 0, 1, 0, 0 ],
-[ 0.5700000000000001, 0.66, 0, 1, 0, 0 ],
-[ 0.19, 0.41, 0, 1, 0, 0 ],
-[ 0.59, 0.86, 0, 1, 0, 0 ],
-[ -0.67, 0.73, 0, 0, 1, 0 ],
-[ -0.13, 0.26, 0, 0, 1, 0 ],
-[ 0.09, 0.92, 1, 0, 0, 0 ],
-[ -0.49, -0.28, 1, 0, 0, 0 ],
-[ 0.37, -0.15, 0, 0, 0, 1 ],
-[ 0.17, -0.84, 1, 0, 0, 0 ],
-[ -0.05, -0.22, 0, 0, 0, 1 ],
-[ -0.46, -0.43, 1, 0, 0, 0 ],
-[ -0.54, -0.16, 1, 0, 0, 0 ],
-[ -0.25, -0.2, 0, 0, 0, 1 ],
-[ 0.76, 0.89, 1, 0, 0, 0 ],
-[ -0.91, -0.51, 1, 0, 0, 0 ],
-[ -0.46, 0.76, 0, 0, 1, 0 ],
-[ -0.08, -0.64, 0, 0, 0, 1 ],
-[ 0.19, -0.38, 0, 0, 0, 1 ],
-[ 0.58, 0.65, 0, 1, 0, 0 ],
-[ 0.3, -0.17, 0, 0, 0, 1 ],
-[ -0.88, -0.71, 1, 0, 0, 0 ],
-[ 0.61, -0.73, 1, 0, 0, 0 ],
-[ 0.73, 0.87, 0, 1, 0, 0 ],
-[ -0.23, -0.55, 1, 0, 0, 0 ],
-[ 0.45, 0.62, 0, 1, 0, 0 ],
-[ -0.47, -0.2, 1, 0, 0, 0 ],
-[ 0.07000000000000001, -0.37, 0, 0, 0, 1 ],
-[ 0.11, -0.74, 0, 0, 0, 1 ],
-[ 0.21, -0.06, 0, 0, 0, 1 ],
-[ -0.33, -0.05, 0, 0, 0, 1 ],
-[ 0.67, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.6, 0.2, 0, 1, 0, 0 ],
-[ 0.2, -0.36, 0, 0, 0, 1 ],
-[ 0.76, -1., 1, 0, 0, 0 ],
-[ 0.48, 0.21, 0, 1, 0, 0 ],
-[ 0.9400000000000001, -0.04, 1, 0, 0, 0 ],
-[ 0.02, 0.84, 1, 0, 0, 0 ],
-[ 0.44, -0.77, 1, 0, 0, 0 ],
-[ 0.7000000000000001, 0.01, 1, 0, 0, 0 ],
-[ -0.03, -0.53, 0, 0, 0, 1 ],
-[ -0.01, -0.53, 0, 0, 0, 1 ],
-[ 0.23, -0.55, 1, 0, 0, 0 ],
-[ 1., 0.66, 1, 0, 0, 0 ],
-[ 0.45, 0.8300000000000001, 0, 1, 0, 0 ],
-[ 0.34, -0.78, 1, 0, 0, 0 ],
-[ -0.97, -0.21, 1, 0, 0, 0 ],
-[ 0.96, 0.62, 1, 0, 0, 0 ],
-[ 0.12, -0.51, 0, 0, 0, 1 ],
-[ -0.22, -0.4, 0, 0, 0, 1 ],
-[ 0.35, -0.46, 1, 0, 0, 0 ],
-[ -0.84, -0.79, 1, 0, 0, 0 ],
-[ -0.09, 0.52, 0, 0, 1, 0 ],
-[ 0.87, -0.23, 1, 0, 0, 0 ],
-[ 0.99, -0.91, 1, 0, 0, 0 ],
-[ -0.3, -0.5700000000000001, 1, 0, 0, 0 ],
-[ 0.07000000000000001, -0.61, 0, 0, 0, 1 ],
-[ -0.88, -0.28, 1, 0, 0, 0 ],
-[ 0.4, -0.71, 1, 0, 0, 0 ],
-[ 0.21, -0.54, 0, 0, 0, 1 ],
-[ -0.34, 0.93, 1, 0, 0, 0 ],
-[ 0.9, -0.32, 1, 0, 0, 0 ],
-[ -0.74, -0.73, 1, 0, 0, 0 ],
-[ 0.77, -0.88, 1, 0, 0, 0 ],
-[ -0.15, 0.97, 1, 0, 0, 0 ],
-[ 0.62, -0.49, 1, 0, 0, 0 ],
-[ 0.71, 0.28, 0, 1, 0, 0 ],
-[ -0.9, 0.59, 0, 0, 1, 0 ],
-[ 0.44, 0.85, 0, 1, 0, 0 ],
-[ 0.01, 0.29, 1, 0, 0, 0 ],
-[ 0.58, 0.67, 0, 1, 0, 0 ],
-[ -0.8100000000000001, 0.8, 0, 0, 1, 0 ],
-[ -0.16, 0.93, 1, 0, 0, 0 ],
-[ 0.79, -0.39, 1, 0, 0, 0 ],
-[ 0.23, 0.8300000000000001, 0, 1, 0, 0 ],
-[ -0.5600000000000001, -0.01, 1, 0, 0, 0 ],
-[ 0.5700000000000001, -0.29, 1, 0, 0, 0 ],
-[ 0.4, 0.28, 0, 1, 0, 0 ],
-[ -0.84, 0.59, 0, 0, 1, 0 ],
-[ 0.47, 0.4, 0, 1, 0, 0 ],
-[ 0.13, 0.78, 1, 0, 0, 0 ],
-[ 0.11, -0.13, 0, 0, 0, 1 ],
-[ -0.49, -0.76, 1, 0, 0, 0 ],
-[ 0.53, 0.86, 0, 1, 0, 0 ],
-[ 0.79, 0.85, 1, 0, 0, 0 ],
-[ -0.59, 0.9, 0, 0, 1, 0 ],
-[ -0.5700000000000001, -0.48, 1, 0, 0, 0 ],
-[ 0.88, -0.18, 1, 0, 0, 0 ],
-[ 0.37, -0.45, 1, 0, 0, 0 ],
-[ 0.5, 0.92, 0, 1, 0, 0 ],
-[ -0.63, 0.11, 0, 0, 1, 0 ],
-[ 0.1, 0.16, 1, 0, 0, 0 ],
-[ -0.51, 0.6, 0, 0, 1, 0 ],
-[ 0.11, 0.08, 1, 0, 0, 0 ],
-[ -0.6, 0.34, 0, 0, 1, 0 ],
-[ -0.5700000000000001, 0.96, 1, 0, 0, 0 ],
-[ -0.01, -0.76, 0, 0, 0, 1 ],
-[ 0.9400000000000001, -0.36, 1, 0, 0, 0 ],
-[ -0.47, -0.34, 1, 0, 0, 0 ],
-[ -0.42, 0.79, 0, 0, 1, 0 ],
-[ -0.28, 0.38, 0, 0, 1, 0 ],
-[ -0.91, 0.2, 1, 0, 0, 0 ],
-[ 0.03, -0.16, 0, 0, 0, 1 ],
-[ -0.59, 0.2, 0, 0, 1, 0 ],
-[ 0.62, 0.51, 0, 1, 0, 0 ],
-[ 0.66, 0.1, 0, 1, 0, 0 ],
-[ 0.68, 0.86, 0, 1, 0, 0 ],
-[ 0.43, 0.98, 1, 0, 0, 0 ],
-[ 0.34, 0.19, 0, 1, 0, 0 ],
-[ 0.28, -0.9500000000000001, 1, 0, 0, 0 ],
-[ 0.84, 0.55, 0, 1, 0, 0 ],
-[ -0.04, -0.68, 0, 0, 0, 1 ],
-[ 0.18, -0.96, 1, 0, 0, 0 ],
-[ -0.62, -0.1, 1, 0, 0, 0 ],
-[ 0.6, -0.37, 1, 0, 0, 0 ],
-[ -0.89, -0.02, 1, 0, 0, 0 ],
-[ 0.11, -0.43, 0, 0, 0, 1 ],
-[ -0.5600000000000001, 0.93, 0, 0, 1, 0 ],
-[ -0.46, 0.8100000000000001, 0, 0, 1, 0 ],
-[ -0.67, 0.8300000000000001, 0, 0, 1, 0 ],
-[ 0.12, -0.5600000000000001, 0, 0, 0, 1 ],
-[ 0.79, -0.8200000000000001, 1, 0, 0, 0 ],
-[ 0.96, -0.26, 1, 0, 0, 0 ],
-[ 0.71, -0.19, 1, 0, 0, 0 ],
-[ 0.98, 0.19, 1, 0, 0, 0 ],
-[ -0.09, -0.91, 1, 0, 0, 0 ],
-[ -0.28, 0.79, 0, 0, 1, 0 ],
-[ -0.17, 0.79, 0, 0, 1, 0 ],
-[ -0.09, -0.38, 0, 0, 0, 1 ],
-[ -0.26, -0.98, 1, 0, 0, 0 ],
-[ -0.28, 0.09, 1, 0, 0, 0 ],
-[ 0.92, 0.23, 1, 0, 0, 0 ],
-[ -0.4, -0.78, 1, 0, 0, 0 ],
-[ 0.97, -0.53, 1, 0, 0, 0 ],
-[ -0.03, 0.2, 1, 0, 0, 0 ],
-[ -0.62, 0.9, 0, 0, 1, 0 ],
-[ 0.86, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.39, -0.2, 0, 0, 0, 1 ],
-[ -0.23, 0.6, 0, 0, 1, 0 ],
-[ 0.38, -0.49, 1, 0, 0, 0 ],
-[ 0.53, -0.51, 1, 0, 0, 0 ],
-[ 0.65, 0.74, 0, 1, 0, 0 ],
-[ 0.1, -0.16, 0, 0, 0, 1 ],
-[ 0.6900000000000001, 0.61, 0, 1, 0, 0 ],
-[ -0.33, 0.24, 0, 0, 1, 0 ],
-[ 0.12, -0.8, 1, 0, 0, 0 ],
-[ 0.49, -0.27, 1, 0, 0, 0 ],
-[ -0.31, -0.36, 0, 0, 0, 1 ],
-[ -0.04, 0.3, 1, 0, 0, 0 ],
-[ 0.87, 0.62, 0, 1, 0, 0 ],
-[ -0.11, -0.9, 1, 0, 0, 0 ],
-[ -0.5600000000000001, 0.97, 1, 0, 0, 0 ],
-[ -0.88, 0.2, 1, 0, 0, 0 ],
-[ 0.9, -0.98, 1, 0, 0, 0 ],
-[ 0.16, -0.44, 0, 0, 0, 1 ],
-[ 0.06, 0.74, 1, 0, 0, 0 ],
-[ 0.8200000000000001, 0.39, 0, 1, 0, 0 ],
-[ -0.3, 0.42, 0, 0, 1, 0 ],
-[ 0.5600000000000001, -0.08, 1, 0, 0, 0 ],
-[ 0.2, -0.93, 1, 0, 0, 0 ],
-[ 0.44, -0.01, 0, 0, 0, 1 ],
-[ -0.55, 0.65, 0, 0, 1, 0 ],
-[ 0.8300000000000001, -0.27, 1, 0, 0, 0 ],
-[ 0.38, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.59, 0.68, 0, 0, 1, 0 ],
-[ 0.99, 0.47, 1, 0, 0, 0 ],
-[ 0.6, 0.64, 0, 1, 0, 0 ],
-[ -0.62, -0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.32, 0.59, 0, 0, 1, 0 ],
-[ -0.48, 0.13, 0, 0, 1, 0 ],
-[ -0.68, 0.3, 0, 0, 1, 0 ],
-[ 0.8300000000000001, -0.85, 1, 0, 0, 0 ],
-[ -0.28, 0.87, 0, 0, 1, 0 ],
-[ 0.32, -0.54, 1, 0, 0, 0 ],
-[ -0.91, 0.85, 1, 0, 0, 0 ],
-[ 0.59, -0.34, 1, 0, 0, 0 ],
-[ 0.61, 0.47, 0, 1, 0, 0 ],
-[ 0.45, -0.01, 0, 0, 0, 1 ],
-[ -0.21, 0.48, 0, 0, 1, 0 ],
-[ 0.98, 0.88, 1, 0, 0, 0 ],
-[ -0.16, -0.88, 1, 0, 0, 0 ],
-[ -0.5700000000000001, -0.52, 1, 0, 0, 0 ],
-[ -0.36, -0.11, 0, 0, 0, 1 ],
-[ 0.23, 0.29, 0, 1, 0, 0 ],
-[ -0.47, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.21, -0.5, 0, 0, 0, 1 ],
-[ 0.4, -0.39, 1, 0, 0, 0 ],
-[ 0.8, -0.24, 1, 0, 0, 0 ],
-[ -0.48, 0.48, 0, 0, 1, 0 ],
-[ 0.46, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.45, -0.11, 1, 0, 0, 0 ],
-[ -0.23, 0.15, 0, 0, 1, 0 ],
-[ -0.14, 0.55, 0, 0, 1, 0 ],
-[ -0.5700000000000001, 0.92, 0, 0, 1, 0 ],
-[ -0.74, -0.15, 1, 0, 0, 0 ],
-[ -0.44, -0.45, 1, 0, 0, 0 ],
-[ -0.8300000000000001, 0.5700000000000001, 0, 0, 1, 0 ],
-[ -0.11, -0.12, 0, 0, 0, 1 ],
-[ 0.93, -0.5700000000000001, 1, 0, 0, 0 ],
-[ 0.87, 0.44, 0, 1, 0, 0 ],
-[ -0.53, 0.73, 0, 0, 1, 0 ],
-[ -0.33, 0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.6, -0.46, 1, 0, 0, 0 ],
-[ -0.97, 0.14, 1, 0, 0, 0 ],
-[ -0.12, -1., 1, 0, 0, 0 ],
-[ -0.05, -0.71, 0, 0, 0, 1 ],
-[ 0.53, 0.43, 0, 1, 0, 0 ],
-[ 0.72, -0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.72, 0.76, 0, 0, 1, 0 ],
-[ -0.14, -0.08, 0, 0, 0, 1 ],
-[ -0.91, -0.2, 1, 0, 0, 0 ],
-[ -0.8100000000000001, -0.71, 1, 0, 0, 0 ],
-[ 0.54, 0.34, 0, 1, 0, 0 ],
-[ 0.9400000000000001, -0.48, 1, 0, 0, 0 ],
-[ 0.71, -0.67, 1, 0, 0, 0 ],
-[ -0.26, -0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.53, -0.76, 1, 0, 0, 0 ],
-[ 0.6, -0.25, 1, 0, 0, 0 ],
-[ -0.06, -0.59, 0, 0, 0, 1 ],
-[ -0.25, -0.64, 1, 0, 0, 0 ],
-[ -0.55, 0.6900000000000001, 0, 0, 1, 0 ],
-[ 0.48, -1., 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.22, 0, 0, 1, 0 ],
-[ -0.47, -0.24, 1, 0, 0, 0 ],
-[ 0.38, -0.31, 1, 0, 0, 0 ],
-[ -0.42, -0.04, 0, 0, 0, 1 ],
-[ 0.91, 0.25, 1, 0, 0, 0 ],
-[ 0.33, 0.13, 0, 1, 0, 0 ],
-[ 0.47, -0.52, 1, 0, 0, 0 ],
-[ 0.8200000000000001, -0.67, 1, 0, 0, 0 ],
-[ 0.26, 0.01, 1, 0, 0, 0 ],
-[ -0.8, -0.44, 1, 0, 0, 0 ],
-[ -0.41, 0.35, 0, 0, 1, 0 ],
-[ 0.34, -0.85, 1, 0, 0, 0 ],
-[ 0.47, 0.15, 0, 1, 0, 0 ],
-[ -0.84, -0.08, 1, 0, 0, 0 ],
-[ 0.78, 0.18, 0, 1, 0, 0 ],
-[ 0.23, -0.16, 0, 0, 0, 1 ],
-[ 0.21, 0.75, 0, 1, 0, 0 ],
-[ 0.19, 0.6, 0, 1, 0, 0 ],
-[ 0.35, -0.5600000000000001, 1, 0, 0, 0 ],
-[ 0.1, -0.36, 0, 0, 0, 1 ],
-[ 0.11, 0.8, 1, 0, 0, 0 ],
-[ -0.5600000000000001, -0.23, 1, 0, 0, 0 ],
-[ 0.49, -0.02, 0, 0, 0, 1 ],
-[ 0.9400000000000001, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.67, 0.55, 0, 0, 1, 0 ],
-[ -0.92, 0.73, 1, 0, 0, 0 ],
-[ -0.24, 0.91, 1, 0, 0, 0 ],
-[ 0.51, 0.35, 0, 1, 0, 0 ],
-[ 0.77, 0.73, 0, 1, 0, 0 ],
-[ 0.17, 0.65, 0, 1, 0, 0 ],
-[ 0.51, -0.18, 1, 0, 0, 0 ],
-[ -0.16, 0.44, 0, 0, 1, 0 ],
-[ -0.85, -0.92, 1, 0, 0, 0 ],
-[ -0.68, 0.23, 0, 0, 1, 0 ],
-[ -0.49, 0.11, 0, 0, 1, 0 ],
-[ 0.76, 0.92, 1, 0, 0, 0 ],
-[ 0.02, 0.2, 1, 0, 0, 0 ],
-[ -0.8300000000000001, 0.72, 0, 0, 1, 0 ],
-[ -0.53, -0.25, 1, 0, 0, 0 ],
-[ -0.39, -0.02, 0, 0, 0, 1 ],
-[ -0.5700000000000001, 0.5, 0, 0, 1, 0 ],
-[ -0.5, 0.24, 0, 0, 1, 0 ],
-[ -0.06, -0.54, 0, 0, 0, 1 ],
-[ 0.5, 0.84, 0, 1, 0, 0 ],
-[ -0.77, -0.14, 1, 0, 0, 0 ],
-[ 0.76, -0.18, 1, 0, 0, 0 ],
-[ -0.5600000000000001, 0.54, 0, 0, 1, 0 ],
-[ 0.32, 0.77, 0, 1, 0, 0 ],
-[ 0.8200000000000001, 0.25, 0, 1, 0, 0 ],
-[ 0.09, 0.17, 1, 0, 0, 0 ],
-[ 0.88, -0.79, 1, 0, 0, 0 ],
-[ -0.26, 0.13, 0, 0, 1, 0 ],
-[ -0.64, -0.66, 1, 0, 0, 0 ],
-[ 0.96, 0.16, 1, 0, 0, 0 ],
-[ 0.03, 0.54, 1, 0, 0, 0 ],
-[ 0.09, 0.26, 1, 0, 0, 0 ],
-[ 0.48, -0.14, 1, 0, 0, 0 ],
-[ 0.77, -0.72, 1, 0, 0, 0 ],
-[ 0.39, -0.42, 1, 0, 0, 0 ],
-[ -0.93, 0.66, 1, 0, 0, 0 ],
-[ -0.98, -0.49, 1, 0, 0, 0 ],
-[ 0.24, -0.13, 0, 0, 0, 1 ],
-[ -0.72, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.9400000000000001, 0.88, 1, 0, 0, 0 ],
-[ -0.26, 0.15, 0, 0, 1, 0 ],
-[ 0.55, 0.12, 0, 1, 0, 0 ],
-[ 0.51, 0.36, 0, 1, 0, 0 ],
-[ -0.28, -0.5, 1, 0, 0, 0 ],
-[ -0.19, -0.49, 0, 0, 0, 1 ],
-[ 0.5700000000000001, -0.79, 1, 0, 0, 0 ],
-[ -0.2, -0.54, 0, 0, 0, 1 ],
-[ 0.99, 0.14, 1, 0, 0, 0 ],
-[ 0.14, 0.92, 1, 0, 0, 0 ],
-[ -1., -0.05, 1, 0, 0, 0 ],
-[ -0.79, -0.12, 1, 0, 0, 0 ],
-[ 0.49, 0.92, 0, 1, 0, 0 ],
-[ -0.61, -0.49, 1, 0, 0, 0 ],
-[ -0.41, -0.53, 1, 0, 0, 0 ],
-[ 0.9400000000000001, -0.63, 1, 0, 0, 0 ],
-[ 0.14, -0.26, 0, 0, 0, 1 ],
-[ 0.89, 0.78, 1, 0, 0, 0 ],
-[ -0.01, -0.37, 0, 0, 0, 1 ],
-[ -0.08, -0.53, 0, 0, 0, 1 ],
-[ -0.46, 0.09, 0, 0, 1, 0 ],
-[ 0.88, 0.48, 0, 1, 0, 0 ],
-[ -0.66, 0.73, 0, 0, 1, 0 ],
-[ -0.02, 0.15, 1, 0, 0, 0 ],
-[ -0.55, -0.37, 1, 0, 0, 0 ],
-[ 0.22, 0.98, 1, 0, 0, 0 ],
-[ -0.9500000000000001, 0.41, 1, 0, 0, 0 ],
-[ 0.92, 0.33, 1, 0, 0, 0 ],
-[ -0.9500000000000001, 0.58, 1, 0, 0, 0 ],
-[ -0.53, 0.72, 0, 0, 1, 0 ],
-[ -0.36, 0.8300000000000001, 0, 0, 1, 0 ],
-[ -0.38, 0.42, 0, 0, 1, 0 ],
-[ 0.5600000000000001, 0.76, 0, 1, 0, 0 ],
-[ -0.64, 0.78, 0, 0, 1, 0 ],
-[ -0.08, -0.39, 0, 0, 0, 1 ],
-[ -0.96, -0.28, 1, 0, 0, 0 ],
-[ 0.21, 0.34, 0, 1, 0, 0 ],
-[ -0.58, -0.35, 1, 0, 0, 0 ],
-[ -0.44, 0.49, 0, 0, 1, 0 ],
-[ 0.29, -0.25, 0, 0, 0, 1 ],
-[ -0.6900000000000001, 0.93, 1, 0, 0, 0 ],
-[ -0.26, -0.8, 1, 0, 0, 0 ],
-[ 0.65, -0.21, 1, 0, 0, 0 ],
-[ 0.77, -0.66, 1, 0, 0, 0 ],
-[ 0.63, -0.76, 1, 0, 0, 0 ],
-[ -0.85, -0.42, 1, 0, 0, 0 ],
-[ -0.92, 0.16, 1, 0, 0, 0 ],
-[ 0.1, 0.72, 1, 0, 0, 0 ],
-[ -0.98, 0.53, 1, 0, 0, 0 ],
-[ 0.46, -1., 1, 0, 0, 0 ],
-[ -0.76, -0.44, 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.3, 0, 0, 1, 0 ],
-[ -0.45, 0.78, 0, 0, 1, 0 ],
-[ -0.91, 0.4, 0, 0, 1, 0 ],
-[ 0.5, 1., 1, 0, 0, 0 ],
-[ 0.58, -0.53, 1, 0, 0, 0 ],
-[ 0.71, 0.77, 0, 1, 0, 0 ],
-[ 0.92, 0.22, 1, 0, 0, 0 ],
-[ -0.61, -0.71, 1, 0, 0, 0 ],
-[ 0.24, 0.5, 0, 1, 0, 0 ],
-[ 0.7000000000000001, 0.8, 0, 1, 0, 0 ],
-[ -0.32, -0.15, 0, 0, 0, 1 ],
-[ 0.3, -0.17, 0, 0, 0, 1 ],
-[ 0.88, 0.53, 0, 1, 0, 0 ],
-[ 0.17, 0.31, 0, 1, 0, 0 ],
-[ 0.29, -0.62, 1, 0, 0, 0 ],
-[ 0.03, 0.86, 1, 0, 0, 0 ],
-[ -0.51, 0.21, 0, 0, 1, 0 ],
-[ 0.01, -0.76, 0, 0, 0, 1 ],
-[ -0.03, 0.84, 1, 0, 0, 0 ],
-[ -0.5700000000000001, 0.23, 0, 0, 1, 0 ],
-[ -0.02, -0.6, 0, 0, 0, 1 ],
-[ 0.88, -0.43, 1, 0, 0, 0 ],
-[ -0.5, -0.35, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.01, 1, 0, 0, 0 ],
-[ -0.2, 0.32, 0, 0, 1, 0 ],
-[ -0.17, 0.32, 0, 0, 1, 0 ],
-[ 0.93, 0.23, 1, 0, 0, 0 ],
-[ 0.64, -0.03, 1, 0, 0, 0 ],
-[ 0.04, 0.34, 1, 0, 0, 0 ],
-[ 0.27, 0.44, 0, 1, 0, 0 ],
-[ -0.03, 0.12, 1, 0, 0, 0 ],
-[ 0.11, -0.88, 1, 0, 0, 0 ],
-[ -0.38, 0.6900000000000001, 0, 0, 1, 0 ],
-[ 0.36, -0.2, 0, 0, 0, 1 ],
-[ 0.43, -0.54, 1, 0, 0, 0 ],
-[ -0.16, -0.08, 0, 0, 0, 1 ],
-[ 0.54, 0.29, 0, 1, 0, 0 ],
-[ -0.35, -0.51, 1, 0, 0, 0 ],
-[ -0.32, 0.8300000000000001, 0, 0, 1, 0 ],
-[ -0.42, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.34, -0.63, 1, 0, 0, 0 ],
-[ -0.98, 0.1, 1, 0, 0, 0 ],
-[ -0.91, 0.07000000000000001, 1, 0, 0, 0 ],
-[ 0.96, -0.76, 1, 0, 0, 0 ],
-[ -0.93, -0.5600000000000001, 1, 0, 0, 0 ],
-[ -0.21, 0.68, 0, 0, 1, 0 ],
-[ -0.35, 0.97, 1, 0, 0, 0 ],
-[ -0.64, 0.84, 0, 0, 1, 0 ],
-[ -0.91, -0.68, 1, 0, 0, 0 ],
-[ -0.92, -0.39, 1, 0, 0, 0 ],
-[ -0.16, -0.08, 0, 0, 0, 1 ],
-[ -0.96, 0.21, 1, 0, 0, 0 ],
-[ -0.17, -0.93, 1, 0, 0, 0 ],
-[ 0.05, 0.32, 1, 0, 0, 0 ],
-[ 0.76, -0.9, 1, 0, 0, 0 ],
-[ -0.85, 0.2, 1, 0, 0, 0 ],
-[ 0.49, -0.58, 1, 0, 0, 0 ],
-[ -0.93, 0.89, 1, 0, 0, 0 ],
-[ 0.79, 0.7000000000000001, 0, 1, 0, 0 ],
-[ -0.9500000000000001, 0.18, 1, 0, 0, 0 ],
-[ -0.2, -0.65, 1, 0, 0, 0 ],
-[ 0.12, 0.78, 1, 0, 0, 0 ],
-[ -0.84, 0.23, 0, 0, 1, 0 ],
-[ 0.77, 0.99, 1, 0, 0, 0 ],
-[ 0.62, 0.7000000000000001, 0, 1, 0, 0 ],
-[ 0.93, -0.6, 1, 0, 0, 0 ],
-[ -0.76, -0.75, 1, 0, 0, 0 ],
-[ 0.12, -0.54, 0, 0, 0, 1 ],
-[ -0.52, -0.72, 1, 0, 0, 0 ],
-[ -0.62, 0.63, 0, 0, 1, 0 ],
-[ 0.28, 0.38, 0, 1, 0, 0 ],
-[ -0.61, -0.14, 1, 0, 0, 0 ],
-[ -0.43, 0.84, 0, 0, 1, 0 ],
-[ -0.79, -0.98, 1, 0, 0, 0 ],
-[ 0.5, -0.13, 1, 0, 0, 0 ],
-[ 0.34, -0.87, 1, 0, 0, 0 ],
-[ 0.43, 0.43, 0, 1, 0, 0 ],
-[ 0.68, -0.96, 1, 0, 0, 0 ],
-[ -0.85, -0.07000000000000001, 1, 0, 0, 0 ],
-[ -0.74, -0.98, 1, 0, 0, 0 ],
-[ -0.22, 0.52, 0, 0, 1, 0 ],
-[ -0.09, -0.8, 0, 0, 0, 1 ],
-[ 0.51, -0.55, 1, 0, 0, 0 ],
-[ 0.26, 0.03, 1, 0, 0, 0 ],
-[ 0.46, -0.9, 1, 0, 0, 0 ],
-[ -0.74, 0.65, 0, 0, 1, 0 ],
-[ -0.85, -0.99, 1, 0, 0, 0 ],
-[ 0.11, -0.4, 0, 0, 0, 1 ],
-[ 0.02, -0.38, 0, 0, 0, 1 ],
-[ -0.79, -1., 1, 0, 0, 0 ],
-[ 0.44, 0.19, 0, 1, 0, 0 ],
-[ -0.19, -0.73, 1, 0, 0, 0 ],
-[ -0.2, -0.64, 1, 0, 0, 0 ],
-[ -0.5, -0.99, 1, 0, 0, 0 ],
-[ -0.24, 0.92, 1, 0, 0, 0 ],
-[ -0.61, 0.99, 1, 0, 0, 0 ],
-[ -0.65, 0.11, 0, 0, 1, 0 ],
-[ 0.87, -0.8300000000000001, 1, 0, 0, 0 ],
-[ -0.9400000000000001, 0.08, 1, 0, 0, 0 ],
-[ 0.32, -0.54, 1, 0, 0, 0 ],
-[ 0.54, 0.03, 1, 0, 0, 0 ],
-[ -0.75, -1., 1, 0, 0, 0 ],
-[ -0.77, 0.78, 0, 0, 1, 0 ],
-[ 0.79, 0.22, 0, 1, 0, 0 ],
-[ -0.46, -0.1, 1, 0, 0, 0 ],
-[ -0.12, -0.26, 0, 0, 0, 1 ],
-[ -0.2, 0.18, 0, 0, 1, 0 ],
-[ -0.15, 0.32, 0, 0, 1, 0 ],
-[ -0.5, 0.02, 1, 0, 0, 0 ],
-[ -0.22, 0.52, 0, 0, 1, 0 ],
-[ 0.68, -0.9, 1, 0, 0, 0 ],
-[ 0.26, -0.92, 1, 0, 0, 0 ],
-[ -0.59, 0.03, 1, 0, 0, 0 ],
-[ 0.7000000000000001, -0.18, 1, 0, 0, 0 ],
-[ 0.34, 0.22, 0, 1, 0, 0 ],
-[ -0.53, -0.05, 1, 0, 0, 0 ],
-[ 0.09, 0.49, 0, 1, 0, 0 ],
-[ -0.84, 0.68, 0, 0, 1, 0 ],
-[ -0.36, -0.66, 1, 0, 0, 0 ],
-[ 0.6900000000000001, -0.61, 1, 0, 0, 0 ],
-[ 0.75, -0.75, 1, 0, 0, 0 ],
-[ 0.84, -0.87, 1, 0, 0, 0 ],
-[ 0.37, -0.5, 1, 0, 0, 0 ],
-[ 0.78, 0.79, 0, 1, 0, 0 ],
-[ 0.67, -0.32, 1, 0, 0, 0 ],
-[ 0.76, -0.74, 1, 0, 0, 0 ],
-[ 0.92, 0.7000000000000001, 1, 0, 0, 0 ],
-[ 0.17, 0.48, 0, 1, 0, 0 ],
-[ -0.33, 0.29, 0, 0, 1, 0 ],
-[ -0.37, 0.5, 0, 0, 1, 0 ],
-[ -0.36, -0.79, 1, 0, 0, 0 ],
-[ -0.4, 0.39, 0, 0, 1, 0 ],
-[ -0.47, 0.16, 0, 0, 1, 0 ],
-[ 0.6900000000000001, -0.09, 1, 0, 0, 0 ],
-[ 0.46, -0.22, 1, 0, 0, 0 ],
-[ -0.52, 0.76, 0, 0, 1, 0 ],
-[ -0.72, 1., 1, 0, 0, 0 ],
-[ 0.42, 0.6, 0, 1, 0, 0 ],
-[ -0.15, -0.34, 0, 0, 0, 1 ],
-[ -0.75, -0.8100000000000001, 1, 0, 0, 0 ],
-[ -0.78, -0.38, 1, 0, 0, 0 ],
-[ -0.09, 0.17, 1, 0, 0, 0 ],
-[ 0.84, 0.03, 1, 0, 0, 0 ],
-[ -0.8300000000000001, -0.16, 1, 0, 0, 0 ],
-[ 0.97, -0.52, 1, 0, 0, 0 ],
-[ 0.59, 0.53, 0, 1, 0, 0 ],
-[ -0.27, -0.55, 1, 0, 0, 0 ],
-[ 0.75, 0.14, 0, 1, 0, 0 ],
-[ 0.38, -0.84, 1, 0, 0, 0 ],
-[ 0.88, 0.92, 1, 0, 0, 0 ],
-[ -0.01, -0.71, 0, 0, 0, 1 ],
-[ -0.5700000000000001, 0.12, 0, 0, 1, 0 ],
-[ 0.02, -0.88, 0, 0, 0, 1 ],
-[ 0.16, -0.08, 0, 0, 0, 1 ],
-[ -0.08, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.61, -0.17, 1, 0, 0, 0 ],
-[ 0.39, 0.6900000000000001, 0, 1, 0, 0 ],
-[ 0.12, 0.01, 1, 0, 0, 0 ],
-[ -0.16, -0.68, 1, 0, 0, 0 ],
-[ 0.85, 0.55, 0, 1, 0, 0 ],
-[ -0.03, -0.66, 0, 0, 0, 1 ],
-[ 0.41, 0.11, 0, 1, 0, 0 ],
-[ -0.38, 0.8200000000000001, 0, 0, 1, 0 ],
-[ -0.73, 0.93, 1, 0, 0, 0 ],
-[ 0.17, -0.33, 0, 0, 0, 1 ],
-[ 0.88, 0.63, 0, 1, 0, 0 ],
-[ -0.27, -0.38, 0, 0, 0, 1 ],
-[ -0.5700000000000001, 0.47, 0, 0, 1, 0 ],
-[ 0.46, 1., 1, 0, 0, 0 ],
-[ -0.48, -0.6900000000000001, 1, 0, 0, 0 ],
-[ -0.85, 0.8200000000000001, 1, 0, 0, 0 ],
-[ -0.28, 0.05, 1, 0, 0, 0 ],
-[ -0.96, 0.21, 1, 0, 0, 0 ],
-[ -0.79, 0.63, 0, 0, 1, 0 ],
-[ -0.5600000000000001, 0.02, 1, 0, 0, 0 ],
-[ -0.72, 0.9500000000000001, 1, 0, 0, 0 ],
-[ -0.17, 0.9400000000000001, 1, 0, 0, 0 ],
-[ 0.39, -0.48, 1, 0, 0, 0 ],
-[ -0.54, 0.68, 0, 0, 1, 0 ],
-[ 0.9, -0.37, 1, 0, 0, 0 ],
-[ 0.28, 0.79, 0, 1, 0, 0 ],
-[ 0.09, -0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.1, 0.18, 1, 0, 0, 0 ],
-[ 0.54, -0.79, 1, 0, 0, 0 ],
-[ 0.34, 0.51, 0, 1, 0, 0 ],
-[ -0.33, 0.13, 0, 0, 1, 0 ],
-[ -0.7000000000000001, 0.71, 0, 0, 1, 0 ],
-[ 0.01, 0.9400000000000001, 1, 0, 0, 0 ],
-[ -0.18, 0.79, 0, 0, 1, 0 ],
-[ 0.73, 0.92, 1, 0, 0, 0 ],
-[ 0.86, 0.63, 0, 1, 0, 0 ],
-[ -0.91, -0.26, 1, 0, 0, 0 ],
-]
\ No newline at end of file
+[ -0.42, -0.73, 0, 1, 0 ],
+[ -0.62, 0.58, 0, 0, 1 ],
+[ -0.41, -0.21, 0, 0, 1 ],
+[ 0.08, -0.75, 0, 1, 0 ],
+[ 0.35, 0.6, 1, 0, 0 ],
+[ -0.22, 0.66, 1, 0, 0 ],
+[ -0.88, 0.11, 0, 0, 1 ],
+[ -0.36, -0.9400000000000001, 0, 1, 0 ],
+[ 0.97, -0.9, 0, 1, 0 ],
+[ -0.78, 0.44, 0, 0, 1 ],
+[ 0.14, -0.6, 0, 1, 0 ],
+[ 0.73, -0.97, 0, 1, 0 ],
+[ -0.76, -0.98, 0, 1, 0 ],
+[ 0.13, 0.73, 1, 0, 0 ],
+[ -0.88, -0.5600000000000001, 0, 1, 0 ],
+[ -0.3, 0.71, 1, 0, 0 ],
+[ 0.55, -0.86, 0, 1, 0 ],
+[ 0.36, -1., 0, 1, 0 ],
+[ -0.04, 0.5600000000000001, 1, 0, 0 ],
+[ 0.47, -0.71, 0, 1, 0 ],
+[ 0.76, 0.3, 0, 0, 1 ],
+[ -0.1, 0.62, 1, 0, 0 ],
+[ 0.33, 0.5600000000000001, 1, 0, 0 ],
+[ -0.09, 0.85, 1, 0, 0 ],
+[ -0.22, 0.49, 1, 0, 0 ],
+[ 0.38, 0.25, 0, 0, 1 ],
+[ 0.24, 0.79, 1, 0, 0 ],
+[ -0.62, -0.4, 0, 0, 1 ],
+[ -0.37, -0.67, 0, 1, 0 ],
+[ 0.25, 0.25, 1, 0, 0 ],
+[ -0.96, 0.1, 0, 0, 1 ],
+[ 0.05, 0.8, 1, 0, 0 ],
+[ -0.32, -0.6, 0, 1, 0 ],
+[ 0.28, 0.39, 1, 0, 0 ],
+[ -0.67, -0.15, 0, 0, 1 ],
+[ -0.5700000000000001, -0.35, 0, 0, 1 ],
+[ 0.9400000000000001, 0.14, 0, 0, 1 ],
+[ 0.89, 0.65, 0, 0, 1 ],
+[ -0.12, 0.75, 1, 0, 0 ],
+[ -0.19, 0.6, 1, 0, 0 ],
+[ 0.61, 0.86, 0, 0, 1 ],
+[ -0.01, -0.84, 0, 1, 0 ],
+[ -0.49, -0.72, 0, 1, 0 ],
+[ 0.5, -0.78, 0, 1, 0 ],
+[ 0.9, 0.06, 0, 0, 1 ],
+[ -0.03, 0.46, 1, 0, 0 ],
+[ 0.33, 0.9, 0, 0, 1 ],
+[ 0.68, -0.97, 0, 1, 0 ],
+[ -0.58, -0.64, 0, 1, 0 ],
+[ -0.19, 0.93, 0, 0, 1 ],
+[ -1., 0.52, 0, 0, 1 ],
+[ -0.31, -0.6900000000000001, 0, 1, 0 ],
+[ 0.92, -0.88, 0, 1, 0 ],
+[ -0.97, -0.52, 0, 1, 0 ],
+[ 0.25, 0.71, 1, 0, 0 ],
+[ 0.29, -0.99, 0, 1, 0 ],
+[ -0.01, 0.15, 1, 0, 0 ],
+[ -0.79, -0.55, 0, 1, 0 ],
+[ -0.97, 0.29, 0, 0, 1 ],
+[ 0.5600000000000001, 0.08, 0, 0, 1 ],
+[ 0.9500000000000001, 0.7000000000000001, 0, 0, 1 ],
+[ 0.53, 0.62, 0, 0, 1 ],
+[ 0.89, 0.09, 0, 0, 1 ],
+[ 0.12, 0.26, 1, 0, 0 ],
+[ -0.25, 0.75, 1, 0, 0 ],
+[ 0.3, -0.5, 0, 1, 0 ],
+[ 0.17, 0.26, 1, 0, 0 ],
+[ -0.17, -0.26, 0, 0, 1 ],
+[ -0.11, -0.39, 0, 0, 1 ],
+[ 0.9, -0.04, 0, 0, 1 ],
+[ 0.5600000000000001, 0.8100000000000001, 0, 0, 1 ],
+[ 0.31, -0.5, 0, 1, 0 ],
+[ -0.79, -0.84, 0, 1, 0 ],
+[ 0.87, 0.1, 0, 0, 1 ],
+[ 0.75, -0.75, 0, 1, 0 ],
+[ -0.27, 0.21, 1, 0, 0 ],
+[ 0.12, 0.38, 1, 0, 0 ],
+[ -0.1, 0.13, 1, 0, 0 ],
+[ 0.28, 0.61, 1, 0, 0 ],
+[ 0.8100000000000001, 0.67, 0, 0, 1 ],
+[ 0.15, 0.96, 0, 0, 1 ],
+[ 0.51, -0.54, 0, 1, 0 ],
+[ -0.29, 0.59, 1, 0, 0 ],
+[ 0.99, 0.93, 0, 0, 1 ],
+[ 0.59, 0.73, 0, 0, 1 ],
+[ 0.62, 0.06, 0, 0, 1 ],
+[ 0.89, -0.6900000000000001, 0, 1, 0 ],
+[ 0.15, -0.48, 0, 0, 1 ],
+[ 0.89, -0.67, 0, 1, 0 ],
+[ 0.66, 0.51, 0, 0, 1 ],
+[ 0.98, -0.97, 0, 1, 0 ],
+[ -0.75, -0.72, 0, 1, 0 ],
+[ -0.52, 0.04, 0, 0, 1 ],
+[ 0.27, 0.5, 1, 0, 0 ],
+[ -0.31, -0.27, 0, 0, 1 ],
+[ 0.96, -0.15, 0, 0, 1 ],
+[ -0.3, 0.73, 1, 0, 0 ],
+[ 0.37, 0.01, 0, 0, 1 ],
+[ -0.16, -0.84, 0, 1, 0 ],
+[ -0.19, 0.8100000000000001, 1, 0, 0 ],
+[ -0.13, 0.43, 1, 0, 0 ],
+[ 0.01, -0.76, 0, 1, 0 ],
+[ -0.23, -0.7000000000000001, 0, 1, 0 ],
+[ 0.9500000000000001, 0.5600000000000001, 0, 0, 1 ],
+[ -0.26, 0.52, 1, 0, 0 ],
+[ 0.66, -0.62, 0, 1, 0 ],
+[ -0.46, -0.66, 0, 1, 0 ],
+[ 0.91, -0.8300000000000001, 0, 1, 0 ],
+[ -0.17, 0.29, 1, 0, 0 ],
+[ -0.3, -0.59, 0, 1, 0 ],
+[ 0.28, -0.91, 0, 1, 0 ],
+[ -0.12, 0.17, 1, 0, 0 ],
+[ -0.51, 0.91, 0, 0, 1 ],
+[ 0.1, -0.45, 0, 0, 1 ],
+[ -0.97, -0.61, 0, 1, 0 ],
+[ 1., -0.46, 0, 0, 1 ],
+[ 0.05, 0.87, 1, 0, 0 ],
+[ 0.8, -0.97, 0, 1, 0 ],
+[ 0.53, -0.9400000000000001, 0, 1, 0 ],
+[ -0.12, 0.8, 1, 0, 0 ],
+[ -0.35, -0.77, 0, 1, 0 ],
+[ -1., 0.17, 0, 0, 1 ],
+[ -0.7000000000000001, -0.93, 0, 1, 0 ],
+[ 0.1, -0.02, 0, 0, 1 ],
+[ 0.2, 0.98, 0, 0, 1 ],
+[ 0.22, 0.17, 1, 0, 0 ],
+[ -0.24, 0.48, 1, 0, 0 ],
+[ -0.49, 0.46, 0, 0, 1 ],
+[ -0.8200000000000001, -0.34, 0, 0, 1 ],
+[ 0.75, -0.8300000000000001, 0, 1, 0 ],
+[ 0.45, -0.24, 0, 0, 1 ],
+[ -0.46, -0.6, 0, 1, 0 ],
+[ 0.91, -0.98, 0, 1, 0 ],
+[ -0.02, 0.58, 1, 0, 0 ],
+[ -0.79, 0.02, 0, 0, 1 ],
+[ -0.89, -0.8, 0, 1, 0 ],
+[ -0.9400000000000001, -0.62, 0, 1, 0 ],
+[ 0.43, -0.02, 0, 0, 1 ],
+[ 0.73, 0.38, 0, 0, 1 ],
+[ 0.09, 0.84, 1, 0, 0 ],
+[ 0.24, -0.7000000000000001, 0, 1, 0 ],
+[ -0.01, 0.7000000000000001, 1, 0, 0 ],
+[ -0.11, 0.31, 1, 0, 0 ],
+[ -0.68, 0.9500000000000001, 0, 0, 1 ],
+[ -0.85, 0.55, 0, 0, 1 ],
+[ -0.16, 0.73, 1, 0, 0 ],
+[ 0.74, -0.5, 0, 1, 0 ],
+[ 0.8300000000000001, -0.54, 0, 1, 0 ],
+[ -0.37, 0.61, 1, 0, 0 ],
+[ -0.91, 0.45, 0, 0, 1 ],
+[ 0.1, -0.02, 0, 0, 1 ],
+[ 0.42, -0.5700000000000001, 0, 1, 0 ],
+[ 0.5700000000000001, -0.85, 0, 1, 0 ],
+[ -0.71, 0.27, 0, 0, 1 ],
+[ -0.3, 0.44, 1, 0, 0 ],
+[ 0.05, 0.59, 1, 0, 0 ],
+[ -0.46, 0.45, 0, 0, 1 ],
+[ -0.21, -0.62, 0, 1, 0 ],
+[ -0.68, 0.8, 0, 0, 1 ],
+[ 0.18, 0.37, 1, 0, 0 ],
+[ -0.59, 0.34, 0, 0, 1 ],
+[ -0.12, 0.65, 1, 0, 0 ],
+[ -0.73, 0.06, 0, 0, 1 ],
+[ 0.93, -0.54, 0, 1, 0 ],
+[ 0.92, 0.53, 0, 0, 1 ],
+[ 0.23, 0.31, 1, 0, 0 ],
+[ -0.4, -0.86, 0, 1, 0 ],
+[ 0.71, 0.62, 0, 0, 1 ],
+[ 0.28, 0.7000000000000001, 1, 0, 0 ],
+[ -0.3, -0.84, 0, 1, 0 ],
+[ -0.6, -0.61, 0, 1, 0 ],
+[ -0.23, 0.61, 1, 0, 0 ],
+[ 0.04, 0.12, 1, 0, 0 ],
+[ -0.47, -0.88, 0, 1, 0 ],
+[ -0.29, 0.28, 1, 0, 0 ],
+[ 0.19, -0.64, 0, 1, 0 ],
+[ -0.31, -0.04, 0, 0, 1 ],
+[ 0.08, 0.31, 1, 0, 0 ],
+[ 0.38, 0.63, 0, 0, 1 ],
+[ 0.14, 0.18, 1, 0, 0 ],
+[ -0.14, 0.48, 1, 0, 0 ],
+[ 0.38, -0.99, 0, 1, 0 ],
+[ -0.48, 0.32, 0, 0, 1 ],
+[ -0.27, 0.59, 1, 0, 0 ],
+[ -0.28, 0.48, 1, 0, 0 ],
+[ 0.67, 0.38, 0, 0, 1 ],
+[ 0.53, 0.35, 0, 0, 1 ],
+[ -0.8300000000000001, 0.76, 0, 0, 1 ],
+[ -0.18, -0.01, 0, 0, 1 ],
+[ -0.78, 0.88, 0, 0, 1 ],
+[ 0.92, -0.42, 0, 0, 1 ],
+[ -0.04, -0.53, 0, 1, 0 ],
+[ 0.05, 0.75, 1, 0, 0 ],
+[ 0.22, -0.5, 0, 1, 0 ],
+[ -0.28, -0.73, 0, 1, 0 ],
+[ 0.02, 0.31, 1, 0, 0 ],
+[ 0.01, -0.98, 0, 1, 0 ],
+[ -0.38, 0.62, 1, 0, 0 ],
+[ 0.8100000000000001, -0.73, 0, 1, 0 ],
+[ 0.27, 0.37, 1, 0, 0 ],
+[ -0.13, -0.98, 0, 1, 0 ],
+[ -0.31, 0.43, 1, 0, 0 ],
+[ -0.67, 0.8300000000000001, 0, 0, 1 ],
+[ -0.2, 0.29, 1, 0, 0 ],
+[ 0.36, -0.29, 0, 0, 1 ],
+[ 0.27, 0.34, 1, 0, 0 ],
+[ -0.48, -0.87, 0, 1, 0 ],
+[ 0.2, 0.5, 1, 0, 0 ],
+[ -0.88, 0.23, 0, 0, 1 ],
+[ 0.43, -0.52, 0, 1, 0 ],
+[ -0.39, 0.12, 0, 0, 1 ],
+[ -0.9, 0.84, 0, 0, 1 ],
+[ -0.17, -0.87, 0, 1, 0 ],
+[ 0.92, -0.9, 0, 1, 0 ],
+[ 0.91, -0.73, 0, 1, 0 ],
+[ 0.12, 0.6, 1, 0, 0 ],
+[ -0.74, -0.5, 0, 1, 0 ],
+[ 0.54, -0.92, 0, 1, 0 ],
+[ -0.85, -0.16, 0, 0, 1 ],
+[ 0.4, -0.75, 0, 1, 0 ],
+[ -0.53, -0.48, 0, 0, 1 ],
+[ 0.13, -0.63, 0, 1, 0 ],
+[ 0.75, -0.45, 0, 0, 1 ],
+[ 0.3, 0.8200000000000001, 0, 0, 1 ],
+[ -0.31, -0.1, 0, 0, 1 ],
+[ -0.84, -0.27, 0, 0, 1 ],
+[ -0.11, 0.75, 1, 0, 0 ],
+[ 0.17, 0.66, 1, 0, 0 ],
+[ 0.9500000000000001, -0.11, 0, 0, 1 ],
+[ -0.89, -0.39, 0, 0, 1 ],
+[ 0.21, -0.77, 0, 1, 0 ],
+[ 0.18, -0.98, 0, 1, 0 ],
+[ -0.8, 0.6, 0, 0, 1 ],
+[ 0.86, 0.54, 0, 0, 1 ],
+[ 0.76, -0.62, 0, 1, 0 ],
+[ -0.21, -0.54, 0, 1, 0 ],
+[ 0.34, 0.8300000000000001, 0, 0, 1 ],
+[ -0.16, 0.74, 1, 0, 0 ],
+[ -0.22, 0.29, 1, 0, 0 ],
+[ 0.29, 0.47, 1, 0, 0 ],
+[ 0.73, -0.41, 0, 0, 1 ],
+[ 0.04, 0.48, 1, 0, 0 ],
+[ -0.87, 0.2, 0, 0, 1 ],
+[ -0.71, -0.8200000000000001, 0, 1, 0 ],
+[ -0.18, 0.6, 1, 0, 0 ],
+[ 0.44, -0.8200000000000001, 0, 1, 0 ],
+[ -0.8300000000000001, 0.44, 0, 0, 1 ],
+[ -0.07000000000000001, 0.77, 1, 0, 0 ],
+[ 0.67, -0.92, 0, 1, 0 ],
+[ 0.09, 0.73, 1, 0, 0 ],
+[ -0.22, 0.79, 1, 0, 0 ],
+[ 0.64, -0.72, 0, 1, 0 ],
+[ -0.13, -0.4, 0, 0, 1 ],
+[ 0.2, 0.22, 1, 0, 0 ],
+[ 0.1, -0.19, 0, 0, 1 ],
+[ 0.1, -0.74, 0, 1, 0 ],
+[ 0.47, -0.52, 0, 1, 0 ],
+[ -0.5700000000000001, -0.71, 0, 1, 0 ],
+[ 0.28, 0.73, 1, 0, 0 ],
+[ -0.07000000000000001, 0.31, 1, 0, 0 ],
+[ -0.27, 0.39, 1, 0, 0 ],
+[ 0.52, -0.76, 0, 1, 0 ],
+[ -0.31, -0.52, 0, 1, 0 ],
+[ -0.36, 0.48, 1, 0, 0 ],
+[ 0.76, -0.88, 0, 1, 0 ],
+[ -0.31, 0.75, 1, 0, 0 ],
+[ 0.3, 0.6900000000000001, 1, 0, 0 ],
+[ -0.3, -0.84, 0, 1, 0 ],
+[ 0.62, -0.51, 0, 1, 0 ],
+[ 0.34, 0.66, 1, 0, 0 ],
+[ -0.14, 0.28, 1, 0, 0 ],
+[ -0.2, 0.5, 1, 0, 0 ],
+[ 0.51, -0.33, 0, 0, 1 ],
+[ -0.12, 0.01, 0, 0, 1 ],
+[ 0.16, 0.16, 1, 0, 0 ],
+[ 0.33, 0.3, 1, 0, 0 ],
+[ -0.21, 0.41, 1, 0, 0 ],
+[ -0.73, -0.92, 0, 1, 0 ],
+[ 0.85, -0.71, 0, 1, 0 ],
+[ 0.25, -0.61, 0, 1, 0 ],
+[ 0.31, -0.8200000000000001, 0, 1, 0 ],
+[ -0.64, 0.58, 0, 0, 1 ],
+[ -0.06, 0.12, 1, 0, 0 ],
+[ 0.06, -0.85, 0, 1, 0 ],
+[ 0.22, 0.53, 1, 0, 0 ],
+[ 0.1, 0.37, 1, 0, 0 ],
+[ 0.49, 0.2, 0, 0, 1 ],
+[ -0.06, 0.12, 1, 0, 0 ],
+[ 0.23, 0.24, 1, 0, 0 ],
+[ 0.13, -0.02, 0, 0, 1 ],
+[ 0.01, 0.87, 1, 0, 0 ],
+[ -0.39, -0.9500000000000001, 0, 1, 0 ],
+[ 0.33, 0.77, 0, 0, 1 ],
+[ -0.5600000000000001, 0.52, 0, 0, 1 ],
+[ 0.8300000000000001, -0.6, 0, 1, 0 ],
+[ -0.34, 0.68, 1, 0, 0 ],
+[ 0.25, 0.77, 1, 0, 0 ],
+[ -0.66, 0.8200000000000001, 0, 0, 1 ],
+[ -0.06, -0.72, 0, 1, 0 ],
+[ -0.5600000000000001, -0.52, 0, 1, 0 ],
+[ -0.5700000000000001, -0.54, 0, 1, 0 ],
+[ 0.05, 0.13, 1, 0, 0 ],
+[ -0.72, -0.87, 0, 1, 0 ],
+[ -0.28, -0.89, 0, 1, 0 ],
+[ -0.8100000000000001, -0.98, 0, 1, 0 ],
+[ 0.46, 0.52, 0, 0, 1 ],
+[ 0.9400000000000001, -0.09, 0, 0, 1 ],
+[ -0.31, 0.66, 1, 0, 0 ],
+[ 0.64, -0.78, 0, 1, 0 ],
+[ -0.6, -0.34, 0, 0, 1 ],
+[ -0.39, 0.73, 0, 0, 1 ],
+[ 0.26, -0.85, 0, 1, 0 ],
+[ -0.33, -0.68, 0, 1, 0 ],
+[ 0.35, 0.41, 1, 0, 0 ],
+[ -0.24, 0.19, 1, 0, 0 ],
+[ 0.19, 0.79, 1, 0, 0 ],
+[ 0.67, -0.77, 0, 1, 0 ],
+[ -0.75, -0.84, 0, 1, 0 ],
+[ -0.13, 0.79, 1, 0, 0 ],
+[ 0.44, -0.55, 0, 1, 0 ],
+[ 0.15, 0.21, 1, 0, 0 ],
+[ 0.58, 0.13, 0, 0, 1 ],
+[ -0.28, -0.71, 0, 1, 0 ],
+[ 0.29, -0.79, 0, 1, 0 ],
+[ -0.51, 0.51, 0, 0, 1 ],
+[ 0.29, 0.59, 1, 0, 0 ],
+[ 0.71, -0.9500000000000001, 0, 1, 0 ],
+[ 0.02, -0.71, 0, 1, 0 ],
+[ 0.36, -0.42, 0, 0, 1 ],
+[ 0.6, 0.48, 0, 0, 1 ],
+[ -0.37, 0.51, 1, 0, 0 ],
+[ 0.05, 0.23, 1, 0, 0 ],
+[ 0.37, -0.98, 0, 1, 0 ],
+[ -0.8, -0.05, 0, 0, 1 ],
+[ 0.15, 0.32, 1, 0, 0 ],
+[ -0.1, -0.33, 0, 0, 1 ],
+[ 0.7000000000000001, -0.38, 0, 0, 1 ],
+[ -0.06, 0.29, 1, 0, 0 ],
+[ -0.29, 0.47, 1, 0, 0 ],
+[ -0.75, 0.8300000000000001, 0, 0, 1 ],
+[ 0.19, 0.52, 1, 0, 0 ],
+[ 0.49, 0.01, 0, 0, 1 ],
+[ 0.64, 0.06, 0, 0, 1 ],
+[ 0.58, 0.78, 0, 0, 1 ],
+[ -0.66, -0.65, 0, 1, 0 ],
+[ 0.77, -0.89, 0, 1, 0 ],
+[ 0.52, -0.53, 0, 1, 0 ],
+[ -0.04, 0.63, 1, 0, 0 ],
+[ 0.22, 0.39, 1, 0, 0 ],
+[ -0.09, -0.71, 0, 1, 0 ],
+[ 0.42, 0.37, 0, 0, 1 ],
+[ -0.16, 0.64, 1, 0, 0 ],
+[ 0.75, -0.66, 0, 1, 0 ],
+[ 0.01, 0.54, 1, 0, 0 ],
+[ -0.02, 0.32, 1, 0, 0 ],
+[ -0.4, -0.2, 0, 0, 1 ],
+[ 0.06, 0.39, 1, 0, 0 ],
+[ 0.22, 0.35, 1, 0, 0 ],
+[ -0.84, -0.03, 0, 0, 1 ],
+[ -0.6, 1., 0, 0, 1 ],
+[ -0.79, -0.92, 0, 1, 0 ],
+[ 0.27, 0.7000000000000001, 1, 0, 0 ],
+[ 0.3, 0.26, 1, 0, 0 ],
+[ 0.75, 0.44, 0, 0, 1 ],
+[ 1., -0.17, 0, 0, 1 ],
+[ 0.06, 0.49, 1, 0, 0 ],
+[ 0.75, -0.16, 0, 0, 1 ],
+[ 0.03, 0.52, 1, 0, 0 ],
+[ -0.13, -0.9500000000000001, 0, 1, 0 ],
+[ 0.07000000000000001, -0.47, 0, 0, 1 ],
+[ -0.54, -0.66, 0, 1, 0 ],
+[ -0.04, 0.5600000000000001, 1, 0, 0 ],
+[ 0.26, 0.32, 1, 0, 0 ],
+[ -0.28, -0.64, 0, 1, 0 ],
+[ 0.78, -0.06, 0, 0, 1 ],
+[ -0.62, 0.75, 0, 0, 1 ],
+[ -0.52, -0.16, 0, 0, 1 ],
+[ -0.38, -0.53, 0, 1, 0 ],
+[ -0.01, -0.96, 0, 1, 0 ],
+[ 0.04, 0.78, 1, 0, 0 ],
+[ -0.53, 0.58, 0, 0, 1 ],
+[ -0.99, 0.2, 0, 0, 1 ],
+[ 0.3, 0.37, 1, 0, 0 ],
+[ 0.26, 0.34, 1, 0, 0 ],
+[ 0.31, 0.66, 1, 0, 0 ],
+[ -0.86, -0.91, 0, 1, 0 ],
+[ -0.13, 0.68, 1, 0, 0 ],
+[ 0.29, 0.3, 1, 0, 0 ],
+[ -0.66, -0.48, 0, 0, 1 ],
+[ -0.68, 0.19, 0, 0, 1 ],
+[ -0.96, -0.54, 0, 1, 0 ],
+[ -0.19, 0.92, 0, 0, 1 ],
+[ 0.18, 0.77, 1, 0, 0 ],
+[ -0.64, -0.6900000000000001, 0, 1, 0 ],
+[ 0.42, 0.06, 0, 0, 1 ],
+[ -0.72, 0.25, 0, 0, 1 ],
+[ -0.05, 0.47, 1, 0, 0 ],
+[ -0.13, 0.4, 1, 0, 0 ],
+[ -0.14, -0.67, 0, 1, 0 ],
+[ -0.93, -0.6, 0, 1, 0 ],
+[ 0.74, -0.91, 0, 1, 0 ],
+[ -0.19, -0.79, 0, 1, 0 ],
+[ -1., 0.13, 0, 0, 1 ],
+[ 0.8300000000000001, -0.17, 0, 0, 1 ],
+[ 0.09, 0.8, 1, 0, 0 ],
+[ -0.16, 0.29, 1, 0, 0 ],
+[ -0.09, 0.42, 1, 0, 0 ],
+[ -0.53, -0.75, 0, 1, 0 ],
+[ -0.08, 0.54, 1, 0, 0 ],
+[ 0.01, 0.45, 1, 0, 0 ],
+[ -0.73, 0.9500000000000001, 0, 0, 1 ],
+[ 0.4, -0.06, 0, 0, 1 ],
+[ 0.6, -0.68, 0, 1, 0 ],
+[ 0.26, 0.58, 1, 0, 0 ],
+[ -0.05, 0.2, 1, 0, 0 ],
+[ -0.27, -0.9500000000000001, 0, 1, 0 ],
+[ 0.36, -0.96, 0, 1, 0 ],
+[ -0.8200000000000001, 0.78, 0, 0, 1 ],
+[ -0.1, -0.74, 0, 1, 0 ],
+[ 0.5700000000000001, 0.12, 0, 0, 1 ],
+[ -0.24, -0.11, 0, 0, 1 ],
+[ 0.47, -0.6, 0, 1, 0 ],
+[ 0.32, -0.66, 0, 1, 0 ],
+[ 0.76, -0.8, 0, 1, 0 ],
+[ 0.05, 0.68, 1, 0, 0 ],
+[ 0.92, -0.35, 0, 0, 1 ],
+[ 0.08, 0.52, 1, 0, 0 ],
+[ 0.47, 0.44, 0, 0, 1 ],
+[ 0.28, 0.38, 1, 0, 0 ],
+[ -0.8300000000000001, -0.67, 0, 1, 0 ],
+[ -0.18, 0.77, 1, 0, 0 ],
+[ 0.86, 0.23, 0, 0, 1 ],
+[ -0.86, -0.87, 0, 1, 0 ],
+[ -0.16, 0.38, 1, 0, 0 ],
+[ 0.9500000000000001, -0.9400000000000001, 0, 1, 0 ],
+[ 0.93, 0.79, 0, 0, 1 ],
+[ -0.03, 0.55, 1, 0, 0 ],
+[ -0.72, -0.39, 0, 0, 1 ],
+[ -0.8200000000000001, -0.27, 0, 0, 1 ],
+[ 0.17, -0.49, 0, 0, 1 ],
+[ 0.34, -0.98, 0, 1, 0 ],
+[ 0.18, 0.05, 0, 0, 1 ],
+[ 0.97, -0.63, 0, 1, 0 ],
+[ 0.44, 0.6, 0, 0, 1 ],
+[ -0.49, -0.07000000000000001, 0, 0, 1 ],
+[ -0.09, 0.46, 1, 0, 0 ],
+[ -0.26, 0.54, 1, 0, 0 ],
+[ 0.16, -0.65, 0, 1, 0 ],
+[ -0.58, -0.86, 0, 1, 0 ],
+[ -0.52, -0.6, 0, 1, 0 ],
+[ 0.05, 0.16, 1, 0, 0 ],
+[ -0.55, -0.51, 0, 1, 0 ],
+[ -0.19, 0.74, 1, 0, 0 ],
+[ 0.48, -0.14, 0, 0, 1 ],
+[ -0.05, -0.13, 0, 0, 1 ],
+[ -0.28, 0.29, 1, 0, 0 ],
+[ -0.36, 0.38, 1, 0, 0 ],
+[ 0.9500000000000001, -0.31, 0, 0, 1 ],
+[ -0.66, 0.73, 0, 0, 1 ],
+[ -0.6900000000000001, -0.62, 0, 1, 0 ],
+[ 0.17, -0.48, 0, 0, 1 ],
+[ -0.1, 0.5, 1, 0, 0 ],
+[ 0.13, 0.68, 1, 0, 0 ],
+[ -0.02, 0.67, 1, 0, 0 ],
+[ -0.2, 0.79, 1, 0, 0 ],
+[ -0.02, -0.73, 0, 1, 0 ],
+[ -0.91, -0.68, 0, 1, 0 ],
+[ -0.04, 0.55, 1, 0, 0 ],
+[ -0.09, 0.5700000000000001, 1, 0, 0 ],
+[ 0.77, 0.38, 0, 0, 1 ],
+[ -0.03, 0.35, 1, 0, 0 ],
+[ 0.8100000000000001, -0.32, 0, 0, 1 ],
+[ -0.42, -0.49, 0, 0, 1 ],
+[ -0.35, -0.68, 0, 1, 0 ],
+[ -0.09, 0.23, 1, 0, 0 ],
+[ 0.71, 0.59, 0, 0, 1 ],
+[ -0.09, 0.84, 1, 0, 0 ],
+[ 0.63, 0.07000000000000001, 0, 0, 1 ],
+[ 0.22, -0.8, 0, 1, 0 ],
+[ 0.29, 0.76, 1, 0, 0 ],
+[ -0.86, -0.97, 0, 1, 0 ],
+[ -0.45, -0.62, 0, 1, 0 ],
+[ 0.77, -0.55, 0, 1, 0 ],
+[ 1., -0.84, 0, 1, 0 ],
+[ 0.5700000000000001, -0.04, 0, 0, 1 ],
+[ -0.76, 0.32, 0, 0, 1 ],
+[ 0.8100000000000001, 0.92, 0, 0, 1 ],
+[ 0.27, 0.32, 1, 0, 0 ],
+[ -0.28, 0.75, 1, 0, 0 ],
+[ -0.9400000000000001, -0.79, 0, 1, 0 ],
+[ -0.24, 0.47, 1, 0, 0 ],
+[ 0.32, -0.53, 0, 1, 0 ],
+[ -0.27, 0.73, 1, 0, 0 ],
+[ 0.7000000000000001, 0.63, 0, 0, 1 ],
+[ -0.65, -0.44, 0, 0, 1 ],
+[ -0.93, -0.99, 0, 1, 0 ],
+[ -0.85, -0.75, 0, 1, 0 ],
+[ 0.98, -0.17, 0, 0, 1 ],
+];
diff --git a/examples/NeuralNetwork_CrossEntropy/neural_network.g b/examples/NeuralNetwork_CrossEntropy/neural_network.g
index c5d9d1c..f280f0e 100644
--- a/examples/NeuralNetwork_CrossEntropy/neural_network.g
+++ b/examples/NeuralNetwork_CrossEntropy/neural_network.g
@@ -1,63 +1,199 @@
-LoadPackage( "GradientDescentForCAP" );
-
-
+#! @Chapter Examples for neural networks
+
+#! @Section Multi-class neural network with cross-entropy loss function
+
+LoadPackage( "GradientBasedLearningForCAP" );
+
+#! This example demonstrates how to train a small feed-forward neural network
+#! for a multi-class classification task using the $\texttt{GradientBasedLearningForCAP}$
+#! package. We employ the cross-entropy loss function and optimise the network
+#! parameters with gradient descent.
+#!
+#! The dataset consists of points $(x_1, x_2) \in \mathbb{R}^2$ labelled by a
+#! non-linear decision rule describing three regions that form
+#! @BeginLatexOnly
+#! \begin{itemize}
+#! \item $\emph{class 0}$:
+#! \[
+#! x_1^2 + (x_2 - 0.5)^2 \le 0.16
+#! \qquad\text{(inside a circle of radius $0.4$ centred at $(0,0.5)$)}
+#! \]
+#! \item \emph{class 1}:
+#! \[
+#! x_2 \le -0.5
+#! \qquad\text{(below the horizontal line)}
+#! \]
+#! \item $\emph{class 2}$: everything else.
+#! \end{itemize}
+#! @EndLatexOnly
+
+#! @BeginLatexOnly
+#! \begin{center}
+#! \includegraphics[width=0.5\textwidth]{../examples/NeuralNetwork_CrossEntropy/data/scatter_plot_training_examples.png}
+#! \end{center}
+#! @EndLatexOnly
+
+#! We build a neural network with three hidden layers and a Softmax output, fit
+#! it on the provided training examples for several epochs, and then evaluate
+#! the trained model on a grid of input points to visualise the learned
+#! decision regions.
+
+#! @BeginLatexOnly
+#! Concretely, we choose three hidden layers, each with 6 neurons:
+#! \[
+#! \texttt{hidden\_layers} = [6,6,6].
+#! \]
+#! With input dimension \(2\) (representing point coordinates) and output dimension \(3\) (the probability of each class), the affine maps between
+#! consecutive layers therefore have the following matrix dimensions (together
+#! bias vectors):
+#! \[
+#! \binom{W_1}{b_1} \in \mathbb{R}^{ 3 \times 6},\quad
+#! \binom{W_2}{b_2} \in \mathbb{R}^{ 7 \times 6},\quad
+#! \binom{W_3}{b_3} \in \mathbb{R}^{ 7 \times 6},\quad
+#! \binom{W_4}{b_4} \in \mathbb{R}^{ 7 \times 3}.
+#! \]
+#! Equivalently, each layer computes for an input $a_k$ the output \(z_{k+1} := (a_k\;\;1)\binom{W_{k+1}}{b_{k+1}}=a_k W_{k+1} + b_{k+1}\), where
+#! \(a_0 \in \mathbb{R}^2\), \(a_1,a_2,a_3 \in \mathbb{R}^6\), and the final output
+#! lies in \(\mathbb{R}^3\).
+#! The non-linear activation function ReLU is applied after each hidden layer.
+#! And Softmax is applied after the final layer to obtain a probability
+#! estimate for the classes.
+#! \[
+#! a_0 \mapsto
+#! \color{red}\mathbf{Softmax}\left(
+#! \color{blue}\left(
+#! \color{green}\mathbf{Relu}\left(
+#! \color{yellow}\left(
+#! \color{red}\mathbf{Relu}\left(
+#! \color{blue}\left(
+#! \color{green}\mathbf{Relu}\left(
+#! \color{yellow}\left(
+#! a_0\;\; 1
+#! \color{yellow}\right)
+#! \binom{W_1}{b_1}
+#! \color{green}\right)
+#! \;\; 1
+#! \color{blue}\right)
+#! \binom{W_2}{b_2}
+#! \color{red}\right)
+#! \;\; 1
+#! \color{yellow}\right)
+#! \binom{W_3}{b_3}
+#! \color{green}\right)
+#! \;\; 1
+#! \color{blue}\right)
+#! \binom{W_4}{b_4}
+#! \color{red}\right) \in \mathbb{R}^3
+#! \]
+#! The predicted class is the one with the highest probability.
+#! That is, the total number of parameters (weights and biases) is \(123\).
+#! After training, we obtain a weight vector \(w \in \mathbb{R}^{123}\).
+#! The first $21$ entries of $w$ correspond to the concatenation of the columns of $\binom{W_4}{b_4}\in \mathbb{R}^{7\times 3}$,
+#! the next $42$ entries correspond to the concatenation of the columns of $\binom{W_3}{b_3}\in \mathbb{R}^{7\times 6}$, and so on.
+#! @EndLatexOnly
+
+#! @Example
Smooth := SkeletalSmoothMaps;
+#! SkeletalSmoothMaps
Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
Para := CategoryOfParametrisedMorphisms( Smooth );
-
-
-## The function we are trying minimize
-f := LossMorphismOfNeuralNetwork( Para, 2, [ 5, 5 ], 4, "Softmax" );
-
-## One epoch update of the parameters
-optimizer := Lenses.GradientDescentOptimizer( : learning_rate := 0.01 );
-
-training_examples_path := "data/training_examples.txt";
-
-batch_size := 1;
-
-one_epoch_update := OneEpochUpdateLens( f, optimizer, training_examples_path, batch_size );
-
-## Initialize the parameters and apply updates nr_epochs times
-
-# initial values for the parameters of the first affine transformation w1 (as 3x5 matrix)
-w1 := [[-0.61789644, 0.56407845, -0.5965204, -0.85063416, 0.770488 ],
- [-0.13079625, 0.47618425, 0.8807312, 0.24377191, 0.18529081],
- [0., 0., 0., 0., 0. ] ];
-
-# as vector
-w1 := Concatenation( TransposedMat( w1 ) );
-
-# initial weights for the parameters of the second affine transformation w2 (as 6x5 matrix)
-w2 :=
-[[-0.52913845, 0.524745, 0.67446196, -0.13036567, -0.5108599 ],
- [-0.12336099, 0.7475884, -0.18031466, 0.30409217, -0.5017855 ],
- [-0.5523451, 0.74021363, -0.38746935, -0.2771675, 0.6162708 ],
- [-0.24399745, 0.523523, 0.31327105, -0.5376833, -0.4945482 ],
- [ 0.33063114, -0.10083395, 0.13537377, 0.671383, -0.2012583 ],
- [ 0., 0., 0., 0., 0. ] ];
-
-# as vector
-w2 := Concatenation( TransposedMat( w2 ) );
-
-# initial weights for the parameters of the third affine transformation w3 (as 6x4 matrix)
-w3 :=
-[[-0.05885905, -0.81396204, 0.00370395, -0.42547446],
- [-0.39928403, 0.56314194, 0.6614479 , 0.5060446 ],
- [ 0.6662301, -0.2800727 , 0.1187852 , -0.27065504],
- [ 0.15874296, -0.6039741 , -0.7533438 , -0.33242884],
- [ 0.26578736, -0.45036432, -0.61879224, 0.8060001 ],
- [ 0., 0., 0., 0., ] ];
-
-# as vector
-w3 := Concatenation( TransposedMat( w3 ) );
-
-# creating a vector of initial values
-w := Concatenation( [ w3, w2, w1 ] );
-
-nr_epochs := 50;
-
-w := Fit( one_epoch_update, nr_epochs, w );
-
-# After 50 epochs:
-# w = [ 0.927523, -1.16534, 3.09767, 1.21408, 0.666635, -1.43307, -0.985489, 0.871045, -1.92995, 0.786445, -1.90583, 0.40558, -0.0727751, 2.23415, 0.54885, -2.54374, -2.19966, -0.398129, -1.16385, -0.608512, -1.48229, -0.987787, 3.44148, 1.42562, -0.465934, -0.356098, -1.33342, -0.236309, 0.961528, 0.644209, 0.809773, -0.881621, 2.03238, -0.870562, -1.20672, 1.29646, 2.97375, -0.133015, -1.56653, 2.90988, 0.817293, 1.46626, -0.262231, 0.301989, -0.500305, -1.36048, 2.25753, 1.28782, -0.0197388, -3.45074, 1.58903, -0.815923, -1.0852, 2.2728, -2.66226, 1.12052, 1.03489, 0.085673, 3.31336, 0.29301, 0.110178, 2.22798, 2.15017, -1.25682, 2.86108, -1.89215, 2.74446, 1.19491, 1.01804 ]
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+hidden_layers := [ 6, 6, 6 ];;
+f := NeuralNetworkLossMorphism( Para, 2, hidden_layers, 3, "Softmax" );;
+optimizer := Lenses.GradientDescentOptimizer( : learning_rate := 0.1 );
+#! function( n ) ... end
+training_examples_path := Filename(
+ DirectoriesPackageLibrary("GradientBasedLearningForCAP", "examples")[1],
+ "NeuralNetwork_CrossEntropy/data/training_examples.txt" );;
+batch_size := 4;
+#! 4
+one_epoch_update := OneEpochUpdateLens( f, optimizer,
+ training_examples_path, batch_size );
+#! (ℝ^123, ℝ^123) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^123 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^123 -> ℝ^123
+nr_weights := RankOfObject( Source( PutMorphism( one_epoch_update ) ) );
+#! 123
+rs := RandomSource( IsMersenneTwister, 1 );;
+w := List( [ 1 .. nr_weights ], i -> 0.001 * Random( rs, [ -1000 .. 1000 ] ) );;
+Display( w{[ 1 .. 5 ]} );
+#! [ 0.789, -0.767, -0.613, -0.542, 0.301 ]
+nr_epochs := 16;
+#! 16
+w := Fit( one_epoch_update, nr_epochs, w : verbose := true );;
+#! Epoch 0/16 - loss = 0.80405334335407785
+#! Epoch 1/16 - loss = 0.18338542093217905
+#! Epoch 2/16 - loss = 0.1491650040794873
+#! Epoch 3/16 - loss = 0.13186409729963983
+#! Epoch 4/16 - loss = 0.12293129048146505
+#! Epoch 5/16 - loss = 0.11742704538825839
+#! Epoch 6/16 - loss = 0.11191588532335346
+#! Epoch 7/16 - loss = 0.10441947487056685
+#! Epoch 8/16 - loss = 0.095102838431592687
+#! Epoch 9/16 - loss = 0.092441708967385072
+#! Epoch 10/16 - loss = 0.097057579505470393
+#! Epoch 11/16 - loss = 0.093295953606638768
+#! Epoch 12/16 - loss = 0.082114375099200984
+#! Epoch 13/16 - loss = 0.082910416530212819
+#! Epoch 14/16 - loss = 0.082815082271383303
+#! Epoch 15/16 - loss = 0.085405485529683856
+#! Epoch 16/16 - loss = 0.087825108242740729
+w;
+#! [ 0.789, -1.09294, -1.43008, -0.66714, 1.27126, -1.12774, -0.240397, 0.213,
+#! -0.382376, 1.42204, 0.300837, -1.79451, 0.392967, -0.868913, 0.858,
+#! 1.16231, 0.769031, 0.309303, 0.555253, -0.142223, 0.0703106, -0.997,
+#! -0.746, 0.9, -0.248, -0.801, -0.317, -0.826, 0.0491083, -1.51073, -1.01246,
+#! 0.371752, -0.852, 0.342548, 1.01666, 1.39005, 0.958034, 0.357176, 0.3225,
+#! -0.29, -1.0095, 0.154876, -0.460859, -0.582425, 0.223943, -0.402, -0.368,
+#! 0.275911, -0.0791975, 0.0986371, -0.487903, -0.699542, -0.553485, 0.766,
+#! 1.88163, 0.903741, -0.895688, -0.949546, 0.034, 0.13, -0.91, 0.67043,
+#! -0.784672, -0.195688, 1.49813, 0.881451, 0.679593, -0.380004, 0.743062,
+#! 0.529804, 0.221497, 0.487694, 1.12092, 1.38134, -0.313891, 0.780071,
+#! 0.00526383, 0.422997, 0.287254, -0.42555, -0.0525988, -0.159442, -0.256285,
+#! -0.296361, 0.822117, -0.23663, -0.252, -0.986452, -0.955211, 0.52727,
+#! 0.261295, -0.867, -0.787, -0.395, -0.871, -0.205, -0.315, -0.385,
+#! -0.292919, -1.46115, -0.634953, 0.818446, 0.903525, 0.833456, 1.59504,
+#! -0.500531, -0.191608, 0.390861, 0.808496, -1.94883, 0.445591, -1.62511,
+#! -0.601054, -0.154008, -1.20266, -0.255521, 0.989522, 0.29963, 0.372084,
+#! 1.07529, -0.909025, 0.454265, 0.539106 ]
+predict := NeuralNetworkPredictionMorphism( Para, 2, hidden_layers, 3, "Softmax" );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^123
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^125 -> ℝ^3
+predict_given_w := ReparametriseMorphism( predict, Smooth.Constant( w ) );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^0
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^2 -> ℝ^3
+predict_using_w := UnderlyingMorphism( predict_given_w );
+#! ℝ^2 -> ℝ^3
+inputs := Cartesian( 0.1 * [ -10 .. 10 ], 0.1 * [ -10 .. 10 ] );;
+predictions := List( inputs, x ->
+ -1 + Position( predict_using_w( x ), Maximum( predict_using_w( x ) ) ) );;
+# ScatterPlotUsingPython( inputs, predictions );
+#! @EndExample
+
+#! Executing the command $\texttt{ScatterPlotUsingPython( inputs, predictions );}$ produces the following plot:
+#! @BeginLatexOnly
+#! \begin{center}
+#! \includegraphics[width=0.5\textwidth]{../examples/NeuralNetwork_CrossEntropy/scatter_plot_predictions.png}
+#! \end{center}
+#! @EndLatexOnly
diff --git a/examples/NeuralNetwork_CrossEntropy/neural_network.py b/examples/NeuralNetwork_CrossEntropy/neural_network.py
deleted file mode 100644
index 399120d..0000000
--- a/examples/NeuralNetwork_CrossEntropy/neural_network.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import numpy as np
-from tensorflow.keras.utils import to_categorical
-from tensorflow.keras.models import Sequential
-from tensorflow.keras.layers import Input, Dense
-from tensorflow.keras.optimizers import Adam, SGD
-from sklearn.model_selection import train_test_split
-
-with open("data/training_examples.txt", "r") as f:
- f = f.read()
-
-data = np.array(eval(f))
-
-# create the data
-X = data[:, :2]
-y = data[:, 2:]
-
-# Convert the labels to class indices
-y = np.argmax(y, axis=1)
-
-# Convert the labels to categorical format
-y = to_categorical(y)
-
-# Split the data into training and testing sets
-X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
-
-model = Sequential()
-
-# Add an Input layer
-model.add(Input(shape=(2,)))
-
-# Add hidden layers
-model.add(Dense(5, activation='relu'))
-model.add(Dense(5, activation='relu'))
-
-# Add output layer
-model.add(Dense(4, activation='softmax'))
-
-# Build the model with the input shape to initialize the weights
-model.build()
-
-# Summary of the model
-model.summary()
-
-# Specify the optimizer
-#optimizer = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07)
-optimizer = SGD(learning_rate=0.01)
-
-model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
-
-# View the initial weights
-initial_weights = model.get_weights()
-initial_weights_vec = []
-
-for i in range(0, len(initial_weights), 2):
- w = initial_weights[i]
- b = initial_weights[i+1]
- b = b[np.newaxis, :]
- m = np.concatenate([w, b])
- print( f"affine matrix of layer {i//2 + 1}:\n{m}\n" )
- initial_weights_vec = m.flatten('F').tolist() + initial_weights_vec
-
-print(f"initial weights as vector: \n{initial_weights_vec}")
-
-# View the parameters of the Adam optimizer
-print(f'Learning rate: {optimizer.learning_rate.numpy()}')
-#print(f'Beta_1: {optimizer.beta_1}')
-#print(f'Beta_2: {optimizer.beta_2}')
-#print(f'Epsilon: {optimizer.epsilon}')
-
-# Train the model
-model.fit(X_train, y_train, epochs=50, batch_size=1)
-
-# Evaluate the model. Accuracy should less or more than 83%
-loss, accuracy = model.evaluate(X_test, y_test)
-print(f'Accuracy: {accuracy}')
-
-# View the learnt weights
-weights = model.get_weights()
-
-weights_vec = []
-for i in range(0, len(weights), 2):
- w = weights[i]
- b = weights[i+1]
- b = b[np.newaxis, :]
- m = np.concatenate([w, b])
- print( f"affine matrix of layer {i//2 + 1}:\n{m}\n" )
- weights_vec = m.flatten('F').tolist() + weights_vec
-
-print(f"weights as vector: \n{weights_vec}")
diff --git a/examples/NeuralNetwork_CrossEntropy/predict/predict.g b/examples/NeuralNetwork_CrossEntropy/predict/predict.g
deleted file mode 100644
index cc6e6c2..0000000
--- a/examples/NeuralNetwork_CrossEntropy/predict/predict.g
+++ /dev/null
@@ -1,30 +0,0 @@
-LoadPackage( "GradientDescentForCAP" );
-
-
-Smooth := SkeletalSmoothMaps;
-Para := CategoryOfParametrisedMorphisms( Smooth );
-
-
-## The function we are trying minimize
-predict := PredictionMorphismOfNeuralNetwork( Para, 2, [ 5, 5 ], 4, "Softmax" );
-
-## After 50 epochs, we got the following weights:
-w := [ 0.927523, -1.16534, 3.09767, 1.21408, 0.666635, -1.43307, -0.985489, 0.871045,
- -1.92995, 0.786445, -1.90583, 0.40558, -0.0727751, 2.23415, 0.54885, -2.54374,
- -2.19966, -0.398129, -1.16385, -0.608512, -1.48229, -0.987787, 3.44148, 1.42562,
- -0.465934, -0.356098, -1.33342, -0.236309, 0.961528, 0.644209, 0.809773, -0.881621,
- 2.03238, -0.870562, -1.20672, 1.29646, 2.97375, -0.133015, -1.56653, 2.90988,
- 0.817293, 1.46626, -0.262231, 0.301989, -0.500305, -1.36048, 2.25753, 1.28782,
- -0.0197388, -3.45074, 1.58903, -0.815923, -1.0852, 2.2728, -2.66226, 1.12052,
- 1.03489, 0.085673, 3.31336, 0.29301, 0.110178, 2.22798, 2.15017, -1.25682, 2.86108,
- -1.89215, 2.74446, 1.19491, 1.01804 ];
-
-## Let us use w to predict:
-predict_using_w := UnderlyingMorphism( ReparametriseMorphism( predict, Smooth.Constant( w ) ) );
-
-## create inputs:
-inputs := Cartesian( 0.01 * [ -100 .. 100 ], 0.01 * [ -100 .. 100 ] );
-predictions := List( inputs, x -> PositionMaximum( predict_using_w( x ) ) );
-
-
-ScatterPlotUsingPython( inputs, predictions );
diff --git a/examples/NeuralNetwork_CrossEntropy/predict/scatter_plot_predictions.png b/examples/NeuralNetwork_CrossEntropy/predict/scatter_plot_predictions.png
deleted file mode 100644
index b48a28e..0000000
Binary files a/examples/NeuralNetwork_CrossEntropy/predict/scatter_plot_predictions.png and /dev/null differ
diff --git a/examples/NeuralNetwork_CrossEntropy/scatter_plot_predictions.png b/examples/NeuralNetwork_CrossEntropy/scatter_plot_predictions.png
new file mode 100644
index 0000000..d6f3867
Binary files /dev/null and b/examples/NeuralNetwork_CrossEntropy/scatter_plot_predictions.png differ
diff --git a/examples/NeuralNetwork_QuadraticLoss/data/create_train_test_examples.g b/examples/NeuralNetwork_QuadraticLoss/data/create_train_test_examples.g
deleted file mode 100644
index ac7d326..0000000
--- a/examples/NeuralNetwork_QuadraticLoss/data/create_train_test_examples.g
+++ /dev/null
@@ -1,41 +0,0 @@
-
-
-# locate the current dir
-current_dir := DirectoryCurrent( );
-
-# create a file for the training dataset
-
-## we want to approximate these secret perfect weights!
-perfect_weights := [ 2, -3, 1 ];
-
-## The input dimension is 2 and the output dimension is 1 --> an example dimension is 2 + 1 = 3.
-## Each training example is of the form [x1, x2, y ] where y := 2x1 - 3x2 + 1 + some_error.
-
-files := [ "training_examples.txt", "test_examples.txt" ];
-nr_examples := [ 100, 20 ];
-
-noise := 0.5;
-
-for i in [ 1, 2 ] do
-
- file := Filename( current_dir, files[i] );
-
- PrintTo( file, "[\n" );
-
- for j in [ 1 .. nr_examples[i] ] do
-
- x1 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
- x2 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
-
- error := Random( [ -0.001, 0.001 ] ) * Random( [ 1 .. 100 ] );
-
- AppendTo( file, [ x1, x2, [ x1, x2, 1 ] * perfect_weights + noise * error ], ",\n" );
-
- od;
-
- AppendTo( file, "]" );
-
-od;
-
-Display( "Done!" );
-QUIT;
diff --git a/examples/NeuralNetwork_QuadraticLoss/data/generate_examples.g b/examples/NeuralNetwork_QuadraticLoss/data/generate_examples.g
new file mode 100644
index 0000000..59e29e6
--- /dev/null
+++ b/examples/NeuralNetwork_QuadraticLoss/data/generate_examples.g
@@ -0,0 +1,37 @@
+LoadPackage( "GradientBasedLearningForCAP" );
+
+## we want to approximate these secret perfect weights!
+perfect_weights := [ 2, -3, 1 ];
+
+## The input dimension is 2 and the output dimension is 1 --> an example dimension is 2 + 1 = 3.
+## Each training example is of the form [x1, x2, y ] where y := 2x1 - 3x2 + 1 + some_error.
+
+nr_examples := 100;
+
+noise := 0.5;
+
+training_examples := [ ];
+
+for j in [ 1 .. nr_examples ] do
+
+ x1 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+ x2 := Random( [ -0.01, 0.01 ] ) * Random( [ 1 .. 100 ] );
+
+ error := Random( [ -0.001, 0.001 ] ) * Random( [ 1 .. 100 ] );
+
+ y := perfect_weights[1] * x1 + perfect_weights[2] * x2 + perfect_weights[3] + error;
+
+ Add( training_examples, [ x1, x2, y ] );
+
+od;
+
+file := Filename( DirectoryCurrent( ), "training_examples.txt" );
+
+PrintTo( file, "[\n" );
+for example in training_examples do
+ AppendTo( file, example, ",\n" );
+od;
+AppendTo( file, "]" );
+
+Display( "Done!" );
+QUIT;
diff --git a/examples/NeuralNetwork_QuadraticLoss/data/scatter_plot_training_examples.png b/examples/NeuralNetwork_QuadraticLoss/data/scatter_plot_training_examples.png
new file mode 100644
index 0000000..7996190
Binary files /dev/null and b/examples/NeuralNetwork_QuadraticLoss/data/scatter_plot_training_examples.png differ
diff --git a/examples/NeuralNetwork_QuadraticLoss/data/test_examples.txt b/examples/NeuralNetwork_QuadraticLoss/data/test_examples.txt
deleted file mode 100644
index 4a94774..0000000
--- a/examples/NeuralNetwork_QuadraticLoss/data/test_examples.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-[
-[ 0.9500000000000001, -0.96, 5.737 ],
-[ -0.8100000000000001, 0.34, -1.652 ],
-[ 0.8200000000000001, 0.13, 2.2085 ],
-[ 0.06, -0.4, 2.367 ],
-[ 0.68, -0.29, 3.2475 ],
-[ 0.41, 0.14, 1.446 ],
-[ 0.09, -0.37, 2.2455 ],
-[ -0.93, -0.41, 0.3629999999999999 ],
-[ -0.75, 0.91, -3.2685 ],
-[ -0.71, -0.01, -0.3549999999999999 ],
-[ -0.2, 0.01, 0.584 ],
-[ 0.9400000000000001, -0.84, 5.3955 ],
-[ 0.29, 0.9, -1.15 ],
-[ -0.06, -0.29, 1.7265 ],
-[ -0.86, -0.53, 0.8905000000000001 ],
-[ -0.98, 0.2, -1.549 ],
-[ 0.44, -0.68, 3.9195 ],
-[ -0.21, 0.19, -0.01199999999999999 ],
-[ 0.05, 0.99, -1.871 ],
-[ 0.32, -0.9400000000000001, 4.495500000000001 ],
-]
\ No newline at end of file
diff --git a/examples/NeuralNetwork_QuadraticLoss/data/training_examples.txt b/examples/NeuralNetwork_QuadraticLoss/data/training_examples.txt
index c35b54f..0f95f05 100644
--- a/examples/NeuralNetwork_QuadraticLoss/data/training_examples.txt
+++ b/examples/NeuralNetwork_QuadraticLoss/data/training_examples.txt
@@ -1,102 +1,102 @@
[
-[ 0.59, -0.85, 4.691999999999999 ],
-[ -0.72, -0.55, 1.1605 ],
-[ 0.12, -0.3, 2.092 ],
-[ -0.8100000000000001, 0.97, -3.4955 ],
-[ -0.45, -0.5600000000000001, 1.762 ],
-[ -0.77, 0.8, -2.9055 ],
-[ 0.72, 0.37, 1.3135 ],
-[ -0.06, 0.9400000000000001, -1.926 ],
-[ 0.05, 0.8, -1.302 ],
-[ 0.63, 0.9, -0.4465000000000002 ],
-[ 0.49, -0.85, 4.494499999999999 ],
-[ 0.6, -0.68, 4.2705 ],
-[ -0.19, 0.8100000000000001, -1.8015 ],
-[ -0.09, -0.49, 2.3265 ],
-[ -0.22, 0.29, -0.2964999999999998 ],
-[ -0.75, 0.22, -1.1335 ],
-[ -0.78, -0.18, 0.002999999999999982 ],
-[ -0.19, -0.33, 1.593 ],
-[ 0.9400000000000001, -0.51, 4.436 ],
-[ -0.24, 0.46, -0.9075000000000001 ],
-[ 0.8100000000000001, -0.38, 3.7795 ],
-[ 0.93, 0.37, 1.7005 ],
-[ 0.38, 0.79, -0.6265000000000001 ],
-[ 0.9, -0.98, 5.749000000000001 ],
-[ 0.28, 0.61, -0.222 ],
-[ 0.89, -0.44, 4.105499999999999 ],
-[ 0.5700000000000001, 0.47, 0.7314999999999999 ],
-[ -0.05, -0.76, 3.1615 ],
-[ -0.06, 0.06, 0.695 ],
-[ 0.1, -0.99, 4.2095 ],
-[ -0.18, -0.88, 3.3005 ],
-[ 0.31, 0.71, -0.5319999999999998 ],
-[ 0.42, 0.85, -0.6905 ],
-[ -0.21, -0.45, 1.9725 ],
-[ -0.27, 0.73, -1.778 ],
-[ 0.35, -0.48, 3.12 ],
-[ -0.09, 0.85, -1.7635 ],
-[ 0.45, 0.12, 1.5875 ],
-[ -0.8200000000000001, -0.06, -0.4405000000000002 ],
-[ 0.72, -0.65, 4.4225 ],
-[ 0.16, 0.87, -1.2795 ],
-[ -0.66, -0.75, 1.9375 ],
-[ 0.32, 0.01, 1.5865 ],
-[ 0.16, 0.6900000000000001, -0.7980000000000003 ],
-[ 0.98, -0.62, 4.779 ],
-[ -0.43, 0.96, -2.7775 ],
-[ 0.76, 0.78, 0.1440000000000002 ],
-[ 0.67, 0.09, 2.11 ],
-[ 0.42, -0.01, 1.8935 ],
-[ -0.85, 0.28, -1.5375 ],
-[ -0.15, -0.07000000000000001, 0.8875000000000001 ],
-[ -0.61, -0.23, 0.4610000000000001 ],
-[ 0.14, -0.06, 1.5065 ],
-[ -0.62, 0.34, -1.276 ],
-[ 0.5600000000000001, 0.49, 0.6915000000000001 ],
-[ -0.2, -0.04, 0.6895 ],
-[ -0.25, -0.91, 3.192 ],
-[ -0.16, -0.53, 2.2985 ],
-[ 0.72, -0.72, 4.5985 ],
-[ -0.55, 1., -3.065 ],
-[ -0.88, 0.9, -3.4655 ],
-[ 0.31, 0.97, -1.2865 ],
-[ -0.07000000000000001, -0.47, 2.243 ],
-[ 0.55, 0.61, 0.252 ],
-[ -0.62, 0.98, -3.226 ],
-[ 0.5, 0.16, 1.4945 ],
-[ 0.91, 0.73, 0.6360000000000001 ],
-[ 0.38, 0.4, 0.6059999999999999 ],
-[ 1., 0.02, 2.963 ],
-[ -0.22, -0.28, 1.386 ],
-[ 0.67, 0.02, 2.2585 ],
-[ 0.92, -0.49, 4.32 ],
-[ 0.47, 0.22, 1.2485 ],
-[ -0.79, -0.85, 1.927 ],
-[ 0.75, -0.98, 5.467499999999999 ],
-[ -0.48, 0.54, -1.5515 ],
-[ 0.9500000000000001, 0.51, 1.3425 ],
-[ 0.41, -0.43, 3.135 ],
-[ 0.33, -0.75, 3.889 ],
-[ -0.27, 0.99, -2.4675 ],
-[ 0.5, 0.15, 1.563 ],
-[ 0.58, 0.8100000000000001, -0.2930000000000003 ],
-[ 0.33, 0.93, -1.1325 ],
-[ 0.9, 0.16, 2.3195 ],
-[ -0.65, 0.92, -3.013500000000001 ],
-[ -0.98, 0.97, -3.8955 ],
-[ 0.92, -0.24, 3.6085 ],
-[ 0.06, 0.8200000000000001, -1.298 ],
-[ 0.62, -0.03, 2.282 ],
-[ 0.55, -0.46, 3.4335 ],
-[ -0.34, -0.9, 3.046 ],
-[ 0.67, -0.34, 3.3175 ],
-[ 0.35, 0.26, 0.911 ],
-[ -0.5700000000000001, -0.16, 0.3544999999999999 ],
-[ 0.8, 0.48, 1.1425 ],
-[ 0.98, 0.62, 1.082 ],
-[ 0.96, -0.5600000000000001, 4.633999999999999 ],
-[ 0.8200000000000001, -0.52, 4.199 ],
-[ 0.9, -0.02, 2.8355 ],
-[ -0.17, -0.35, 1.7165 ],
+[ 0.59, -0.85, 4.654 ],
+[ -0.72, -0.55, 1.111 ],
+[ 0.12, -0.3, 2.044 ],
+[ -0.8100000000000001, 0.97, -3.461 ],
+[ -0.45, -0.5600000000000001, 1.744 ],
+[ -0.77, 0.8, -2.871 ],
+[ 0.72, 0.37, 1.297 ],
+[ -0.06, 0.9400000000000001, -1.912 ],
+[ 0.05, 0.8, -1.304 ],
+[ 0.63, 0.9, -0.4530000000000002 ],
+[ 0.49, -0.85, 4.459 ],
+[ 0.6, -0.68, 4.301 ],
+[ -0.19, 0.8100000000000001, -1.793 ],
+[ -0.09, -0.49, 2.363 ],
+[ -0.22, 0.29, -0.2829999999999998 ],
+[ -0.75, 0.22, -1.107 ],
+[ -0.78, -0.18, 0.02599999999999998 ],
+[ -0.19, -0.33, 1.576 ],
+[ 0.9400000000000001, -0.51, 4.462 ],
+[ -0.24, 0.46, -0.9550000000000001 ],
+[ 0.8100000000000001, -0.38, 3.799 ],
+[ 0.93, 0.37, 1.651 ],
+[ 0.38, 0.79, -0.6430000000000001 ],
+[ 0.9, -0.98, 5.758 ],
+[ 0.28, 0.61, -0.174 ],
+[ 0.89, -0.44, 4.111 ],
+[ 0.5700000000000001, 0.47, 0.733 ],
+[ -0.05, -0.76, 3.143 ],
+[ -0.06, 0.06, 0.6899999999999999 ],
+[ 0.1, -0.99, 4.249 ],
+[ -0.18, -0.88, 3.321 ],
+[ 0.31, 0.71, -0.5539999999999998 ],
+[ 0.42, 0.85, -0.6709999999999999 ],
+[ -0.21, -0.45, 2.015 ],
+[ -0.27, 0.73, -1.826 ],
+[ 0.35, -0.48, 3.1 ],
+[ -0.09, 0.85, -1.797 ],
+[ 0.45, 0.12, 1.635 ],
+[ -0.8200000000000001, -0.06, -0.4210000000000002 ],
+[ 0.72, -0.65, 4.455000000000001 ],
+[ 0.16, 0.87, -1.269 ],
+[ -0.66, -0.75, 1.945 ],
+[ 0.32, 0.01, 1.563 ],
+[ 0.16, 0.6900000000000001, -0.8460000000000002 ],
+[ 0.98, -0.62, 4.738 ],
+[ -0.43, 0.96, -2.815 ],
+[ 0.76, 0.78, 0.1080000000000002 ],
+[ 0.67, 0.09, 2.15 ],
+[ 0.42, -0.01, 1.917 ],
+[ -0.85, 0.28, -1.535 ],
+[ -0.15, -0.07000000000000001, 0.865 ],
+[ -0.61, -0.23, 0.4520000000000001 ],
+[ 0.14, -0.06, 1.553 ],
+[ -0.62, 0.34, -1.292 ],
+[ 0.5600000000000001, 0.49, 0.7330000000000001 ],
+[ -0.2, -0.04, 0.659 ],
+[ -0.25, -0.91, 3.154 ],
+[ -0.16, -0.53, 2.327 ],
+[ 0.72, -0.72, 4.597 ],
+[ -0.55, 1., -3.03 ],
+[ -0.88, 0.9, -3.471 ],
+[ 0.31, 0.97, -1.283 ],
+[ -0.07000000000000001, -0.47, 2.216 ],
+[ 0.55, 0.61, 0.234 ],
+[ -0.62, 0.98, -3.272 ],
+[ 0.5, 0.16, 1.469 ],
+[ 0.91, 0.73, 0.6420000000000001 ],
+[ 0.38, 0.4, 0.6519999999999998 ],
+[ 1., 0.02, 2.986 ],
+[ -0.22, -0.28, 1.372 ],
+[ 0.67, 0.02, 2.237 ],
+[ 0.92, -0.49, 4.33 ],
+[ 0.47, 0.22, 1.217 ],
+[ -0.79, -0.85, 1.884 ],
+[ 0.75, -0.98, 5.494999999999999 ],
+[ -0.48, 0.54, -1.523 ],
+[ 0.9500000000000001, 0.51, 1.315 ],
+[ 0.41, -0.43, 3.16 ],
+[ 0.33, -0.75, 3.868 ],
+[ -0.27, 0.99, -2.425 ],
+[ 0.5, 0.15, 1.576 ],
+[ 0.58, 0.8100000000000001, -0.3160000000000002 ],
+[ 0.33, 0.93, -1.135 ],
+[ 0.9, 0.16, 2.319 ],
+[ -0.65, 0.92, -2.967000000000001 ],
+[ -0.98, 0.97, -3.921 ],
+[ 0.92, -0.24, 3.657 ],
+[ 0.06, 0.8200000000000001, -1.256 ],
+[ 0.62, -0.03, 2.234 ],
+[ 0.55, -0.46, 3.387 ],
+[ -0.34, -0.9, 3.072 ],
+[ 0.67, -0.34, 3.275 ],
+[ 0.35, 0.26, 0.902 ],
+[ -0.5700000000000001, -0.16, 0.3689999999999999 ],
+[ 0.8, 0.48, 1.125 ],
+[ 0.98, 0.62, 1.064 ],
+[ 0.96, -0.5600000000000001, 4.667999999999999 ],
+[ 0.8200000000000001, -0.52, 4.198 ],
+[ 0.9, -0.02, 2.811 ],
+[ -0.17, -0.35, 1.723 ],
]
diff --git a/examples/NeuralNetwork_QuadraticLoss/neural_network.g b/examples/NeuralNetwork_QuadraticLoss/neural_network.g
index 26b7c76..f37746a 100644
--- a/examples/NeuralNetwork_QuadraticLoss/neural_network.g
+++ b/examples/NeuralNetwork_QuadraticLoss/neural_network.g
@@ -1,29 +1,124 @@
-LoadPackage( "GradientDescentForCAP" );
+LoadPackage( "GradientBasedLearningForCAP" );
+#! @Chapter Examples for neural networks
-Smooth := SkeletalSmoothMaps;
-Lenses := CategoryOfLenses( Smooth );
-Para := CategoryOfParametrisedMorphisms( Smooth );
+#! @Section Neural network with quadratic loss function
-## the perfect weights are [ 2, -3, 1 ]
+#! This example demonstrates how to train a small feed-forward neural network
+#! for a regression task using the $\texttt{GradientBasedLearningForCAP}$ package. We employ
+#! the quadratic loss function and optimise the network parameters with
+#! gradient descent.
+#! The dataset consists of points $(x_1, x_2) \in \mathbb{R}^2$ with corresponding
+#! outputs $y \in \mathbb{R}$ generated by a linear function with some added noise.
+#! Concretely, the outputs are generated according to the formula
+#! @BeginLatexOnly
+#! \[
+#! y = 2x_1 - 3x_2 + 1 + \text{some small random error}.
+#! \]
-## The function we are trying minimize
-f := LossMorphismOfNeuralNetwork( Para, 2, [], 1, "IdFunc" );
+#! @EndLatexOnly
+#! @BeginLatexOnly
+#! \begin{center}
+#! \includegraphics[width=0.5\textwidth]{../examples/NeuralNetwork_QuadraticLoss/data/scatter_plot_training_examples.png}
+#! \end{center}
+#! @EndLatexOnly
-## One epoch update of the parameters
-optimizer := Lenses.AdamOptimizer();
+#! We build a neural network with input dimension 2, no hidden layers, and output dimension 1.
+#! Hence, the affine map between input and output layer has the following matrix dimensions (together with bias vector):
+#! @BeginLatexOnly
+#! \[
+#! \binom{W_1}{b_1} \in \mathbb{R}^{ 3 \times 1 }.
+#! \]
+#! @EndLatexOnly
+#! Where $W_1 \in \mathbb{R}^{2 \times 1}$ and $b_1 \in \mathbb{R}^1$ are the weights and bias to be learned.
+#! Equivalently, the network computes for an input $a_0 \in \mathbb{R}^2$ the output
+#! @BeginLatexOnly
+#! \[
+#! z_{1} := (a_0\;1)\binom{W_{1}}{b_{1}}=a_0 W_{1} + b_{1}\in \mathbb{R}^1.
+#! \]
+#! @EndLatexOnly
+#! Hence, the number of parameters to learn is 3 (two weights and one bias).
+#! We fit the neural network on the provided training examples for 30 epochs, and then compare the learned parameters
+#! to the perfect weights used to generate the dataset.
+#! We use the Adam optimiser for gradient descent. Hence, the initiat weights vector $(t, m_1, m_2, m_3, v_1, v_2, v_3, w_1, w_2, b_1) \in \mathbb{R}^{1+3+3+3}$
+#! contains additional parameters for the optimiser (the $m$'s and $v$'s). We initialise $t$ to $1$ and $m$'s and $v$'s to $0$.
-training_examples_path := "data/training_examples.txt";
+#! @Example
+Smooth := SkeletalSmoothMaps;
+#! SkeletalSmoothMaps
+Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+f := NeuralNetworkLossMorphism( Para, 2, [ ], 1, "IdFunc" );
+#! ℝ^3 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^3
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^6 -> ℝ^1
+optimizer := Lenses.AdamOptimizer();
+#! function( n ) ... end
+training_examples_path := Filename(
+ DirectoriesPackageLibrary("GradientBasedLearningForCAP", "examples")[1],
+ "NeuralNetwork_QuadraticLoss/data/training_examples.txt" );;
batch_size := 5;
-
-one_epoch_update := OneEpochUpdateLens( f, optimizer, training_examples_path, batch_size );
-
-## initialize the parameters and apply updates nr_epochs times
+#! 5
+one_epoch_update := OneEpochUpdateLens( f, optimizer,
+ training_examples_path, batch_size );
+#! (ℝ^10, ℝ^10) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^10 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^10 -> ℝ^10
w := [ 1, 0, 0, 0, 0, 0, 0, 0.21, -0.31, 0.7 ];
-nr_epochs := 50;
-
-w := Fit( one_epoch_update, nr_epochs, w );
+#! [ 1, 0, 0, 0, 0, 0, 0, 0.21, -0.31, 0.7 ]
+nr_epochs := 30;
+#! 30
+w := Fit( one_epoch_update, nr_epochs, w );;
+#! Epoch 0/30 - loss = 4.4574869198
+#! Epoch 1/30 - loss = 1.0904439656285798
+#! Epoch 2/30 - loss = 0.44893422753741707
+#! Epoch 3/30 - loss = 0.24718222552679428
+#! Epoch 4/30 - loss = 0.15816538314892969
+#! Epoch 5/30 - loss = 0.11009214898573197
+#! Epoch 6/30 - loss = 0.080765189573546586
+#! Epoch 7/30 - loss = 0.061445427900729599
+#! Epoch 8/30 - loss = 0.04803609207319106
+#! Epoch 9/30 - loss = 0.038370239087861441
+#! Epoch 10/30 - loss = 0.031199992288917108
+#! Epoch 11/30 - loss = 0.025760084031019172
+#! Epoch 12/30 - loss = 0.021557800050973547
+#! Epoch 13/30 - loss = 0.018263315597330656
+#! Epoch 14/30 - loss = 0.01564869258749324
+#! Epoch 15/30 - loss = 0.013552162640841157
+#! Epoch 16/30 - loss = 0.011856309185255345
+#! Epoch 17/30 - loss = 0.010474254262187581
+#! Epoch 18/30 - loss = 0.0093406409193010267
+#! Epoch 19/30 - loss = 0.008405587711401704
+#! Epoch 20/30 - loss = 0.0076305403249797375
+#! Epoch 21/30 - loss = 0.0069853659369945552
+#! Epoch 22/30 - loss = 0.0064462805409909937
+#! Epoch 23/30 - loss = 0.0059943461353685126
+#! Epoch 24/30 - loss = 0.0056143650058947617
+#! Epoch 25/30 - loss = 0.0052940553411779294
+#! Epoch 26/30 - loss = 0.0050234291867088457
+#! Epoch 27/30 - loss = 0.0047943179297568897
+#! Epoch 28/30 - loss = 0.0046000067074985669
+#! Epoch 29/30 - loss = 0.004434950161766555
+#! Epoch 30/30 - loss = 0.0042945495896027528
+w;
+#! [ 601, -0.00814765, -0.0328203, 0.00154532, 0.0208156, 0.0756998,
+#! 0.047054, 2.01399, -2.9546, 0.989903 ]
+#! @EndExample
-# after 5 epochs w = [ 1021, -0.00236067, -0.00633157, 0.000258869, 0.0135747, 0.0500079, 0.0310695, 2.00197, -2.99162, 0.997524 ]
-# ca. 2 ca. -3 ca. 1
+#! We notice that the learned weights $w_1 \approx 2.01399$, $w_2 \approx -2.9546$, and $b_1 \approx 0.989903$ are close to the
+#! perfect weights $2$, $-3$, and $1$ used to generate the dataset.
diff --git a/examples/NeuralNetworks.g b/examples/NeuralNetworks.g
new file mode 100644
index 0000000..7e32ebb
--- /dev/null
+++ b/examples/NeuralNetworks.g
@@ -0,0 +1,286 @@
+#! @Chapter Neural Networks
+
+#! @Section Examples
+
+LoadPackage( "GradientBasedLearningForCAP" );
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+N213_Logits := NeuralNetworkLogitsMorphism( Para, 2, [ 1 ], 3 );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+dummy_input := DummyInputForNeuralNetwork( 2, [ 1 ], 3 );
+#! [ w2_1_1, b2_1, w2_1_2, b2_2, w2_1_3, b2_3, w1_1_1, w1_2_1, b1_1, z1, z2 ]
+Display( N213_Logits : dummy_input := dummy_input );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+#!
+#! ‣ w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1
+#! ‣ w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2
+#! ‣ w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3
+N213_Pred := NeuralNetworkPredictionMorphism( Para, 2, [ 1 ], 3, "IdFunc" );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+N213_Pred = N213_Logits;
+#! true
+N213_Loss := NeuralNetworkLossMorphism( Para, 2, [ 1 ], 3, "IdFunc" );
+#! ℝ^5 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^14 -> ℝ^1
+vars := Concatenation(
+ DummyInputStringsForNeuralNetwork( 2, [ 1 ], 3 ),
+ DummyInputStrings( "y", 3 ) );
+#! [ "w2_1_1", "b2_1", "w2_1_2", "b2_2", "w2_1_3", "b2_3", "w1_1_1", "w1_2_1",
+#! "b1_1", "z1", "z2", "y1", "y2", "y3" ]
+dummy_input := CreateContextualVariables( vars );
+#! [ w2_1_1, b2_1, w2_1_2, b2_2, w2_1_3, b2_3, w1_1_1, w1_2_1, b1_1, z1, z2,
+#! y1, y2, y3 ]
+Display( N213_Loss : dummy_input := dummy_input );
+#! ℝ^5 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^14 -> ℝ^1
+#!
+#! ‣ ((w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 - y1) ^ 2
+#! + (w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 - y2) ^ 2
+#! + (w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 - y3) ^ 2) / 3
+#! @EndExample
+
+#! @BeginExample
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+N213_Logits := NeuralNetworkLogitsMorphism( Para, 1, [ ], 1 );
+#! ℝ^1 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^3 -> ℝ^1
+dummy_input := DummyInputForNeuralNetwork( 1, [ ], 1 );
+#! [ w1_1_1, b1_1, z1 ]
+Display( N213_Logits : dummy_input := dummy_input );
+#! ℝ^1 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^3 -> ℝ^1
+#!
+#! ‣ w1_1_1 * z1 + b1_1
+N213_Pred := PreCompose( N213_Logits, Para.Sigmoid( 1 ) );
+#! ℝ^1 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^3 -> ℝ^1
+N213_Pred = NeuralNetworkPredictionMorphism( Para, 1, [ ], 1, "Sigmoid" );
+#! true
+Display( N213_Pred : dummy_input := dummy_input );
+#! ℝ^1 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^3 -> ℝ^1
+#!
+#! ‣ 1 / (1 + Exp( - (w1_1_1 * z1 + b1_1) ))
+N213_Loss := NeuralNetworkLossMorphism( Para, 1, [ ], 1, "Sigmoid" );
+#! ℝ^2 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^4 -> ℝ^1
+vars := Concatenation(
+ DummyInputStringsForNeuralNetwork( 1, [ ], 1 ),
+ [ "y1" ] );
+#! [ "w1_1_1", "b1_1", "z1", "y1" ]
+dummy_input := CreateContextualVariables( vars );
+#! [ w1_1_1, b1_1, z1, y1 ]
+Display( N213_Loss : dummy_input := dummy_input );
+#! ℝ^2 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^4 -> ℝ^1
+#!
+#! ‣ Log( 1 + Exp( - (w1_1_1 * z1 + b1_1) ) ) + (1 - y1) * (w1_1_1 * z1 + b1_1)
+#! @EndExample
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+N213_Logits := NeuralNetworkLogitsMorphism( Para, 2, [ 1 ], 3 );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+dummy_input := DummyInputForNeuralNetwork( 2, [ 1 ], 3 );
+#! [ w2_1_1, b2_1, w2_1_2, b2_2, w2_1_3, b2_3, w1_1_1, w1_2_1, b1_1, z1, z2 ]
+Display( N213_Logits : dummy_input := dummy_input );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+#!
+#! ‣ w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1
+#! ‣ w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2
+#! ‣ w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3
+N213_Pred := PreCompose( N213_Logits, Para.Softmax( 3 ) );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+N213_Pred = NeuralNetworkPredictionMorphism( Para, 2, [ 1 ], 3, "Softmax" );
+#! true
+Display( N213_Pred : dummy_input := dummy_input );
+#! ℝ^2 -> ℝ^3 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^11 -> ℝ^3
+#! ‣ Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 )
+#! / (Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 )
+#! + Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 )
+#! + Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 ))
+#! ‣ Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 )
+#! / (Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 )
+#! + Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 )
+#! + Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 ))
+#! ‣ Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 )
+#! / (Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 )
+#! + Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 )
+#! + Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 ))
+N213_Loss := NeuralNetworkLossMorphism( Para, 2, [ 1 ], 3, "Softmax" );
+#! ℝ^5 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^14 -> ℝ^1
+vars := Concatenation(
+ DummyInputStringsForNeuralNetwork( 2, [ 1 ], 3 ),
+ DummyInputStrings( "y", 3 ) );
+#! [ "w2_1_1", "b2_1", "w2_1_2", "b2_2", "w2_1_3", "b2_3", "w1_1_1", "w1_2_1",
+#! "b1_1", "z1", "z2", "y1", "y2", "y3" ]
+dummy_input := CreateContextualVariables( vars );
+#! [ w2_1_1, b2_1, w2_1_2, b2_2, w2_1_3, b2_3, w1_1_1, w1_2_1, b1_1, z1, z2,
+#! y1, y2, y3 ]
+Display( N213_Loss : dummy_input := dummy_input );
+#! ℝ^5 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^9
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^14 -> ℝ^1
+#!
+#! ‣ (
+#! (
+#! Log(
+#! Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 ) +
+#! Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 ) +
+#! Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 )
+#! )
+#! - (w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1)
+#! ) * y1 +
+#! (
+#! Log( Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 ) +
+#! Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 ) +
+#! Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 )
+#! )
+#! - (w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2)
+#! ) * y2
+#! +
+#! (
+#! Log( Exp( w2_1_1 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_1 ) +
+#! Exp( w2_1_2 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_2 ) +
+#! Exp( w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3 )
+#! )
+#! - (w2_1_3 * Relu( w1_1_1 * z1 + w1_2_1 * z2 + b1_1 ) + b2_3)
+#! ) * y3
+#! ) / 3
+#! @EndExample
diff --git a/examples/OneEpochUpdateLens.g b/examples/OneEpochUpdateLens.g
new file mode 100644
index 0000000..69d1143
--- /dev/null
+++ b/examples/OneEpochUpdateLens.g
@@ -0,0 +1,388 @@
+#! @Chapter Fitting Parameters
+
+#! @Section Examples
+
+LoadPackage( "GradientBasedLearningForCAP" );
+
+#! @Example
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
+D := [ Smooth.1, Smooth.1, Smooth.1, Smooth.1 ];
+#! [ ℝ^1, ℝ^1, ℝ^1, ℝ^1 ]
+p1 := ProjectionInFactorOfDirectProduct( Smooth, D, 1 );
+#! ℝ^4 -> ℝ^1
+p2 := ProjectionInFactorOfDirectProduct( Smooth, D, 2 );
+#! ℝ^4 -> ℝ^1
+p3 := ProjectionInFactorOfDirectProduct( Smooth, D, 3 );
+#! ℝ^4 -> ℝ^1
+p4 := ProjectionInFactorOfDirectProduct( Smooth, D, 4 );
+#! ℝ^4 -> ℝ^1
+f := PreCompose( (p3 - p1), Smooth.Power(2) )
+ + PreCompose( (p4 - p2), Smooth.Power(2) );
+#! ℝ^4 -> ℝ^1
+dummy_input := CreateContextualVariables( [ "theta_1", "theta_2", "x1", "x2" ] );
+#! [ theta_1, theta_2, x1, x2 ]
+Display( f : dummy_input := dummy_input );
+#! ℝ^4 -> ℝ^1
+#!
+#! ‣ (x1 + (- theta_1)) ^ 2 + (x2 + (- theta_2)) ^ 2
+f := MorphismConstructor( Para, Para.2, [ Smooth.2, f ], Para.1 );
+#! ℝ^2 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^4 -> ℝ^1
+Display( f : dummy_input := dummy_input );
+#! ℝ^2 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^2
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^4 -> ℝ^1
+#!
+#! ‣ (x1 + (- theta_1)) ^ 2 + (x2 + (- theta_2)) ^ 2
+optimizer := Lenses.GradientDescentOptimizer( :learning_rate := 0.01 );
+#! function( n ) ... end
+dummy_input := CreateContextualVariables( [ "theta_1", "theta_2", "g1", "g2" ] );
+#! [ theta_1, theta_2, g1, g2 ]
+Display( optimizer( 2 ) : dummy_input := dummy_input );
+#! (ℝ^2, ℝ^2) -> (ℝ^2, ℝ^2) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ theta_1
+#! ‣ theta_2
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^4 -> ℝ^2
+#!
+#! ‣ theta_1 + 0.01 * g1
+#! ‣ theta_2 + 0.01 * g2
+update_lens_1 := OneEpochUpdateLens( f, optimizer, [ [ 1, 2 ] ], 1 );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+dummy_input := CreateContextualVariables( [ "theta_1", "theta_2" ] );
+#! [ theta_1, theta_2 ]
+Display( update_lens_1 : dummy_input := dummy_input );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ ((1 + (- theta_1)) ^ 2 + (2 + (- theta_2)) ^ 2) / 1 / 1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ theta_1 + 0.01 * (-1 * (0 + 0 + (1 * ((2 * (1 + (- theta_1)) ^ 1 * -1 + 0) * 1
+#! + 0 + 0 + 0) * 1 + 0 + 0 + 0) * 1 + 0))
+#! ‣ theta_2 + 0.01 * (-1 * (0 + 0 + 0 + (0 + 1 * (0 +
+#! (0 + 2 * (2 + (- theta_2)) ^ 1 * -1) * 1 + 0 + 0) * 1 + 0 + 0) * 1))
+update_lens_1 := SimplifyMorphism( update_lens_1, infinity );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+Display( update_lens_1 : dummy_input := dummy_input );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ (theta_1 - 1) ^ 2 + (theta_2 - 2) ^ 2
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ 0.98 * theta_1 + 0.02
+#! ‣ 0.98 * theta_2 + 0.04
+update_lens_2 := OneEpochUpdateLens( f, optimizer, [ [ 3, 4 ] ], 1 );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+Display( update_lens_2 : dummy_input := dummy_input );
+#!
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ ((3 + (- theta_1)) ^ 2 + (4 + (- theta_2)) ^ 2) / 1 / 1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ theta_1 + 0.01 * (-1 * (0 + 0 + (1 * ((2 * (3 + (- theta_1)) ^ 1 * -1 + 0) * 1
+#! + 0 + 0 + 0) * 1 + 0 + 0 + 0) * 1 + 0))
+#! ‣ theta_2 + 0.01 * (-1 * (0 + 0 + 0 + (0 + 1 * (0 +
+#! (0 + 2 * (4 + (- theta_2)) ^ 1 * -1) * 1 + 0 + 0) * 1 + 0 + 0) * 1))
+update_lens_2 := SimplifyMorphism( update_lens_2, infinity );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+Display( update_lens_2 : dummy_input := dummy_input );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ (theta_1 - 3) ^ 2 + (theta_2 - 4) ^ 2
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ 0.98 * theta_1 + 0.06
+#! ‣ 0.98 * theta_2 + 0.08
+update_lens := OneEpochUpdateLens( f, optimizer, [ [ 1, 2 ], [ 3, 4 ] ], 1 );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+Display( update_lens : dummy_input := dummy_input );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ (
+#! ((1 + (- theta_1)) ^ 2 + (2 + (- theta_2)) ^ 2) / 1 +
+#! ((3 + (- theta_1)) ^ 2 + (4 + (- theta_2)) ^ 2) / 1
+#! ) / 2
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ theta_1 + 0.01 * (-1 * (0 + 0 + (1 * ((2 * (1 + (- theta_1)) ^ 1 * -1 + 0) * 1
+#! + 0 + 0 + 0) * 1 + 0 + 0 + 0) * 1 + 0)) + 0.01 * (-1 * (0 + 0 +
+#! (1 * ((2 * (3 + (- (theta_1 + 0.01 * (-1 * (0 + 0 +
+#! (1 * ((2 * (1 + (- theta_1)) ^ 1 * -1 + 0) * 1 + 0 + 0 + 0) * 1
+#! + 0 + 0 + 0) * 1 + 0))))) ^ 1 * -1 + 0) * 1 + 0 + 0 + 0) * 1 + 0 + 0 + 0) * 1
+#! + 0))
+#! ‣ theta_2 + 0.01 * (-1 * (0 + 0 + 0 + (0 + 1 * (0 + (0 + 2 * (2 +
+#! (- theta_2)) ^ 1 * -1) * 1 + 0 + 0) * 1 + 0 + 0) * 1)) + 0.01
+#! * (-1 * (0 + 0 + 0 + (0 + 1 * (0 + (0 + 2 * (4 +
+#! (- (theta_2 + 0.01 * (-1 * (0 + 0 + 0 + (0 + 1 * (0 + (0 + 2 * (2 +
+#! (- theta_2)) ^ 1 * -1) * 1 + 0 + 0) * 1 + 0 + 0) * 1))))) ^ 1 * -1) * 1
+#! + 0 + 0) * 1 + 0 + 0) * 1))
+update_lens := SimplifyMorphism( update_lens, infinity );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+Display( update_lens : dummy_input := dummy_input );
+#! (ℝ^2, ℝ^2) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^1
+#!
+#! ‣ theta_1 ^ 2 - 4 * theta_1 + theta_2 ^ 2 - 6 * theta_2 + 15
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^2 -> ℝ^2
+#!
+#! ‣ 0.9604 * theta_1 + 0.0796
+#! ‣ 0.9604 * theta_2 + 0.1192
+"If we used only update_lens_1, the parameters converge to (1,2)";;
+theta := [ 0, 0 ];;
+for i in [ 1 .. 1000 ] do theta := PutMorphism( update_lens_1 )( theta ); od;
+theta;
+#! [ 1., 2. ]
+"If we used only update_lens_2, the parameters converge to (3,4)";;
+theta := [ 0, 0 ];;
+for i in [ 1 .. 1000 ] do theta := PutMorphism( update_lens_2 )( theta ); od;
+theta;
+#! [ 3., 4. ]
+"If we use the combined update_lens, the parameters converge to (2,3)";;
+theta := [ 0, 0 ];;
+for i in [ 1 .. 1000 ] do theta := PutMorphism( update_lens )( theta ); od;
+theta;
+#! [ 2.0101, 3.0101 ]
+"Inseated of manually applying the put-morphism, we can use the Fit operation:";;
+"For example, to fit theta = (0,0) using 10 epochs:";;
+theta := [ 0, 0 ];;
+theta := Fit( update_lens, 10, theta );
+#! Epoch 0/10 - loss = 15
+#! Epoch 1/10 - loss = 13.9869448
+#! Epoch 2/10 - loss = 13.052687681213568
+#! Epoch 3/10 - loss = 12.19110535502379
+#! Epoch 4/10 - loss = 11.39655013449986
+#! Epoch 5/10 - loss = 10.663813003077919
+#! Epoch 6/10 - loss = 9.9880895506637923
+#! Epoch 7/10 - loss = 9.3649485545394704
+#! Epoch 8/10 - loss = 8.790302999738083
+#! Epoch 9/10 - loss = 8.2603833494932317
+#! Epoch 10/10 - loss = 7.7717128910720641
+#! [ 0.668142, 1.00053 ]
+#! @EndExample
+
+
+#! Let us in this example find a solution to the equation $\theta^3-\theta^2-4=0$. We can reframe this
+#! as a minimization problem by considering the parametrised morphism
+#! $(\mathbb{R}^1, f):\mathbb{R}^0 \to \mathbb{R}^1$ where $f(\theta) = (\theta^3-\theta^2-4)^2$.
+
+#! @BeginExample
+Smooth := SkeletalCategoryOfSmoothMaps( );
+#! SkeletalSmoothMaps
+Para := CategoryOfParametrisedMorphisms( Smooth );
+#! CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
+Lenses := CategoryOfLenses( Smooth );
+#! CategoryOfLenses( SkeletalSmoothMaps )
+f := Smooth.Power( 3 ) - Smooth.Power( 2 ) - Smooth.Constant([ 4 ]);
+#! ℝ^1 -> ℝ^1
+Display( f );
+#! ℝ^1 -> ℝ^1
+#! ‣ x1 ^ 3 + (- x1 ^ 2) + - 4
+f := PreCompose( f, Smooth.Power( 2 ) );
+#! ℝ^1 -> ℝ^1
+Display( f );
+#! ℝ^1 -> ℝ^1
+#!
+#! ‣ (x1 ^ 3 + (- x1 ^ 2) + - 4) ^ 2
+f := MorphismConstructor( Para, Para.0, [ Smooth.1, f ], Para.1 );
+#! ℝ^0 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^1
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^1 -> ℝ^1
+dummy_input := CreateContextualVariables( [ "theta" ] );
+#! [ theta ]
+Display( f : dummy_input := dummy_input );
+#! ℝ^0 -> ℝ^1 defined by:
+#!
+#! Underlying Object:
+#! -----------------
+#! ℝ^1
+#!
+#! Underlying Morphism:
+#! -------------------
+#! ℝ^1 -> ℝ^1
+#!
+#! ‣ (theta ^ 3 + (- theta ^ 2) + -4) ^ 2
+optimizer := Lenses.AdamOptimizer( :learning_rate := 0.01,
+ beta1 := 0.9, beta2 := 0.999, epsilon := 1.e-7 );
+#! function( n ) ... end
+dummy_input := CreateContextualVariables( [ "t", "m", "v", "theta", "g" ] );
+#! [ t, m, v, theta, g ]
+Display( optimizer( 1 ) : dummy_input := dummy_input );
+#! (ℝ^4, ℝ^4) -> (ℝ^1, ℝ^1) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^4 -> ℝ^1
+#!
+#! ‣ theta
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^5 -> ℝ^4
+#!
+#! ‣ t + 1
+#! ‣ 0.9 * m + 0.1 * g
+#! ‣ 0.999 * v + 0.001 * g ^ 2
+#! ‣ theta + 0.01 / (1 - 0.999 ^ t) * ((0.9 * m + 0.1 * g) /
+#! (1.e-07 + Sqrt( (0.999 * v + 0.001 * g ^ 2) / (1 - 0.999 ^ t) )))
+update_lens := OneEpochUpdateLens( f, optimizer, [ [ ] ], 1 );
+#! (ℝ^4, ℝ^4) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^4 -> ℝ^1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^4 -> ℝ^4
+dummy_input := CreateContextualVariables( [ "t", "m", "v", "theta" ] );
+#! [ t, m, v, theta ]
+Display( update_lens : dummy_input := dummy_input );
+#! (ℝ^4, ℝ^4) -> (ℝ^1, ℝ^0) defined by:
+#!
+#! Get Morphism:
+#! ------------
+#! ℝ^4 -> ℝ^1
+#!
+#! ‣ (theta ^ 3 + (- theta ^ 2) + -4) ^ 2 / 1 / 1
+#!
+#! Put Morphism:
+#! ------------
+#! ℝ^4 -> ℝ^4
+#!
+#! ‣ t + 1
+#! ‣ 0.9 * m + 0.1 * (-1 * (1 * (2 * (theta ^ 3 + (- theta ^ 2) + -4) ^ 1 *
+#! (3 * theta ^ 2 + (- 2 * theta ^ 1)) * 1) * 1 * 1))
+#! ‣ 0.999 * v + 0.001 * (-1 * (1 * (2 * (theta ^ 3 + (- theta ^ 2) + -4) ^ 1 *
+#! (3 * theta ^ 2 + (- 2 * theta ^ 1)) * 1) * 1 * 1)) ^ 2
+#! ‣ theta + 0.01 / (1 - 0.999 ^ t) * ((0.9 * m + 0.1 *
+#! (-1 * (1 * (2 * (theta ^ 3 + (- theta ^ 2) + -4) ^ 1 *
+#! (3 * theta ^ 2 + (- 2 * theta ^ 1)) * 1) * 1 * 1))) /
+#! (1.e-07 + Sqrt( (0.999 * v + 0.001 * (-1 * (1 * (2 *
+#! (theta ^ 3 + (- theta ^ 2) + -4) ^ 1 * (3 * theta ^ 2 + (- 2 * theta ^ 1)) * 1)
+#! * 1 * 1)) ^ 2) / (1 - 0.999 ^ t) )))
+Fit( update_lens, 10000, [ 1, 0, 0, 8 ] : verbose := false );
+#! [ 10001, 4.11498e-13, 1463.45, 2. ]
+UnderlyingMorphism( f )( [ 2. ] );
+#! [ 0. ]
+#! @EndExample
diff --git a/examples/generate_supported_cap_operations.g b/examples/generate_supported_cap_operations.g
new file mode 100644
index 0000000..d1ec9f4
--- /dev/null
+++ b/examples/generate_supported_cap_operations.g
@@ -0,0 +1,57 @@
+#! @Chapter Skeletal Category of Smooth Maps
+
+#! @Section Supported CAP Operations
+
+#! @Subsection Generate Documentation
+
+#! @Example
+
+LoadPackage( "GradientBasedLearningForCAP", false );
+#! true
+
+CAP_INTERNAL_GENERATE_DOCUMENTATION_FOR_CATEGORY_INSTANCES(
+ [
+ [ SkeletalSmoothMaps, "SkeletalSmoothMaps", 0 ],
+ ],
+ "GradientBasedLearningForCAP",
+ "SkeletalCategoryOfSmoothMaps.autogen.gd",
+ "Skeletal Category of Smooth Maps",
+ "Supported CAP Operations"
+);
+#! @EndExample
+
+
+#! @Chapter Category of Parametrised Morphisms
+
+#! @Section Supported CAP Operations
+
+#! @Subsection Generate Documentation
+
+#! @Example
+LoadPackage( "GradientBasedLearningForCAP", false );
+#! true
+
+Para := CategoryOfParametrisedMorphisms( SkeletalSmoothMaps );;
+
+CAP_INTERNAL_GENERATE_DOCUMENTATION_FOR_CATEGORY_INSTANCES(
+ [
+ [ Para, "Category of Parametrised Smooth Maps", 0 ],
+ ],
+ "GradientBasedLearningForCAP",
+ "CategoryOfParametrisedMorphisms.autogen.gd",
+ "Category of Parametrised Morphisms",
+ "Supported CAP Operations"
+);
+
+Lenses := CategoryOfLenses( SkeletalSmoothMaps );;
+
+CAP_INTERNAL_GENERATE_DOCUMENTATION_FOR_CATEGORY_INSTANCES(
+ [
+ [ Lenses, "Category of Lenses of Smooth Maps", 0 ],
+ ],
+ "GradientBasedLearningForCAP",
+ "CategoryOfLenses.autogen.gd",
+ "Category of Lenses",
+ "Supported CAP Operations"
+);
+#! @EndExample
diff --git a/gap/CategoryOfLenses.autogen.gd b/gap/CategoryOfLenses.autogen.gd
new file mode 100644
index 0000000..3beb142
--- /dev/null
+++ b/gap/CategoryOfLenses.autogen.gd
@@ -0,0 +1,50 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
+#
+# Declarations
+#
+# THIS FILE IS AUTOMATICALLY GENERATED, SEE CAP_project/CAP/gap/MethodRecord.gi
+
+#! @Chapter Category of Lenses
+
+#! @Section Supported CAP Operations
+
+#! @Subsection Category of Lenses of Smooth Maps
+
+#! The following CAP operations are supported:
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
diff --git a/gap/CategoryOfLenses.gd b/gap/CategoryOfLenses.gd
index 3bc540d..456fa68 100644
--- a/gap/CategoryOfLenses.gd
+++ b/gap/CategoryOfLenses.gd
@@ -1,27 +1,191 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
+#! @Chapter Category of Lenses
+
+#! @Section Definition
+#! Let $C$ be a cartesian category. The category of lenses $\mathbf{Lenses}(C)$ has:
+#! - Objects: pairs of objects $(S_1, S_2)$ in $C$.
+#! - Morphisms: a morphism from $f:(S_1, S_2) \to (T_1, T_2)$ is given by a pair of morphisms in $C$:
+#! - A "get" (or "forward") morphism $f_g: S_1 \to T_1$ in $C$.
+#! - A "put" (or "backward") morphism $f_p: S_1 \times T_2 \to S_2$ in $C$.
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! % Nodes
+#! \node (A) at (-3, 1) {$S_1$};
+#! \node (B) at ( 3, 1) {$T_1$};
+#! \node (Ap) at (-3,-1) {$S_2$};
+#! \node (Bp) at ( 3,-1) {$T_2$};
+#! \draw (-1.5,-1.8) rectangle (1.5,1.8);
+#! \draw[->] (A) -- node[above] {$f_g$} (B);
+#! \draw[->] (Bp) -- node[midway, below] {$f_p$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! - Composition: given lenses $f: (S_1, S_2) \to (T_1, T_2)$ and $h: (T_1, T_2) \to (U_1, U_2)$,
+#! their composition $f \cdot h: (S_1, S_2) \to (U_1, U_2)$ is defined by:
+#! - Get morphism: $f_g \cdot h_g: S_1 \to U_1$.
+#! - Put morphism: $\langle \pi_{S_1}, (f_g \times U_2)\cdot h_p\rangle \cdot f_p:S_1 \times U_2 \to S_2$,
+#! where $\langle -, -\rangle$ denotes the universal morphism into the product object.
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}[node distance=3cm, auto]
+#! \node (S1U2) {$S_1 \times U_2$};
+#! \node (S1T2) [right of=S1U2, node distance=5cm] {$S_1 \times T_2$};
+#! \node (T1U2) [below of=S1U2] {$T_1 \times U_2$};
+#! \node (S1) [above of=S1T2] {$S_1$};
+#! \node (T2) [below of=S1T2] {$T_2$};
+#! \node (S2) [right of=S1T2] {$S_2$};
+#! \draw[->] (S1U2) -- node {$\langle \pi_{S_1}, (f_g \times U_2)\cdot h_p\rangle$} (S1T2);
+#! \draw[->] (S1U2) -- node {$\pi_{S1}$} (S1);
+#! \draw[->] (S1U2) -- node [left] {$f_g\times U_2$} (T1U2);
+#! \draw[->] (T1U2) -- node {$h_p$} (T2);
+#! \draw[->] (S1T2) -- node {$f_p$} (S2);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! - Identity: the identity lens on $(S_1, S_2)$ has:
+#! - Get morphism: $id_{S_1}: S_1 \to S_1$.
+#! - Put morphism: the projection morphism $\pi_{S_2}: S_1 \times S_2 \to S_2$.
+
+#! @Section GAP Categories
+
+#! @Description
+#! The &GAP; category of a category of lenses.
DeclareCategory( "IsCategoryOfLenses",
IsCapCategory );
#! @Description
-#! The &GAP; category of objects in path categories.
+#! The &GAP; category of objects in a category of lenses.
DeclareCategory( "IsObjectInCategoryOfLenses",
IsCapCategoryObject );
#! @Description
-#! The &GAP; category of morphisms in path categories.
+#! The &GAP; category of morphisms in a category of lenses.
DeclareCategory( "IsMorphismInCategoryOfLenses",
IsCapCategoryMorphism );
+#! @Section Constructors
+
+#! @Description
+#! Construct the category of lenses over the category C.
+#! A lens is a pair of morphisms: a "get" morphism and a "put" morphism,
+#! which together model bidirectional data flow used in automatic differentiation.
+#! @Arguments C
+#! @Returns a category
DeclareOperation( "CategoryOfLenses", [ IsCapCategory ] );
+if false then
+
+#! @Description
+#! Construct an object in the category of lenses given a pair of objects.
+#! @Arguments Lens, obj_list
+#! @Returns an object in the category of lenses
+DeclareOperation( "ObjectConstructor", [ IsCategoryOfLenses, IsDenseList ] );
+
+#! @Description
+#! Construct a morphism in the category of lenses given a pair of morphisms:
+#! a "get" morphism and a "put" morphism.
+#! @Arguments Lens, source_obj, morphism_list, target_obj
+#! @Returns a morphism in the category of lenses
+DeclareOperation( "MorphismConstructor", [ IsCategoryOfLenses, IsObjectInCategoryOfLenses, IsDenseList, IsObjectInCategoryOfLenses ] );
+
+fi;
+
+#! @Section Attributes
+
+#! @Description
+#! Returns the underlying pair of objects $(S_1, S_2)$ for an object in the category of lenses.
+#! This operation is a synonym for ObjectDatum.
+#! @Arguments obj
+#! @Returns a pair of objects
DeclareAttribute( "UnderlyingPairOfObjects", IsObjectInCategoryOfLenses );
+
+#! @Description
+#! Returns the underlying pair of morphisms for a morphism in the category of lenses.
+#! This operation is a synonym for MorphismDatum.
+#! @Arguments f
+#! @Returns a pair of morphisms
DeclareAttribute( "UnderlyingPairOfMorphisms", IsMorphismInCategoryOfLenses );
+
+#! @Description
+#! Returns the "get" morphism of a lens.
+#! For a lens $f: (S_1, S_2) \to (T_1, T_2)$, the output is the get morphism $S_1 \to T_1$.
+#! @Arguments f
+#! @Returns a morphism
DeclareAttribute( "GetMorphism", IsMorphismInCategoryOfLenses );
+
+#! @Description
+#! Returns the "put" morphism of a lens.
+#! For a lens $f: (S_1, S_2) \to (T_1, T_2)$, the output is the put morphism $S_1 \times T_2 \to S_2$.
+#! @Arguments f
+#! @Returns a morphism
DeclareAttribute( "PutMorphism", IsMorphismInCategoryOfLenses );
-DeclareOperation( "EmbeddingIntoCategoryOfLenses", [ IsCapCategory, IsCategoryOfLenses ] );
+#! @Section Operations
+
+#! @Description
+#! Embedding functor from the category Smooth of smooth maps into its category of lenses Lenses.
+#! An object $\mathbb{R}^m$ in Smooth is mapped to the lens $(\mathbb{R}^m, \mathbb{R}^m)$,
+#! and a morphism $f: \mathbb{R}^m \to \mathbb{R}^n$ is mapped to the lens
+#! with get morphism $f: \mathbb{R}^m \to \mathbb{R}^n$ and put morphism
+#! $Rf: \mathbb{R}^m \times \mathbb{R}^n \to \mathbb{R}^m$ given by
+#! $(u,v) \mapsto vJ_f(u)$, where $J_f(u) \in \mathbb{R}^{n \times m}$ is the Jacobian matrix of $f$ evaluated
+#! at $u \in \mathbb{R}^m$. This functor might be defined for any cartesian reverse-differentiable category,
+#! but the category of smooth maps is the only such category currently implemented in this package.
+#! For example, if $f: \mathbb{R}^2 \to \mathbb{R}$ is defined by
+#! $f(x_1, x_2) = (x_1^3 + x_2^2)$, the the corresponding $Rf:\mathbb{R}^2 \times \mathbb{R} \to \mathbb{R}^2$ is given by
+#! $Rf((x_1, x_2), y) = y(3x_1^2, 2x_2) = (3x_1^2y, 2x_2y)$.
+#! @Arguments Smooth, Lenses
+#! @Returns a functor
+DeclareOperation( "ReverseDifferentialLensFunctor", [ IsSkeletalCategoryOfSmoothMaps, IsCategoryOfLenses ] );
+
+#! @Section Optimizers
+
+#! @BeginLatexOnly
+#! In this section we document the optimizers provided via the dot operator
+#! $\mathrm{Lenses}.( .. )$ when the underlying category is the skeletal category of smooth maps.
+#! The optimizers are lenses whose get-part reads the current parameters and whose put-part
+#! performs the parameter/state update.
+#! In the formulas below we use the following conventions:
+#! \begin{itemize}
+#! \item $\theta_t \in \mathbb{R}^n$: the current parameter vector.
+#! \item $g_t \in \mathbb{R}^n$: the current gradient (coming from backpropagation).
+#! \item $\eta > 0$: learning rate.
+#! \item All operations on vectors are meant componentwise.
+#! \end{itemize}
+#! \begin{itemize}
+#! \item
+#! \textbf{GradientDescentOptimizer}:
+#! This is plain gradient descent with update
+#! $$(\theta_t,g_t) \mapsto \theta_t + \eta g_t =: \theta_{t+1}.$$
+#! \item
+#! \textbf{GradientDescentWithMomentumOptimizer}:
+#! This optimizer maintains a momentum vector $s_t \in \mathbb{R}^n$.
+#! With momentum parameter $\mu \in [0,1)$ the update is
+#! $$(s_t, \theta_t, g_t) \mapsto (\mu\, s_t + \eta\, g_t, \theta_t + \mu\, s_t + \eta\, g_t) =: (s_{t+1}, \theta_{t+1}).$$
+#! Note that the \textbf{GradientDescentOptimizer} is a special case of this optimizer with $\mu = 0$.
+#! \item
+#! \textbf{AdagradOptimizer}:
+#! This optimizer maintains an accumulator $s_t \in \mathbb{R}^n$ of squared gradients.
+#! With $\epsilon > 0$ the update is
+#! $$ (s_t, \theta_t, g_t) \mapsto (s_t + g_t^2, \theta_t + \eta\, \frac{g_t}{\epsilon + \sqrt{s_t + g_t^2}}) =: (s_{t+1}, \theta_{t+1}).$$
+#! \item
+#! \textbf{AdamOptimizer}:
+#! This optimizer maintains first and second moment estimates $m_t, v_t \in \mathbb{R}^n$.
+#! With parameters $\beta_1,\beta_2 \in [0,1)$ and $\epsilon > 0$ the update is
+#! $$ (t, m_t, v_t, \theta_t, g_t) \mapsto (t+1, \beta_1 m_t + (1-\beta_1) g_t, \beta_2 v_t + (1-\beta_2) g_t^2, \theta_t + \eta\, \frac{\hat m_{t+1}}{\epsilon + \sqrt{\hat v_{t+1}}}) =: (t+1, m_{t+1}, v_{t+1}, \theta_{t+1})$$
+#! where
+#! \begin{align*}
+#! \hat m_{t+1} &= \frac{\beta_1 m_t + (1-\beta_1) g_t}{1-\beta_1^{t+1}}, \\
+#! \hat v_{t+1} &= \frac{\beta_2 v_t + (1-\beta_2) g_t^2}{1-\beta_2^{t+1}}. \\
+#! \end{align*}
+#! The iteration counter $t$ is included in the input because the update formulas depend on it ($\beta_1^{t+1}$ and $\beta_2^{t+1}$),
+#! while in the other optimizers $t$ was merely an index and not needed for the update.
+#! \end{itemize}
+#! @EndLatexOnly
diff --git a/gap/CategoryOfLenses.gi b/gap/CategoryOfLenses.gi
index a22b711..c78b8f5 100644
--- a/gap/CategoryOfLenses.gi
+++ b/gap/CategoryOfLenses.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
@@ -414,15 +414,16 @@ InstallOtherMethod( IsCongruentForMorphisms,
end );
##
-InstallMethod( EmbeddingIntoCategoryOfLenses,
- [ IsCategoryOfSkeletalSmoothMaps, IsCategoryOfLenses ],
+InstallMethod( ReverseDifferentialLensFunctor,
+ [ IsSkeletalCategoryOfSmoothMaps, IsCategoryOfLenses ],
- function ( C, Lenses )
+ function ( Smooth, Lenses )
local F;
- Assert( 0, IsIdenticalObj( C, UnderlyingCategory( Lenses ) ) );
+ Assert( 0, IsIdenticalObj( UnderlyingCategory( Lenses ), Smooth ),
+ "the underlying category of the category of lenses must be the category of smooth maps!" );
- F := CapFunctor( "Embedding functor into category of lenses", C, Lenses );
+ F := CapFunctor( "Embedding functor into category of lenses", Smooth, Lenses );
AddObjectFunction( F,
function ( A )
@@ -435,7 +436,7 @@ InstallMethod( EmbeddingIntoCategoryOfLenses,
function ( source, f, target )
- return MorphismConstructor( Lenses, source, Pair( f, ReverseDifferential( C, f ) ), target );
+ return MorphismConstructor( Lenses, source, Pair( f, ReverseDifferential( Smooth, f ) ), target );
end );
@@ -443,6 +444,19 @@ InstallMethod( EmbeddingIntoCategoryOfLenses,
end );
+##
+InstallOtherMethod( ReverseDifferentialLensFunctor,
+ [ IsSkeletalCategoryOfSmoothMaps ],
+
+ function ( Smooth )
+ local Lenses;
+
+ Lenses := CategoryOfLenses( Smooth );
+
+ return ReverseDifferentialLensFunctor( Smooth, Lenses );
+
+end );
+
## TxP3 P
## ------------>
@@ -467,7 +481,7 @@ InstallOtherMethod( \.,
C := UnderlyingCategory( Lenses );
- if not IsCategoryOfSkeletalSmoothMaps( C ) then
+ if not IsSkeletalCategoryOfSmoothMaps( C ) then
TryNextMethod( );
fi;
@@ -613,10 +627,10 @@ InstallOtherMethod( \.,
PreCompose( Smooth, p3, DirectProductFunctorial( Smooth, ListWithIdenticalEntries( n, Smooth.Power( 2 ) ) ) ) );
t := MultiplicationForMorphisms( Smooth,
- SmoothMorphism( Smooth, P3, ListWithIdenticalEntries( n, learning_rate ), Smooth.( n ) ),
+ SmoothMap( Smooth, P3, ListWithIdenticalEntries( n, learning_rate ), Smooth.( n ) ),
PreCompose( Smooth,
AdditionForMorphisms( Smooth,
- SmoothMorphism( Smooth, P3, ListWithIdenticalEntries( n, epsilon ), Smooth.( n ) ),
+ SmoothMap( Smooth, P3, ListWithIdenticalEntries( n, epsilon ), Smooth.( n ) ),
PreCompose( Smooth,
s,
DirectProductFunctorial( Smooth, ListWithIdenticalEntries( n, Smooth.Sqrt ) ) ) ),
@@ -821,7 +835,7 @@ InstallOtherMethod( \.,
p4 := ProjectionInFactorOfDirectProductWithGivenDirectProduct( Smooth, diagram, 4, TxP4 );
p5 := ProjectionInFactorOfDirectProductWithGivenDirectProduct( Smooth, diagram, 5, TxP4 );
- put_1 := AdditionForMorphisms( Smooth, p1, SmoothMorphism( Smooth, TxP4, [ 1 ], Smooth.( 1 ) ) );
+ put_1 := AdditionForMorphisms( Smooth, p1, SmoothMap( Smooth, TxP4, [ 1 ], Smooth.( 1 ) ) );
m := AdditionForMorphisms( Smooth,
MultiplyWithElementOfCommutativeRingForMorphisms( Smooth, beta_1, p2 ),
@@ -832,7 +846,7 @@ InstallOtherMethod( \.,
b := PreComposeList( Smooth,
[ p1,
SubtractionForMorphisms( Smooth,
- SmoothMorphism( Smooth, Smooth.( 1 ), [ 1 ], Smooth.( 1 ) ),
+ SmoothMap( Smooth, Smooth.( 1 ), [ 1 ], Smooth.( 1 ) ),
Smooth.PowerBase( beta_2 ) ),
Smooth.Power( -1 ) ] );
@@ -852,7 +866,7 @@ InstallOtherMethod( \.,
b := PreComposeList( Smooth,
[ p1,
SubtractionForMorphisms( Smooth,
- SmoothMorphism( Smooth, Smooth.( 1 ), [ 1 ], Smooth.( 1 ) ),
+ SmoothMap( Smooth, Smooth.( 1 ), [ 1 ], Smooth.( 1 ) ),
Smooth.PowerBase( beta_2 ) ),
Smooth.Power( -1 ) ] );
@@ -861,7 +875,7 @@ InstallOtherMethod( \.,
v_hat := MultiplicationForMorphisms( Smooth, v, b );
- delta_n := SmoothMorphism( Smooth,
+ delta_n := SmoothMap( Smooth,
TxP4,
ListWithIdenticalEntries( n, epsilon ),
Smooth.( n ) );
@@ -1021,13 +1035,13 @@ InstallOtherMethod( \.,
return
function ( arg... )
- return ApplyFunctor( EmbeddingIntoCategoryOfLenses( C, Lenses ), CallFuncList( C.( f ), arg ) );
+ return ApplyFunctor( ReverseDifferentialLensFunctor( C ), CallFuncList( C.( f ), arg ) );
end;
elif f in [ "Sqrt", "Exp", "Log", "Sin", "Cos", "BinaryCrossEntropyLoss_", "BinaryCrossEntropyLoss" ] then
- return ApplyFunctor( EmbeddingIntoCategoryOfLenses( C, Lenses ), C.( f ) );
+ return ApplyFunctor( ReverseDifferentialLensFunctor( C ), C.( f ) );
fi;
end );
@@ -1055,9 +1069,9 @@ InstallMethod( ViewString,
" -> ",
ViewString( Target( f ) ),
" defined by:",
- "\n\nGet Morphism:\n----------\n",
+ "\n\nGet Morphism:\n------------\n",
ViewString( GetMorphism( f ) ),
- "\n\nPut Morphism:\n----------\n",
+ "\n\nPut Morphism:\n------------\n",
ViewString( PutMorphism( f ) )
);
@@ -1068,36 +1082,25 @@ InstallMethod( DisplayString,
[ IsMorphismInCategoryOfLenses ],
function ( f )
+ local rank, get_morphism, put_morphism, get_input, put_input;
- return Concatenation(
- ViewString( Source( f ) ),
- " -> ",
- ViewString( Target( f ) ),
- " defined by:\n",
- "\nGet Morphism:\n----------\n",
- DisplayString( GetMorphism( f ) ),
- "\nPut Morphism:\n----------\n",
- DisplayString( PutMorphism( f ) ) );
+ get_morphism := GetMorphism( f );
+ put_morphism := PutMorphism( f );
-end );
-
-##
-InstallMethod( Display,
- [ IsMorphismInCategoryOfLenses ],
-
- function ( f )
+ put_input := CAP_INTERNAL_RETURN_OPTION_OR_DEFAULT( "dummy_input", DummyInput( put_morphism ) );
- Print( Concatenation(
+ rank := RankOfObject( Source( get_morphism ) );
+
+ get_input := put_input{ [ 1 .. rank ] };
+
+ return Concatenation(
ViewString( Source( f ) ),
" -> ",
ViewString( Target( f ) ),
" defined by:\n",
- "\nGet Morphism:\n------------\n" ) );
-
- Display( GetMorphism( f ) );
-
- Print( "\nPut Morphism:\n------------\n" );
-
- Display( PutMorphism( f ) );
+ "\nGet Morphism:\n------------\n",
+ DisplayString( GetMorphism( f ) : dummy_input := get_input ),
+ "\n\nPut Morphism:\n------------\n",
+ DisplayString( PutMorphism( f ) : dummy_input := put_input ) );
end );
diff --git a/gap/CategoryOfParametrisedMorphisms.autogen.gd b/gap/CategoryOfParametrisedMorphisms.autogen.gd
new file mode 100644
index 0000000..530d9b5
--- /dev/null
+++ b/gap/CategoryOfParametrisedMorphisms.autogen.gd
@@ -0,0 +1,35 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
+#
+# Declarations
+#
+# THIS FILE IS AUTOMATICALLY GENERATED, SEE CAP_project/CAP/gap/MethodRecord.gi
+
+#! @Chapter Category of Parametrised Morphisms
+
+#! @Section Supported CAP Operations
+
+#! @Subsection Category of Parametrised Smooth Maps
+
+#! The following CAP operations are supported:
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
diff --git a/gap/CategoryOfParametrisedMorphisms.gd b/gap/CategoryOfParametrisedMorphisms.gd
index 4ec530b..fa3166e 100644
--- a/gap/CategoryOfParametrisedMorphisms.gd
+++ b/gap/CategoryOfParametrisedMorphisms.gd
@@ -1,33 +1,224 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
+#! @Chapter Category of Parametrised Morphisms
+
+#! @Section Definition
+
+#! Let $(C, \otimes, I)$ be a strict symmetric monoidal category. The category $P(C)$ of parametrized morphisms
+#! is given by the following data:
+#!
+#! * **Objects**: $\mathrm{Obj}(P(C)) := \mathrm{Obj}(C)$
+#!
+#! * **Morphisms**: For two objects $A$ and $B$ in $P(C)$, a morphism $f \colon A \to B$ in $P(C)$ consists of:
+#! * A parameter object $P \in \mathrm{Obj}(C)$
+#! * A morphism $f_P \colon P \otimes A \to B$ in $C$
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}[baseline=-2pt]
+#! \node[draw, minimum width=2cm, minimum height=1cm] (F) {$f_P$};
+#! \node[left=1.2cm of F] (A) {$A$};
+#! \node[right=1.2cm of F] (B) {$B$};
+#! \node[above=1cm of F] (P) {$P$};
+#!
+#! \draw[->] (A) -- (F);
+#! \draw[->] (F) -- (B);
+#! \draw[->] (P) -- (F);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! * **Composition**: Given two morphisms $f=(P,f_P) \colon A \to B$ and $g =(Q,g_Q) \colon B \to C$ in $P(C)$,
+#! their composition $f \cdot g \colon A \to C$ is given by the pair
+#! @BeginLatexOnly
+#! \[
+#! \left( Q \otimes P, (\mathrm{id}_Q \otimes f_P) \cdot g_Q \colon (Q \otimes P) \otimes A \to C \right).
+#! \]
+#! \[
+#! \begin{tikzpicture}[baseline=-2pt]
+#! \node[draw, minimum width=2cm, minimum height=1cm] (F) {$Q \otimes P \otimes A \xrightarrow{\mathrm{id}_Q \otimes f_P} Q \otimes B \xrightarrow{g_Q} C$};
+#! \node[left=1.2cm of F] (A) {$A$};
+#! \node[right=1.2cm of F] (C) {$C$};
+#! \node[above=1cm of F] (QP) {$Q \otimes P$};
+#!
+#! \draw[->] (A) -- (F);
+#! \draw[->] (F) -- (C);
+#! \draw[->] (QP) -- (F);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! * **Identity morphisms**: For each object $A$ in $P(C)$, the identity morphism is given by
+#! the pair $\mathrm{id}_A = \left( I, (\mathrm{id}_A)_I := \mathrm{id}_A \in \mathbf{Hom}_C(A, A)) \right)$.
+
+#! @Section GAP Categories
+
+#! @Description
+#! The &GAP; category of a category of parametrised morphisms.
DeclareCategory( "IsCategoryOfParametrisedMorphisms",
IsCapCategory );
+#! @Description
+#! The &GAP; category of objects in a category of parametrised morphisms.
DeclareCategory( "IsObjectInCategoryOfParametrisedMorphisms",
IsCapCategoryObject );
+#! @Description
+#! The &GAP; category of morphisms in a category of parametrised morphisms.
DeclareCategory( "IsMorphismInCategoryOfParametrisedMorphisms",
IsCapCategoryMorphism );
+#! @Section Constructors
+#! @Description
+#! Construct the category of parametrised morphisms over the category C.
+#! @Arguments C
+#! @Returns a category
DeclareOperation( "CategoryOfParametrisedMorphisms", [ IsCapCategory ] );
+if false then
+#! @Description
+#! Construct an object in the category of parametrised morphisms.
+#! @Arguments Para, A
+#! @Returns an object in the category of parametrised morphisms
+DeclareOperation( "ObjectConstrutor", [ IsCategoryOfParametrisedMorphisms, IsCapCategoryObject ] );
+
+
+#! @Description
+#! Construct a morphism in the category of parametrised morphisms.
+#! The datum is a pair consisting of the parameter object $P$ and the underlying morphism $f_P \colon P \otimes A \to B$.
+#! @Arguments Para, A, datum, B
+#! @Returns a morphism in the category of parametrised morphisms
+DeclareOperation( "MorphismConstructor",
+ [ IsCategoryOfParametrisedMorphisms,
+ IsObjectInCategoryOfParametrisedMorphisms,
+ IsDenseList,
+ IsObjectInCategoryOfParametrisedMorphisms ] );
+fi;
+
+#! @Section Attributes
+
+#! @Description
+#! Returns the underlying category of a category of parametrised morphisms.
+#! @Arguments Para
+#! @Returns a category
DeclareAttribute( "UnderlyingCategory", IsCategoryOfParametrisedMorphisms );
+#! @Description
+#! Returns the underlying object for an object in the category of parametrised morphisms.
+#! @Arguments A
+#! @Returns an object in the underlying category
DeclareAttribute( "UnderlyingObject", IsObjectInCategoryOfParametrisedMorphisms );
+#! @Description
+#! Returns the parameter object (underlying object) of a parametrised morphism.
+#! @Arguments f
+#! @Returns an object in the underlying category
DeclareAttribute( "UnderlyingObject", IsMorphismInCategoryOfParametrisedMorphisms );
+
+#! @Description
+#! Returns the underlying morphism $f_p:P \otimes A \to B \in C$ in $C$ of a parametrised morphism
+#! $f = (P, f_p:P \otimes A \to B):A \to B$ in $P(C)$.
+#! @Arguments f
+#! @Returns a morphism in the underlying category
DeclareAttribute( "UnderlyingMorphism", IsMorphismInCategoryOfParametrisedMorphisms );
+#! @Section Operations
+
+#! @Description
+#! The input is a parametrised morphism $f=(P,f_P: P\otimes A \to B): A \to B$ and a morphism $r: Q \to P$ in $C$.
+#! The output is the reparametrised morphism $\hat{f}=(Q,\hat{f}_Q: Q \otimes A \to B): A \to B$ where $\hat{f}_Q := (r \otimes \mathrm{id}_A) \cdot f_P \colon Q \otimes A \to B$.
+#! @Arguments f, r
+#! @Returns a parametrised morphism
DeclareOperation( "ReparametriseMorphism", [ IsMorphismInCategoryOfParametrisedMorphisms, IsCapCategoryMorphism ] );
-DeclareOperation( "SwitchSourceAndUnderlyingObject", [ IsMorphismInCategoryOfParametrisedMorphisms ] );
-DeclareOperation( "AdjustToBatchSize", [ IsMorphismInCategoryOfParametrisedMorphisms, IsInt ] );
-DeclareOperation( "NaturalEmbeddingIntoCategoryOfParametrisedMorphisms", [ IsCapCategory, IsCategoryOfParametrisedMorphisms ] );
+#! @Description
+#! The input is a parametrised morphism $f=(P,f_P: P\otimes A \to B): A \to B$.
+#! The output is the parametrised morphism $\hat{f}=(A,\hat{f}_A: A \otimes P \to B): P \to A$ where $\hat{f}_A := \sigma_{A,P} \cdot f_P \colon A \otimes P \to B$,
+#! with $\sigma_{A,P}: A \otimes P \to P \otimes A$ being the symmetry isomorphism (braiding) in the underlying symmetric monoidal category $C$.
+#! @Arguments f
+#! @Returns a parametrised morphism
+DeclareOperation( "FlipParameterAndSource", [ IsMorphismInCategoryOfParametrisedMorphisms ] );
+
+DeclareSynonym( "FlipSourceAndParameter", FlipParameterAndSource );
+
+#! @Description
+#! Adjusts a parametrised morphism to process a batch of n inputs simultaneously.
+#! Given a parametrised morphism $f = (P, f_P \colon P \otimes A \to B): A \to B$ where the target $B$ has rank 1,
+#! this operation produces a new parametrised morphism that can handle $n$ copies of $A$ at once (a batch of size $n$).
+#!
+#! The construction works as follows:
+#! * The source becomes the direct product of $n$ copies of $A$: $A^n = A \times \cdots \times A$
+#! * The parameter object remains $P$
+#! * The underlying morphism is constructed by: $\alpha \cdot f_P^{(n)} \cdot \mu_n \colon P \otimes A^n \to B$
+#! where:
+#! - $\alpha:P \otimes A^n \to (P \otimes A)^n$ is the morphism that reuses the parameter $P$ across the $n$ components.
+#! For example, when $n=2$, $\alpha$ sends $(p, a_1, a_2) \in P \otimes A^2$ to $(p, a_1, p, a_2) \in (P \otimes A)^2$, i.e., the same parameter $p$ is paired with each input (training example) $a_i$.
+#! - $f_P^{(n)}: (P \otimes A)^n \to B^n$ is the direct product of $n$ copies of $f_P$,
+#! - $\mu_n: B^n \to B$ is the mean aggregator that averages the $n$ outputs into a single output in $B$ (since $B$ has rank 1).
+#! @Arguments f, n
+#! @Returns a parametrised morphism
+DeclareOperation( "Batchify", [ IsMorphismInCategoryOfParametrisedMorphisms, IsInt ] );
+
+#! @Description
+#! Natural embedding functor from category C into category of parametrised morphisms P (of C).
+#! Objects are mapped to themselves, and a morphism $f: A \to B$ in C is mapped to the parametrised morphism
+#! $(I, f_I: I \otimes A \xrightarrow{f} B): A \to B$ in P. Note that $I \otimes A = A$ by the strict monoidal unit property.
+#! This functor reflects the fact that ordinary morphisms can be viewed as parametrised morphisms with a trivial (unit) parameter.
+#! @Arguments C, P
+#! @Returns a functor
+DeclareOperation( "NaturalEmbedding", [ IsCapCategory, IsCategoryOfParametrisedMorphisms ] );
+
+#! @Description
+#! Embedding functor from category of parametrised morphisms Para into another category of parametrised morphisms Para_Lenses.
+#! @Arguments Para, Para_Lenses
+#! @Returns a functor
DeclareOperation( "EmbeddingIntoCategoryOfParametrisedMorphisms", [ IsCategoryOfParametrisedMorphisms, IsCategoryOfParametrisedMorphisms ] );
+#! @Section Available Parametrised Morphisms
+
+#! All available smooth maps can be lifted to parametrised morphisms in the category of parametrised morphisms
+#! by using one of the following two methods:
+#! - Using the natural embedding functor from the category of smooth maps into the category of parametrised morphisms,
+#! which associates to each smooth map $f: A \to B$ the parametrised morphism
+#! $(I, f_I: I \otimes A \xrightarrow{f} B): A \to B$ where $I$ is the monoidal unit. For example, $\mathrm{Cos}$, $\mathrm{Exp}$, $\mathrm{Log}$ etc.
+#! - Constructing parametrised morphisms directly by specifying the parameter object and the underlying morphism.
+#! For instance, the construction of an affine transformation as a parametrised morphism:
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}[baseline=-2pt]
+#! \node[draw, minimum width=2cm, minimum height=1cm] (F) {$f_P:\mathbb{R}^{(m+1)n+m} \to \mathbb{R}^n$};
+#! \node[left=1.2cm of F] (A) {$\mathbb{R}^m$};
+#! \node[right=1.2cm of F] (B) {$\mathbb{R}^n$};
+#! \node[above=1cm of F] (P) {$\mathbb{R}^{(m+1)n}$};
+#!
+#! \draw[->] (A) -- (F);
+#! \draw[->] (F) -- (B);
+#! \draw[->] (P) -- (F);
+#! \end{tikzpicture}
+#! \]
+#! where $f_P:\mathbb{R}^{(m+1)n+m} \to \mathbb{R}^n$ is the standard smooth affine transformation map that
+#! computes $z W + b$, where
+#! $W \in \mathbb{R}^{m \times n}$ is the weight matrix, $b \in \mathbb{R}^{1 \times n}$
+#! is the bias row vector and $z \in \mathbb{R}^{1 \times m}$ is the logits row-vector.
+#! More explicitly, the input to this morphism consists of $(m+1)n + m$ components
+#! structured as follows:
+#! \begin{itemize}
+#! \item The first $(m+1)n$ components encode the weight matrix $W$ and bias vector $b$
+#! by concatenating the columns of the augmented matrix
+#! $\begin{pmatrix} W \\ b \end{pmatrix} \in \mathbb{R}^{(m+1) \times n}$.
+#! Explicitly, for each output dimension $i \in \{1,\ldots,n\}$, we have
+#! the $i$-th column $(w_{1,i}, w_{2,i}, \ldots, w_{m,i}, b_i)^T$, i.e., $(m+1)$ parameters.
+#! Thus, the parameter object $P$ is $\mathbb{R}^{(m+1)n}$.
+#! \item The last $m$ components represent the logits $z = (z_1, \ldots, z_m)$ to be transformed.
+#! Usually, these correspond to the activations from the previous layer in a neural network.
+#! \end{itemize}
+#! For example, for $m=2$ and $n=3$, the morphism $f_P$ maps
+#! $$(w_{1,1}, w_{1,2}, w_{1,3}, b_1, w_{2,1}, w_{2,2}, w_{2,3}, b_2, z_1, z_2, z_3) \in \mathbb{R}^{(3+1) \cdot 2 + 3} = \mathbb{R}^{8+3}=\mathbb{R}^{11}$$
+#! to
+#! $\begin{pmatrix} z_1 & z_2 & z_3 & 1 \end{pmatrix} \cdot \begin{pmatrix} w_{1,1} & w_{1,2} \\ w_{2,1} & w_{2,2} \\ w_{3,1} & w_{3,2} \\ b_1 & b_2 \end{pmatrix} = $
+#! $\begin{pmatrix} z_1 & z_2 & z_3 \end{pmatrix} \cdot \begin{pmatrix} w_{1,1} & w_{1,2} \\ w_{2,1} & w_{2,2} \\ w_{3,1} & w_{3,2} \end{pmatrix} + \begin{pmatrix} b_1 & b_2 \end{pmatrix}$ \\
+#! which compiles to $$(w_{1,1} z_1 + w_{2,1} z_2 + w_{3,1} z_3 + b_1,\; w_{1,2} z_1 + w_{2,2} z_2 + w_{3,2} z_3 + b_2) \in \mathbb{R}^2.$$
+#! @EndLatexOnly
diff --git a/gap/CategoryOfParametrisedMorphisms.gi b/gap/CategoryOfParametrisedMorphisms.gi
index ca56673..0a1df00 100644
--- a/gap/CategoryOfParametrisedMorphisms.gi
+++ b/gap/CategoryOfParametrisedMorphisms.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
@@ -234,7 +234,7 @@ end );
#
# a direct implementation:
#
-InstallMethod( SwitchSourceAndUnderlyingObject,
+InstallMethod( FlipParameterAndSource,
[ IsMorphismInCategoryOfParametrisedMorphisms ],
function ( f )
@@ -308,14 +308,19 @@ end );
InstallOtherMethod( Eval,
[ IsMorphismInCategoryOfParametrisedMorphisms, IsDenseList ],
- function( f, pair )
+ function( f, input_list )
- return Eval( UnderlyingMorphism( f ), Concatenation( pair ) );
+ # if input_list is a pair of parameter_vector and input_vector, concatenate them
+ if Length( input_list ) = 2 and IsDenseList( input_list[1] ) and IsDenseList( input_list[2] ) then
+ input_list := Concatenation( input_list[1], input_list[2] );
+ fi;
+
+ return Eval( UnderlyingMorphism( f ), input_list );
end );
##
-InstallMethod( NaturalEmbeddingIntoCategoryOfParametrisedMorphisms,
+InstallMethod( NaturalEmbedding,
[ IsCapCategory, IsCategoryOfParametrisedMorphisms ],
function ( C, Para )
@@ -356,7 +361,7 @@ InstallMethod( EmbeddingIntoCategoryOfParametrisedMorphisms,
Lenses := UnderlyingCategory( Para_Lenses );
- iota := EmbeddingIntoCategoryOfLenses( C, Lenses );
+ iota := ReverseDifferentialLensFunctor( C );
delta := CapFunctor( "Embedding into category of parametrised morphisms", Para, Para_Lenses );
@@ -391,7 +396,7 @@ InstallOtherMethod( \.,
C := UnderlyingCategory( Para );
- if not IsCategoryOfSkeletalSmoothMaps( C ) then
+ if not IsSkeletalCategoryOfSmoothMaps( C ) then
TryNextMethod( );
fi;
@@ -462,7 +467,7 @@ InstallOtherMethod( \.,
end );
##
-InstallMethod( AdjustToBatchSize,
+InstallMethod( Batchify,
[ IsMorphismInCategoryOfParametrisedMorphisms, IsInt ],
function ( f, n )
diff --git a/gap/CategoryOfSkeletalSmoothMaps.gd b/gap/CategoryOfSkeletalSmoothMaps.gd
deleted file mode 100644
index 56e25ab..0000000
--- a/gap/CategoryOfSkeletalSmoothMaps.gd
+++ /dev/null
@@ -1,36 +0,0 @@
-# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
-#
-# Declarations
-#
-
-
-DeclareCategory( "IsCategoryOfSkeletalSmoothMaps",
- IsCapCategory );
-
-DeclareCategory( "IsObjectInCategoryOfSkeletalSmoothMaps",
- IsCapCategoryObject );
-
-DeclareCategory( "IsMorphismInCategoryOfSkeletalSmoothMaps",
- IsCapCategoryMorphism );
-
-
-DeclareGlobalFunction( "CategoryOfSkeletalSmoothMaps" );
-
-DeclareOperation( "SmoothMorphism",
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsDenseList, IsObjectInCategoryOfSkeletalSmoothMaps ] );
-
-DeclareAttribute( "RankOfObject", IsObjectInCategoryOfSkeletalSmoothMaps );
-
-DeclareAttribute( "Map", IsMorphismInCategoryOfSkeletalSmoothMaps );
-DeclareAttribute( "JacobianMatrix", IsMorphismInCategoryOfSkeletalSmoothMaps );
-
-DeclareOperation( "Eval", [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsDenseList ] );
-DeclareOperation( "EvalJacobianMatrix", [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsDenseList ] );
-
-DeclareGlobalVariable( "GradientDescentForCAP" );
-
-DeclareGlobalFunction( "DummyInputStringsForAffineTransformation" );
-DeclareGlobalFunction( "DummyInputForAffineTransformation" );
-DeclareGlobalFunction( "DummyInputStringsForPolynomialTransformation" );
-DeclareGlobalFunction( "DummyInputForPolynomialTransformation" );
diff --git a/gap/Expressions.gd b/gap/Expressions.gd
index 0c7dd4c..e42ef96 100644
--- a/gap/Expressions.gd
+++ b/gap/Expressions.gd
@@ -1,25 +1,103 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
+#! @Chapter Expressions
+
+#! @Section GAP Categories
+
+#! @Description
+#! The &GAP; category of symbolic expressions.
+#! Expressions are used to represent mathematical formulas symbolically,
+#! allowing operations like differentiation to be performed symbolically before evaluation.
+#! @Arguments e
DeclareCategory( "IsExpression", IsNearAdditiveElementWithInverse and IsAdditiveElement and IsMultiplicativeElement );
+
+#! @Description
+#! The &GAP; category of constant expressions (expressions with no variables).
+#! Constant expressions represent fixed numerical values and can be evaluated directly.
+#! @Arguments e
DeclareCategory( "IsConstantExpression", IsExpression );
+#! @Section Constructors
+
+#! @Description
+#! Create an expression from a list of variables and a string representation.
+#! @Arguments variables, string
+#! @Returns an expression
DeclareOperation( "Expression", [ IsDenseList, IsString ] );
-DeclareOperation( "Expression", [ IsString ] );
+#! @Description
+#! Create a constant expression from a string.
+#! The expression will have no variables and will also be registered as a global constant.
+#! @Arguments string
+#! @Returns a constant expression
+DeclareOperation( "ConstantExpression", [ IsString ] );
+
+#! @Section Attributes
+
+#! @Description
+#! The list of variables of an expression.
+#! @Arguments e
+#! @Returns a list of strings
DeclareAttribute( "Variables", IsExpression );
+
+#! @Description
+#! The string representation of an expression.
+#! @Arguments e
+#! @Returns a string
DeclareAttribute( "String", IsExpression );
+
+#! @Description
+#! Convert an expression to a &GAP; function that can be evaluated numerically.
+#! @Arguments e
+#! @Returns a function
DeclareAttribute( "AsFunction", IsExpression );
+
DeclareOperation( "AsFunction", [ IsDenseList, IsString ] );
+#! @Section Operations
+
DeclareOperation( "DummyInputStrings", [ IsString, IsInt ] );
+
+#! @Description
+#! Generate a list of dummy input expressions using base name var with indices from 1 to n.
+#! For example, DummyInput("x", 3) returns a list of three expressions x1, x2, x3.
+#! Those can be used as input to smooth morphisms constructed from expressions.
+#! @Arguments var, n
+#! @Returns a list of expressions
DeclareOperation( "DummyInput", [ IsString, IsInt ] );
+#! @Section Global Functions
+
+#! @Description
+#! Assign to each string in constants a global constant expression defined by that string.
+#! The variables of these expressions will be empty.
+#! For example, ConvertToConstantExpressions( ["Pi"] ) returns [ Pi ],
+#! and Variables(Pi)=[].
+#! @Arguments constants
+#! @Returns a list of constant expressions
DeclareGlobalFunction( "ConvertToConstantExpressions" );
-DeclareGlobalFunction( "ConvertToExpressions" );
+
+#! @Description
+#! Assigns to each string in variables an expression with the same name.
+#! The variables of these expressions equals the list variables itself.
+#! After that, one would be able for example to construct more complex expressions using these variables,
+#! e.g., x1 + Sin(x2).
+#! For example, ConvertToExpressions( ["x1", "x2"] ) returns a pair [ x1, x2 ],
+#! where x1 and x2 are expressions with string representations "x1" and "x2",
+#! and Variables(x1) = ["x1", "x2"], Variables(x2) = ["x1", "x2"].
+#! @Arguments variables
+#! @Returns a list of expressions
+DeclareGlobalFunction( "CreateContextualVariables" );
+
+#! @Description
+#! Assign each expression in vars to a global variable with the same name.
+#! For example, if vars = [ x1, x2 ], this function will create global variables x1 and x2
+#! corresponding to the expressions in the list.
+#! @Arguments vars
DeclareGlobalFunction( "AssignExpressions" );
DeclareGlobalVariable( "LIST_OF_GLOBAL_CONSTANT_EXPRESSIONS" );
diff --git a/gap/Expressions.gi b/gap/Expressions.gi
index 1c2069f..882680e 100644
--- a/gap/Expressions.gi
+++ b/gap/Expressions.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
@@ -36,7 +36,7 @@ InstallMethod( Expression,
end );
##
-InstallMethod( Expression,
+InstallMethod( ConstantExpression,
[ IsString ],
function ( string )
@@ -48,6 +48,10 @@ InstallMethod( Expression,
MakeReadWriteGlobal( string );
+ if IsBoundGlobal( string ) then
+ UnbindGlobal( string );
+ fi;
+
DeclareSynonym( string, constant );
Add( LIST_OF_GLOBAL_CONSTANT_EXPRESSIONS, constant );
@@ -74,7 +78,7 @@ InstallOtherMethod( ViewString,
end );
##
-InstallGlobalFunction( ConvertToExpressions,
+InstallGlobalFunction( CreateContextualVariables,
variables -> List( variables, var -> Expression( variables, var ) )
);
@@ -101,13 +105,13 @@ InstallMethod( DummyInput,
function ( var, r )
- return ConvertToExpressions( DummyInputStrings( var, r ) );
+ return CreateContextualVariables( DummyInputStrings( var, r ) );
end );
##
InstallOtherMethod( DummyInput,
- [ IsString, IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsString, IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( var, f )
@@ -117,7 +121,7 @@ end );
##
InstallOtherMethod( DummyInput,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f )
@@ -131,6 +135,8 @@ InstallGlobalFunction( AssignExpressions,
function ( vars )
local func;
+ Assert( 0, ForAll( vars, IsExpression ) );
+
func :=
function ( e )
local name;
diff --git a/gap/FitParameters.gd b/gap/FitParameters.gd
index a3d33f9..903f926 100644
--- a/gap/FitParameters.gd
+++ b/gap/FitParameters.gd
@@ -1,8 +1,224 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
+#! @Chapter Fitting Parameters
+
+#! @Section Introduction
+
+#! Suppose we have a parametrised morphism $(\mathbb{R}^p, f):\mathbb{R}^n \to \mathbb{R}$
+#! where $\mathbb{R}^p$ is the parameters of the morphism and
+#! $f:\mathbb{R}^{p+n} \to \mathbb{R}$ is a morphism in a skeletal category of smooth maps
+#! (It represents a loss function over an input in $\mathbb{R}^n$ and parameter vector in $\mathbb{R}^p$).
+#! Given a set of training examples $\{X_1, \ldots, X_m\}$ where each $X_i \in \mathbb{R}^n$,
+#! we want to fit a parameter vector $\Theta \in \mathbb{R}^p$ such that the output of $f$ is minimized on the training examples.
+#!
+#! We can achieve this by creating an update-lens for each training example.
+#! This update-lens reads the current parameters $\Theta$ and updates it according to the gradient of the loss function $f$ at the example $X_i$.
+#! We start by substituting the training example $X_i$ into $f$ resulting in a morphism $f_i:\mathbb{R}^p \to \mathbb{R}$
+#! defined by $f_i(\Theta) = f(\Theta, X_i)$.
+#! By applying the reverse differential lens functor ReverseDifferentialLensFunctor
+#! $$\mathbf{R}: \mathrm{Smooth} \to \mathrm{Lenses}(\mathrm{Smooth}),$$
+#! on $f_i$,
+#! we obtain a lens $\mathbf{R}(f_i):(\mathbb{R}^p, \mathbb{R}^p) \to (\mathbb{R}^1, \mathbb{R}^1)$.
+#! The get-morphism of this lens reads the current parameters $\Theta$ and computes the loss $f_i(\Theta)$,
+#! while the put-morphism $Rf_i:\mathbb{R}^p \times \mathbb{R}^1 \to \mathbb{R}^p$
+#! is given by $(\Theta, r) \mapsto rJ_{f_i}(\Theta)$ where $J_{f_i}(\Theta) \in \mathbb{R}^{1 \times p}$ is the Jacobian matrix of $f_i$ evaluated at $\Theta$.
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! % Nodes
+#! \node (A) at (-3, 1) {$\mathbb{R}^p$};
+#! \node (B) at ( 3, 1) {$\mathbb{R}^1$};
+#! \node (Ap) at (-3,-1) {$\mathbb{R}^p$};
+#! \node (Bp) at ( 3,-1) {$\mathbb{R}^1$};
+#! \draw (-1.5,-1.8) rectangle (1.5,1.8);
+#! \draw[->] (A) -- node[below] {$f_i$} (B);
+#! \draw[->] (A) -- node[above] {$\Theta \mapsto f_i(\Theta)$} (B);
+#! \draw[->] (Bp) -- node[midway, above] {$Rf_i$} (Ap);
+#! \draw[->] (Bp) -- node[midway, below] {$rJ_{f_i}(\Theta) \mapsfrom (\Theta, r)$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#!
+#! The One-Epoch update lens for the example $X_i$ is then obtained by precomposing an optimizer lens (e.g., gradient descent, Adam, etc.)
+#! to the following lens $\mathbf{R}(f_i) \cdot \varepsilon$ where
+#! $\varepsilon:(\mathbb{R}^1, \mathbb{R}^0) \to (\mathbb{R}^1, \mathbb{R}^1)$ is the lens defined by:
+#! - Get morphism: the identity morphism on $\mathbb{R}^1$.
+#! - Put morphism: the morphism $\mathbb{R}^1 \to \mathbb{R}^0$ defined by $r \mapsto -r$.
+#! This lens merely negates the gradient signal.
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! \node (A) at (-3, 1) {$\mathbb{R}^p$};
+#! \node (B) at ( 3, 1) {$\mathbb{R}^1$};
+#! \node (C) at (9, 1) {$\mathbb{R}^1$};
+#! \node (Ap) at (-3,-1) {$\mathbb{R}^p$};
+#! \node (Bp) at ( 3,-1) {$\mathbb{R}^1$};
+#! \node (Cp) at (9,-1) {$\mathbb{R}^0$};
+#! \draw (-1.5,-1.8) rectangle (1.5,1.8);
+#! \draw[->] (A) -- node[below] {$f_i$} (B);
+#! \draw[->] (A) -- node[above] {$\Theta \mapsto f_i(\Theta)$} (B);
+#! \draw[->] (Bp) -- node[midway, above] {$Rf_i$} (Ap);
+#! \draw[->] (Bp) -- node[midway, below] {$rJ_{f_i}(\Theta) \mapsfrom (\Theta, r)$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \draw[-] (5,1) to[out=-90, in=90] (7,-1);
+#! \draw (4.5,-1.8) rectangle (7.5,1.8);
+#! \draw[->] (B) -- node[above] {$r \mapsto r$} (C);
+#! \draw[->] (Cp) -- node[midway, below] {$-r \mapsfrom r$} (Bp);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#!
+#! For example, if we chose the optimizer to be the gradient descent optimizer with learning rate $\eta=0.01$:
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! % Nodes
+#! \node (A) at (-3, 1) {$\mathbb{R}^p$};
+#! \node (B) at ( 3, 1) {$\mathbb{R}^p$};
+#! \node (Ap) at (-3,-1) {$\mathbb{R}^p$};
+#! \node (Bp) at ( 3,-1) {$\mathbb{R}^p$};
+#! \draw (-1.5,-1.8) rectangle (1.5,1.8);
+#! \draw[->] (A) -- node[above] {$\Theta \mapsto f_i(\Theta)$} (B);
+#! \draw[->] (Bp) -- node[midway, below] {$\Theta + \eta g \mapsfrom (\Theta, g)$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! The resulting One-Epoch update lens for the example $X_i$ is given by:
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! % Nodes
+#! \node (A) at (-3, 1) {$\mathbb{R}^p$};
+#! \node (B) at ( 3, 1) {$\mathbb{R}^p$};
+#! \node (Ap) at (-3,-1) {$\mathbb{R}^p$};
+#! \node (Bp) at ( 3,-1) {$\mathbb{R}^0$};
+#! \draw (-1.5,-1.8) rectangle (1.5,1.8);
+#! \draw[->] (A) -- node[above] {$\Theta \mapsto \Theta$} (B);
+#! \draw[->] (Bp) -- node[midway, below] {$\Theta - \eta J_{f_i}(\Theta) \mapsfrom \Theta$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! Now, we can start by a random parameter vector $\Theta_0 \in \mathbb{R}^p$
+#! and apply the update morphism of the One-Epoch update lens for $X_1$ to obtain a new parameter vector $\Theta_1$,
+#! then use $\Theta_1$ and the One-Epoch update lens for $X_2$ to obtain $\Theta_2$, and so on.
+#! After going through all training examples, we have completed one epoch of training.
+#! To perform multiple epochs of training, we can simply repeat the process.
+#!
+#! For example, suppose we start with the parmetised morphism ($\mathbb{R}^2, f):\mathbb{R}^2 \to \mathbb{R}$
+#! where $f:\mathbb{R}^{2+2} \to \mathbb{R}$ is
+#! defined by $f(\theta_1, \theta_2, x_1, x_2) = (x_1-\theta_1)^2 + (x_2-\theta_2)^2$ where $\Theta := (\theta_1, \theta_2) \in \mathbb{R}^2$ represents the parameters
+#! and $x = (x_1, x_2) \in \mathbb{R}^2$ is the input.
+#! Given training examples $X_1 = (1,2)$ and $X_2 = (3,4)$,
+#! the morphism $f_1:\mathbb{R}^2 \to \mathbb{R}$ is defined by $f_1(\theta_1, \theta_2) = (1 - \theta_1)^2 + (2 - \theta_2)^2$
+#! with Jacobian matrix
+#! @BeginLatexOnly
+#! \[
+#! J_{f_1}(\theta_1, \theta_2) = \begin{pmatrix}-2(1 - \theta_1) & -2(2 - \theta_2)\end{pmatrix}.
+#! \]
+#! @EndLatexOnly
+#! Thus, the One-Epoch update lens for $X_1$ is given by:
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! % Nodes
+#! \node (A) at (-5, 1) {$\mathbb{R}^2$};
+#! \node (B) at ( 5, 1) {$\mathbb{R}^2$};
+#! \node (Ap) at (-5,-1) {$\mathbb{R}^2$};
+#! \node (Bp) at ( 5,-1) {$\mathbb{R}^0$};
+#! \draw (-4,-2.5) rectangle (4,2);
+#! \draw[->] (A) -- node[above] {$(\theta_1, \theta_2) \mapsto (1 - \theta_1)^2 + (2 - \theta_2)^2$} (B);
+#! \draw[->] (Bp) -- node[midway, below] {$(\underbrace{\theta_1 + \eta \cdot 2(1-\theta_1)}_{0.98\theta_1 + 0.02}, \underbrace{\theta_2 + \eta \cdot 2(2-\theta_2)}_{0.98\theta_2 + 0.04}) \mapsfrom (\theta_1, \theta_2)$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! and the One-Epoch update lens for $X_2$ is given by:
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}
+#! % Nodes
+#! \node (A) at (-5, 1) {$\mathbb{R}^2$};
+#! \node (B) at ( 5, 1) {$\mathbb{R}^2$};
+#! \node (Ap) at (-5,-1) {$\mathbb{R}^2$};
+#! \node (Bp) at ( 5,-1) {$\mathbb{R}^0$};
+#! \draw (-4,-2.5) rectangle (4,2);
+#! \draw[->] (A) -- node[above] {$(\theta_1, \theta_2) \mapsto (3 - \theta_1)^2 + (4 - \theta_2)^2$} (B);
+#! \draw[->] (Bp) -- node[midway, below] {$(\underbrace{\theta_1 + \eta \cdot 2(3-\theta_1)}_{0.98\theta_1 + 0.06}, \underbrace{\theta_2 + \eta \cdot 2(4-\theta_2)}_{0.98\theta_2 + 0.08}) \mapsfrom (\theta_1, \theta_2)$} (Ap);
+#! \draw[-] (-1,1) to[out=-90, in=90] (1,-1);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
+#! Suppose we start with the parameter vector $\Theta = (0,0)$. Then:
+#! - After applying the update lens for $X_1$: $\Theta_1 = (0.98 \cdot 0 + 0.02, 0.98 \cdot 0 + 0.04) = (0.02, 0.04)$.
+#! - After applying the update lens for $X_2$: $\Theta_2 = (0.98 \cdot 0.02 + 0.06, 0.98 \cdot 0.04 + 0.08) = (0.0796, 0.1192)$.
+#! Thus, after one epoch of training, the updated parameters are $\Theta_2 = (0.0796, 0.1192)$.
+#! Repeating this process for multiple epochs will further refine the parameters to minimize the loss function over the training examples.
+#! Eventually, we expect the parameters to converge to $\Theta = [2, 3]$ which minimizes the loss function.
+#! The point whose distance from $[1, 2]$ and $[3, 4]$ is minimized is $[2, 3]$.
+#! See the examples section for the implementation of this process in &GAP;.
+
+
+#! @Section Notes on Batching
+#! Given a parametrised (loss) morphism $(\mathbb{R}^p, f):\mathbb{R}^n \to \mathbb{R}$
+#! and a set of training examples $\{X_1, \ldots, X_m\}$ where each $X_i \in \mathbb{R}^n$.
+#! If the number of training examples $m$ is large, it may be beneficial to use mini-batches during training.
+#! Given a positive integer batch_size, the loss morphism is first batched using Batchify.
+#! This means, we create a new parametrised morphism $(\mathbb{R}^p, f_{batch}):\mathbb{R}^{batch\_size \cdot n} \to \mathbb{R}$
+#! where $f_{batch}(\Theta, X_{i_1}, \ldots, X_{i_{batch\_size}}) = \frac{1}{batch\_size} \sum_{j=1}^{batch\_size} f(\Theta, X_{i_j})$.
+#! We divide the training examples into mini-batches of size batch_size
+#! (padding the list by repeating randomly chosen examples if necessary to make its length divisible by batch_size).
+#! And then we consider each mini-batch as a single training example. Now, we can repeat the training process described above using the
+#! batched loss morphism and the new training examples. For example, if the parametrised morphism is
+#! $(\mathbb{R}^p, f):\mathbb{R}^2 \to \mathbb{R}$
+#! where $f(\theta_1, \theta_2, x_1, x_2) = (x_1-\theta_1)^2 + (x_2-\theta_2)^2$,
+#! and we have training examples $[[1,2], [3,4], [5,6], [7,8], [9,10]]$, then for batch_size = $2$,
+#! the batched loss morphism is $(\mathbb{R}^p, f_{batch}):\mathbb{R}^4 \to \mathbb{R}$
+#! where $f_{batch}(\theta_1, \theta_2, x_1, x_2, x_3, x_4) = \frac{1}{2} \left( (x_1-\theta_1)^2 + (x_2-\theta_2)^2 + (x_3-\theta_1)^2 + (x_4-\theta_2)^2 \right)$
+#! (See Batchify operation).
+#! Since the number of training examples is not divisible by batch_size,
+#! we pad the list by randomly choosing an example (say, $[1,2]$) and appending it to the list.
+#! Then the new training examples set would be $[[1,2,3,4], [5,6,7,8], [9,10,1,2]]$.
+
+#! @Section Operations
+
+#! @Description
+#! Create an update lens for one epoch of training.
+#!
+#! The argument parametrised_morphism must be a morphism in a category of parametrised morphisms
+#! whose target has rank $1$ (a scalar loss).
+#!
+#! The argument optimizer is a function which takes the number of parameters p and returns
+#! an optimizer lens in the category of lenses over Smooth.
+#! Typical examples are Lenses.GradientDescentOptimizer, Lenses.AdamOptimizer, etc.
+#!
+#! The list training_examples must contain at least one example; each example is a dense list
+#! representing a vector in $\mathbb{R}^n$.
+#!
+#! @Arguments parametrised_morphism, optimizer, training_examples, batch_size
+#! @Returns a morphism in a category of lenses (the epoch update lens)
DeclareOperation( "OneEpochUpdateLens", [ IsMorphismInCategoryOfParametrisedMorphisms, IsFunction, IsDenseList, IsPosInt ] );
+
+#! @Description
+#! Same as OneEpochUpdateLens, but reads the training examples from a file.
+#! The file is evaluated using EvalString and is expected to contain a GAP expression
+#! evaluating to a dense list of examples.
+#! @Arguments parametrised_morphism, optimizer, training_examples_path, batch_size
+#! @Returns a morphism in a category of lenses (the epoch update lens)
+DeclareOperation( "OneEpochUpdateLens", [ IsMorphismInCategoryOfParametrisedMorphisms, IsFunction, IsString, IsPosInt ] );
+
+#! @Description
+#! Perform nr_epochs epochs of training using the given one_epoch_update_lens and initial weights initial_weights.
+#!
+#! The lens one_epoch_update_lens must have get-morphism $\mathbb{R}^p \to \mathbb{R}^1$ and
+#! put-morphism $\mathbb{R}^p \to \mathbb{R}^p$ for the same $p$ as the length of initial_weights.
+#! The option verbose controls whether to print the loss at each epoch.
+#! @Arguments one_epoch_update_lens, nr_epochs, initial_weights
+#! @Returns a list of final weights
DeclareOperation( "Fit", [ IsMorphismInCategoryOfLenses, IsPosInt, IsDenseList ] );
diff --git a/gap/FitParameters.gi b/gap/FitParameters.gi
index 8dbff63..72354d5 100644
--- a/gap/FitParameters.gi
+++ b/gap/FitParameters.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
@@ -43,9 +43,9 @@ InstallMethod( OneEpochUpdateLens,
fi;
- parametrised_morphism := AdjustToBatchSize( parametrised_morphism, batch_size );
+ parametrised_morphism := Batchify( parametrised_morphism, batch_size );
- cost := SwitchSourceAndUnderlyingObject( parametrised_morphism );
+ cost := FlipParameterAndSource( parametrised_morphism );
costs :=
List( SplitDenseList( training_examples, batch_size ),
@@ -55,7 +55,7 @@ InstallMethod( OneEpochUpdateLens,
Lenses := CapCategory( optimizer );
- L := EmbeddingIntoCategoryOfLenses( Smooth, Lenses );
+ L := ReverseDifferentialLensFunctor( Smooth, Lenses );
costs := List( costs, cost -> ApplyFunctor( L, cost ) );
@@ -116,11 +116,14 @@ InstallMethod( Fit,
[ IsMorphismInCategoryOfLenses, IsPosInt, IsDenseList ],
function( epoch_lens, n, w )
- local MOD, get, put, get_source, get_target, put_source, put_target, l, l_n, str_i, l_i, spaces, loss, i;
+ local verbose, MOD, get, put, get_source, get_target, put_source, put_target, l, l_n, str_i, l_i, spaces, loss, i;
- MOD := GradientDescentForCAP.MOD;
+ # get the option to print training progress
+ verbose := CAP_INTERNAL_RETURN_OPTION_OR_DEFAULT( "verbose", true );
- GradientDescentForCAP.MOD := "train";
+ MOD := GradientBasedLearningForCAP.MOD;
+
+ GradientBasedLearningForCAP.MOD := "train";
get := GetMorphism( epoch_lens );
put := PutMorphism( epoch_lens );
@@ -139,27 +142,33 @@ InstallMethod( Fit,
l_n := Length( String( n ) );
- Print( "Epoch ", JoinStringsWithSeparator( ListWithIdenticalEntries( l_n - 1, " " ), "" ), "0/", String( n ), " - loss = ", String( get( w )[1] ), "\n" );
+ if verbose then
+ Print( "Epoch ", JoinStringsWithSeparator( ListWithIdenticalEntries( l_n - 1, " " ), "" ), "0/", String( n ), " - loss = ", String( get( w )[1] ), "\n" );
+ fi;
for i in [ 1 .. n ] do
- str_i := String( i );
-
- l_i := Length( str_i );
-
- spaces := JoinStringsWithSeparator( ListWithIdenticalEntries( l_n - l_i, " " ), "" );
+ if verbose then
+
+ l_i := Length( String( i ) );
+
+ spaces := JoinStringsWithSeparator( ListWithIdenticalEntries( l_n - l_i, " " ), "" );
+
+ fi;
w := put( w );
- loss := get( w );
-
- Print( "Epoch ", spaces, String( i ), "/", String( n ), " - loss = ", String( loss[1] ), "\n" );
-
- #Display( w );
+ if verbose then
+
+ loss := get( w );
+
+ Print( "Epoch ", spaces, String( i ), "/", String( n ), " - loss = ", String( loss[1] ), "\n" );
+
+ fi;
od;
- GradientDescentForCAP.MOD := MOD;
+ GradientBasedLearningForCAP.MOD := MOD;
return w;
diff --git a/gap/MethodRecord.Declarations.autogen.gd b/gap/MethodRecord.Declarations.autogen.gd
index bbe0e5f..ca8e562 100644
--- a/gap/MethodRecord.Declarations.autogen.gd
+++ b/gap/MethodRecord.Declarations.autogen.gd
@@ -1,13 +1,13 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
# THIS FILE IS AUTOMATICALLY GENERATED, SEE CAP_project/CAP/gap/MethodRecordTools.gi
-#! @Chapter operations for machine learning in CAP
+#! @Chapter CAP Operations for GradientBasedLearningForCAP
-#! @Section Add-methods
+#! @Section Add-Methods
#! @BeginGroup
#! @Description
diff --git a/gap/MethodRecord.Installations.autogen.gi b/gap/MethodRecord.Installations.autogen.gi
index e913372..9e0614b 100644
--- a/gap/MethodRecord.Installations.autogen.gi
+++ b/gap/MethodRecord.Installations.autogen.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
diff --git a/gap/MethodRecord.gd b/gap/MethodRecord.gd
index 99e661f..8f4a0f6 100644
--- a/gap/MethodRecord.gd
+++ b/gap/MethodRecord.gd
@@ -1,16 +1,36 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
-
+#! @Chapter CAP Operations for GradientBasedLearningForCAP
DeclareGlobalVariable( "CRDC_INTERNAL_METHOD_NAME_RECORD" );
+#! @Section Basic Operations
+
+#! @Description
+#! Compute the pointwise multiplication of two morphisms with the same source and target.
+#! For morphisms $f, g: A \to B$, returns a morphism whose output at each component
+#! is the product of the outputs of $f$ and $g$.
+#! @Arguments alpha, beta
+#! @Returns a morphism
+DeclareOperation( "MultiplicationForMorphisms", [ IsCapCategoryMorphism, IsCapCategoryMorphism ] );
+
+#! @Description
+#! Compute the reverse differential of a morphism alpha with given source and target objects.
+#! For a morphism $f: \mathbb{R}^m \to \mathbb{R}^n$, the reverse differential is a morphism
+#! $Df: \mathbb{R}^m \times \mathbb{R}^n \to \mathbb{R}^m$ that computes $y \cdot J_f(x)$
+#! where $J_f$ is the Jacobian matrix of $f$.
+#! @Arguments source, alpha, range
+#! @Returns a morphism
DeclareOperation( "ReverseDifferentialWithGivenObjects",
[ IsCapCategoryObject, IsCapCategoryMorphism, IsCapCategoryObject ] );
+#! @Description
+#! Compute the reverse differential of a morphism alpha.
+#! This is equivalent to ReverseDifferentialWithGivenObjects with automatically computed source and target.
+#! @Arguments alpha
+#! @Returns a morphism
DeclareAttribute( "ReverseDifferential", IsCapCategoryMorphism );
-
-DeclareOperation( "MultiplicationForMorphisms", [ IsCapCategoryMorphism, IsCapCategoryMorphism ] );
diff --git a/gap/MethodRecord.gi b/gap/MethodRecord.gi
index 56bdf57..c2e3edc 100644
--- a/gap/MethodRecord.gi
+++ b/gap/MethodRecord.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
@@ -65,12 +65,12 @@ CAP_INTERNAL_ENHANCE_NAME_RECORD( CRDC_INTERNAL_METHOD_NAME_RECORD );
CAP_INTERNAL_GENERATE_DECLARATIONS_AND_INSTALLATIONS_FROM_METHOD_NAME_RECORD(
CRDC_INTERNAL_METHOD_NAME_RECORD,
- "GradientDescentForCAP",
+ "GradientBasedLearningForCAP",
"MethodRecord.",
- "operations for machine learning in CAP",
- "Add-methods"
+ "CAP Operations for GradientBasedLearningForCAP",
+ "Add-Methods"
);
-CAP_INTERNAL_REGISTER_METHOD_NAME_RECORD_OF_PACKAGE( CRDC_INTERNAL_METHOD_NAME_RECORD, "GradientDescentForCAP" );
+CAP_INTERNAL_REGISTER_METHOD_NAME_RECORD_OF_PACKAGE( CRDC_INTERNAL_METHOD_NAME_RECORD, "GradientBasedLearningForCAP" );
CAP_INTERNAL_INSTALL_ADDS_FROM_RECORD( CRDC_INTERNAL_METHOD_NAME_RECORD );
diff --git a/gap/NeuralNetworks.gd b/gap/NeuralNetworks.gd
index 7489e42..2d0c9af 100644
--- a/gap/NeuralNetworks.gd
+++ b/gap/NeuralNetworks.gd
@@ -1,11 +1,113 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
+#! @Chapter Neural Networks
+#! @Section Definition
+#! A neural network can be viewed as a composition of parametrised affine transformations
+#! and non-linear activation functions (such as ReLU, Sigmoid, Softmax, etc.).
+#! @BeginLatexOnly
+#! \[
+#! \begin{tikzpicture}[baseline=-2pt]
+#! \node[draw, minimum width=1cm, minimum height=1cm] (F1) {affine-trans};
+#! \node[left=0.75cm of F1] (A1) {$\mathbb{R}^{m_1}$};
+#! \node[right=0.3cm of F1] (A2) {$\mathbb{R}^{m_2}$};
+#! \node[above=0.75cm of F1] (P1) {$\mathbb{R}^{(m_1+1)m_2}$};
+#! \node[draw, minimum width=1cm, minimum height=1cm, right=0.3cm of A2] (F2) {ReLU};
+#! \node[right=0.3cm of F2] (A3) {$\mathbb{R}^{m_2}$};
+#! \node[above=0.75cm of F2] (P2) {$\mathbb{R}^0$};
+#! \node[draw, minimum width=1cm, minimum height=1cm, right=0.3cm of A3] (F3) {affine-trans};
+#! \node[right=0.3cm of F3] (A4) {$\mathbb{R}^{m_3}$};
+#! \node[above=0.75cm of F3] (P3) {$\mathbb{R}^{(m_2+1)m_3}$};
+#! \node[draw, minimum width=1cm, minimum height=1cm, right=0.3cm of A4] (F4) {ReLU};
+#! \node[right=0.3cm of F4] (A5) {$\mathbb{R}^{m_3}$};
+#! \node[above=0.75cm of F4] (P4) {$\mathbb{R}^0$};
+#! \node[right=0.3cm of A5] (F5) {$\cdots$};
+#! \draw[->] (A1) -- (F1);
+#! \draw[->] (F1) -- (A2);
+#! \draw[->] (P1) -- (F1);
+#! \draw[->] (A2) -- (F2);
+#! \draw[->] (F2) -- (A3);
+#! \draw[->] (P2) -- (F2);
+#! \draw[->] (A3) -- (F3);
+#! \draw[->] (F3) -- (A4);
+#! \draw[->] (P3) -- (F3);
+#! \draw[->] (A4) -- (F4);
+#! \draw[->] (F4) -- (A5);
+#! \draw[->] (P4) -- (F4);
+#! \draw[->] (A5) -- (F5);
+#! \end{tikzpicture}
+#! \]
+#! @EndLatexOnly
-DeclareOperation( "LogitsMorphismOfNeuralNetwork", [ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt ] );
-DeclareOperation( "PredictionMorphismOfNeuralNetwork", [ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt, IsString ] );
-DeclareOperation( "LossMorphismOfNeuralNetwork", [ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt, IsString ] );
+
+#! @Section Operations
+
+#! @Description
+#! The arguments are Para, a parametrised morphism category,
+#! s, a positive integer giving the input dimension,
+#! hidden_layers_dims, a list of positive integers giving the sizes of
+#! the hidden layers in order, and t, a positive integer giving the output dimension.
+#! This operation constructs a parametrised morphism that computes the logits (pre-activation outputs)
+#! of a fully-connected feed-forward neural network. The signature of the parametrised morphism is
+#! $\mathbb{R}^s \to \mathbb{R}^t$ and is parameterised by the network weights and biases.
+#! More specifically, the parametrised morphism represents the function that maps an input vector
+#! $x \in \mathbb{R}^s$ and a parameter vector $p \in \mathbb{R}^d$ to the output vector $y \in \mathbb{R}^t$,
+#! where $d$ is the total number of weights and biases in the network defined by the given architecture.
+#! - For a layer with input dimension $m_i$ and output dimension $m_{i+1}$, the parameter object has dimension
+#! $(m_i + 1) \times m_{i+1}$, accounting for both the $m_i \times m_{i+1}$ weights matrix and the $m_{i+1}$ biases.
+#! - Hidden layers use ReLU nonlinearity between linear layers. The final layer
+#! is linear (no activation) so the returned morphism produces logits suitable
+#! for subsequent application of a loss or classification activation.
+#! @Arguments Para, s, hidden_layers_dims, t
+#! @Returns a parametrised morphism
+DeclareOperation( "NeuralNetworkLogitsMorphism", [ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt ] );
+
+#! @Description
+#! It composes the logits morphisms with the specified activation function to create a
+#! parametrised morphism representing the predictions of a neural network.
+#! The network has the architecture specified by s, hidden_layers_dims, and t,
+#! i.e., the source and target of the parametrised morphism are $\mathbb{R}^{s}$ and $\mathbb{R}^{t}$, respectively.
+#! The activation determines the final activation function:
+#! * $\mathbf{Softmax}$: applies the softmax activation to turn logits into probabilities for multi-class classification.
+#! * $\mathbf{Sigmoid}$: applies the sigmoid activation to turn logits into probabilities for binary classification.
+#! * $\mathbf{IdFunc}$: applies the identity function (no activation) for regression tasks.
+#! @Arguments Para, s, hidden_layers_dims, t, activation
+#! @Returns a parametrised morphism
+DeclareOperation( "NeuralNetworkPredictionMorphism", [ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt, IsString ] );
+
+#! @Description
+#! Construct a parametrised morphism representing the training loss of a fully-connected
+#! feed-forward neural network with architecture given by s, hidden_layers_dims
+#! and t. The returned parametrised morphism is parameterised by the network
+#! weights and biases and maps a pair (input, target) to a scalar loss:
+#! its source is $\mathbb{R}^s \times \mathbb{R}^t$ (an input vector $x$ and a target vector $y$)
+#! and its target is $\mathbb{R}$ (the scalar loss).
+#!
+#! The behaviour of the loss depends on the activation argument:
+#! - $\mathbf{Softmax}$:
+#! * Used for multi-class classification.
+#! * Softmax is applied to the logits to convert them into a probability distribution.
+#! * The loss is the (negative) cross-entropy between the predicted probabilities and the target distribution.
+#! * Targets y may be one-hot vectors or probability distributions over classes.
+#! - $\mathbf{Sigmoid}$:
+#! * Used for binary classification. Requires $t = 1$.
+#! * Applies the logistic sigmoid to the single logit to obtain a probability $\hat{y}$ in $[0,1]$.
+#! * The loss is binary cross-entropy: $\mathrm{loss} = - ( y\log(\hat{y}) + (1-y)\log(1-\hat{y}) )$.
+#! - $\mathbf{IdFunc}$:
+#! * Used for regression.
+#! * No final activation is applied. The loss is the mean squared error (MSE).
+#!
+#! @Arguments Para, s, hidden_layers_dims, t, activation
+#! @Returns a parametrised morphism
+DeclareOperation( "NeuralNetworkLossMorphism", [ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt, IsString ] );
+
+
+##
+DeclareGlobalFunction( "DummyInputStringsForNeuralNetwork" );
+
+##
+DeclareGlobalFunction( "DummyInputForNeuralNetwork" );
diff --git a/gap/NeuralNetworks.gi b/gap/NeuralNetworks.gi
index c312082..dee39c2 100644
--- a/gap/NeuralNetworks.gi
+++ b/gap/NeuralNetworks.gi
@@ -1,9 +1,9 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
-InstallMethod( LogitsMorphismOfNeuralNetwork,
+InstallMethod( NeuralNetworkLogitsMorphism,
[ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt ],
function ( Para, input_layer_dim, hidden_layers_dims, output_layer_dim )
@@ -38,13 +38,13 @@ InstallMethod( LogitsMorphismOfNeuralNetwork,
end );
##
-InstallMethod( PredictionMorphismOfNeuralNetwork,
+InstallMethod( NeuralNetworkPredictionMorphism,
[ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt, IsString ],
function ( Para, input_layer_dim, hidden_layers_dims, output_layer_dim, activation )
local logits;
- logits := LogitsMorphismOfNeuralNetwork( Para, input_layer_dim, hidden_layers_dims, output_layer_dim );
+ logits := NeuralNetworkLogitsMorphism( Para, input_layer_dim, hidden_layers_dims, output_layer_dim );
if not activation in [ "Softmax", "Sigmoid", "IdFunc" ] then
Error( "unrecognized activation functions!\n" );
@@ -55,7 +55,7 @@ InstallMethod( PredictionMorphismOfNeuralNetwork,
end );
##
-InstallMethod( LossMorphismOfNeuralNetwork,
+InstallMethod( NeuralNetworkLossMorphism,
[ IsCategoryOfParametrisedMorphisms, IsPosInt, IsDenseList, IsPosInt, IsString ],
function ( Para, input_layer_dim, hidden_layers_dims, output_layer_dim, activation )
@@ -63,7 +63,7 @@ InstallMethod( LossMorphismOfNeuralNetwork,
Smooth := UnderlyingCategory( Para );
- logits := LogitsMorphismOfNeuralNetwork( Para, input_layer_dim, hidden_layers_dims, output_layer_dim );
+ logits := NeuralNetworkLogitsMorphism( Para, input_layer_dim, hidden_layers_dims, output_layer_dim );
paramter_obj := UnderlyingObject( logits );
@@ -104,3 +104,30 @@ InstallMethod( LossMorphismOfNeuralNetwork,
ObjectConstructor( Para, Smooth.( 1 ) ) );
end );
+
+
+##
+InstallGlobalFunction( DummyInputStringsForNeuralNetwork,
+
+ function ( input_layer_dim, hidden_layers_dims, output_layer_dim )
+ local Smooth, dims, N, weights_strings, input_strings;
+
+ dims := Concatenation( [ input_layer_dim ], hidden_layers_dims, [ output_layer_dim ] );
+
+ N := Length( dims );
+
+ weights_strings := List( [ 1 .. N - 1 ], i -> DummyInputStringsForAffineTransformation( dims[i], dims[i + 1], Concatenation( "w", String( i ), "_" ), Concatenation( "b", String( i ) ) ){ [ 1 .. ( dims[i] + 1 ) * dims[i + 1] ] } );
+
+ input_strings := List( [ 1 .. input_layer_dim ], j -> Concatenation( "z", String( j ) ) );
+
+ return Concatenation( Concatenation( Reversed( weights_strings ) ), input_strings );
+
+end );
+
+##
+InstallGlobalFunction( DummyInputForNeuralNetwork,
+ function ( input_layer_dim, hidden_layers_dims, output_layer_dim )
+
+ return CreateContextualVariables( DummyInputStringsForNeuralNetwork( input_layer_dim, hidden_layers_dims, output_layer_dim ) );
+
+end );
diff --git a/gap/SkeletalCategoryOfSmoothMaps.autogen.gd b/gap/SkeletalCategoryOfSmoothMaps.autogen.gd
new file mode 100644
index 0000000..6034837
--- /dev/null
+++ b/gap/SkeletalCategoryOfSmoothMaps.autogen.gd
@@ -0,0 +1,106 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
+#
+# Declarations
+#
+# THIS FILE IS AUTOMATICALLY GENERATED, SEE CAP_project/CAP/gap/MethodRecord.gi
+
+#! @Chapter Skeletal Category of Smooth Maps
+
+#! @Section Supported CAP Operations
+
+#! @Subsection SkeletalSmoothMaps
+
+#! The following CAP operations are supported:
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
+#! *
diff --git a/gap/SkeletalCategoryOfSmoothMaps.gd b/gap/SkeletalCategoryOfSmoothMaps.gd
new file mode 100644
index 0000000..7d904d0
--- /dev/null
+++ b/gap/SkeletalCategoryOfSmoothMaps.gd
@@ -0,0 +1,258 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
+#
+# Declarations
+#
+
+#! @Chapter Skeletal Category of Smooth Maps
+
+#! @Section GAP Categories
+
+#! @Description
+#! The &GAP; category of a category of skeletal smooth maps.
+DeclareCategory( "IsSkeletalCategoryOfSmoothMaps",
+ IsCapCategory );
+
+#! @Description
+#! The &GAP; category of objects in a category of skeletal smooth maps.
+DeclareCategory( "IsObjectInSkeletalCategoryOfSmoothMaps",
+ IsCapCategoryObject );
+
+#! @Description
+#! The &GAP; category of morphisms in a category of skeletal smooth maps.
+DeclareCategory( "IsMorphismInSkeletalCategoryOfSmoothMaps",
+ IsCapCategoryMorphism );
+
+#! @Section Constructors
+
+#! @Description
+#! Construct the category of skeletal smooth maps.
+#! Objects in this category are the Euclidean spaces $\mathbb{R}^n$ for non-negative integers $n$.
+#! Morphisms are smooth maps between these spaces, represented by a map function and its Jacobian matrix.
+#! @Returns a category
+DeclareGlobalFunction( "SkeletalCategoryOfSmoothMaps" );
+
+if false then
+
+#! @Description
+#! Construct the object representing the Euclidean space $\mathbb{R}^{n}$ in the category of skeletal smooth maps.
+#! Points in Euclidean spaces are represented as row vectors.
+#! @Arguments Smooth, n
+#! @Returns an object in the category of skeletal smooth maps
+DeclareOperation( "ObjectConstructor",
+ [ IsSkeletalCategoryOfSmoothMaps, IsInt ] );
+
+#! @Description
+#! Construct a smooth morphism from source to target using the given datum.
+#! The datum should be a pair [ map_func, jacobian_func ] where:
+#!
+#! - map_func is a function that takes a list of m elements (where m is the rank of source)
+#! and returns a list of n elements (where n is the rank of target).
+#! This represents the smooth map f: \mathbb{R}^m \to \mathbb{R}^n,
+#! $[x_1, x_2, \ldots, x_m] \mapsto [f_1(x_1, \ldots, x_m), f_2(x_1, \ldots, x_m), \ldots, f_n(x_1, \ldots, x_m)]$.
+#!
+#! - jacobian_func is a function that takes a list of m elements
+#! and returns an n \times m matrix (represented as a list of n lists, each containing m elements).
+#! This matrix represents the Jacobian Df(x) at point x, where entry (i,j) is \frac{\partial f_i}{\partial x_j}(x).
+#!
+#! @Arguments Smooth, source, datum, target
+#! @Returns a morphism in the category of skeletal smooth maps
+DeclareOperation( "MorphismConstructor",
+ [ IsSkeletalCategoryOfSmoothMaps,
+ IsObjectInSkeletalCategoryOfSmoothMaps,
+ IsDenseList,
+ IsObjectInSkeletalCategoryOfSmoothMaps ] );
+fi;
+
+#! @Description
+#! Delegates to MorphismConstructor to create a smooth morphism.
+#! @Arguments Smooth, source, datum, target
+#! @Returns a morphism in the category of skeletal smooth maps
+DeclareOperation( "SmoothMap",
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsDenseList, IsObjectInSkeletalCategoryOfSmoothMaps ] );
+
+#! @Section Attributes
+
+#! @Description
+#! The rank (dimension) of the Euclidean space represented by the object obj.
+#! For an object representing $\mathbb{R}^n$, this returns $n$.
+#! @Arguments obj
+#! @Returns a non-negative integer
+DeclareAttribute( "RankOfObject", IsObjectInSkeletalCategoryOfSmoothMaps );
+
+#! @Description
+#! The underlying map function of a smooth morphism f.
+#! For a morphism $f: \mathbb{R}^m \to \mathbb{R}^n$, this is a function that takes a list of $m$ elements
+#! and returns a list of $n$ elements.
+#! @Arguments f
+#! @Returns a function
+DeclareAttribute( "Map", IsMorphismInSkeletalCategoryOfSmoothMaps );
+
+#! @Description
+#! The Jacobian matrix function of a smooth morphism f.
+#! For a morphism $f: \mathbb{R}^m \to \mathbb{R}^n$, this is a function that takes a list of $m$ elements
+#! and returns an $n \times m$ matrix representing the partial derivatives.
+#! @Arguments f
+#! @Returns a function
+DeclareAttribute( "JacobianMatrix", IsMorphismInSkeletalCategoryOfSmoothMaps );
+
+#! @Section Operations
+
+#! @Description
+#! Evaluate the smooth morphism f at the point x.
+#! @Arguments f, x
+#! @Returns a dense list
+DeclareOperation( "Eval", [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsDenseList ] );
+
+#! @Description
+#! Evaluate the Jacobian matrix of the smooth morphism f at the point x.
+#! @Arguments f, x
+#! @Returns a matrix (list of lists)
+DeclareOperation( "EvalJacobianMatrix", [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsDenseList ] );
+
+DeclareGlobalVariable( "GradientBasedLearningForCAP" );
+
+#! @Section Available Smooth Maps
+
+#! @BeginLatexOnly
+#! The category of skeletal smooth maps offer several pre-implemented helper functions:
+#! \begin{itemize}
+#! \item \textbf{Sqrt}: The square root function. Returns the square root map $\mathbb{R}^1 \to \mathbb{R}^1$,
+#! given by $x \to \sqrt{x}$. Its Jacobian matrix is given by $\frac{1}{2 \sqrt{x}}$.
+#! \item \textbf{Exp}: The exponential function. Returns the map $\mathbb{R}^1 \to \mathbb{R}^1$ given by $x \mapsto e^x$.
+#! Its Jacobian matrix is $e^x$.
+#! \item \textbf{Log}: The natural logarithm function. Returns the map $\mathbb{R}^1 \to \mathbb{R}^1$ given by $x \mapsto \log(x)$.
+#! Its Jacobian matrix is $\frac{1}{x}$.
+#! \item \textbf{Sin}: The sine function. Returns the map $\mathbb{R}^1 \to \mathbb{R}^1$ given by $x \mapsto \sin(x)$.
+#! Its Jacobian matrix is $\cos(x)$.
+#! \item \textbf{Cos}: The cosine function. Returns the map $\mathbb{R}^1 \to \mathbb{R}^1$ given by $x \mapsto \cos(x)$.
+#! Its Jacobian matrix is $-\sin(x)$.
+#! \item \textbf{Constant(rank\_s, l)} or \textbf{Constant(l)}: Returns a constant morphism $\mathbb{R}^{\text{rank\_s}} \to \mathbb{R}^m$
+#! that maps any input to the fixed list $l$ (where $m = |l|$). If rank\_s is omitted, it defaults to 0.
+#! \item \textbf{Zero(s, t)}: Returns the zero morphism $\mathbb{R}^s \to \mathbb{R}^t$.
+#! \item \textbf{IdFunc(n)}: Returns the identity morphism $\mathbb{R}^n \to \mathbb{R}^n$.
+#! \item \textbf{Relu(n)}: Returns the rectified linear unit function $\mathbb{R}^n \to \mathbb{R}^n$ applied componentwise,
+#! given by $x_i \mapsto \max(0, x_i)$.
+#! \item \textbf{Sigmoid(n)}: Returns the sigmoid activation function $\mathbb{R}^n \to \mathbb{R}^n$ applied componentwise,
+#! given by $x_i \mapsto \frac{1}{1 + e^{-x_i}}$.
+#! \item \textbf{Softmax(n)}: Returns the softmax function $\mathbb{R}^n \to \mathbb{R}^n$ given by
+#! $x_i \mapsto \frac{e^{x_i}}{\sum_{j=1}^n e^{x_j}}$.
+#! \item \textbf{Sum(n)}: Returns the sum function $\mathbb{R}^n \to \mathbb{R}^1$ given by $(x_1, \ldots, x_n) \mapsto \sum_{i=1}^n x_i$.
+#! \item \textbf{Mean(n)}: Returns the mean function $\mathbb{R}^n \to \mathbb{R}^1$ given by $(x_1, \ldots, x_n) \mapsto \frac{1}{n}\sum_{i=1}^n x_i$.
+#! \item \textbf{Variance(n)}: Returns the variance function $\mathbb{R}^n \to \mathbb{R}^1$ given by
+#! $(x_1, \ldots, x_n) \mapsto \frac{1}{n}\sum_{i=1}^n (x_i - \mu)^2$ where $\mu$ is the mean.
+#! \item \textbf{StandardDeviation(n)}: Returns the standard deviation function $\mathbb{R}^n \to \mathbb{R}^1$
+#! given by the square root of the variance.
+#! \item \textbf{Mul(n)}: Returns the multiplication function $\mathbb{R}^n \to \mathbb{R}^1$ given by
+#! $(x_1, \ldots, x_n) \mapsto \prod_{i=1}^n x_i$.
+#! \item \textbf{Power(n)}: Returns the power function $\mathbb{R}^1 \to \mathbb{R}^1$ given by $x \mapsto x^n$.
+#! Its Jacobian matrix is $n \cdot x^{n-1}$.
+#! \item \textbf{PowerBase(n)}: Returns the exponential function with base $n$, i.e., $\mathbb{R}^1 \to \mathbb{R}^1$
+#! given by $x \mapsto n^x$. Its Jacobian matrix is $\log(n) \cdot n^x$.
+#! \item \textbf{QuadraticLoss(n)}: Returns the quadratic loss function $\mathbb{R}^{2n} \to \mathbb{R}^1$
+#! given by $(\hat{y}_1, \ldots, \hat{y}_n, y_1, \ldots, y_n) \mapsto \frac{1}{n}\sum_{i=1}^n (\hat{y}_i-y_i)^2$,
+#! where the first $n$ components are the ground truth values and the last $n$ components are the predicted values.
+#! \item \textbf{BinaryCrossEntropyLoss()}: Returns the binary cross-entropy loss function $\mathbb{R}^2 \to \mathbb{R}^1$
+#! (requires $n=1$) given by $(\hat{y}, y) \mapsto -(y \log(\hat{y}) + (1-y) \log(1-\hat{y}))$, where $\hat{y}$ is the predicted probability
+#! and $y$ is the ground truth label.
+#! \item \textbf{SigmoidBinaryCrossEntropyLoss()}: Returns the loss obtained by
+#! precomposing the binary cross-entropy loss with a functorial direct product of
+#! the sigmoid function (applied to the predicted logit) and the identity (applied
+#! to the ground-truth label). Formally, this is implemented as
+#! \[
+#! \text{SigmoidBinaryCrossEntropyLoss()}
+#! \;=\;
+#! \bigl( \,\sigma \times \mathrm{Id}\, \bigr)
+#! \cdot
+#! \text{BinaryCrossEntropyLoss}
+#! \]
+#! This corresponds to applying a sigmoid to the prediction component while leaving
+#! the label component unchanged, followed by a numerically stable binary
+#! cross-entropy evaluation. In particular,
+#! $(\hat{y}, y) \mapsto \log( 1 + e^{ -\hat{y} } ) + ( 1 - y ) \hat{y}$.
+#! \item \textbf{CrossEntropyLoss(n)}: Returns the cross-entropy loss function $\mathbb{R}^{2n} \to \mathbb{R}^1$
+#! given by $(\hat{y}_1, \ldots, \hat{y}_n, y_1, \ldots, y_n) \mapsto -\frac{1}{n}\sum_{i=1}^n y_i \log(\hat{y}_i)$, where
+#! the first $n$ components are predicted probabilities and the last $n$ components are ground truth labels.
+#! \item \textbf{SoftmaxCrossEntropyLoss(n)}: Returns the loss obtained by
+#! applying a softmax transformation to the predicted logits and then evaluating
+#! the multi-class cross-entropy with respect to the ground-truth labels.
+#! Formally, the construction proceeds categorically as follows:
+#! \begin{itemize}
+#! \item The predicted logits are extracted using the projection
+#! $p_1 : \mathbb{R}^n \times \mathbb{R}^n \to \mathbb{R}^n$.
+#! \item These logits are mapped to class probabilities via the softmax
+#! morphism $\mathrm{Softmax}_n$, yielding
+#! $p_1 \cdot \mathrm{Softmax}_n$.
+#! \item The ground-truth label vector is extracted by the second projection
+#! $p_2 : \mathbb{R}^n \times \mathbb{R}^n \to \mathbb{R}^n$.
+#! \item The softmax-transformed predictions and the ground-truth vector are
+#! recombined using the universal morphism into the direct product,
+#! forming $(\,p_1 \cdot \mathrm{Softmax}_n,\; p_2\,)$.
+#! \item Finally, this pair is composed with the multi-class cross-entropy loss
+#! $\mathrm{CrossEntropyLoss}_n$.
+#! \end{itemize}
+#! Altogether, the resulting morphism is
+#! \[
+#! \text{SoftmaxCrossEntropyLoss}(n)
+#! \;=\;
+#! \bigl(\,p_1 \cdot \mathrm{Softmax}_n,\; p_2\,\bigr)
+#! \cdot
+#! \text{CrossEntropyLoss}_n.
+#! \]
+#! \item \textbf{AffineTransformation(m, n)}: Returns an affine transformation
+#! $\mathbb{R}^{(m+1)n+m} \to \mathbb{R}^n$ implementing the standard linear layer
+#! operation $z W + b$, where $z \in \mathbb{R}^{1 \times m}$ is the logits row vector,
+#! $W \in \mathbb{R}^{m \times n}$ is the weight matrix, and $b \in \mathbb{R}^{1 \times n}$
+#! is the bias row vector. The input to this morphism consists of $(m+1)n + m$ components
+#! structured as follows:
+#! \begin{itemize}
+#! \item The first $(m+1)n$ components encode the weight matrix $W$ and bias vector $b$
+#! by concatenating the columns of the augmented matrix
+#! $\begin{pmatrix} W \\ b \end{pmatrix} \in \mathbb{R}^{(m+1) \times n}$.
+#! Explicitly, for each output dimension $i \in \{1,\ldots,n\}$, we store
+#! the $i$-th column $(w_{1,i}, w_{2,i}, \ldots, w_{m,i}, b_i)^T$.
+#! \item The last $m$ components represent the logits $z = (z_1, \ldots, z_m)$ to be transformed.
+#! Usually, these correspond to the activations from the previous layer in a neural network.
+#! \end{itemize}
+#! The output is the $n$-dimensional vector
+#! \[
+#! \begin{pmatrix} z & 1 \end{pmatrix} \cdot \begin{pmatrix} W \\ b \end{pmatrix} = z\cdot W + b
+#! \]
+#! whose $i$-th component is given by
+#! \[
+#! \sum_{j=1}^m w_{j,i} \, z_j \;+\; b_i.
+#! \]
+#! For example, with $m=2$ and $n=3$, the input structure is
+#! $$(w_{1,1}, w_{2,1}, b_1, w_{1,2}, w_{2,2}, b_2, w_{1,3}, w_{2,3}, b_3, z_1, z_2) \in \mathbb{R}^{(2+1)3 + 2} = \mathbb{R}^{11},$$
+#! and the output is
+#! $\begin{pmatrix} z_1 & z_2 \end{pmatrix} \cdot \begin{pmatrix} w_{1,1} & w_{1,2} & w_{1,3} \\ w_{2,1} & w_{2,2} & w_{2,3} \end{pmatrix} + \begin{pmatrix} b_1 & b_2 & b_3 \end{pmatrix}$,
+#! which compiles to
+#! \[
+#! (w_{1,1} z_1 + w_{2,1} z_2 + b_1,\;
+#! w_{1,2} z_1 + w_{2,2} z_2 + b_2,\;
+#! w_{1,3} z_1 + w_{2,3} z_2 + b_3) \in \mathbb{R}^3.
+#! \]
+#! \end{itemize}
+#! @EndLatexOnly
+
+#! @Description
+#! List the names of available skeletal smooth maps.
+#! @Returns a list of strings
+#! @Arguments Smooth
+DeclareGlobalFunction( "AvailableSkeletalSmoothMaps" );
+
+DeclareGlobalFunction( "DummyInputStringsForAffineTransformation" );
+
+#! @Description
+#! Generate dummy input expressions for an affine transformation with m inputs and n outputs.
+#! Its length is $(m+1)n + m$.
+#! Additional arguments specify the weight string, bias string, and a logits string.
+#! For example, for m=2 and n=3, with weight string "w", bias string "b", and logits string "z", the output is
+#! [ w1_1, w2_1, b_1, w1_2, w2_2, b_2, w1_3, w2_3, b_3, z1, z2 ].
+#! @Arguments m, n, weight_str, bias_str, logits_str
+#! @Returns a list of expressions
+DeclareGlobalFunction( "DummyInputForAffineTransformation" );
+
+DeclareGlobalFunction( "DummyInputStringsForPolynomialTransformation" );
+
+DeclareGlobalFunction( "DummyInputForPolynomialTransformation" );
diff --git a/gap/CategoryOfSkeletalSmoothMaps.gi b/gap/SkeletalCategoryOfSmoothMaps.gi
similarity index 89%
rename from gap/CategoryOfSkeletalSmoothMaps.gi
rename to gap/SkeletalCategoryOfSmoothMaps.gi
index 5c99e40..d95dd6d 100644
--- a/gap/CategoryOfSkeletalSmoothMaps.gi
+++ b/gap/SkeletalCategoryOfSmoothMaps.gi
@@ -1,15 +1,15 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
##
-InstallValue( GradientDescentForCAP,
+InstallValue( GradientBasedLearningForCAP,
rec( MOD := "basic" ) ); # or train
##
-InstallGlobalFunction( CategoryOfSkeletalSmoothMaps,
+InstallGlobalFunction( SkeletalCategoryOfSmoothMaps,
function ( )
local name, Smooth, reals;
@@ -17,14 +17,14 @@ InstallGlobalFunction( CategoryOfSkeletalSmoothMaps,
name := "SkeletalSmoothMaps";
Smooth := CreateCapCategory( name,
- IsCategoryOfSkeletalSmoothMaps,
- IsObjectInCategoryOfSkeletalSmoothMaps,
- IsMorphismInCategoryOfSkeletalSmoothMaps,
+ IsSkeletalCategoryOfSmoothMaps,
+ IsObjectInSkeletalCategoryOfSmoothMaps,
+ IsMorphismInSkeletalCategoryOfSmoothMaps,
IsCapCategoryTwoCell
: overhead := false
);
- Smooth!.is_computable := false;
+ #Smooth!.is_computable := false;
SetIsCartesianCategory( Smooth, true );
SetIsStrictMonoidalCategory( Smooth, true );
@@ -114,6 +114,47 @@ InstallGlobalFunction( CategoryOfSkeletalSmoothMaps,
end );
+ ##
+ AddIsCongruentForMorphisms( Smooth,
+
+ function ( Smooth, f, g )
+ local rank_S, 1000_random_inputs, compare_maps, compare_jacobian_matrices;
+
+ # DisplayString operation applies both maps on dummy inputs
+ if DisplayString( f ) = DisplayString( g ) then
+
+ return true;
+
+ else
+
+ # If the DisplayString check fails, we do numerical tests
+ rank_S := RankOfObject( Source( f ) );
+
+ 1000_random_inputs := List( [ 1 .. 1000 ], i -> List( [ 1 .. rank_S ], j -> 0.01 * Random( [ 1 .. 100 ] ) ) );
+
+ compare_maps :=
+ ForAll( 1000_random_inputs, x -> ForAll( ListN( Eval( f, x ), Eval( g, x ), { a, b } -> AbsoluteValue(a - b) < 1.e-6 ), IdFunc ) );
+
+ compare_jacobian_matrices :=
+ ForAll( 1000_random_inputs, x -> ForAll( ListN( EvalJacobianMatrix( f, x ), EvalJacobianMatrix( g, x ), { a, b } -> AbsoluteValue( Sum(a - b) ) < 1.e-6 ), IdFunc ) );
+
+ Info( InfoWarning, 2, "Based on numerical tests with 1000 random inputs and error tolerance 1.e-6, the output seems to be:" );
+
+ return compare_maps and compare_jacobian_matrices;
+
+ fi;
+
+ end );
+
+ ##
+ AddIsEqualForMorphisms( Smooth,
+
+ function ( Smooth, f, g )
+
+ return DisplayString( f ) = DisplayString( g );
+
+ end );
+
##
AddIdentityMorphism( Smooth,
@@ -651,7 +692,7 @@ InstallGlobalFunction( CategoryOfSkeletalSmoothMaps,
1, [ x{[ rank_S + 1 .. rank_S + rank_T ]} ], rank_T,
rank_T, JacobianMatrix( f )( x{[ 1 .. rank_S ]} ), rank_S )[1];
- return SmoothMorphism( Smooth, source, map, target, false );
+ return SmoothMap( Smooth, source, map, target, false );
end );
@@ -706,42 +747,11 @@ InstallGlobalFunction( CategoryOfSkeletalSmoothMaps,
end );
##
-BindGlobal( "SkeletalSmoothMaps", CategoryOfSkeletalSmoothMaps( ) );
-
-##
-InstallOtherMethod( IsCongruentForMorphisms,
- [ IsCategoryOfSkeletalSmoothMaps, IsMorphismInCategoryOfSkeletalSmoothMaps, IsMorphismInCategoryOfSkeletalSmoothMaps ],
-
- function ( Smooth, f, g )
- local rank_S, 100_random_inputs, compare_maps, compare_jacobian_matrices;
-
- rank_S := RankOfObject( Source( f ) );
-
- 100_random_inputs := List( [ 1 .. 100 ], i -> List( [ 1 .. rank_S ], j -> 0.001 * Random( [ 1 .. 100 ] ) ) );
-
- compare_maps :=
- ForAll( 100_random_inputs, x -> ForAll( ListN( Eval( f, x ), Eval( g, x ), { a, b } -> (a - b) - 1.e-10 < 0. ), IdFunc ) );
-
- compare_jacobian_matrices :=
- ForAll( 100_random_inputs, x -> ForAll( ListN( EvalJacobianMatrix( f, x ), EvalJacobianMatrix( g, x ), { a, b } -> Sum( a - b ) - 1.e-10 < 0. ), IdFunc ) );
-
- return compare_maps and compare_jacobian_matrices;
-
-end );
-
-##
-InstallOtherMethod( IsEqualForMorphisms,
- [ IsCategoryOfSkeletalSmoothMaps, IsMorphismInCategoryOfSkeletalSmoothMaps, IsMorphismInCategoryOfSkeletalSmoothMaps ],
-
- function ( Smooth, f, g )
-
- return Map( f ) = Map( g ) and JacobianMatrix( f ) = JacobianMatrix( g );
-
-end );
+BindGlobal( "SkeletalSmoothMaps", SkeletalCategoryOfSmoothMaps( ) );
##
InstallMethod( Eval,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsDenseList ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsDenseList ],
function ( f, x )
local y;
@@ -758,7 +768,7 @@ end );
##
InstallOtherMethod( CallFuncList,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsDenseList ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsDenseList ],
function ( f, L )
@@ -768,14 +778,14 @@ end );
##
InstallOtherMethod( Eval,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
f -> Eval( f, DummyInput( f ) )
);
##
InstallMethod( EvalJacobianMatrix,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsDenseList ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsDenseList ],
function ( f, x )
@@ -787,14 +797,15 @@ end );
##
InstallOtherMethod( EvalJacobianMatrix,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
f -> EvalJacobianMatrix( f, DummyInput( f ) )
);
##
-InstallMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsDenseList, IsObjectInCategoryOfSkeletalSmoothMaps ],
+InstallMethod( SmoothMap,
+ "datum as in MorphismConstructor",
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsDenseList, IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( Smooth, S, datum, T )
@@ -803,8 +814,9 @@ InstallMethod( SmoothMorphism,
end );
##
-InstallOtherMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsDenseList, IsObjectInCategoryOfSkeletalSmoothMaps, IsBool ],
+InstallOtherMethod( SmoothMap,
+ "datum is a list of strings in variables x1, x2, ...; and a boolean use_python",
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsDenseList, IsObjectInSkeletalCategoryOfSmoothMaps, IsBool ],
function ( Smooth, S, maps, T, use_python )
local rank_S, rank_T, vars, jacobian_matrix, map;
@@ -818,7 +830,7 @@ InstallOtherMethod( SmoothMorphism,
Assert( 0, Length( maps ) = rank_T );
- vars := List( [ 1 .. rank_S ], i -> Concatenation( "x", String( i ) ) );
+ vars := DummyInputStrings( "x", rank_S );
if use_python then
@@ -837,8 +849,9 @@ InstallOtherMethod( SmoothMorphism,
end );
##
-InstallOtherMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsDenseList, IsObjectInCategoryOfSkeletalSmoothMaps ],
+InstallOtherMethod( SmoothMap,
+ "datum is a list of strings in variables x1, x2, ...",
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsDenseList, IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( Smooth, S, maps, T )
@@ -846,13 +859,16 @@ InstallOtherMethod( SmoothMorphism,
TryNextMethod( );
fi;
- return SmoothMorphism( Smooth, S, maps, T, false );
+ Assert( 0, Length( maps ) = RankOfObject( T ) );
+
+ return SmoothMap( Smooth, S, maps, T, false );
end );
##
-InstallOtherMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsFunction, IsObjectInCategoryOfSkeletalSmoothMaps, IsBool ],
+InstallOtherMethod( SmoothMap,
+ "datum is a function and a boolean use_python",
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsFunction, IsObjectInSkeletalCategoryOfSmoothMaps, IsBool ],
function ( Smooth, S, map, T, use_python )
local rank_S, rank_T, vars, jacobian_matrix;
@@ -860,8 +876,10 @@ InstallOtherMethod( SmoothMorphism,
rank_S := RankOfObject( S );
rank_T := RankOfObject( T );
- vars := List( [ 1 .. rank_S ], i -> Concatenation( "x", String( i ) ) );
+ vars := DummyInputStrings( "x", rank_S );
+ # Remark: The map takes a row-vector and returns a row-vector, e.g., map := vec -> [ vec[1]^2 + vec[2], Sin( vec[1] ) ]
+ #
if use_python then
jacobian_matrix := JacobianMatrix( vars, map, [ 1 .. rank_S ] );
@@ -877,18 +895,19 @@ InstallOtherMethod( SmoothMorphism,
end );
##
-InstallOtherMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsFunction, IsObjectInCategoryOfSkeletalSmoothMaps ],
+InstallOtherMethod( SmoothMap,
+ "datum is a function",
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsFunction, IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( Smooth, S, map, T )
- return SmoothMorphism( Smooth, S, map, T, true );
+ return SmoothMap( Smooth, S, map, T, true );
end );
##
-InstallOtherMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsDenseList, IsObjectInCategoryOfSkeletalSmoothMaps ],
+InstallOtherMethod( SmoothMap,
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsDenseList, IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( Smooth, S, maps, T )
@@ -896,13 +915,13 @@ InstallOtherMethod( SmoothMorphism,
TryNextMethod( );
fi;
- return SmoothMorphism( Smooth, S, maps, T, false );
+ return SmoothMap( Smooth, S, maps, T, false );
end );
##
-InstallOtherMethod( SmoothMorphism,
- [ IsCategoryOfSkeletalSmoothMaps, IsObjectInCategoryOfSkeletalSmoothMaps, IsDenseList, IsObjectInCategoryOfSkeletalSmoothMaps ],
+InstallOtherMethod( SmoothMap,
+ [ IsSkeletalCategoryOfSmoothMaps, IsObjectInSkeletalCategoryOfSmoothMaps, IsDenseList, IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( Smooth, S, constants, T )
local rank_S, rank_T, map, jacobian_matrix;
@@ -924,7 +943,7 @@ end );
##
InstallOtherMethod( \.,
- [ IsCategoryOfSkeletalSmoothMaps, IsPosInt ],
+ [ IsSkeletalCategoryOfSmoothMaps, IsPosInt ],
function ( Smooth, string_as_int )
local i;
@@ -943,7 +962,7 @@ end );
##
InstallOtherMethod( \.,
- [ IsCategoryOfSkeletalSmoothMaps, IsPosInt ],
+ [ IsSkeletalCategoryOfSmoothMaps, IsPosInt ],
function ( Smooth, string_as_int )
local f, l1, l2;
@@ -992,7 +1011,7 @@ InstallOtherMethod( \.,
Assert( 0, IsDenseList( l ) );
- return SmoothMorphism( Smooth, Smooth.( rank_S ), l, Smooth.( Length( l ) ) );
+ return SmoothMap( Smooth, Smooth.( rank_S ), l, Smooth.( Length( l ) ) );
end;
@@ -1223,7 +1242,7 @@ InstallOtherMethod( \.,
jacobian_matrix :=
function ( x )
- return DiagonalMat( List( List( [ 1 .. n ], i -> Exp( -x[i] ) ), exp -> exp / ( 1 - exp ) ^ 2 ) );
+ return DiagonalMat( List( List( [ 1 .. n ], i -> Exp( -x[i] ) ), exp -> exp / ( 1 + exp ) ^ 2 ) );
end;
@@ -1264,7 +1283,7 @@ InstallOtherMethod( \.,
local max, exp_x, s;
# standard trick to avoid numerical overflow
- if GradientDescentForCAP.MOD = "train" then
+ if GradientBasedLearningForCAP.MOD = "train" then
max := Maximum( x );
@@ -1286,7 +1305,7 @@ InstallOtherMethod( \.,
local max, exp_x, s, d;
# standard trick to avoid numerical overflow
- if GradientDescentForCAP.MOD = "train" then
+ if GradientBasedLearningForCAP.MOD = "train" then
max := Maximum( x );
@@ -1372,10 +1391,10 @@ InstallOtherMethod( \.,
elif f = "BinaryCrossEntropyLoss_" then
return
- function ( n )
+ function ( arg... )
local l1, l2;
- if n <> 1 then
+ if Length( arg ) >= 1 and arg[1] <> 1 then
Error( "the passed argument 'n' must be equal to 1!\n" );
fi;
@@ -1393,9 +1412,9 @@ InstallOtherMethod( \.,
elif f = "BinaryCrossEntropyLoss" then
return
- function ( n )
+ function ( arg... )
- if n <> 1 then
+ if Length( arg ) >= 1 and arg[1] <> 1 then
Error( "the passed argument 'n' must be equal to 1!\n" );
fi;
@@ -1411,15 +1430,15 @@ InstallOtherMethod( \.,
elif f = "SigmoidBinaryCrossEntropyLoss_" then
return
- function ( n )
+ function ( arg... )
- if n <> 1 then
+ if Length( arg ) >= 1 and arg[1] <> 1 then
Error( "the passed argument 'n' must be equal to 1!\n" );
fi;
return PreCompose( Smooth,
DirectProductFunctorial( Smooth, [ Smooth.Sigmoid_( 1 ), Smooth.IdFunc( 1 ) ] ),
- Smooth.BinaryCrossEntropyLoss_( n ) );
+ Smooth.BinaryCrossEntropyLoss_() );
end;
@@ -1427,9 +1446,9 @@ InstallOtherMethod( \.,
elif f = "SigmoidBinaryCrossEntropyLoss" then
return
- function ( n )
+ function ( arg... )
- if n <> 1 then
+ if Length( arg ) >= 1 and arg[1] <> 1 then
Error( "the passed argument 'n' must be equal to 1!\n" );
fi;
@@ -1524,7 +1543,7 @@ InstallOtherMethod( \.,
local max, l;
# standard trick to avoid numerical overflow
- if GradientDescentForCAP.MOD = "train" then
+ if GradientBasedLearningForCAP.MOD = "train" then
max := Maximum( List( [ 1 .. n ], i -> x[i] ) );
@@ -1544,7 +1563,7 @@ InstallOtherMethod( \.,
local max, exp_x, s, l, c;
# standard trick to avoid numerical overflow
- if GradientDescentForCAP.MOD = "train" then
+ if GradientBasedLearningForCAP.MOD = "train" then
max := Maximum( List( [ 1 .. n ], i -> x[i] ) );
@@ -1637,12 +1656,12 @@ InstallOtherMethod( \.,
function ( x )
local i;
- if GradientDescentForCAP.MOD = "basic" then
+ if GradientBasedLearningForCAP.MOD = "basic" then
# dropout is activated only while training
return ListWithIdenticalEntries( n, 1 );
- elif GradientDescentForCAP.MOD = "train" then
+ elif GradientBasedLearningForCAP.MOD = "train" then
i := Int( percentage * n );
@@ -1777,12 +1796,46 @@ InstallGlobalFunction( DummyInputStringsForAffineTransformation,
end );
+##
+InstallGlobalFunction( AvailableSkeletalSmoothMaps,
+
+ function ( arg... )
+
+ return [
+ "Sqrt",
+ "Exp",
+ "Log",
+ "Sin",
+ "Cos",
+ "Constant",
+ "Zero",
+ "IdFunc",
+ "Relu",
+ "Sum",
+ "Mean",
+ "Variance",
+ "StandardDeviation",
+ "Mul",
+ "Power",
+ "PowerBase",
+ "Sigmoid",
+ "Softmax",
+ "QuadraticLoss",
+ "BinaryCrossEntropyLoss",
+ "SigmoidBinaryCrossEntropyLoss",
+ "CrossEntropyLoss",
+ "SoftmaxCrossEntropyLoss",
+ "AffineTransformation",
+ ];
+
+end );
+
##
InstallGlobalFunction( DummyInputForAffineTransformation,
function ( arg... )
- return ConvertToExpressions( CallFuncList( DummyInputStringsForAffineTransformation, arg ) );
+ return CreateContextualVariables( CallFuncList( DummyInputStringsForAffineTransformation, arg ) );
end );
@@ -1830,13 +1883,13 @@ InstallGlobalFunction( DummyInputForPolynomialTransformation,
function ( arg... )
- return ConvertToExpressions( CallFuncList( DummyInputStringsForPolynomialTransformation, arg ) );
+ return CreateContextualVariables( CallFuncList( DummyInputStringsForPolynomialTransformation, arg ) );
end );
##
InstallOtherMethod( \^,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsAdditiveElement ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsAdditiveElement ],
function ( f, n )
local Smooth, p;
@@ -1851,7 +1904,7 @@ end );
##
InstallOtherMethod( \*,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f, g )
@@ -1861,7 +1914,7 @@ end );
##
InstallOtherMethod( \/,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps, IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps, IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f, g )
@@ -1872,7 +1925,7 @@ end );
##
InstallMethod( LaTeXOutput,
- [ IsObjectInCategoryOfSkeletalSmoothMaps ],
+ [ IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( U )
@@ -1882,7 +1935,7 @@ end );
##
InstallMethod( LaTeXOutput,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f )
local dummy_input, rank_S, rank_T, vars, all, maps, jacobian_matrix;
@@ -1922,7 +1975,7 @@ end );
##
InstallMethod( ViewString,
- [ IsObjectInCategoryOfSkeletalSmoothMaps ],
+ [ IsObjectInSkeletalCategoryOfSmoothMaps ],
function ( U )
@@ -1932,7 +1985,7 @@ end );
##
InstallMethod( ViewString,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f )
@@ -1945,7 +1998,7 @@ end );
##
InstallMethod( DisplayString,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f )
local dummy_input, maps;
@@ -1965,7 +2018,7 @@ end );
##
InstallMethod( Display,
- [ IsMorphismInCategoryOfSkeletalSmoothMaps ],
+ [ IsMorphismInSkeletalCategoryOfSmoothMaps ],
function ( f )
local dummy_input, m;
diff --git a/gap/Tools.gd b/gap/Tools.gd
index b3ecedd..be4605a 100644
--- a/gap/Tools.gd
+++ b/gap/Tools.gd
@@ -1,26 +1,137 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Declarations
#
+#! @Chapter Tools
+#! @Section Mathematical Operations
+
+#! @Description
+#! The ReLU (Rectified Linear Unit) activation function.
+#! Returns Maximum(a, 0) for a float a.
+#! @Arguments a
+#! @Returns a float
DeclareOperation( "Relu", [ IsFloat ] );
+
+#! @Description
+#! The Kronecker delta function. Returns 1 if a equals b, otherwise 0.
+#! @Arguments a, b
+#! @Returns 0 or 1
+DeclareGlobalFunction( "KroneckerDelta" );
+
+#! @Description
+#! Multiply two matrices with explicit dimensions.
+#! mat_1 is an m_1 x n_1 matrix,
+#! mat_2 is an m_2 x n_2 matrix.
+#! Requires n_1 = m_2.
+#! @Arguments m_1, mat_1, n_1, m_2, mat_2, n_2
+#! @Returns a matrix
+DeclareGlobalFunction( "MultiplyMatrices" );
+
+#! @Section List Operations
+
+#! @Description
+#! Create enumerated pairs from a list.
+#! Returns a list of pairs [i, l[i]] for each element in l.
+#! @Arguments l
+#! @Returns a list of pairs
DeclareOperation( "Enumerate", [ IsDenseList ] );
+
+#! @Description
+#! Split a dense list l according to the dimensions in dims.
+#! The sum of dimensions must equal the length of the list.
+#! @Arguments l, dims
+#! @Returns a list of lists
DeclareOperation( "SplitDenseList", [ IsDenseList, IsDenseList ] );
+
+#! @Description
+#! Split a dense list l into sublists of size n.
+#! If the length of l is not a multiple of n,
+#! the last sublist will contain the remaining elements (i.e., will contain less than n elements).
+#! @Arguments l, n
+#! @Returns a list of lists
DeclareOperation( "SplitDenseList", [ IsDenseList, IsPosInt ] );
+#! @Section Helper Functions
+
+#! @Description
+#! Select an element based on a condition.
+#! If cond is true, returns val_true, otherwise returns val_false.
+#! @Arguments cond, val_true, val_false
+#! @Returns an element
DeclareGlobalFunction( "SelectBasedOnCondition" );
+
+#! @Description
+#! Call one of two functions based on a condition.
+#! If cond is true, calls func_1, otherwise calls func_2
+#! on the arguments in args. For example,
+#! CallFuncListBasedOnCondition( true, x -> x^2, x -> x^3, [ 2 ] ); returns $4$,
+#! while CallFuncListBasedOnCondition( false, x -> x^2, x -> x^3, [ 2 ] ); returns $8$.
+#! @Arguments cond, func_1, func_2, args
+#! @Returns the result of the called function
DeclareGlobalFunction( "CallFuncListBasedOnCondition" );
-DeclareGlobalFunction( "KroneckerDelta" );
-DeclareGlobalFunction( "MultiplyMatrices" );
-DeclareOperation( "ScatterPlotUsingPython", [ IsDenseList, IsDenseList ] );
-DeclareOperation( "SimplifyExpressionUsingPython", [ IsDenseList, IsDenseList ] );
+#! @Section Python Integration
+
+#! @Description
+#! Compute the partial derivative of an expression with respect to the i-th variable.
+#! vars is a list of variable names, str is the expression string.
+#! See the example in the Expressions chapter.
+#! @Arguments vars, str, i
+#! @Returns a function
+DeclareOperation( "Diff", [ IsDenseList, IsString, IsPosInt ] );
+
+#! @Description
+#! Compute the lazy partial derivative of an expression with respect to the i-th variable.
+#! vars is a list of variable names, str is the expression string.
+#! See the example in the Expressions chapter.
+#! @Arguments vars, str, i
+#! @Returns a function
+DeclareOperation( "LazyDiff", [ IsDenseList, IsString, IsPosInt ] );
+
+#! @Description
+#! Compute the Jacobian matrix using Python's SymPy library.
+#! vars is a list of variable names, exps is a list of expression strings,
+#! indices specifies which variables to differentiate with respect to.
+#! @Arguments vars, exps, indices
+#! @Returns a matrix of derivative expressions
DeclareOperation( "JacobianMatrixUsingPython", [ IsDenseList, IsDenseList, IsDenseList ] );
+
+#! @Description
+#! Compute a lazy Jacobian matrix (deferred computation).
+#! Returns a function that computes the Jacobian when evaluated.
+#! @Arguments vars, exps, indices
+#! @Returns a function
DeclareOperation( "LazyJacobianMatrix", [ IsDenseList, IsDenseList, IsDenseList ] );
+
+#! @Description
+#! Create a scatter plot using Python's matplotlib.
+#! points is a list of 2D points, labels is a list of class labels.
+#! @Arguments points, labels
+#! @Returns the directory containing the plot
+DeclareOperation( "ScatterPlotUsingPython", [ IsDenseList, IsDenseList ] );
+
+#! @Description
+#! Simplify expressions using Python's SymPy library.
+#! vars is a list of variable names, exps is a list of expression strings.
+#! @Arguments vars, exps
+#! @Returns a list of simplified expression strings
+DeclareOperation( "SimplifyExpressionUsingPython", [ IsDenseList, IsDenseList ] );
+
+#! @Description
+#! Convert expressions to LaTeX format using Python's SymPy library.
+#! vars is a list of variable names, exps is a list of expression strings.
+#! @Arguments vars, exps
+#! @Returns a list of LaTeX strings
DeclareOperation( "LaTeXOutputUsingPython", [ IsDenseList, IsDenseList ] );
-DeclareOperation( "LazyDiff", [ IsDenseList, IsString, IsPosInt ] );
-DeclareOperation( "Diff", [ IsDenseList, IsString, IsPosInt ] );
+#! @Description
+#! Compile functions to Cython for improved performance.
+#! vars is a list of lists of variable names (one per function),
+#! function_names is a list of function names,
+#! functions is a list of function body strings.
+#! @Arguments vars, function_names, functions
+#! @Returns a string with instructions to use the compiled functions
DeclareOperation( "AsCythonFunction", [ IsDenseList, IsDenseList, IsDenseList ] );
diff --git a/gap/Tools.gi b/gap/Tools.gi
index 9d5a6aa..05f5041 100644
--- a/gap/Tools.gi
+++ b/gap/Tools.gi
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Implementations
#
@@ -88,13 +88,14 @@ InstallMethod( SplitDenseList,
[ IsDenseList, IsPosInt ],
function ( l, n )
- local N;
+ local a, b, dims;
- if Length( l ) mod n <> 0 then
- Error( "the length of the passed list 'l' must be divisible by passed positive integers 'n'!\n" );
- fi;
+ a := QuoInt( Length( l ), n );
+ b := RemInt( Length( l ), n );
+
+ dims := Concatenation( ListWithIdenticalEntries( a, n ), SelectBasedOnCondition( b = 0, [ ], [ b ] ) );
- return SplitDenseList( l, ListWithIdenticalEntries( Length( l ) / n, n ) );
+ return SplitDenseList( l, dims );
end );
@@ -157,7 +158,7 @@ InstallMethod( LazyDiff,
return
function ( vec )
- local vec_vars;
+ local vec_vars, expressions;
if ForAll( vec, e -> IsFloat( e ) or IsRat( e ) ) then
vec := List( vec, e -> Expression( [ ], String( e ) ) );
@@ -166,8 +167,9 @@ InstallMethod( LazyDiff,
# obviously
Assert( 0, IsDenseList( vec ) and Length( vars ) = Length( vec ) );
- # all entries of vec must be expressions defined by the same variables
- Assert( 0, Length( Set( List( vec, Variables ) ) ) = 1 );
+ # all non-float expressions should have the same variables if any expression is non-float
+ expressions := Filtered( vec, e -> IsExpression( e ) );
+ Assert( 0, IsEmpty( expressions ) or ForAll( expressions, e -> Variables( e ) = Variables( expressions[1] ) ) );
if not IsEmpty( vec ) then
vec_vars := Variables( vec[1] );
@@ -200,10 +202,10 @@ InstallOtherMethod( LazyDiff,
InstallMethod( SimplifyExpressionUsingPython,
[ IsDenseList, IsDenseList ],
- function ( vars, exps )
- local constants, dir, input_path, input_file, output_path, import, symbols, functions, g_ops, p_ops, define_exps, simplify, write_output, stream, err, output_file, outputs, j, i, exp, o;
+ function ( vars, expressions )
+ local constants, indices, exps, dir, input_path, input_file, output_path, import, symbols, functions, g_ops, p_ops, define_exps, simplify, write_output, stream, err, output_file, raw_outputs, outputs, j, i;
- if not ( ForAll( exps, IsString ) and ForAll( vars, IsString ) ) then
+ if not ( ForAll( expressions, IsString ) and ForAll( vars, IsString ) ) then
TryNextMethod( );
fi;
@@ -212,7 +214,17 @@ InstallMethod( SimplifyExpressionUsingPython,
vars := Concatenation( vars, constants );
# create a copy
- exps := List( [ 1 .. Length( exps ) ], i -> exps[i] );
+ expressions := List( [ 1 .. Length( expressions ) ], i -> expressions[i] );
+
+ # skip expressions that contain Diff
+ indices := PositionsProperty( expressions, exp -> PositionSublist( exp, "Diff" ) = fail );
+
+ # nothing to simplify (all expressions contain Diff)
+ if IsEmpty( indices ) then
+ return expressions;
+ fi;
+
+ exps := expressions{ indices };
dir := DirectoryTemporary( );
@@ -267,18 +279,21 @@ InstallMethod( SimplifyExpressionUsingPython,
output_file := IO_File( output_path, "r" );
- outputs := IO_ReadLines( output_file );
+ raw_outputs := IO_ReadLines( output_file );
IO_Close( output_file );
- Assert( 0, Length( outputs ) = Length( exps ) );
+ Assert( 0, Length( raw_outputs ) = Length( exps ) );
- for j in [ 1 .. Length( outputs ) ] do
+ # start with the original expressions; overwrite only those that were simplified
+ outputs := ShallowCopy( expressions );
+
+ for j in [ 1 .. Length( indices ) ] do
- outputs[j] := ReplacedString( outputs[j], "\n", "" );
+ outputs[ indices[j] ] := ReplacedString( raw_outputs[j], "\n", "" );
for i in [ 1 .. Length( g_ops ) ] do
- outputs[j] := ReplacedString( outputs[j], p_ops[i], g_ops[i] );
+ outputs[ indices[j] ] := ReplacedString( outputs[ indices[j] ], p_ops[i], g_ops[i] );
od;
od;
@@ -424,7 +439,7 @@ InstallOtherMethod( JacobianMatrix,
function ( vars, map_func, indices )
- return JacobianMatrix( map_func( ConvertToExpressions( vars ) ), indices );
+ return JacobianMatrix( map_func( CreateContextualVariables( vars ) ), indices );
end );
@@ -460,7 +475,7 @@ InstallOtherMethod( LazyJacobianMatrix,
function( vec )
if exps = fail then
- exps := map_func( ConvertToExpressions( vars ) );
+ exps := map_func( CreateContextualVariables( vars ) );
fi;
return LazyJacobianMatrix( exps, indices )( vec );
@@ -704,108 +719,96 @@ end );
##
InstallMethod( ScatterPlotUsingPython,
- [ IsDenseList, IsDenseList ],
+ [ IsDenseList, IsDenseList ],
function ( points, labels )
- local dir, path, file, size, action, stream, err, p;
-
- dir := DirectoryTemporary( );
-
- Info( InfoPython, 1, dir );
-
- path := Filename( dir, "plot.py" );
-
- file := IO_File( path, "w" );
-
- size := CAP_INTERNAL_RETURN_OPTION_OR_DEFAULT( "size", "20" );
-
- action := CAP_INTERNAL_RETURN_OPTION_OR_DEFAULT( "action", "show" );
-
- IO_Write( file,
- Concatenation(
- "import matplotlib.pyplot as plt\n",
- "import matplotlib.patches as patches\n\n",
-
- "points =", String( points ), "\n",
- "labels = [ str(label) for label in ", String( labels ), "]\n",
-
- #"#test_points =", String( test_points ), "\n",
- #"#test_labels = [ str(label) for label in ", String( test_labels ), "]\n",
-
- "# Convert the points to separate x and y lists\n",
- "x1 = [p[0] for p in points]\n",
- "y1 = [p[1] for p in points]\n",
-
- "#x2 = [p[0] for p in test_points]\n",
- "#y2 = [p[1] for p in test_points]\n",
-
- "# Unique classes\n",
- "unique_classes = list(set(labels))\n",
-
- "# Number of unique classes\n",
- "num_classes = len(unique_classes)\n",
-
- "markers = ['+', '*', 'o', 'v', '^', '<', '>', '1', '2', '3', '4', 's', 'p', 'x', 'h', 'H', '.', ',', 'D', 'd', '|', '_']\n",
-
- "# Generate a list of colors using a colormap\n",
- "colormap = plt.get_cmap('rainbow')\n",
- "colors = [colormap(i / num_classes) for i in range(num_classes)]",
-
- "# Map classes to colors\n",
- "style_map = {cls: colors[i] for i, cls in enumerate(unique_classes)}\n",
- "markers = {cls : markers[i] for i, cls in enumerate(unique_classes)}\n",
- "# Create a figure and an axes\n",
- "fig, ax = plt.subplots()\n",
-
- "# Create a scatter plots\n",
- "for cls in unique_classes:",
- "\n ",
- "x1_class = [x1[i] for i in range(len(x1)) if labels[i] == cls]",
- "\n ",
- "y1_class = [y1[i] for i in range(len(y1)) if labels[i] == cls]",
- "\n ",
- "scatter1 = plt.scatter(x1_class, y1_class, color=style_map[cls], marker=markers[cls], s=", size, ", label=cls)\n",
-
- "\n ",
- "#x2_class = [x2[i] for i in range(len(x2)) if test_labels[i] == cls]",
- "\n ",
- "#y2_class = [y2[i] for i in range(len(y2)) if test_labels[i] == cls]",
- "\n ",
- "#scatter2 = plt.scatter(x2_class, y2_class, color=style_map[cls], marker='v', s=100, label=cls)\n",
-
- "# Set the limits of the plot based on min and max values of x and y\n",
- "plt.xlim(min(x1) - 0.1, max(x1) + 0.1)\n",
- "plt.ylim(min(y1) - 0.1, max(y1) + 0.1)\n",
-
- "plt.xlabel('X-axis')\n",
- "plt.ylabel('Y-axis')\n",
- "plt.title('Scatter Plot using Matplotlib')\n",
- "plt.legend()\n",
- SelectBasedOnCondition(
- action = "save",
- Concatenation( "plt.savefig('", Filename( dir, "plot.png" ), "', dpi=400)\n" ),
- "plt.show()\n" ) ) );
-
- IO_Close( file );
-
- stream := IO_Popen3( IO_FindExecutable( "python" ), [ path, "&" ] );
-
- err := Concatenation( IO_ReadLines( stream.stderr ) );
-
- IO_ReadLines( stream.stdout );
-
- IO_Close( stream.stdin );
-
- IO_Close( stream.stdout );
-
- IO_Close( stream.stderr );
-
- if not IsEmpty( err ) then
-
- Error( err, "\n" );
-
- fi;
+ local dir, path, file, size, action, stream, err, p;
+
+ dir := DirectoryTemporary( );
+
+ Info( InfoPython, 1, dir );
+
+ path := Filename( dir, "plot.py" );
+
+ file := IO_File( path, "w" );
+
+ size := CAP_INTERNAL_RETURN_OPTION_OR_DEFAULT( "size", "100" );
+
+ action := CAP_INTERNAL_RETURN_OPTION_OR_DEFAULT( "action", "show" );
+
+ IO_Write( file,
+ Concatenation(
+ "import matplotlib.pyplot as plt\n",
+ "import matplotlib.patches as patches\n\n",
+
+ "points =", String( points ), "\n",
+ "labels = [ str(label) for label in ", String( labels ), "]\n",
+
+ "# Convert the points to separate x and y lists\n",
+ "x1 = [p[0] for p in points]\n",
+ "y1 = [p[1] for p in points]\n",
+
+ "# Unique classes (use sorted order to make mapping deterministic)\n",
+ "unique_classes = sorted(set(labels))\n",
+
+ "# Number of unique classes\n",
+ "num_classes = len(unique_classes)\n",
+
+ "marker_list = ['+', '*', 'o', 'v', '^', '<', '>', '1', '2', '3', '4', 's', 'p', 'x', 'h', 'H', '.', ',', 'D', 'd', '|', '_']\n",
+
+ "# Generate a deterministic list of colors from a fixed colormap\n",
+ "colormap = plt.get_cmap('viridis')\n",
+ "colors = [colormap(i / max(num_classes - 1, 1)) for i in range(num_classes)]\n",
+
+ "# Map classes to styles deterministically (by sorted class order)\n",
+ "style_map = {cls: colors[i] for i, cls in enumerate(unique_classes)}\n",
+ "markers = {cls: marker_list[i % len(marker_list)] for i, cls in enumerate(unique_classes)}\n",
+
+ "# Create a figure and an axes\n",
+ "fig, ax = plt.subplots()\n",
+
+ "# Create a scatter plots\n",
+ "for cls in unique_classes:",
+ "\n ",
+ "x1_class = [x1[i] for i in range(len(x1)) if labels[i] == cls]",
+ "\n ",
+ "y1_class = [y1[i] for i in range(len(y1)) if labels[i] == cls]",
+ "\n ",
+ "scatter1 = plt.scatter(x1_class, y1_class, color=style_map[cls], marker=markers[cls], s=", size, ", label=cls)\n",
+
+ "# Set the limits of the plot based on min and max values of x and y\n",
+ "plt.xlim(min(x1) - 0.1, max(x1) + 0.1)\n",
+ "plt.ylim(min(y1) - 0.1, max(y1) + 0.1)\n",
+
+ "plt.xlabel('X-axis')\n",
+ "plt.ylabel('Y-axis')\n",
+ "plt.title('Scatter Plot using Matplotlib')\n",
+ "plt.legend()\n",
+ SelectBasedOnCondition(
+ action = "save",
+ Concatenation( "plt.savefig('", Filename( dir, "plot.png" ), "', dpi=400)\n" ),
+ "plt.show()\n" ) ) );
+
+ IO_Close( file );
+
+ stream := IO_Popen3( IO_FindExecutable( "python" ), [ path, "&" ] );
+
+ err := Concatenation( IO_ReadLines( stream.stderr ) );
+
+ IO_ReadLines( stream.stdout );
+
+ IO_Close( stream.stdin );
+
+ IO_Close( stream.stdout );
+
+ IO_Close( stream.stderr );
+
+ if not IsEmpty( err ) then
- return dir;
+ Error( err, "\n" );
+ fi;
+
+ return dir;
+
end );
diff --git a/init.g b/init.g
index 6da5ae4..716a66c 100644
--- a/init.g
+++ b/init.g
@@ -1,15 +1,17 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Reading the declaration part of the package.
#
-ReadPackage( "GradientDescentForCAP", "gap/Tools.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/Expressions.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/MethodRecord.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/MethodRecord.Declarations.autogen.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/CategoryOfSkeletalSmoothMaps.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/CategoryOfParametrisedMorphisms.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/CategoryOfLenses.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/FitParameters.gd" );
-ReadPackage( "GradientDescentForCAP", "gap/NeuralNetworks.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/Tools.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/Expressions.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/MethodRecord.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/MethodRecord.Declarations.autogen.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/SkeletalCategoryOfSmoothMaps.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/CategoryOfParametrisedMorphisms.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/CategoryOfParametrisedMorphisms.autogen.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/CategoryOfLenses.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/CategoryOfLenses.autogen.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/FitParameters.gd" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/NeuralNetworks.gd" );
diff --git a/makedoc.g b/makedoc.g
index 867b82e..f9a6ca0 100644
--- a/makedoc.g
+++ b/makedoc.g
@@ -1,18 +1,22 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# This file is a script which compiles the package manual.
#
-if fail = LoadPackage( "AutoDoc", "2019.05.20" ) then
+if fail = LoadPackage( "AutoDoc", "2025.12.19" ) then
- Error( "AutoDoc version 2019.05.20 or newer is required." );
+ Error( "AutoDoc version 2025.12.19 or newer is required." );
fi;
AutoDoc( rec(
autodoc := rec(
files := [ "doc/Doc.autodoc" ],
- scan_dirs := [ "doc", "gap", "examples", "examples/doc" ],
+ scan_dirs := [ "doc", "gap", "examples", "examples/doc",
+ "examples/NeuralNetwork_BinaryCrossEntropy",
+ "examples/NeuralNetwork_CrossEntropy",
+ "examples/NeuralNetwork_QuadraticLoss",
+ ],
),
extract_examples := rec(
units := "Single",
@@ -20,14 +24,17 @@ AutoDoc( rec(
gapdoc := rec(
LaTeXOptions := rec(
LateExtraPreamble := """
+ \usepackage{tikz}
+ \usetikzlibrary{positioning}
\usepackage{mathtools}
+ \usepackage{stmaryrd}
\DeclareUnicodeCharacter{211D}{\ensuremath{\mathbb{R}}}
\DeclareUnicodeCharacter{2023}{\ensuremath{\blacktriangleright}}
""",
),
),
scaffold := rec(
- entities := [ "homalg", "CAP" ],
+ entities := rec( homalg := "homalg", CAP := "CAP" ),
),
) );
diff --git a/makedoc_with_overfull_hbox_warnings.g b/makedoc_with_overfull_hbox_warnings.g
index 4373425..1eeea61 100644
--- a/makedoc_with_overfull_hbox_warnings.g
+++ b/makedoc_with_overfull_hbox_warnings.g
@@ -1,11 +1,11 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# This file is a script which compiles the package manual and prints overfull hbox warnings.
#
-if fail = LoadPackage( "AutoDoc", "2019.05.20" ) then
+if fail = LoadPackage( "AutoDoc", "2025.12.19" ) then
- Error( "AutoDoc version 2019.05.20 or newer is required." );
+ Error( "AutoDoc version 2025.12.19 or newer is required." );
fi;
@@ -18,7 +18,10 @@ AutoDoc( rec(
gapdoc := rec(
LaTeXOptions := rec(
LateExtraPreamble := """
+ \usepackage{tikz}
+ \usetikzlibrary{positioning}
\usepackage{mathtools}
+ \usepackage{stmaryrd}
\DeclareUnicodeCharacter{211D}{\ensuremath{\mathbb{R}}}
\DeclareUnicodeCharacter{2023}{\ensuremath{\blacktriangleright}}
% Many thanks to https://tex.stackexchange.com/questions/22466/how-to-convince-fancyvrb-to-give-overfull-warnings/534486#534486
@@ -38,7 +41,7 @@ AutoDoc( rec(
),
),
scaffold := rec(
- entities := [ "homalg", "CAP" ],
+ entities := rec( homalg := "homalg", CAP := "CAP" ),
),
) );
diff --git a/makefile b/makefile
index 6ccb687..652cd7e 100644
--- a/makefile
+++ b/makefile
@@ -18,9 +18,9 @@ test: doc
test-basic-spacing:
# exit code 1 means no match, which is what we want here (exit code 2 signals an error)
- grep -RPl "\t" examples/*.g gap/*.g*; test $$? -eq 1 || (echo "Tabs found" && exit 1)
- grep -RPl "\r" examples/*.g gap/*.g*; test $$? -eq 1 || (echo "Windows line-endings found" && exit 1)
- grep -RPzL "\n\z" examples/*.g gap/*.g* | grep ""; test $$? -eq 1 || (echo "File with no newline at end of file found" && exit 1)
+ grep -RPl "\t" examples/ gap/ --exclude="*.png"; test $$? -eq 1 || (echo "Tabs found" && exit 1)
+ grep -RPl "\r" examples/ gap/ --exclude="*.png"; test $$? -eq 1 || (echo "Windows line-endings found" && exit 1)
+ grep -RPzL "\n\z" examples/ gap/ --exclude="*.png" | grep ""; test $$? -eq 1 || (echo "File with no newline at end of file found" && exit 1)
test-doc: doc
cp -aT doc/ doc_tmp/
@@ -37,7 +37,7 @@ test-spacing:
grep -RE '[^ ] +$$' gap/*; test $$? -eq 1 || (echo "Trailing whitespace found" && exit 1)
for filename in gap/*; do \
echo $$filename; \
- gap --quitonbreak --norepl --banner -c "LoadPackage(\"GradientDescentForCAP\"); SizeScreen([4096]); func := ReadAsFunction(\"$$filename\"); FileString(\"gap_spacing\", DisplayString(func));"; \
+ gap --quitonbreak --norepl --banner -c "LoadPackage(\"GradientBasedLearningForCAP\"); SizeScreen([4096]); func := ReadAsFunction(\"$$filename\"); FileString(\"gap_spacing\", DisplayString(func));"; \
# In a perfect world, the DisplayString of a function would exactly match our code. However, our line breaks and indentation might differ from the GAP ones, \
# so we remove all indentation, line breaks, and empty lines, and afterwards insert line breaks at semicolons again for better readability. \
cat "gap_spacing" | tail -n +2 | head -n -2 | sed 's/\[ \]/[ ]/g' | sed 's/( )/( )/g' | sed 's/( :/( :/g' | sed 's/ *$$//' | sed 's/^ *//' | grep -v "^$$" | tr "\n" " " | sed 's/;/;\n/g' > modified_gap_spacing; \
diff --git a/read.g b/read.g
index 5c4f7d6..b97149c 100644
--- a/read.g
+++ b/read.g
@@ -1,15 +1,15 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# Reading the implementation part of the package.
#
-ReadPackage( "GradientDescentForCAP", "gap/Tools.gi");
-ReadPackage( "GradientDescentForCAP", "gap/Expressions.gi");
-ReadPackage( "GradientDescentForCAP", "gap/MethodRecord.gi");
-ReadPackage( "GradientDescentForCAP", "gap/MethodRecord.Installations.autogen.gi");
-ReadPackage( "GradientDescentForCAP", "gap/CategoryOfSkeletalSmoothMaps.gi");
-ReadPackage( "GradientDescentForCAP", "gap/CategoryOfParametrisedMorphisms.gi" );
-ReadPackage( "GradientDescentForCAP", "gap/CategoryOfLenses.gi" );
-ReadPackage( "GradientDescentForCAP", "gap/FitParameters.gi" );
-ReadPackage( "GradientDescentForCAP", "gap/NeuralNetworks.gi" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/Tools.gi");
+ReadPackage( "GradientBasedLearningForCAP", "gap/Expressions.gi");
+ReadPackage( "GradientBasedLearningForCAP", "gap/MethodRecord.gi");
+ReadPackage( "GradientBasedLearningForCAP", "gap/MethodRecord.Installations.autogen.gi");
+ReadPackage( "GradientBasedLearningForCAP", "gap/SkeletalCategoryOfSmoothMaps.gi");
+ReadPackage( "GradientBasedLearningForCAP", "gap/CategoryOfParametrisedMorphisms.gi" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/CategoryOfLenses.gi" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/FitParameters.gi" );
+ReadPackage( "GradientBasedLearningForCAP", "gap/NeuralNetworks.gi" );
diff --git a/stats.514058 b/stats.514058
new file mode 100644
index 0000000..f83f144
--- /dev/null
+++ b/stats.514058
@@ -0,0 +1 @@
+{ "Type": "_", "Version":1, "IsCover": true, "TimeType": "Memory"}
diff --git a/tst/100_LoadPackage.tst b/tst/100_LoadPackage.tst
index fc663ef..4885d54 100644
--- a/tst/100_LoadPackage.tst
+++ b/tst/100_LoadPackage.tst
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# This file tests if the package can be loaded without errors or warnings.
#
@@ -7,9 +7,9 @@
gap> PushOptions( rec( OnlyNeeded := true ) );
gap> package_loading_info_level := InfoLevel( InfoPackageLoading );;
gap> SetInfoLevel( InfoPackageLoading, PACKAGE_ERROR );;
-gap> LoadPackage( "GradientDescentForCAP", false );
+gap> LoadPackage( "GradientBasedLearningForCAP", false );
true
gap> SetInfoLevel( InfoPackageLoading, PACKAGE_INFO );;
-gap> LoadPackage( "GradientDescentForCAP" );
+gap> LoadPackage( "GradientBasedLearningForCAP" );
true
gap> SetInfoLevel( InfoPackageLoading, package_loading_info_level );;
diff --git a/tst/example-in-readme-1.tst b/tst/example-in-readme-1.tst
index d083f99..72ef08f 100644
--- a/tst/example-in-readme-1.tst
+++ b/tst/example-in-readme-1.tst
@@ -1,4 +1,4 @@
-gap> LoadPackage( "GradientDescentForCAP" );
+gap> LoadPackage( "GradientBasedLearningForCAP" );
true
gap> Para := CategoryOfParametrisedMorphisms( SkeletalSmoothMaps );
CategoryOfParametrisedMorphisms( SkeletalSmoothMaps )
@@ -8,7 +8,7 @@ gap> training_set := [ [ 1, 2.9 ], [ 2, 5.1 ], [ 3, 7.05 ] ];
gap> input_dim := 1;; output_dim := 1;; hidden_dims := [ ];;
-gap> f := PredictionMorphismOfNeuralNetwork( Para, input_dim, hidden_dims, output_dim, "IdFunc" );;
+gap> f := NeuralNetworkPredictionMorphism( Para, input_dim, hidden_dims, output_dim, "IdFunc" );;
gap> Display( f );
ℝ^1 -> ℝ^1 defined by:
@@ -28,7 +28,7 @@ gap> parameters := [ 2, 1 ];; x := [ 2 ];;
gap> Eval( f, [ parameters, x ] );
[ 5 ]
-gap> ell := LossMorphismOfNeuralNetwork( Para, input_dim, hidden_dims, output_dim, "IdFunc" );;
+gap> ell := NeuralNetworkLossMorphism( Para, input_dim, hidden_dims, output_dim, "IdFunc" );;
gap> Display( ell );
ℝ^2 -> ℝ^1 defined by:
diff --git a/tst/example-in-readme-2.tst b/tst/example-in-readme-2.tst
index bb4a40c..c37261f 100644
--- a/tst/example-in-readme-2.tst
+++ b/tst/example-in-readme-2.tst
@@ -1,4 +1,4 @@
-gap> LoadPackage( "GradientDescentForCAP" );
+gap> LoadPackage( "GradientBasedLearningForCAP" );
true
gap> Para := CategoryOfParametrisedMorphisms( SkeletalSmoothMaps );
@@ -18,9 +18,9 @@ gap> training_set := Concatenation( class_1, class_2, class_3 );
gap> input_dim := 2;; output_dim := 3;; hidden_dims := [ ];;
-gap> f := PredictionMorphismOfNeuralNetwork( Para, input_dim, hidden_dims, output_dim, "Softmax" );;
+gap> f := NeuralNetworkPredictionMorphism( Para, input_dim, hidden_dims, output_dim, "Softmax" );;
-gap> input := ConvertToExpressions( [ "theta_1", "theta_2", "theta_3", "theta_4", "theta_5", "theta_6", "theta_7", "theta_8", "theta_9", "x1", "x2" ] );;
+gap> input := CreateContextualVariables( [ "theta_1", "theta_2", "theta_3", "theta_4", "theta_5", "theta_6", "theta_7", "theta_8", "theta_9", "x1", "x2" ] );;
gap> Display( f : dummy_input := input );
ℝ^2 -> ℝ^3 defined by:
@@ -50,9 +50,9 @@ gap> prediction_x := Eval( f, [ parameters, x ] );
gap> PositionMaximum( prediction_x );
2
-gap> ell := LossMorphismOfNeuralNetwork( Para, input_dim, hidden_dims, output_dim, "Softmax" );;
+gap> ell := NeuralNetworkLossMorphism( Para, input_dim, hidden_dims, output_dim, "Softmax" );;
-gap> input := ConvertToExpressions( [ "theta_1", "theta_2", "theta_3", "theta_4", "theta_5", "theta_6", "theta_7", "theta_8", "theta_9", "x1", "x2", "y1", "y2", "y3" ] );;
+gap> input := CreateContextualVariables( [ "theta_1", "theta_2", "theta_3", "theta_4", "theta_5", "theta_6", "theta_7", "theta_8", "theta_9", "x1", "x2", "y1", "y2", "y3" ] );;
gap> Display( ell : dummy_input := input );
ℝ^5 -> ℝ^1 defined by:
@@ -82,11 +82,11 @@ gap> one_epoch_update := OneEpochUpdateLens( ell, optimizer, training_set, batch
(ℝ^28, ℝ^28) -> (ℝ^1, ℝ^0) defined by:
Get Morphism:
-----------
+------------
ℝ^28 -> ℝ^1
Put Morphism:
-----------
+------------
ℝ^28 -> ℝ^28
gap> parameters := [ 0.1, -0.1, 0, 0.1, 0.2, 0, -0.2, 0.3, 0 ];;
diff --git a/tst/functor.tst b/tst/functor.tst
index b4da613..a5a2ddd 100644
--- a/tst/functor.tst
+++ b/tst/functor.tst
@@ -4,8 +4,8 @@ gap> Smooth := SkeletalSmoothMaps;;
gap> Lenses := CategoryOfLenses( Smooth );;
gap> Para := CategoryOfParametrisedMorphisms( Smooth );;
gap> Para_Lenses := CategoryOfParametrisedMorphisms( Lenses );;
-gap> ell := LossMorphismOfNeuralNetwork( Para, 2, [], 1, "IdFunc" );;
-gap> dummy_input := ConvertToExpressions( [ "w1", "w2", "b1", "x1", "x2", "y" ] );
+gap> ell := NeuralNetworkLossMorphism( Para, 2, [], 1, "IdFunc" );;
+gap> dummy_input := CreateContextualVariables( [ "w1", "w2", "b1", "x1", "x2", "y" ] );
[ w1, w2, b1, x1, x2, y ]
gap> Display( ell : dummy_input := dummy_input );
ℝ^3 -> ℝ^1 defined by:
@@ -33,11 +33,11 @@ Underlying Morphism:
(ℝ^6, ℝ^6) -> (ℝ^1, ℝ^1) defined by:
Get Morphism:
-----------
+------------
ℝ^6 -> ℝ^1
Put Morphism:
-----------
+------------
ℝ^7 -> ℝ^6
gap> Display( Rf );
(ℝ^3, ℝ^3) -> (ℝ^1, ℝ^1) defined by:
diff --git a/tst/neural-network-1.tst b/tst/neural-network-1.tst
index 5390ad9..dd35446 100644
--- a/tst/neural-network-1.tst
+++ b/tst/neural-network-1.tst
@@ -1,7 +1,7 @@
gap> Smooth := SkeletalSmoothMaps;;
gap> Lenses := CategoryOfLenses( Smooth );;
gap> Para := CategoryOfParametrisedMorphisms( Smooth );;
-gap> f := LossMorphismOfNeuralNetwork( Para, 2, [], 1, "IdFunc" );;
+gap> f := NeuralNetworkLossMorphism( Para, 2, [], 1, "IdFunc" );;
gap> optimizer := Lenses.AdamOptimizer( );;
gap> training_examples_path := SelectBasedOnCondition( IsExistingFile( "data-1.txt" ), "data-1.txt", "tst/data-1.txt" );;
gap> batch_size := 5;;
diff --git a/tst/neural-network-2.tst b/tst/neural-network-2.tst
index 0d64663..2ed60e7 100644
--- a/tst/neural-network-2.tst
+++ b/tst/neural-network-2.tst
@@ -1,7 +1,7 @@
gap> Smooth := SkeletalSmoothMaps;;
gap> Lenses := CategoryOfLenses( Smooth );;
gap> Para := CategoryOfParametrisedMorphisms( Smooth );;
-gap> f := LossMorphismOfNeuralNetwork( Para, 2, [ 5, 5 ], 4, "Softmax" );;
+gap> f := NeuralNetworkLossMorphism( Para, 2, [ 5, 5 ], 4, "Softmax" );;
gap> optimizer := Lenses.GradientDescentOptimizer( : learning_rate := 0.01 );;
gap> training_examples_path := SelectBasedOnCondition( IsExistingFile( "data-2.txt" ), "data-2.txt", "tst/data-2.txt" );;
gap> batch_size := 1;;
diff --git a/tst/simplify-expression-using-python.tst b/tst/simplify-expression-using-python.tst
new file mode 100644
index 0000000..00c9282
--- /dev/null
+++ b/tst/simplify-expression-using-python.tst
@@ -0,0 +1,60 @@
+#############################################################################
+# Tests for SimplifyExpressionUsingPython
+#############################################################################
+
+gap> LoadPackage( "GradientBasedLearningForCAP" );
+true
+
+gap> vars := [ "x1", "x2", "x3" ];;
+
+gap> exprs := [
+> "x1 + 0",
+> "0 + x2",
+> "x1 * 1",
+> "1 * x2",
+> "x1 - x1",
+> "x1^2 * x1^3",
+> "Exp(Log(x1))",
+> "Sin(x1)^2 + Cos(x1)^2",
+> "Relu(x1)" ];;
+
+gap> out := SimplifyExpressionUsingPython( vars, exprs );;
+gap> Assert( 0, IsDenseList( out ) );
+gap> Assert( 0, Length( out ) = Length( exprs ) );
+gap> Assert( 0, ForAll( out, IsString ) );
+
+# Spot checks that are stable across SymPy versions.
+gap> Assert( 0, out[1] = "x1" );
+gap> Assert( 0, out[2] = "x2" );
+gap> Assert( 0, out[3] = "x1" );
+gap> Assert( 0, out[4] = "x2" );
+gap> Assert( 0, out[5] = "0" );
+gap> Assert( 0, out[6] in [ "x1**5", "x1^5" ] );
+gap> Assert( 0, out[8] in [ "1", "1.0" ] );
+
+# Expressions containing Diff are intentionally not passed through python and must be preserved.
+gap> exprs2 := [ "Diff( [\"x1\"], \"x1^2\", 1 )( [x1] )", "x1 + 0" ];;
+gap> out2 := SimplifyExpressionUsingPython( ["x1"], exprs2 );;
+gap> Assert( 0, out2[1] = exprs2[1] );
+gap> Assert( 0, out2[2] = "x1" );
+
+# A small randomized batch: ensure it returns without error and preserves length.
+gap> RandSeed := 12345;;
+gap> RandomSource( IsMersenneTwister, RandSeed );;
+gap> ops := [ "+", "-", "*" ];;
+gap> rand_exprs := List( [ 1 .. 20 ], i ->
+> Concatenation(
+> "(",
+> Random( vars ),
+> Random( ops ),
+> String( Random( [ 0, 1, 2, 3 ] ) ),
+> ")",
+> Random( ops ),
+> "(",
+> Random( vars ),
+> Random( ops ),
+> "0",
+> ")" ) );;
+gap> rand_out := SimplifyExpressionUsingPython( vars, rand_exprs );;
+gap> Assert( 0, Length( rand_out ) = Length( rand_exprs ) );
+gap> Assert( 0, ForAll( rand_out, IsString ) );
diff --git a/tst/testall.g b/tst/testall.g
index ae003a2..38070d2 100644
--- a/tst/testall.g
+++ b/tst/testall.g
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-# GradientDescentForCAP: Exploring categorical machine learning in CAP
+# GradientBasedLearningForCAP: Gradient Based Learning via Category Theory
#
# This file runs package tests. It is also referenced in the package
# metadata in PackageInfo.g.
@@ -30,6 +30,6 @@ else
fi;
-TestDirectory( DirectoriesPackageLibrary( "GradientDescentForCAP", "tst" ), options );
+TestDirectory( DirectoriesPackageLibrary( "GradientBasedLearningForCAP", "tst" ), options );
FORCE_QUIT_GAP( 1 ); # if we ever get here, there was an error