Skip to content
This repository was archived by the owner on May 11, 2023. It is now read-only.

Commit 46f4c06

Browse files
committed
Issue #29 More code examples and better structured menu of demo application.
1 parent 0211811 commit 46f4c06

File tree

5 files changed

+327
-19
lines changed

5 files changed

+327
-19
lines changed
Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
using RCNet.CsvTools;
2+
using RCNet.Extensions;
3+
using RCNet.MathTools;
4+
using RCNet.Neural.Activation;
5+
using RCNet.Neural.Data;
6+
using RCNet.Neural.Data.Filter;
7+
using RCNet.Neural.Network.NonRecurrent;
8+
using RCNet.Neural.Network.NonRecurrent.FF;
9+
using System;
10+
using System.Collections.Generic;
11+
using System.Globalization;
12+
using System.Text;
13+
14+
namespace Demo.DemoConsoleApp.Examples.NonRecurrent
15+
{
16+
/// <summary>
17+
/// Example code shows how to use TNRNetClusterChain and TNRNetClusterChainBuilder as the standalone components for classification.
18+
/// Example uses following csv datafiles from ./Data subfolder:
19+
/// LibrasMovement_train.csv and LibrasMovement_verify.csv
20+
/// ProximalPhalanxOutlineAgeGroup_train.csv and ProximalPhalanxOutlineAgeGroup_verify.csv
21+
/// </summary>
22+
class Classification_TNRNetClusterChain_FromScratch : NonRecurrentExampleBase
23+
{
24+
//Constructor
25+
public Classification_TNRNetClusterChain_FromScratch()
26+
: base()
27+
{
28+
return;
29+
}
30+
31+
//Methods
32+
/// <summary>
33+
/// Displays information about the network cluster chain build process progress.
34+
/// </summary>
35+
/// <param name="buildProgress">The current state of the build process.</param>
36+
protected void OnClusterChainBuildProgressChanged(TNRNetClusterChainBuilder.BuildProgress buildProgress)
37+
{
38+
int reportEpochsInterval = 5;
39+
//Progress info
40+
if (buildProgress.ShouldBeReported || (buildProgress.EndNetworkEpochNum % reportEpochsInterval == 0))
41+
{
42+
//Build progress report message
43+
string progressText = buildProgress.GetInfoText(4);
44+
//Report the progress
45+
_log.Write(progressText, !(buildProgress.NewEndNetwork));
46+
}
47+
return;
48+
}
49+
50+
/// <summary>
51+
/// Trains the network cluster to perform classification task and then verifies its performance.
52+
/// </summary>
53+
/// <param name="name">The name of a classification task.</param>
54+
/// <param name="trainDataFile">The name of a csv datafile containing the training data.</param>
55+
/// <param name="verifyDataFile">The name of a csv datafile containing the verification data.</param>
56+
/// <param name="numOfClasses">The number of classes.</param>
57+
/// <param name="foldDataRatio">Specifies what part of training data is reserved for testing. It determines the size of data fold and also number of networks within the cluster.</param>
58+
private void PerformClassification(string name, string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio)
59+
{
60+
_log.Write($"{name} classification performed by the Probabilistic cluster chain ({numOfClasses.ToString(CultureInfo.InvariantCulture)} classes).");
61+
//Load csv data and create vector bundles
62+
_log.Write($"Loading {trainDataFile}...");
63+
CsvDataHolder trainCsvData = new CsvDataHolder(trainDataFile);
64+
VectorBundle trainData = VectorBundle.Load(trainCsvData, numOfClasses);
65+
_log.Write($"Loading {verifyDataFile}...");
66+
CsvDataHolder verifyCsvData = new CsvDataHolder(verifyDataFile);
67+
VectorBundle verifyData = VectorBundle.Load(verifyCsvData, numOfClasses);
68+
//Input data standardization
69+
//Allocation and preparation of the input feature filters
70+
FeatureFilterBase[] inputFeatureFilters = PrepareInputFeatureFilters(trainData);
71+
//Standardize training input data
72+
StandardizeInputVectors(trainData, inputFeatureFilters);
73+
//Standardize verification input data
74+
StandardizeInputVectors(verifyData, inputFeatureFilters);
75+
//Output data
76+
//Output data is already in the 0/1 form requested by the SoftMax activation so we don't
77+
//need to modify it. We only allocate the binary feature filters requested by the cluster chain builder.
78+
FeatureFilterBase[] outputFeatureFilters = new BinFeatureFilter[numOfClasses];
79+
for (int i = 0; i < numOfClasses; i++)
80+
{
81+
outputFeatureFilters[i] = new BinFeatureFilter(Interval.IntZP1);
82+
}
83+
//Cluster chain configuration (we will have two chained clusters)
84+
//Configuration of the first cluster in the chain
85+
//End-networks configuration for the first cluster in the chain. For every testing fold will be trained two end-networks with different structure.
86+
List<FeedForwardNetworkSettings> netCfgs1 = new List<FeedForwardNetworkSettings>
87+
{
88+
//The first FF network will have two hidden layers of 30 TanH activated neurons.
89+
//Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster).
90+
new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
91+
new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogTanHSettings()),
92+
new HiddenLayerSettings(30, new AFAnalogTanHSettings())
93+
),
94+
new RPropTrainerSettings(3, 1000)
95+
),
96+
//The second FF network will have two hidden layers of 30 LeakyReLU activated neurons.
97+
//Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster).
98+
new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
99+
new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogLeakyReLUSettings()),
100+
new HiddenLayerSettings(30, new AFAnalogLeakyReLUSettings())
101+
),
102+
new RPropTrainerSettings(3, 1000)
103+
)
104+
};
105+
//The first probabilistic network cluster configuration instance
106+
TNRNetClusterProbabilisticSettings clusterCfg1 =
107+
new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs1),
108+
new TNRNetClusterProbabilisticWeightsSettings()
109+
);
110+
//Configuration of the second cluster in the chain
111+
//End-network configuration for the second cluster in the chain. For every testing fold will be trained one end-network.
112+
List<FeedForwardNetworkSettings> netCfgs2 = new List<FeedForwardNetworkSettings>
113+
{
114+
//FF network will have two hidden layers of 30 Elliot activated neurons.
115+
//Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster chain).
116+
new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
117+
new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogElliotSettings()),
118+
new HiddenLayerSettings(30, new AFAnalogElliotSettings())
119+
),
120+
new RPropTrainerSettings(3, 1000)
121+
)
122+
};
123+
//The second probabilistic network cluster configuration instance
124+
TNRNetClusterProbabilisticSettings clusterCfg2 =
125+
new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs2),
126+
new TNRNetClusterProbabilisticWeightsSettings()
127+
);
128+
129+
//Probabilistic network cluster chain configuration instance
130+
ITNRNetClusterChainSettings chainCfg =
131+
new TNRNetClusterChainProbabilisticSettings(new CrossvalidationSettings(foldDataRatio),
132+
new TNRNetClustersProbabilisticSettings(clusterCfg1,
133+
clusterCfg2
134+
)
135+
);
136+
137+
//Training
138+
_log.Write($"Cluster chain training on {trainDataFile}...");
139+
//An instance of network cluster chain builder.
140+
TNRNetClusterChainBuilder builder =
141+
new TNRNetClusterChainBuilder("Probabilistic Cluster Chain", chainCfg);
142+
//Register progress event handler
143+
builder.ChainBuildProgressChanged += OnClusterChainBuildProgressChanged;
144+
//Build the trained network cluster chain.
145+
TNRNetClusterChain trainedClusterChain = builder.Build(trainData, outputFeatureFilters);
146+
147+
//Verification
148+
_log.Write(string.Empty);
149+
_log.Write(string.Empty);
150+
_log.Write($"Cluster chain verification on {verifyDataFile}...");
151+
_log.Write(string.Empty);
152+
int numOfErrors = 0;
153+
for (int i = 0; i < verifyData.InputVectorCollection.Count; i++)
154+
{
155+
double[] computed = trainedClusterChain.Compute(verifyData.InputVectorCollection[i], out _);
156+
//Cluster result
157+
int computedWinnerIdx = computed.MaxIdx();
158+
//Real result
159+
int realWinnerIdx = verifyData.OutputVectorCollection[i].MaxIdx();
160+
161+
if (computedWinnerIdx != realWinnerIdx) ++numOfErrors;
162+
_log.Write($"({i + 1}/{verifyData.InputVectorCollection.Count}) Errors: {numOfErrors}", true);
163+
}
164+
_log.Write(string.Empty);
165+
_log.Write($"Accuracy {(1d - (double)numOfErrors / (double)verifyData.InputVectorCollection.Count).ToString(CultureInfo.InvariantCulture)}");
166+
_log.Write(string.Empty);
167+
168+
return;
169+
}
170+
171+
/// <summary>
172+
/// Runs the example code.
173+
/// </summary>
174+
public void Run()
175+
{
176+
Console.Clear();
177+
PerformClassification("Libras Movement",
178+
"./Data/LibrasMovement_train.csv",
179+
"./Data/LibrasMovement_verify.csv",
180+
15, //Number of classes
181+
1e-20 //Requested testing data fold ratio from training data. It is too small, but it will be corrected automatically to minimum viable ratio.
182+
);
183+
_log.Write(string.Empty);
184+
_log.Write("Press Enter to continue with the next classification case...");
185+
Console.ReadLine();
186+
_log.Write(string.Empty);
187+
PerformClassification("Proximal Phalanx Outline Age Group",
188+
"./Data/ProximalPhalanxOutlineAgeGroup_train.csv",
189+
"./Data/ProximalPhalanxOutlineAgeGroup_verify.csv",
190+
3, //Number of classes.
191+
0.1d //Requested testing data fold ratio from training data.
192+
);
193+
return;
194+
}
195+
196+
197+
198+
}//Classification_TNRNetClusterChain_FromScratch
199+
200+
}//Namespace

Demo/DemoConsoleApp/Examples/NonRecurrent/Classification_TNRNetCluster_FromScratch.cs

Lines changed: 24 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ namespace Demo.DemoConsoleApp.Examples.NonRecurrent
1919
/// LibrasMovement_train.csv and LibrasMovement_verify.csv
2020
/// ProximalPhalanxOutlineAgeGroup_train.csv and ProximalPhalanxOutlineAgeGroup_verify.csv
2121
/// </summary>
22-
class Classification_TNRNetCluster_FromScratch : ExampleBase
22+
class Classification_TNRNetCluster_FromScratch : NonRecurrentExampleBase
2323
{
2424
//Constructor
2525
public Classification_TNRNetCluster_FromScratch()
@@ -58,15 +58,29 @@ protected void OnClusterBuildProgressChanged(TNRNetClusterBuilder.BuildProgress
5858
private void PerformClassification(string name, string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio)
5959
{
6060
_log.Write($"{name} classification performed by the Probabilistic cluster ({numOfClasses.ToString(CultureInfo.InvariantCulture)} classes).");
61-
//Load csv data and create vector bundle
61+
//Load csv data and create vector bundles
6262
_log.Write($"Loading {trainDataFile}...");
6363
CsvDataHolder trainCsvData = new CsvDataHolder(trainDataFile);
6464
VectorBundle trainData = VectorBundle.Load(trainCsvData, numOfClasses);
6565
_log.Write($"Loading {verifyDataFile}...");
6666
CsvDataHolder verifyCsvData = new CsvDataHolder(verifyDataFile);
6767
VectorBundle verifyData = VectorBundle.Load(verifyCsvData, numOfClasses);
68-
_log.Write($"Cluster training on {trainDataFile}...");
69-
//Training
68+
//Input data standardization
69+
//Allocation and preparation of the input feature filters
70+
FeatureFilterBase[] inputFeatureFilters = PrepareInputFeatureFilters(trainData);
71+
//Standardize training input data
72+
StandardizeInputVectors(trainData, inputFeatureFilters);
73+
//Standardize verification input data
74+
StandardizeInputVectors(verifyData, inputFeatureFilters);
75+
//Output data
76+
//Output data is already in the 0/1 form requested by the SoftMax activation so we don't
77+
//need to modify it. We only allocate the binary feature filters requested by the cluster builder.
78+
FeatureFilterBase[] outputFeatureFilters = new BinFeatureFilter[numOfClasses];
79+
for (int i = 0; i < numOfClasses; i++)
80+
{
81+
outputFeatureFilters[i] = new BinFeatureFilter(Interval.IntZP1);
82+
}
83+
//Cluster configuration
7084
//End-networks configuration. For every testing fold will be trained two end-networks with different structure.
7185
List<FeedForwardNetworkSettings> netCfgs = new List<FeedForwardNetworkSettings>
7286
{
@@ -78,19 +92,21 @@ private void PerformClassification(string name, string trainDataFile, string ver
7892
),
7993
new RPropTrainerSettings(3, 1000)
8094
),
81-
//The second FF network will have two hidden layers, one of 30 LeakyReLU activated neurons and one of 30 TanH activated neurons.
95+
//The second FF network will have two hidden layers of 30 LeakyReLU activated neurons.
8296
//Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster).
8397
new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
8498
new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogLeakyReLUSettings()),
85-
new HiddenLayerSettings(30, new AFAnalogTanHSettings())
99+
new HiddenLayerSettings(30, new AFAnalogLeakyReLUSettings())
86100
),
87101
new RPropTrainerSettings(3, 1000)
88102
)
89103
};
90-
//Probabilistic network cluster configuration
104+
//Probabilistic network cluster configuration instance
91105
ITNRNetClusterSettings clusterCfg = new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs),
92106
new TNRNetClusterProbabilisticWeightsSettings()
93107
);
108+
//Training
109+
_log.Write($"Cluster training on {trainDataFile}...");
94110
//An instance of network cluster builder.
95111
TNRNetClusterBuilder builder =
96112
new TNRNetClusterBuilder("Probabilistic Cluster",
@@ -102,12 +118,7 @@ private void PerformClassification(string name, string trainDataFile, string ver
102118
//Register progress event handler
103119
builder.ClusterBuildProgressChanged += OnClusterBuildProgressChanged;
104120
//Build the trained network cluster.
105-
FeatureFilterBase[] filters = new BinFeatureFilter[numOfClasses];
106-
for (int i = 0; i < numOfClasses; i++)
107-
{
108-
filters[i] = new BinFeatureFilter(Interval.IntZP1);
109-
}
110-
TNRNetCluster trainedCluster = builder.Build(trainData, filters);
121+
TNRNetCluster trainedCluster = builder.Build(trainData, outputFeatureFilters);
111122

112123
//Verification
113124
_log.Write(string.Empty);

0 commit comments

Comments
 (0)