diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2dd0db8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.ipynb_checkpoints +.DS_Store +__pycache__ diff --git a/KerasMCDropoutInteractive.ipynb b/KerasMCDropoutInteractive.ipynb new file mode 100644 index 0000000..af6bb01 --- /dev/null +++ b/KerasMCDropoutInteractive.ipynb @@ -0,0 +1,431 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "68adf7d2", + "metadata": {}, + "outputs": [], + "source": [ + "# Copyright 2016, Yarin Gal, All rights reserved.\n", + "# This code is based on the code by Jose Miguel Hernandez-Lobato used for his \n", + "# paper \"Probabilistic Backpropagation for Scalable Learning of Bayesian Neural Networks\".\n", + "\n", + "import warnings\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", + "import math\n", + "from scipy.special import logsumexp\n", + "import numpy as np\n", + "\n", + "from keras.regularizers import l2\n", + "from keras import Input\n", + "from keras.layers import Dropout\n", + "from keras.layers import Dense\n", + "from keras.layers import Softmax\n", + "from keras import Model\n", + "import tensorflow as tf\n", + "\n", + "import time\n", + "\n", + "\n", + "class net:\n", + "\n", + " def __init__(self, X_train, y_train, n_hidden, n_epochs = 40,\n", + " normalize = False, tau = 1.0, dropout = 0.05):\n", + "\n", + " \"\"\"\n", + " Constructor for the class implementing a Bayesian neural network\n", + " trained with the probabilistic back propagation method.\n", + "\n", + " @param X_train Matrix with the features for the training data.\n", + " @param y_train Vector with the target variables for the\n", + " training data.\n", + " @param n_hidden Vector with the number of neurons for each\n", + " hidden layer.\n", + " @param n_epochs Numer of epochs for which to train the\n", + " network. The recommended value 40 should be\n", + " enough.\n", + " @param normalize Whether to normalize the input features. This\n", + " is recommended unles the input vector is for\n", + " example formed by binary features (a\n", + " fingerprint). In that case we do not recommend\n", + " to normalize the features.\n", + " @param tau Tau value used for regularization\n", + " @param dropout Dropout rate for all the dropout layers in the\n", + " network.\n", + " \"\"\"\n", + " # We normalize the training data to have zero mean and unit standard\n", + " # deviation in the training set if necessary\n", + "\n", + "# if normalize:\n", + "# self.std_X_train = np.std(X_train, 0)\n", + "# self.std_X_train[ self.std_X_train == 0 ] = 1\n", + "# self.mean_X_train = np.mean(X_train, 0)\n", + "# else:\n", + "# self.std_X_train = np.ones(X_train.shape[ 1 ])\n", + "# self.mean_X_train = np.zeros(X_train.shape[ 1 ])\n", + "\n", + "# X_train = (X_train - np.full(X_train.shape, self.mean_X_train)) / \\\n", + "# np.full(X_train.shape, self.std_X_train)\n", + "\n", + "# self.mean_y_train = np.mean(y_train)\n", + "# self.std_y_train = np.std(y_train)\n", + "\n", + "# y_train_normalized = (y_train - self.mean_y_train) / self.std_y_train\n", + "# y_train_normalized = np.array(y_train_normalized, ndmin = 2).T\n", + " \n", + " # We construct the network\n", + " N = X_train.shape[0]\n", + " batch_size = 128\n", + " lengthscale = 1e-2\n", + " reg = lengthscale**2 * (1 - dropout) / (2. * N * tau)\n", + "\n", + " inputs = Input(shape=(X_train.shape[1],))\n", + " inter = Dropout(dropout)(inputs, training=True)\n", + " inter = Dense(n_hidden[0], activation='relu', kernel_regularizer=l2(reg))(inter)\n", + " for i in range(1, len(n_hidden) - 1):\n", + " inter = Dropout(dropout)(inter, training=True)\n", + " inter = Dense(n_hidden[i], activation='relu', kernel_regularizer=l2(reg))(inter)\n", + " inter = Dropout(dropout)(inter, training=True)\n", + " outputs = Dense(2, kernel_regularizer=l2(reg))(inter)\n", + " outputs = Softmax()(outputs)\n", + " model = Model(inputs, outputs)\n", + "\n", + " model.compile(loss='binary_crossentropy', optimizer='adam')\n", + "# print(model.summary())\n", + " # We iterate the learning process\n", + " start_time = time.time()\n", + " model.fit(X_train, y_train, batch_size=batch_size, epochs=n_epochs, verbose=0)\n", + " self.model = model\n", + " self.tau = tau\n", + " self.running_time = time.time() - start_time\n", + "\n", + " # We are done!\n", + "\n", + " def predict(self, X_test, y_test):\n", + "\n", + " \"\"\"\n", + " Function for making predictions with the Bayesian neural network.\n", + "\n", + " @param X_test The matrix of features for the test data\n", + " \n", + " \n", + " @return m The predictive mean for the test target variables.\n", + " @return v The predictive variance for the test target\n", + " variables.\n", + " @return v_noise The estimated variance for the additive noise.\n", + "\n", + " \"\"\"\n", + " X_test = np.array(X_test, ndmin = 2)\n", + " y_test = np.array(y_test, ndmin = 2).T\n", + "\n", + " # We normalize the test set\n", + "\n", + "# X_test = (X_test - np.full(X_test.shape, self.mean_X_train)) / \\\n", + "# np.full(X_test.shape, self.std_X_train)\n", + "\n", + " # We compute the predictive mean and variance for the target variables\n", + " # of the test data\n", + "\n", + " model = self.model\n", + " standard_pred_probs = model.predict(X_test, batch_size=500, verbose=1)\n", + " standard_pred = tf.math.argmax(standard_pred_probs, axis=1).numpy()\n", + " # standard_pred = standard_pred * self.std_y_train + self.mean_y_train\n", + " # rmse_standard_pred = np.mean((y_test.squeeze() - standard_pred.squeeze())**2.)**0.5\n", + " accuracy_standard_pred = np.mean((y_test.squeeze() == standard_pred.squeeze()))\n", + " print(f'Standard Accuracy: {accuracy_standard_pred}')\n", + "\n", + " # Number of stochastic forward passes which will then be averaged\n", + " # originally 10_000 in the paper's code. Set to 100 here for speed.\n", + " T = 100\n", + " \n", + " Yt_hat = np.array([model.predict(X_test, batch_size=64, verbose=0) for _ in range(T)])\n", + " # Yt_hat = Yt_hat * self.std_y_train + self.mean_y_train\n", + " MC_pred = tf.math.argmax(np.mean(Yt_hat, axis=0), axis=1).numpy()\n", + " # print(MC_pred.shape)\n", + " mc_accuracy = np.mean((y_test.squeeze() == MC_pred.squeeze()))\n", + " print(f'MC Accuracy: {mc_accuracy}')\n", + "\n", + " # We compute the test log-likelihood\n", + " # ll = (logsumexp(-0.5 * self.tau * (y_test[None] - Yt_hat)**2., 0) - np.log(T) \n", + " # - 0.5*np.log(2*np.pi) + 0.5*np.log(self.tau))\n", + " # test_ll = np.mean(ll)\n", + " # ll = np.sum(y_test[])\n", + " \n", + " # double check this!\n", + " y_test = y_test.astype(int)\n", + " y_test_2d = np.hstack((y_test, 1-y_test))\n", + " test_ll = -np.mean(np.log(standard_pred_probs) * y_test_2d)\n", + "\n", + " # We are done!\n", + " print(f'Test LL: {test_ll}')\n", + " return accuracy_standard_pred, mc_accuracy, test_ll\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed3c7981", + "metadata": {}, + "outputs": [], + "source": [ + "# Copyright 2016, Yarin Gal, All rights reserved.\n", + "# This code is based on the code by Jose Miguel Hernandez-Lobato used for his \n", + "# paper \"Probabilistic Backpropagation for Scalable Learning of Bayesian Neural Networks\".\n", + "\n", + "# This file contains code to train dropout networks on the UCI datasets using the following algorithm:\n", + "# 1. Create 20 random splits of the training-test dataset.\n", + "# 2. For each split:\n", + "# 3. Create a validation (val) set taking 20% of the training set.\n", + "# 4. Get best hyperparameters: dropout_rate and tau by training on (train-val) set and testing on val set.\n", + "# 5. Train a network on the entire training set with the best pair of hyperparameters.\n", + "# 6. Get the performance (MC RMSE and log-likelihood) on the test set.\n", + "# 7. Report the averaged performance (Monte Carlo RMSE and log-likelihood) on all 20 splits.\n", + "\n", + "import math\n", + "import numpy as np\n", + "from sklearn.model_selection import train_test_split\n", + "\n", + "# We fix the random seed\n", + "\n", + "np.random.seed(1)\n", + "\n", + "def write_to_file(filename, txt, dropout_rate, tau):\n", + " with open(filename, \"a\") as myfile:\n", + " myfile.write('Dropout_Rate: ' + repr(dropout_rate) + ' Tau: ' + repr(tau) + ' :: ')\n", + " myfile.write(repr(txt) + '\\n')\n", + "\n", + " \n", + "def run_half_moons_experiment(X, y, hidden_layers, n_epochs, epoch_multiplier, dropout_rates, taus, normalize):\n", + " num_training_examples = int(0.8 * X.shape[0])\n", + " X_validation = X[num_training_examples:, :]\n", + " y_validation = y[num_training_examples:]\n", + " X_train = X[0:num_training_examples, :]\n", + " y_train = y[0:num_training_examples]\n", + " \n", + " # Printing the size of the training, validation and test sets\n", + " print('Number of training examples: ' + str(X_train.shape[0]))\n", + " print('Number of validation examples: ' + str(X_validation.shape[0]))\n", + " print('Number of test examples: ' + str(X_test.shape[0]))\n", + " print('Number of train_original examples: ' + str(X.shape[0]))\n", + " print(f'Dropout rates: {dropout_rates}')\n", + " print(f'Taus: {taus}')\n", + " \n", + " # We perform grid-search to select the best hyperparameters based on the highest log-likelihood value\n", + " best_network = None\n", + " best_ll = -float('inf')\n", + " best_tau = None\n", + " best_dropout = None\n", + " for dropout_rate in dropout_rates:\n", + " for tau in taus:\n", + " print ('Grid search step: Tau: ' + str(tau) + ' Dropout rate: ' + str(dropout_rate))\n", + " network = net(X_train, y_train, hidden_layers,\n", + " normalize = normalize,\n", + " n_epochs = int(n_epochs * epochs_multiplier),\n", + " tau = tau,\n", + " dropout = dropout_rate\n", + " )\n", + " print('DONE TRAINING')\n", + "\n", + " # We obtain the test accuracy and the test ll from the validation sets\n", + " accuracy, MC_accuracy, ll = network.predict(X_validation, y_validation)\n", + " print('DONE PREDICTING')\n", + " if (ll > best_ll):\n", + " best_ll = ll\n", + " best_network = network\n", + " best_tau = tau\n", + " best_dropout = dropout_rate\n", + " print ('Best log_likelihood changed to: ' + str(best_ll))\n", + " print ('Best tau changed to: ' + str(best_tau))\n", + " print ('Best dropout rate changed to: ' + str(best_dropout))\n", + " \n", + " # Storing validation results\n", + " write_to_file(_RESULTS_VALIDATION_ACC, accuracy, dropout_rate, tau)\n", + " write_to_file(_RESULTS_VALIDATION_MC_ACC, MC_accuracy, dropout_rate, tau)\n", + " write_to_file(_RESULTS_VALIDATION_LL, ll, dropout_rate, tau)\n", + " \n", + "\n", + " # Storing test results\n", + " best_network = net(X_train, y_train, hidden_layers,\n", + " normalize = True, n_epochs = int(n_epochs * epochs_multiplier), tau = best_tau,\n", + " dropout = best_dropout)\n", + " \n", + " return best_network" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22b35607", + "metadata": {}, + "outputs": [], + "source": [ + "data_directory = 'half_moons'\n", + "\n", + "# We delete previous results\n", + "\n", + "from subprocess import call\n", + "\n", + "epochs_multiplier = 1\n", + "num_hidden_layers = 2\n", + "n_splits = 20\n", + "hidden_layers = [50] * num_hidden_layers\n", + "n_epochs = 50\n", + "epochs_multiplier = 100\n", + "dropout_rates = [0.005, 0.01, 0.05, 0.1]\n", + "taus = [0.25, 0.5, 0.75]\n", + "\n", + "_RESULTS_VALIDATION_LL = \"./UCI_Datasets/\" + data_directory + \"/results/validation_ll_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_VALIDATION_RMSE = \"./UCI_Datasets/\" + data_directory + \"/results/validation_rmse_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_VALIDATION_MC_RMSE = \"./UCI_Datasets/\" + data_directory + \"/results/validation_MC_rmse_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_VALIDATION_ACC = \"./UCI_Datasets/\" + data_directory + \"/results/validation_acc_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_VALIDATION_MC_ACC = \"./UCI_Datasets/\" + data_directory + \"/results/validation_MC_acc_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "\n", + "_RESULTS_TEST_LL = \"./UCI_Datasets/\" + data_directory + \"/results/test_ll_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_TEST_TAU = \"./UCI_Datasets/\" + data_directory + \"/results/test_tau_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_TEST_RMSE = \"./UCI_Datasets/\" + data_directory + \"/results/test_rmse_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_TEST_MC_RMSE = \"./UCI_Datasets/\" + data_directory + \"/results/test_MC_rmse_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_TEST_LOG = \"./UCI_Datasets/\" + data_directory + \"/results/log_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_TEST_ACC = \"./UCI_Datasets/\" + data_directory + \"/results/test_acc_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "_RESULTS_TEST_MC_ACC = \"./UCI_Datasets/\" + data_directory + \"/results/test_MC_acc_\" + str(epochs_multiplier) + \"_xepochs_\" + str(num_hidden_layers) + \"_hidden_layers.txt\"\n", + "\n", + "_DATA_DIRECTORY_PATH = \"./UCI_Datasets/\" + data_directory + \"/data/\"\n", + "_DROPOUT_RATES_FILE = _DATA_DIRECTORY_PATH + \"dropout_rates.txt\"\n", + "_TAU_VALUES_FILE = _DATA_DIRECTORY_PATH + \"tau_values.txt\"\n", + "_DATA_FILE = _DATA_DIRECTORY_PATH + \"data.txt\"\n", + "_HIDDEN_UNITS_FILE = _DATA_DIRECTORY_PATH + \"n_hidden.txt\"\n", + "_EPOCHS_FILE = _DATA_DIRECTORY_PATH + \"n_epochs.txt\"\n", + "_INDEX_FEATURES_FILE = _DATA_DIRECTORY_PATH + \"index_features.txt\"\n", + "_INDEX_TARGET_FILE = _DATA_DIRECTORY_PATH + \"index_target.txt\"\n", + "_N_SPLITS_FILE = _DATA_DIRECTORY_PATH + \"n_splits.txt\"\n", + "\n", + "print (\"Removing existing result files...\")\n", + "call([\"rm\", _RESULTS_VALIDATION_LL])\n", + "call([\"rm\", _RESULTS_VALIDATION_RMSE])\n", + "call([\"rm\", _RESULTS_VALIDATION_MC_RMSE])\n", + "call([\"rm\", _RESULTS_VALIDATION_ACC])\n", + "call([\"rm\", _RESULTS_VALIDATION_MC_ACC])\n", + "call([\"rm\", _RESULTS_TEST_LL])\n", + "call([\"rm\", _RESULTS_TEST_TAU])\n", + "call([\"rm\", _RESULTS_TEST_RMSE])\n", + "call([\"rm\", _RESULTS_TEST_MC_RMSE])\n", + "call([\"rm\", _RESULTS_TEST_ACC])\n", + "call([\"rm\", _RESULTS_TEST_MC_ACC])\n", + "call([\"rm\", _RESULTS_TEST_LOG])\n", + "print (\"Result files removed.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab60b8ce", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "from sklearn.datasets import make_moons\n", + "import matplotlib.pyplot as plt\n", + "\n", + "X, y = make_moons(500, noise=0.1)\n", + "plt.scatter(X[:, 0], X[:, 1], c=y)\n", + "plt.show()\n", + "\n", + "from sklearn.preprocessing import LabelEncoder\n", + "\n", + "# encode class values as integers\n", + "encoder = LabelEncoder()\n", + "encoder.fit(y)\n", + "encoded_Y = encoder.transform(y)\n", + "print(encoded_Y)\n", + "\n", + "accuracies, MC_accuracies, lls = [], [], []\n", + "for i in range(n_splits):\n", + " X_train, X_test, y_train, y_test = train_test_split(X, encoded_Y, train_size=0.8, random_state=100)\n", + " print(y_train.mean())\n", + " print(y_test.mean())\n", + " \n", + " # tune on train and validation sets, returning best trained neural network\n", + " best_network_trained = run_half_moons_experiment(X_train,\n", + " y_train,\n", + " hidden_layers,\n", + " n_epochs,\n", + " epochs_multiplier,\n", + " dropout_rates,\n", + " taus,\n", + " normalize=False\n", + " )\n", + " \n", + " # predict on held out test set\n", + " accuracy, MC_accuracy, ll = best_network_trained.predict(X_test, y_test)\n", + " \n", + " with open(_RESULTS_TEST_ACC, \"a\") as myfile:\n", + " myfile.write(repr(accuracy) + '\\n')\n", + "\n", + " with open(_RESULTS_TEST_MC_ACC, \"a\") as myfile:\n", + " myfile.write(repr(MC_accuracy) + '\\n')\n", + "\n", + " with open(_RESULTS_TEST_LL, \"a\") as myfile:\n", + " myfile.write(repr(ll) + '\\n')\n", + "\n", + " with open(_RESULTS_TEST_TAU, \"a\") as myfile:\n", + " myfile.write(repr(best_network_trained.tau) + '\\n')\n", + "\n", + " print (\"Tests on split \" + str(i) + \" complete.\")\n", + " \n", + " accuracies.append(accuracy)\n", + " MC_accuracies.append(MC_accuracy)\n", + " lls.append(ll)\n", + "\n", + "with open(_RESULTS_TEST_LOG, \"a\") as myfile:\n", + " myfile.write('accuracies %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \\n' % (\n", + " np.mean(accuracies), np.std(accuracies), np.std(accuracies)/math.sqrt(n_splits),\n", + " np.percentile(accuracies, 50), np.percentile(accuracies, 25), np.percentile(accuracies, 75)))\n", + " myfile.write('MC accuracies %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \\n' % (\n", + " np.mean(MC_accuracies), np.std(MC_accuracies), np.std(MC_accuracies)/math.sqrt(n_splits),\n", + " np.percentile(MC_accuracies, 50), np.percentile(MC_accuracies, 25), np.percentile(MC_accuracies, 75)))\n", + " myfile.write('lls %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \\n' % (\n", + " np.mean(lls), np.std(lls), np.std(lls)/math.sqrt(n_splits), \n", + " np.percentile(lls, 50), np.percentile(lls, 25), np.percentile(lls, 75)))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5994e778", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "682074ef", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/UCI_Datasets/half_moons/data/data.txt b/UCI_Datasets/half_moons/data/data.txt new file mode 100644 index 0000000..04c7744 --- /dev/null +++ b/UCI_Datasets/half_moons/data/data.txt @@ -0,0 +1,100 @@ +1.7183493500977276 -0.1956825506034865 1 +0.6546349455786922 -0.43846842204976033 1 +1.8380881048918405 -0.04553490121054904 1 +0.48160743168947484 -0.3551427630053461 1 +0.7774790660436856 -0.4749279121818236 1 +0.595216656877606 -0.4144126230158124 1 +0.008209986176753836 0.372122838315494 1 +-0.3453650544213075 0.9384684220497604 0 +0.1595998950333793 0.9871817834144501 0 +-0.032051577571654985 0.9994862162006879 0 +0.07308324265397825 0.12473299512062597 1 +-1.0 1.2246467991473532e-16 0 +0.19858637813204327 -0.09811053049121587 1 +0.3276991097386831 -0.2402779970753155 1 +0.9917900138232462 0.127877161684506 0 +1.900968867902419 0.06611626088244171 1 +0.9679484224283447 -0.4994862162006879 1 +0.28452758663103267 0.9586678530366606 0 +1.032051577571655 -0.4994862162006879 1 +-0.5183925683105249 0.8551427630053462 0 +1.8713187041233892 0.009282447996061816 1 +1.2845275866310324 -0.4586678530366607 1 +1.0 0.0 0 +0.3765101981412664 -0.2818314824680297 1 +0.3453650544213078 0.9384684220497603 0 +-0.900968867902419 0.43388373911755823 0 +0.09903113209758085 0.06611626088244194 1 +0.7614459583691344 0.6482283953077884 0 +1.3453650544213076 -0.43846842204976044 1 +1.2225209339563143 -0.4749279121818236 1 +1.9490557470106686 0.18489178197637912 1 +0.9039769740923181 -0.49537911294919823 1 +0.03205157757165533 0.9994862162006879 0 +-0.15959989503337899 0.9871817834144502 0 +0.8014136218679567 0.5981105304912159 0 +0.5721166601221698 0.8201722545969559 0 +-0.28452758663103234 0.9586678530366607 0 +0.1619118951081593 -0.045534901210548595 1 +1.9269167573460217 0.12473299512062552 1 +0.6723008902613169 0.7402779970753155 0 +0.1286812958766106 0.009282447996062149 1 +1.5721166601221694 -0.3201722545969561 1 +1.8014136218679564 -0.0981105304912161 1 +0.40478334312239395 0.9144126230158124 0 +0.5183925683105252 0.8551427630053461 0 +1.0960230259076815 -0.49537911294919823 1 +0.46253829024083537 0.886599306373 0 +0.0 0.5 1 +0.09602302590768187 0.9953791129491982 0 +1.6723008902613166 -0.24027799707531572 1 +-0.5721166601221694 0.8201722545969561 0 +1.9672948630390295 0.2463454160904922 1 +-0.9269167573460216 0.3752670048793745 0 +-0.9672948630390293 0.2536545839095078 0 +1.9815591569910653 0.30884137129862743 1 +0.9815591569910653 0.1911586287013723 0 +-0.9979453927503363 0.06407021998071323 0 +1.6234898018587334 -0.2818314824680299 1 +0.8380881048918407 0.5455349012105486 0 +-0.22252093395631434 0.9749279121818236 0 +0.6234898018587336 0.7818314824680297 0 +-0.7614459583691342 0.6482283953077888 0 +2.0 0.4999999999999999 1 +0.03270513696097055 0.24634541609049265 1 +1.9979453927503363 0.43592978001928673 1 +-0.8380881048918404 0.545534901210549 0 +0.9979453927503363 0.06407021998071291 0 +0.9490557470106686 0.31510821802362066 0 +0.9269167573460217 0.3752670048793741 0 +0.5374617097591646 -0.386599306373 1 +-0.09602302590768155 0.9953791129491982 0 +0.8713187041233894 0.49071755200393785 0 +-0.4625382902408351 0.8865993063730001 0 +-0.9490557470106686 0.3151082180236208 0 +-0.9815591569910653 0.19115862870137254 0 +1.159599895033379 -0.4871817834144502 1 +-0.991790013823246 0.1278771616845065 0 +0.9009688679024191 0.4338837391175581 0 +0.8404001049666208 -0.48718178341445006 1 +-0.6723008902613165 0.7402779970753157 0 +-0.7183493500977275 0.6956825506034865 0 +1.991790013823246 0.37212283831549353 1 +0.4278833398778302 -0.32017225459695586 1 +0.9672948630390295 0.25365458390950735 0 +1.5183925683105248 -0.3551427630053462 1 +0.01844084300893467 0.30884137129862776 1 +-0.40478334312239367 0.9144126230158125 0 +-0.8713187041233892 0.49071755200393813 0 +0.22252093395631445 0.9749279121818236 0 +1.462538290240835 -0.3865993063730001 1 +0.05094425298933136 0.18489178197637934 1 +1.7614459583691342 -0.14822839530778875 1 +-0.8014136218679565 0.5981105304912161 0 +-0.6234898018587334 0.7818314824680299 0 +0.7154724133689674 -0.4586678530366606 1 +0.7183493500977276 0.6956825506034864 0 +0.0020546072496636647 0.43592978001928706 1 +0.2816506499022724 -0.19568255060348638 1 +1.4047833431223937 -0.4144126230158125 1 +0.23855404163086558 -0.14822839530778842 1 diff --git a/UCI_Datasets/half_moons/data/dropout_rates.txt b/UCI_Datasets/half_moons/data/dropout_rates.txt new file mode 100644 index 0000000..fcac110 --- /dev/null +++ b/UCI_Datasets/half_moons/data/dropout_rates.txt @@ -0,0 +1,4 @@ +0.005 +0.01 +0.05 +0.1 \ No newline at end of file diff --git a/UCI_Datasets/half_moons/data/index_features.txt b/UCI_Datasets/half_moons/data/index_features.txt new file mode 100644 index 0000000..736a5fe --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_features.txt @@ -0,0 +1,4 @@ +0 +1 +2 + diff --git a/UCI_Datasets/half_moons/data/index_target.txt b/UCI_Datasets/half_moons/data/index_target.txt new file mode 100644 index 0000000..0cfbf08 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_target.txt @@ -0,0 +1 @@ +2 diff --git a/UCI_Datasets/half_moons/data/index_test_0.txt b/UCI_Datasets/half_moons/data/index_test_0.txt new file mode 100644 index 0000000..947a992 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_0.txt @@ -0,0 +1,10 @@ +1 +16 +64 +79 +5 +75 +9 +72 +12 +37 diff --git a/UCI_Datasets/half_moons/data/index_test_1.txt b/UCI_Datasets/half_moons/data/index_test_1.txt new file mode 100644 index 0000000..f733e26 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_1.txt @@ -0,0 +1,10 @@ +7 +75 +21 +91 +76 +2 +70 +85 +52 +6 diff --git a/UCI_Datasets/half_moons/data/index_test_10.txt b/UCI_Datasets/half_moons/data/index_test_10.txt new file mode 100644 index 0000000..84066ae --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_10.txt @@ -0,0 +1,10 @@ +10 +22 +51 +99 +58 +86 +64 +28 +95 +88 diff --git a/UCI_Datasets/half_moons/data/index_test_11.txt b/UCI_Datasets/half_moons/data/index_test_11.txt new file mode 100644 index 0000000..5e7711f --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_11.txt @@ -0,0 +1,10 @@ +52 +42 +48 +57 +41 +95 +18 +38 +64 +99 diff --git a/UCI_Datasets/half_moons/data/index_test_12.txt b/UCI_Datasets/half_moons/data/index_test_12.txt new file mode 100644 index 0000000..053dc2d --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_12.txt @@ -0,0 +1,10 @@ +45 +71 +40 +51 +27 +41 +59 +7 +61 +99 diff --git a/UCI_Datasets/half_moons/data/index_test_13.txt b/UCI_Datasets/half_moons/data/index_test_13.txt new file mode 100644 index 0000000..0a86987 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_13.txt @@ -0,0 +1,10 @@ +79 +58 +24 +77 +54 +59 +57 +20 +5 +42 diff --git a/UCI_Datasets/half_moons/data/index_test_14.txt b/UCI_Datasets/half_moons/data/index_test_14.txt new file mode 100644 index 0000000..e1666ef --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_14.txt @@ -0,0 +1,10 @@ +83 +49 +18 +68 +88 +20 +42 +55 +76 +90 diff --git a/UCI_Datasets/half_moons/data/index_test_15.txt b/UCI_Datasets/half_moons/data/index_test_15.txt new file mode 100644 index 0000000..5f4301d --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_15.txt @@ -0,0 +1,10 @@ +61 +33 +22 +34 +51 +25 +35 +41 +97 +15 diff --git a/UCI_Datasets/half_moons/data/index_test_16.txt b/UCI_Datasets/half_moons/data/index_test_16.txt new file mode 100644 index 0000000..6b5c805 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_16.txt @@ -0,0 +1,10 @@ +76 +99 +21 +73 +1 +88 +28 +80 +48 +0 diff --git a/UCI_Datasets/half_moons/data/index_test_17.txt b/UCI_Datasets/half_moons/data/index_test_17.txt new file mode 100644 index 0000000..e7c0e04 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_17.txt @@ -0,0 +1,10 @@ +63 +42 +38 +32 +93 +74 +46 +9 +69 +11 diff --git a/UCI_Datasets/half_moons/data/index_test_18.txt b/UCI_Datasets/half_moons/data/index_test_18.txt new file mode 100644 index 0000000..3bea00c --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_18.txt @@ -0,0 +1,10 @@ +35 +44 +56 +59 +78 +16 +99 +18 +92 +68 diff --git a/UCI_Datasets/half_moons/data/index_test_19.txt b/UCI_Datasets/half_moons/data/index_test_19.txt new file mode 100644 index 0000000..69a88b9 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_19.txt @@ -0,0 +1,10 @@ +36 +45 +87 +76 +73 +88 +47 +97 +84 +86 diff --git a/UCI_Datasets/half_moons/data/index_test_2.txt b/UCI_Datasets/half_moons/data/index_test_2.txt new file mode 100644 index 0000000..2a71547 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_2.txt @@ -0,0 +1,10 @@ +82 +55 +28 +32 +54 +48 +83 +84 +2 +33 diff --git a/UCI_Datasets/half_moons/data/index_test_3.txt b/UCI_Datasets/half_moons/data/index_test_3.txt new file mode 100644 index 0000000..fb25d8a --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_3.txt @@ -0,0 +1,10 @@ +12 +76 +54 +91 +22 +37 +44 +29 +85 +58 diff --git a/UCI_Datasets/half_moons/data/index_test_4.txt b/UCI_Datasets/half_moons/data/index_test_4.txt new file mode 100644 index 0000000..294fa1e --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_4.txt @@ -0,0 +1,10 @@ +87 +14 +6 +22 +11 +37 +92 +16 +58 +21 diff --git a/UCI_Datasets/half_moons/data/index_test_5.txt b/UCI_Datasets/half_moons/data/index_test_5.txt new file mode 100644 index 0000000..a414314 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_5.txt @@ -0,0 +1,10 @@ +95 +69 +40 +8 +91 +5 +81 +48 +38 +82 diff --git a/UCI_Datasets/half_moons/data/index_test_6.txt b/UCI_Datasets/half_moons/data/index_test_6.txt new file mode 100644 index 0000000..09de0c9 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_6.txt @@ -0,0 +1,10 @@ +0 +88 +17 +16 +44 +40 +82 +22 +42 +63 diff --git a/UCI_Datasets/half_moons/data/index_test_7.txt b/UCI_Datasets/half_moons/data/index_test_7.txt new file mode 100644 index 0000000..01a96d5 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_7.txt @@ -0,0 +1,10 @@ +28 +3 +13 +7 +31 +51 +65 +6 +33 +32 diff --git a/UCI_Datasets/half_moons/data/index_test_8.txt b/UCI_Datasets/half_moons/data/index_test_8.txt new file mode 100644 index 0000000..20c3dda --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_8.txt @@ -0,0 +1,10 @@ +89 +96 +57 +80 +65 +38 +52 +20 +90 +32 diff --git a/UCI_Datasets/half_moons/data/index_test_9.txt b/UCI_Datasets/half_moons/data/index_test_9.txt new file mode 100644 index 0000000..2397fe0 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_test_9.txt @@ -0,0 +1,10 @@ +3 +39 +42 +0 +48 +62 +77 +87 +71 +51 diff --git a/UCI_Datasets/half_moons/data/index_train_0.txt b/UCI_Datasets/half_moons/data/index_train_0.txt new file mode 100644 index 0000000..a1eed93 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_0.txt @@ -0,0 +1,90 @@ +80 +84 +33 +81 +93 +17 +36 +82 +69 +65 +92 +39 +56 +52 +51 +32 +31 +44 +78 +10 +2 +73 +97 +62 +19 +35 +94 +27 +46 +38 +67 +99 +54 +95 +88 +40 +48 +59 +23 +34 +86 +53 +77 +15 +83 +41 +45 +91 +26 +98 +43 +55 +24 +4 +58 +49 +21 +87 +3 +74 +30 +66 +70 +42 +47 +89 +8 +60 +0 +90 +57 +22 +61 +63 +7 +96 +13 +68 +85 +14 +29 +28 +11 +18 +20 +50 +25 +6 +71 +76 diff --git a/UCI_Datasets/half_moons/data/index_train_1.txt b/UCI_Datasets/half_moons/data/index_train_1.txt new file mode 100644 index 0000000..df25f71 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_1.txt @@ -0,0 +1,90 @@ +44 +96 +1 +35 +26 +11 +38 +82 +87 +3 +50 +0 +29 +16 +46 +61 +28 +51 +31 +8 +47 +4 +98 +56 +78 +58 +9 +83 +53 +27 +67 +34 +59 +97 +80 +14 +40 +19 +62 +92 +25 +63 +69 +49 +33 +89 +37 +79 +55 +88 +42 +17 +5 +15 +64 +48 +39 +74 +66 +99 +22 +18 +41 +71 +54 +86 +95 +73 +60 +65 +12 +32 +84 +24 +81 +23 +10 +13 +57 +68 +45 +90 +36 +30 +20 +43 +94 +93 +72 +77 diff --git a/UCI_Datasets/half_moons/data/index_train_10.txt b/UCI_Datasets/half_moons/data/index_train_10.txt new file mode 100644 index 0000000..42ba1a9 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_10.txt @@ -0,0 +1,90 @@ +54 +27 +23 +37 +75 +33 +41 +46 +52 +24 +90 +17 +40 +62 +92 +15 +47 +45 +96 +68 +3 +66 +91 +26 +77 +39 +1 +48 +72 +50 +63 +7 +32 +2 +29 +89 +5 +61 +25 +4 +83 +19 +57 +38 +49 +60 +0 +98 +82 +71 +56 +55 +44 +34 +70 +81 +97 +11 +35 +80 +13 +94 +16 +43 +69 +78 +76 +20 +14 +85 +21 +84 +79 +8 +59 +36 +93 +87 +42 +53 +65 +74 +73 +12 +18 +67 +30 +6 +9 +31 diff --git a/UCI_Datasets/half_moons/data/index_train_11.txt b/UCI_Datasets/half_moons/data/index_train_11.txt new file mode 100644 index 0000000..1dee7de --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_11.txt @@ -0,0 +1,90 @@ +54 +45 +28 +44 +15 +1 +93 +12 +88 +40 +53 +43 +75 +83 +87 +67 +7 +35 +16 +37 +9 +89 +14 +34 +33 +81 +5 +31 +78 +92 +47 +3 +2 +6 +94 +84 +74 +72 +24 +90 +11 +73 +58 +30 +71 +25 +10 +29 +39 +63 +76 +4 +46 +80 +55 +8 +32 +49 +79 +51 +56 +91 +13 +21 +17 +36 +19 +66 +86 +69 +68 +23 +97 +62 +59 +85 +96 +77 +65 +82 +20 +50 +26 +0 +27 +60 +61 +70 +98 +22 diff --git a/UCI_Datasets/half_moons/data/index_train_12.txt b/UCI_Datasets/half_moons/data/index_train_12.txt new file mode 100644 index 0000000..38c3a34 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_12.txt @@ -0,0 +1,90 @@ +70 +75 +5 +68 +36 +79 +72 +78 +57 +44 +28 +80 +17 +95 +87 +50 +6 +77 +52 +37 +49 +32 +74 +81 +38 +11 +13 +33 +1 +83 +25 +69 +53 +2 +20 +89 +66 +60 +39 +84 +43 +14 +93 +73 +16 +76 +92 +65 +47 +90 +64 +26 +86 +63 +24 +15 +62 +4 +91 +48 +55 +3 +96 +30 +34 +23 +98 +8 +97 +12 +31 +42 +10 +56 +22 +29 +35 +67 +18 +85 +94 +82 +19 +0 +54 +21 +58 +46 +88 +9 diff --git a/UCI_Datasets/half_moons/data/index_train_13.txt b/UCI_Datasets/half_moons/data/index_train_13.txt new file mode 100644 index 0000000..6f82fcd --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_13.txt @@ -0,0 +1,90 @@ +41 +9 +30 +89 +40 +49 +3 +22 +8 +16 +81 +25 +61 +29 +4 +37 +88 +0 +71 +52 +74 +95 +99 +62 +94 +13 +18 +50 +48 +44 +96 +97 +90 +38 +14 +72 +10 +85 +64 +60 +65 +98 +6 +84 +12 +28 +15 +31 +35 +7 +86 +76 +19 +75 +32 +2 +34 +11 +43 +33 +56 +67 +47 +21 +87 +45 +1 +17 +92 +26 +66 +39 +46 +93 +68 +69 +53 +23 +27 +70 +78 +55 +82 +83 +73 +63 +51 +91 +80 +36 diff --git a/UCI_Datasets/half_moons/data/index_train_14.txt b/UCI_Datasets/half_moons/data/index_train_14.txt new file mode 100644 index 0000000..09936f8 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_14.txt @@ -0,0 +1,90 @@ +82 +28 +69 +81 +56 +71 +85 +27 +37 +66 +34 +98 +75 +47 +26 +8 +62 +22 +84 +14 +65 +46 +25 +13 +1 +95 +79 +70 +21 +54 +23 +41 +19 +53 +87 +99 +94 +45 +96 +3 +67 +12 +89 +57 +6 +39 +91 +92 +78 +10 +58 +63 +40 +80 +29 +72 +48 +86 +43 +44 +11 +38 +60 +24 +73 +74 +9 +0 +33 +5 +51 +50 +32 +35 +31 +59 +36 +52 +30 +93 +77 +16 +4 +2 +97 +64 +7 +61 +17 +15 diff --git a/UCI_Datasets/half_moons/data/index_train_15.txt b/UCI_Datasets/half_moons/data/index_train_15.txt new file mode 100644 index 0000000..97d0f3d --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_15.txt @@ -0,0 +1,90 @@ +60 +13 +26 +12 +1 +28 +88 +86 +11 +43 +48 +75 +87 +55 +24 +82 +66 +50 +52 +90 +45 +80 +23 +17 +74 +68 +6 +69 +95 +19 +31 +63 +30 +93 +83 +39 +91 +70 +2 +99 +56 +81 +85 +84 +9 +20 +37 +44 +65 +0 +76 +72 +73 +54 +38 +59 +8 +16 +53 +29 +92 +79 +36 +98 +5 +4 +49 +96 +10 +18 +94 +14 +42 +71 +27 +47 +77 +62 +89 +40 +64 +58 +21 +46 +57 +67 +3 +32 +7 +78 diff --git a/UCI_Datasets/half_moons/data/index_train_16.txt b/UCI_Datasets/half_moons/data/index_train_16.txt new file mode 100644 index 0000000..c05d4be --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_16.txt @@ -0,0 +1,90 @@ +93 +16 +72 +75 +10 +4 +38 +71 +42 +62 +55 +5 +91 +14 +15 +60 +19 +39 +27 +79 +84 +2 +64 +51 +47 +18 +95 +11 +35 +68 +24 +94 +56 +9 +54 +59 +69 +8 +26 +78 +45 +50 +74 +98 +77 +7 +92 +36 +23 +61 +13 +87 +17 +66 +58 +41 +25 +31 +52 +63 +67 +85 +43 +20 +57 +96 +46 +29 +44 +89 +97 +6 +65 +90 +12 +86 +37 +70 +49 +34 +3 +22 +82 +33 +53 +30 +83 +81 +40 +32 diff --git a/UCI_Datasets/half_moons/data/index_train_17.txt b/UCI_Datasets/half_moons/data/index_train_17.txt new file mode 100644 index 0000000..43b0dad --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_17.txt @@ -0,0 +1,90 @@ +36 +47 +1 +64 +57 +97 +29 +6 +67 +92 +12 +66 +33 +82 +26 +15 +24 +91 +16 +84 +37 +53 +0 +94 +75 +19 +39 +89 +85 +44 +81 +61 +86 +65 +3 +70 +22 +78 +43 +71 +58 +8 +77 +48 +72 +28 +60 +55 +27 +62 +98 +17 +73 +2 +45 +52 +99 +80 +56 +50 +35 +41 +34 +20 +59 +54 +7 +4 +10 +30 +14 +51 +76 +21 +13 +96 +49 +68 +25 +83 +95 +79 +5 +88 +18 +31 +87 +90 +40 +23 diff --git a/UCI_Datasets/half_moons/data/index_train_18.txt b/UCI_Datasets/half_moons/data/index_train_18.txt new file mode 100644 index 0000000..dd56240 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_18.txt @@ -0,0 +1,90 @@ +48 +87 +17 +57 +22 +81 +4 +51 +94 +63 +20 +98 +49 +83 +65 +77 +23 +24 +11 +28 +12 +55 +76 +58 +15 +19 +97 +6 +50 +74 +90 +91 +66 +42 +84 +2 +47 +41 +93 +40 +14 +54 +7 +79 +71 +37 +13 +85 +96 +72 +60 +45 +43 +89 +70 +46 +52 +9 +36 +80 +5 +73 +31 +26 +21 +69 +38 +64 +3 +30 +0 +95 +88 +10 +29 +27 +82 +1 +86 +25 +33 +34 +8 +75 +62 +53 +67 +32 +61 +39 diff --git a/UCI_Datasets/half_moons/data/index_train_19.txt b/UCI_Datasets/half_moons/data/index_train_19.txt new file mode 100644 index 0000000..cfd9fe4 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_19.txt @@ -0,0 +1,90 @@ +9 +17 +59 +11 +2 +95 +68 +28 +60 +14 +21 +66 +61 +74 +18 +56 +80 +12 +23 +72 +96 +15 +35 +64 +69 +62 +79 +54 +13 +90 +94 +57 +81 +98 +41 +52 +75 +22 +20 +24 +30 +89 +5 +51 +44 +67 +33 +77 +25 +31 +70 +99 +42 +46 +85 +49 +53 +16 +27 +63 +10 +3 +65 +50 +93 +38 +7 +32 +40 +26 +78 +83 +71 +4 +19 +0 +82 +34 +8 +91 +55 +48 +29 +1 +58 +43 +6 +37 +39 +92 diff --git a/UCI_Datasets/half_moons/data/index_train_2.txt b/UCI_Datasets/half_moons/data/index_train_2.txt new file mode 100644 index 0000000..307738c --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_2.txt @@ -0,0 +1,90 @@ +95 +67 +96 +71 +1 +80 +99 +45 +52 +27 +97 +63 +91 +70 +43 +11 +46 +94 +21 +89 +61 +36 +57 +90 +58 +9 +12 +18 +29 +16 +51 +25 +6 +13 +69 +22 +88 +40 +35 +56 +76 +73 +0 +4 +17 +59 +66 +62 +98 +10 +42 +65 +23 +49 +75 +5 +39 +68 +38 +87 +37 +81 +78 +3 +72 +85 +34 +60 +47 +53 +7 +26 +19 +14 +30 +15 +44 +20 +24 +64 +41 +79 +50 +77 +86 +93 +8 +74 +92 +31 diff --git a/UCI_Datasets/half_moons/data/index_train_3.txt b/UCI_Datasets/half_moons/data/index_train_3.txt new file mode 100644 index 0000000..9477579 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_3.txt @@ -0,0 +1,90 @@ +40 +10 +96 +66 +89 +32 +95 +55 +25 +5 +0 +7 +92 +61 +60 +15 +99 +86 +72 +70 +80 +50 +49 +62 +65 +34 +8 +75 +63 +14 +57 +4 +46 +21 +53 +17 +35 +20 +83 +16 +77 +11 +51 +18 +68 +97 +45 +24 +41 +9 +31 +42 +28 +2 +67 +23 +36 +13 +6 +48 +47 +19 +82 +98 +90 +3 +74 +94 +64 +30 +78 +1 +79 +27 +52 +73 +81 +56 +88 +84 +33 +69 +38 +87 +43 +39 +71 +93 +26 +59 diff --git a/UCI_Datasets/half_moons/data/index_train_4.txt b/UCI_Datasets/half_moons/data/index_train_4.txt new file mode 100644 index 0000000..326710b --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_4.txt @@ -0,0 +1,90 @@ +89 +99 +45 +96 +72 +9 +74 +88 +1 +38 +20 +65 +53 +86 +44 +59 +47 +77 +31 +95 +85 +80 +54 +43 +56 +15 +34 +60 +64 +62 +66 +26 +3 +81 +79 +24 +4 +73 +13 +25 +98 +19 +67 +55 +76 +90 +69 +83 +42 +51 +57 +5 +71 +41 +82 +10 +33 +17 +12 +39 +84 +78 +2 +23 +68 +48 +40 +8 +30 +91 +61 +94 +29 +97 +7 +46 +50 +52 +28 +27 +18 +36 +70 +93 +0 +49 +32 +63 +35 +75 diff --git a/UCI_Datasets/half_moons/data/index_train_5.txt b/UCI_Datasets/half_moons/data/index_train_5.txt new file mode 100644 index 0000000..59b2d48 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_5.txt @@ -0,0 +1,90 @@ +0 +86 +4 +23 +24 +1 +62 +83 +66 +93 +55 +14 +21 +25 +76 +75 +16 +44 +15 +53 +11 +92 +80 +2 +47 +34 +35 +60 +6 +85 +13 +20 +41 +94 +31 +45 +90 +22 +96 +49 +98 +97 +65 +19 +70 +50 +89 +17 +12 +42 +84 +26 +57 +58 +74 +72 +39 +56 +61 +36 +68 +29 +59 +73 +79 +32 +10 +52 +18 +71 +64 +87 +28 +63 +54 +99 +3 +67 +9 +46 +43 +30 +77 +51 +27 +33 +88 +37 +78 +7 diff --git a/UCI_Datasets/half_moons/data/index_train_6.txt b/UCI_Datasets/half_moons/data/index_train_6.txt new file mode 100644 index 0000000..15e3576 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_6.txt @@ -0,0 +1,90 @@ +6 +59 +72 +30 +91 +43 +75 +76 +15 +61 +29 +46 +81 +69 +9 +92 +23 +93 +56 +37 +8 +87 +24 +85 +60 +86 +62 +10 +90 +3 +2 +19 +35 +73 +49 +31 +94 +55 +20 +98 +27 +53 +38 +13 +83 +65 +41 +25 +47 +51 +99 +28 +57 +79 +84 +70 +12 +50 +39 +54 +5 +4 +80 +66 +58 +33 +77 +7 +95 +64 +45 +78 +52 +74 +11 +18 +89 +67 +68 +21 +34 +97 +48 +1 +32 +26 +36 +14 +71 +96 diff --git a/UCI_Datasets/half_moons/data/index_train_7.txt b/UCI_Datasets/half_moons/data/index_train_7.txt new file mode 100644 index 0000000..49ab6f2 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_7.txt @@ -0,0 +1,90 @@ +44 +88 +74 +48 +59 +79 +25 +52 +45 +55 +85 +89 +95 +54 +78 +17 +19 +72 +24 +56 +75 +18 +80 +26 +12 +35 +69 +11 +92 +62 +96 +1 +86 +53 +99 +73 +8 +64 +27 +47 +60 +14 +91 +81 +16 +71 +76 +10 +2 +36 +90 +84 +67 +87 +63 +39 +42 +22 +23 +97 +5 +93 +77 +94 +68 +43 +37 +38 +98 +70 +21 +0 +9 +4 +50 +40 +20 +41 +46 +49 +57 +30 +34 +82 +58 +83 +61 +66 +29 +15 diff --git a/UCI_Datasets/half_moons/data/index_train_8.txt b/UCI_Datasets/half_moons/data/index_train_8.txt new file mode 100644 index 0000000..d54095c --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_8.txt @@ -0,0 +1,90 @@ +3 +60 +35 +33 +22 +66 +45 +68 +67 +75 +6 +87 +78 +16 +92 +2 +61 +56 +88 +54 +40 +63 +79 +5 +26 +53 +83 +24 +48 +10 +29 +34 +11 +82 +8 +28 +91 +98 +25 +69 +14 +97 +94 +86 +55 +0 +76 +58 +23 +13 +4 +9 +27 +44 +46 +59 +85 +71 +31 +62 +17 +49 +81 +72 +51 +74 +41 +43 +73 +70 +50 +7 +30 +99 +84 +36 +37 +15 +39 +93 +77 +18 +12 +95 +1 +21 +47 +42 +64 +19 diff --git a/UCI_Datasets/half_moons/data/index_train_9.txt b/UCI_Datasets/half_moons/data/index_train_9.txt new file mode 100644 index 0000000..b109e81 --- /dev/null +++ b/UCI_Datasets/half_moons/data/index_train_9.txt @@ -0,0 +1,90 @@ +64 +91 +12 +52 +16 +65 +50 +68 +33 +30 +14 +55 +78 +6 +92 +21 +72 +27 +24 +99 +70 +40 +69 +61 +45 +89 +84 +49 +23 +34 +81 +97 +22 +35 +95 +36 +41 +29 +10 +94 +58 +17 +19 +28 +25 +60 +82 +73 +15 +20 +63 +9 +47 +37 +93 +44 +18 +76 +46 +32 +2 +86 +7 +79 +96 +98 +80 +4 +38 +8 +56 +83 +13 +1 +54 +90 +11 +31 +67 +26 +5 +85 +66 +59 +75 +74 +88 +53 +43 +57 diff --git a/UCI_Datasets/half_moons/data/n_epochs.txt b/UCI_Datasets/half_moons/data/n_epochs.txt new file mode 100644 index 0000000..425151f --- /dev/null +++ b/UCI_Datasets/half_moons/data/n_epochs.txt @@ -0,0 +1 @@ +40 diff --git a/UCI_Datasets/half_moons/data/n_hidden.txt b/UCI_Datasets/half_moons/data/n_hidden.txt new file mode 100644 index 0000000..e373ee6 --- /dev/null +++ b/UCI_Datasets/half_moons/data/n_hidden.txt @@ -0,0 +1 @@ +50 diff --git a/UCI_Datasets/half_moons/data/n_splits.txt b/UCI_Datasets/half_moons/data/n_splits.txt new file mode 100644 index 0000000..209e3ef --- /dev/null +++ b/UCI_Datasets/half_moons/data/n_splits.txt @@ -0,0 +1 @@ +20 diff --git a/UCI_Datasets/half_moons/data/split_data_train_test.py b/UCI_Datasets/half_moons/data/split_data_train_test.py new file mode 100644 index 0000000..22f0f8c --- /dev/null +++ b/UCI_Datasets/half_moons/data/split_data_train_test.py @@ -0,0 +1,47 @@ + +import numpy as np + +# We set the random seed + +np.random.seed(1) + +# We load the data + +data = np.loadtxt('data.txt') +n = data.shape[ 0 ] + +# We generate the training test splits + +n_splits = 20 +for i in range(n_splits): + + permutation = np.random.choice(range(n), n, replace = False) + + end_train = round(n * 9.0 / 10) + end_test = n + + index_train = permutation[ 0 : end_train ] + index_test = permutation[ end_train : n ] + + np.savetxt("index_train_{}.txt".format(i), index_train, fmt = '%d') + np.savetxt("index_test_{}.txt".format(i), index_test, fmt = '%d') + + print(i) + +np.savetxt("n_splits.txt", np.array([ n_splits ]), fmt = '%d') + +# We store the index to the features and to the target + +index_features = np.array(range(data.shape[ 1 ] - 1), dtype = int) +index_target = np.array([ data.shape[ 1 ] - 1 ]) + +np.savetxt("index_features.txt", index_features, fmt = '%d') +np.savetxt("index_target.txt", index_target, fmt = '%d') + +# We store the number of hidden neurons to use + +np.savetxt("n_hidden.txt", np.array([ 50 ]), fmt = '%d') + +# We store the number of epochs to use + +np.savetxt("n_epochs.txt", np.array([ 40 ]), fmt = '%d') diff --git a/UCI_Datasets/half_moons/data/tau_values.txt b/UCI_Datasets/half_moons/data/tau_values.txt new file mode 100644 index 0000000..9eb18e1 --- /dev/null +++ b/UCI_Datasets/half_moons/data/tau_values.txt @@ -0,0 +1,3 @@ +150 +200 +250 diff --git a/UCI_Datasets/half_moons/results/log_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/log_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..44572ea --- /dev/null +++ b/UCI_Datasets/half_moons/results/log_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,3 @@ +accuracies 0.501000 +- 0.067963 (stddev) +- 0.015197 (std error), median 0.485000 25p 0.460000 75p 0.537500 +MC accuracies 0.503500 +- 0.104989 (stddev) +- 0.023476 (std error), median 0.505000 25p 0.420000 75p 0.590000 +lls -0.346656 +- 0.000387 (stddev) +- 0.000087 (std error), median -0.346576 25p -0.346748 75p -0.346436 diff --git a/UCI_Datasets/half_moons/results/test_MC_acc_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/test_MC_acc_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..1985fe6 --- /dev/null +++ b/UCI_Datasets/half_moons/results/test_MC_acc_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,24 @@ +0.63 +0.57 +0.52 +0.5 +0.59 +0.48 +0.64 +0.72 +0.36 +0.49 +0.39 +0.39 +0.53 +0.64 +0.33 +0.52 +0.6 +0.59 +0.53 +0.45 +0.56 +0.47 +0.36 +0.43 diff --git a/UCI_Datasets/half_moons/results/test_acc_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/test_acc_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..14e0973 --- /dev/null +++ b/UCI_Datasets/half_moons/results/test_acc_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,24 @@ +0.44 +0.58 +0.55 +0.45 +0.52 +0.52 +0.61 +0.48 +0.33 +0.51 +0.46 +0.45 +0.45 +0.44 +0.46 +0.56 +0.6 +0.58 +0.46 +0.47 +0.62 +0.48 +0.49 +0.53 diff --git a/UCI_Datasets/half_moons/results/test_ll_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/test_ll_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..5d133a0 --- /dev/null +++ b/UCI_Datasets/half_moons/results/test_ll_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,24 @@ +-0.3464298915863037 +-0.34659339129924777 +-0.34676177114248274 +-0.3456023409962654 +-0.34644147217273713 +-0.3464530447125435 +-0.3473271337151527 +-0.3460888424515724 +-0.34611385345458984 +-0.34660767555236816 +-0.3465374258160591 +-0.3462721824645996 +-0.3465443634986877 +-0.3463971373438835 +-0.34641920268535614 +-0.34674194097518923 +-0.34702659517526624 +-0.3473024022579193 +-0.3466244202852249 +-0.3466678449511528 +-0.3476290738582611 +-0.346765795648098 +-0.34650048732757566 +-0.3466621682047844 diff --git a/UCI_Datasets/half_moons/results/test_tau_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/test_tau_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..79eec85 --- /dev/null +++ b/UCI_Datasets/half_moons/results/test_tau_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,21 @@ +0.75 +0.25 +0.25 +0.25 +0.25 +0.75 +0.5 +0.75 +0.75 +0.5 +0.5 +0.5 +0.25 +0.75 +0.75 +0.25 +0.75 +0.25 +0.25 +0.75 +0.5 diff --git a/UCI_Datasets/half_moons/results/validation_MC_acc_100_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/validation_MC_acc_100_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..d7ef8d5 --- /dev/null +++ b/UCI_Datasets/half_moons/results/validation_MC_acc_100_xepochs_2_hidden_layers.txt @@ -0,0 +1,3 @@ +Dropout_Rate: 0.005 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4375 diff --git a/UCI_Datasets/half_moons/results/validation_MC_acc_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/validation_MC_acc_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..ac03ad2 --- /dev/null +++ b/UCI_Datasets/half_moons/results/validation_MC_acc_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,308 @@ +Dropout_Rate: 0 Tau: 1 :: 0.5875 +Dropout_Rate: 0 Tau: 25 :: 0.525 +Dropout_Rate: 0 Tau: 50 :: 0.6 +Dropout_Rate: 0 Tau: 100 :: 0.4 +Dropout_Rate: 0 Tau: 125 :: 0.4875 +Dropout_Rate: 0 Tau: 150 :: 0.575 +Dropout_Rate: 0 Tau: 250 :: 0.5125 +Dropout_Rate: 0.005 Tau: 1 :: 0.7125 +Dropout_Rate: 0.005 Tau: 25 :: 0.6 +Dropout_Rate: 0.005 Tau: 50 :: 0.4 +Dropout_Rate: 0.005 Tau: 100 :: 0.5875 +Dropout_Rate: 0.005 Tau: 125 :: 0.5375 +Dropout_Rate: 0.005 Tau: 150 :: 0.525 +Dropout_Rate: 0.005 Tau: 250 :: 0.5 +Dropout_Rate: 0.05 Tau: 1 :: 0.6125 +Dropout_Rate: 0.05 Tau: 25 :: 0.275 +Dropout_Rate: 0.05 Tau: 50 :: 0.475 +Dropout_Rate: 0.05 Tau: 100 :: 0.375 +Dropout_Rate: 0.05 Tau: 125 :: 0.55 +Dropout_Rate: 0.05 Tau: 150 :: 0.525 +Dropout_Rate: 0.05 Tau: 250 :: 0.5875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.6375 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.6375 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.575 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.6375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.6 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.625 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.3375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4125 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.625 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.7125 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.6375 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5875 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.6 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5875 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.3625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.4375 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.6375 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.3 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5875 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.2875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.3 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.2875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.6125 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.675 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.25 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.3875 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.7 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.3 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.225 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.3625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.3625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.675 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.6375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.6375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.6125 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.75 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.3375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.6125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.6125 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.725 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.65 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.3375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5875 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.575 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.3625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.3625 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.65 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4125 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.775 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.325 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.6 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.3625 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.6 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.6 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.6125 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.65 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.575 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.625 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4375 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.65 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.65 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.7625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.6625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.6625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.325 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.7125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4375 diff --git a/UCI_Datasets/half_moons/results/validation_acc_100_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/validation_acc_100_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..d7ef8d5 --- /dev/null +++ b/UCI_Datasets/half_moons/results/validation_acc_100_xepochs_2_hidden_layers.txt @@ -0,0 +1,3 @@ +Dropout_Rate: 0.005 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4375 diff --git a/UCI_Datasets/half_moons/results/validation_acc_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/validation_acc_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..d832763 --- /dev/null +++ b/UCI_Datasets/half_moons/results/validation_acc_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,308 @@ +Dropout_Rate: 0 Tau: 1 :: 0.5875 +Dropout_Rate: 0 Tau: 25 :: 0.525 +Dropout_Rate: 0 Tau: 50 :: 0.6 +Dropout_Rate: 0 Tau: 100 :: 0.4 +Dropout_Rate: 0 Tau: 125 :: 0.4875 +Dropout_Rate: 0 Tau: 150 :: 0.575 +Dropout_Rate: 0 Tau: 250 :: 0.5125 +Dropout_Rate: 0.005 Tau: 1 :: 0.725 +Dropout_Rate: 0.005 Tau: 25 :: 0.5125 +Dropout_Rate: 0.005 Tau: 50 :: 0.4375 +Dropout_Rate: 0.005 Tau: 100 :: 0.55 +Dropout_Rate: 0.005 Tau: 125 :: 0.625 +Dropout_Rate: 0.005 Tau: 150 :: 0.525 +Dropout_Rate: 0.005 Tau: 250 :: 0.475 +Dropout_Rate: 0.05 Tau: 1 :: 0.4875 +Dropout_Rate: 0.05 Tau: 25 :: 0.4 +Dropout_Rate: 0.05 Tau: 50 :: 0.5875 +Dropout_Rate: 0.05 Tau: 100 :: 0.4125 +Dropout_Rate: 0.05 Tau: 125 :: 0.4875 +Dropout_Rate: 0.05 Tau: 150 :: 0.4625 +Dropout_Rate: 0.05 Tau: 250 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.675 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.6625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5875 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.35 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.3625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.6375 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.35 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4125 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.675 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.6 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.375 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.375 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.7 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4375 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.3 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.55 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.575 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.2375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.3875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4375 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.575 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.6875 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4375 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5875 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.775 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.6875 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.3875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.575 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5625 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.3875 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.6 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.6125 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4125 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.6125 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.625 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.45 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.475 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.65 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5125 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.425 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.3625 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5875 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5375 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.475 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.5875 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.5375 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.525 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.5375 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.5125 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5625 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.5 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.525 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4125 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.55 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.45 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.7 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.4875 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.6375 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.575 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.375 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.5125 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.3875 +Dropout_Rate: 0.005 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.01 Tau: 0.25 :: 0.5 +Dropout_Rate: 0.01 Tau: 0.5 :: 0.5625 +Dropout_Rate: 0.01 Tau: 0.75 :: 0.55 +Dropout_Rate: 0.05 Tau: 0.25 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.5 :: 0.4625 +Dropout_Rate: 0.05 Tau: 0.75 :: 0.4625 +Dropout_Rate: 0.1 Tau: 0.25 :: 0.45 +Dropout_Rate: 0.1 Tau: 0.5 :: 0.425 +Dropout_Rate: 0.1 Tau: 0.75 :: 0.4875 diff --git a/UCI_Datasets/half_moons/results/validation_ll_100_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/validation_ll_100_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..387b387 --- /dev/null +++ b/UCI_Datasets/half_moons/results/validation_ll_100_xepochs_2_hidden_layers.txt @@ -0,0 +1,3 @@ +Dropout_Rate: 0.005 Tau: 0.25 :: 0.3465735912322998 +Dropout_Rate: 0.005 Tau: 0.5 :: 0.346571946144104 +Dropout_Rate: 0.005 Tau: 0.25 :: 0.3465733639895916 diff --git a/UCI_Datasets/half_moons/results/validation_ll_1_xepochs_2_hidden_layers.txt b/UCI_Datasets/half_moons/results/validation_ll_1_xepochs_2_hidden_layers.txt new file mode 100644 index 0000000..26f65f7 --- /dev/null +++ b/UCI_Datasets/half_moons/results/validation_ll_1_xepochs_2_hidden_layers.txt @@ -0,0 +1,308 @@ +Dropout_Rate: 0 Tau: 1 :: -0.3466836057603359 +Dropout_Rate: 0 Tau: 25 :: -0.346605121716857 +Dropout_Rate: 0 Tau: 50 :: -0.34658363871276376 +Dropout_Rate: 0 Tau: 100 :: -0.3465467721223831 +Dropout_Rate: 0 Tau: 125 :: -0.34647271521389483 +Dropout_Rate: 0 Tau: 150 :: -0.34661598578095437 +Dropout_Rate: 0 Tau: 250 :: -0.34653557240962984 +Dropout_Rate: 0.005 Tau: 1 :: -0.34717724472284317 +Dropout_Rate: 0.005 Tau: 25 :: -0.34672541357576847 +Dropout_Rate: 0.005 Tau: 50 :: -0.34628817699849607 +Dropout_Rate: 0.005 Tau: 100 :: -0.3473296403884888 +Dropout_Rate: 0.005 Tau: 125 :: -0.34714771024882796 +Dropout_Rate: 0.005 Tau: 150 :: -0.3463339384645224 +Dropout_Rate: 0.005 Tau: 250 :: -0.34685495011508466 +Dropout_Rate: 0.05 Tau: 1 :: -0.34665039703249934 +Dropout_Rate: 0.05 Tau: 25 :: -0.3462770849466324 +Dropout_Rate: 0.05 Tau: 50 :: -0.3467899922281504 +Dropout_Rate: 0.05 Tau: 100 :: -0.3464151293039322 +Dropout_Rate: 0.05 Tau: 125 :: -0.34650530107319355 +Dropout_Rate: 0.05 Tau: 150 :: -0.3463297415524721 +Dropout_Rate: 0.05 Tau: 250 :: -0.34680024683475497 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34699486047029493 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3463199879974127 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34690054357051847 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.346193103864789 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3464956134557724 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3467700034379959 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34692928194999695 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3458757184445858 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3462131340056658 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3472572948783636 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3467133704572916 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3465996216982603 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34642413035035136 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34615080431103706 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3467773586511612 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3467583481222391 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3460072297602892 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3470633927732706 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34665543846786023 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34611622989177704 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34671687744557855 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3461932383477688 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.347238477319479 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.346513881161809 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3461102910339832 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34617616720497607 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3467296525835991 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.346453608199954 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34645775593817235 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3464324861764908 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34641936905682086 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3463302865624428 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34694451093673706 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3465205654501915 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34582316167652605 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3469465795904398 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.346104783564806 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3469864074140787 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34581903368234634 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34623598158359525 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3471024978905916 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3468089412897825 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34696037583053113 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3465699031949043 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3460914984345436 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34702632687985896 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3457239862531424 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3464178916066885 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3464764501899481 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.346565131098032 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3468821968883276 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3467242386192083 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34727925918996333 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3469079375267029 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3468026205897331 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34659505598247053 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3464108228683472 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3467845108360052 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3466466303914785 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3458918254822493 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34664883874356744 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34662418700754644 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3465169046074152 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34649418070912363 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34595331698656084 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3467000737786293 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34610548950731757 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34711107350885867 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3469919640570879 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3469326075166464 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3469910632818937 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34618609510362147 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3463544636964798 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3466890797019005 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34655297100543975 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3465709798038006 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3465559482574463 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.346388516202569 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34734711684286596 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3462859369814396 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34707540161907674 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34665617607533933 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3465985286980867 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3460391853004694 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34656115509569646 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3469294313341379 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34646120145916937 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3467305120080709 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3470541924238205 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.345957126095891 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34694809168577195 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3463377468287945 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3457860566675663 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34627183750271795 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34634662941098215 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3466090589761734 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3465397901833057 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3464791029691696 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34642426706850526 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3462828084826469 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34616789035499096 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3465654544532299 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3467980854213238 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3467655897140503 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3471120990812778 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34685371071100235 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3466965574771166 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34712737128138543 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3454545486718416 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34665803201496603 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3456067755818367 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3463506203144789 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34641386941075325 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3466039802879095 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34660629406571386 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34578905552625655 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34663726277649404 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3467108953744173 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34634965285658836 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34641935788094996 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34651664905250074 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34623385630548 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3465264491736889 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3465410638600588 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3466545145958662 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34651716724038123 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3471034325659275 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3461551729589701 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34626157097518445 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34630154743790625 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3466997124254704 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34661211892962457 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34678608514368536 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34615298360586166 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3463785041123629 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3466461103409529 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3467607721686363 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3471034713089466 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3463727418333292 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3464520011097193 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34681324288249016 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3470913216471672 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3467838019132614 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34663039110600946 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.346434934809804 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3467945884913206 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.346267818659544 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34645369462668896 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34658289328217506 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34647830985486505 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34623763859272005 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3468649756163359 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34671794474124906 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34664780646562576 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3464776571840048 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3466873299330473 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3465245392173529 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34692115150392056 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3468290250748396 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34686060026288035 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3466766491532326 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3461505424231291 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3460772879421711 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34620724357664584 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34646394550800325 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3468620330095291 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3466245044022799 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3470295317471027 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34638204611837864 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3468045022338629 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3464427195489407 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3464981704950333 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3463736534118652 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34657030142843726 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.346526300907135 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34703086614608764 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34713640324771405 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3462482884526253 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3464803613722324 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34631955772638323 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3468379482626915 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.347236206009984 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34673136584460734 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.346717880666256 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3463709570467472 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3462802428752184 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34648931585252285 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34663389101624487 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34606084525585173 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34622066356241704 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34644843377172946 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3462905392050743 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34683530405163765 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3465385515242815 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34662299379706385 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3464580923318863 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34577418118715286 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34679550789296626 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3461578439921141 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3463717639446259 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3468873679637909 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34612959884107114 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34653776288032534 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3467176154255867 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3467625372111797 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34702627807855607 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3467619515955448 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34663346633315084 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3462414491921663 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3467740651220083 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34671177305281164 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3466541189700365 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3464986845850945 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.346916738525033 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.345203161239624 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34669242016971114 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34698508903384206 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34653791710734366 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3465434968471527 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34646271243691446 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3461048223078251 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34625414833426477 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3465048748999834 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34599420577287676 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34599030390381813 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3467887230217457 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34673017300665376 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34630779139697554 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3464944824576378 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.34660492949187754 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34733968526124953 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3467565529048443 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34658047184348106 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34695841148495676 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3467958550900221 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3465621072798967 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3464288357645273 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.346516503021121 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34609606564044954 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3466106869280338 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.3464171174913645 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3464305207133293 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3465668585151434 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34629966244101523 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.34643234945833684 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3466232355684042 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34709969274699687 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.3466952074319124 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34619239494204523 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3462679602205753 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34680564850568774 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3466111931949854 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34648084230721 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3463556457310915 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.346172408759594 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34641889072954657 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3463332485407591 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34600970074534415 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34639174304902554 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34709956869482994 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3472308967262506 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.34687735326588154 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34633342288434504 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.34670224562287333 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34656532295048237 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3472887210547924 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.34672834649682044 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.34686562344431876 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3472907543182373 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.3463377974927425 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.3467037495225668 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34680660627782345 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.34663738310337067 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3465137243270874 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.34622916355729105 +Dropout_Rate: 0.005 Tau: 0.25 :: -0.3463946424424648 +Dropout_Rate: 0.005 Tau: 0.5 :: -0.34642546325922013 +Dropout_Rate: 0.005 Tau: 0.75 :: -0.3466039888560772 +Dropout_Rate: 0.01 Tau: 0.25 :: -0.3466950587928295 +Dropout_Rate: 0.01 Tau: 0.5 :: -0.3465811923146248 +Dropout_Rate: 0.01 Tau: 0.75 :: -0.3469836264848709 +Dropout_Rate: 0.05 Tau: 0.25 :: -0.34639448039233683 +Dropout_Rate: 0.05 Tau: 0.5 :: -0.34667249731719496 +Dropout_Rate: 0.05 Tau: 0.75 :: -0.34644710160791875 +Dropout_Rate: 0.1 Tau: 0.25 :: -0.3467780970036983 +Dropout_Rate: 0.1 Tau: 0.5 :: -0.3459491074085236 +Dropout_Rate: 0.1 Tau: 0.75 :: -0.3463404431939125 diff --git a/experiment.py b/experiment.py index 40f30be..d324bb5 100644 --- a/experiment.py +++ b/experiment.py @@ -112,8 +112,13 @@ def _get_index_train_test_path(split_num, train = True): n_splits = np.loadtxt(_N_SPLITS_FILE) print ("Done.") -errors, MC_errors, lls = [], [], [] -for split in range(int(n_splits)): +print(f"Parameters: {epochs_multiplier} {num_hidden_layers}") + +accuracies, MC_accuracies, lls = [], [], [] +# int(n_splits) +for split in range(2): + + print(f"Split: {split}") # We load the indexes of the training and test sets print ('Loading file: ' + _get_index_train_test_path(split, train=True)) @@ -156,10 +161,12 @@ def _get_index_train_test_path(split_num, train = True): network = net.net(X_train, y_train, ([ int(n_hidden) ] * num_hidden_layers), normalize = True, n_epochs = int(n_epochs * epochs_multiplier), tau = tau, dropout = dropout_rate) + print('DONE TRAINING') # We obtain the test RMSE and the test ll from the validation sets error, MC_error, ll = network.predict(X_validation, y_validation) + print('DONE PREDICTING') if (ll > best_ll): best_ll = ll best_network = network @@ -186,13 +193,14 @@ def _get_index_train_test_path(split_num, train = True): best_network = net.net(X_train_original, y_train_original, ([ int(n_hidden) ] * num_hidden_layers), normalize = True, n_epochs = int(n_epochs * epochs_multiplier), tau = best_tau, dropout = best_dropout) - error, MC_error, ll = best_network.predict(X_test, y_test) + accuracy, MC_accuracy, ll = best_network.predict(X_test, y_test) + print("DONE WITH BEST NETWORK") with open(_RESULTS_TEST_RMSE, "a") as myfile: - myfile.write(repr(error) + '\n') + myfile.write(repr(accuracy) + '\n') with open(_RESULTS_TEST_MC_RMSE, "a") as myfile: - myfile.write(repr(MC_error) + '\n') + myfile.write(repr(MC_accuracy) + '\n') with open(_RESULTS_TEST_LL, "a") as myfile: myfile.write(repr(ll) + '\n') @@ -201,17 +209,17 @@ def _get_index_train_test_path(split_num, train = True): myfile.write(repr(best_network.tau) + '\n') print ("Tests on split " + str(split) + " complete.") - errors += [error] - MC_errors += [MC_error] + accuracies += [accuracy] + MC_accuracies += [MC_accuracy] lls += [ll] with open(_RESULTS_TEST_LOG, "a") as myfile: - myfile.write('errors %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \n' % ( - np.mean(errors), np.std(errors), np.std(errors)/math.sqrt(n_splits), - np.percentile(errors, 50), np.percentile(errors, 25), np.percentile(errors, 75))) - myfile.write('MC errors %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \n' % ( - np.mean(MC_errors), np.std(MC_errors), np.std(MC_errors)/math.sqrt(n_splits), - np.percentile(MC_errors, 50), np.percentile(MC_errors, 25), np.percentile(MC_errors, 75))) + myfile.write('accuracies %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \n' % ( + np.mean(accuracies), np.std(accuracies), np.std(accuracies)/math.sqrt(n_splits), + np.percentile(accuracies, 50), np.percentile(accuracies, 25), np.percentile(accuracies, 75))) + myfile.write('MC accuracies %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \n' % ( + np.mean(MC_accuracies), np.std(MC_accuracies), np.std(MC_accuracies)/math.sqrt(n_splits), + np.percentile(MC_accuracies, 50), np.percentile(MC_accuracies, 25), np.percentile(MC_accuracies, 75))) myfile.write('lls %f +- %f (stddev) +- %f (std error), median %f 25p %f 75p %f \n' % ( np.mean(lls), np.std(lls), np.std(lls)/math.sqrt(n_splits), np.percentile(lls, 50), np.percentile(lls, 25), np.percentile(lls, 75))) diff --git a/net/net.py b/net/net.py index e12033a..4bc4da7 100644 --- a/net/net.py +++ b/net/net.py @@ -6,14 +6,16 @@ warnings.filterwarnings("ignore") import math -from scipy.misc import logsumexp +from scipy.special import logsumexp import numpy as np from keras.regularizers import l2 from keras import Input from keras.layers import Dropout from keras.layers import Dense +from keras.layers import Softmax from keras import Model +import tensorflow as tf import time @@ -73,19 +75,20 @@ def __init__(self, X_train, y_train, n_hidden, n_epochs = 40, inputs = Input(shape=(X_train.shape[1],)) inter = Dropout(dropout)(inputs, training=True) - inter = Dense(n_hidden[0], activation='relu', W_regularizer=l2(reg))(inter) + inter = Dense(n_hidden[0], activation='relu', kernel_regularizer=l2(reg))(inter) for i in range(len(n_hidden) - 1): inter = Dropout(dropout)(inter, training=True) - inter = Dense(n_hidden[i+1], activation='relu', W_regularizer=l2(reg))(inter) + inter = Dense(n_hidden[i+1], activation='relu', kernel_regularizer=l2(reg))(inter) inter = Dropout(dropout)(inter, training=True) - outputs = Dense(y_train_normalized.shape[1], W_regularizer=l2(reg))(inter) + outputs = Dense(2, kernel_regularizer=l2(reg))(inter) + outputs = Softmax()(outputs) model = Model(inputs, outputs) - model.compile(loss='mean_squared_error', optimizer='adam') - + model.compile(loss='binary_crossentropy', optimizer='adam') + # print(model.summary()) # We iterate the learning process start_time = time.time() - model.fit(X_train, y_train_normalized, batch_size=batch_size, nb_epoch=n_epochs, verbose=0) + model.fit(X_train, y_train_normalized, batch_size=batch_size, epochs=n_epochs, verbose=0) self.model = model self.tau = tau self.running_time = time.time() - start_time @@ -119,21 +122,33 @@ def predict(self, X_test, y_test): # of the test data model = self.model - standard_pred = model.predict(X_test, batch_size=500, verbose=1) - standard_pred = standard_pred * self.std_y_train + self.mean_y_train - rmse_standard_pred = np.mean((y_test.squeeze() - standard_pred.squeeze())**2.)**0.5 - - T = 10000 + standard_pred_probs = model.predict(X_test, batch_size=500, verbose=1) + standard_pred = tf.math.argmax(standard_pred_probs, axis=1).numpy() + # standard_pred = standard_pred * self.std_y_train + self.mean_y_train + # rmse_standard_pred = np.mean((y_test.squeeze() - standard_pred.squeeze())**2.)**0.5 + accuracy_standard_pred = np.mean((y_test.squeeze() == standard_pred.squeeze())) + print(f'Standard Accuracy: {accuracy_standard_pred}') + + T = 100 Yt_hat = np.array([model.predict(X_test, batch_size=500, verbose=0) for _ in range(T)]) - Yt_hat = Yt_hat * self.std_y_train + self.mean_y_train - MC_pred = np.mean(Yt_hat, 0) - rmse = np.mean((y_test.squeeze() - MC_pred.squeeze())**2.)**0.5 + # Yt_hat = Yt_hat * self.std_y_train + self.mean_y_train + MC_pred = tf.math.argmax(np.mean(Yt_hat, axis=0), axis=1).numpy() + # print(MC_pred.shape) + mc_accuracy = np.mean((y_test.squeeze() == MC_pred.squeeze())) + print(f'MC Accuracy: {mc_accuracy}') # We compute the test log-likelihood - ll = (logsumexp(-0.5 * self.tau * (y_test[None] - Yt_hat)**2., 0) - np.log(T) - - 0.5*np.log(2*np.pi) + 0.5*np.log(self.tau)) - test_ll = np.mean(ll) + # ll = (logsumexp(-0.5 * self.tau * (y_test[None] - Yt_hat)**2., 0) - np.log(T) + # - 0.5*np.log(2*np.pi) + 0.5*np.log(self.tau)) + # test_ll = np.mean(ll) + # ll = np.sum(y_test[]) + + # double check this! + y_test = y_test.astype(int) + y_test_2d = np.hstack((y_test, 1-y_test)) + test_ll = np.mean(np.log(standard_pred_probs) * y_test_2d) # We are done! - return rmse_standard_pred, rmse, test_ll + print(f'Test LL: {test_ll}') + return accuracy_standard_pred, mc_accuracy, test_ll