From 29837e66070f78bc6f98b4116c5792d163c2767b Mon Sep 17 00:00:00 2001 From: Martin Patz Date: Wed, 8 May 2024 17:26:48 +0200 Subject: [PATCH 1/5] use proper string comparison --- Tutorial.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Tutorial.ipynb b/Tutorial.ipynb index a8f958f..1ca019c 100644 --- a/Tutorial.ipynb +++ b/Tutorial.ipynb @@ -611,9 +611,9 @@ "source": [ "# Runs the appropriate method\n", "\n", - "if model.model_type is 'MM_QP':\n", + "if model.model_type == 'MM_QP':\n", " Ypred, Stats = MM_QP(model, verbose=True)\n", - "if model.model_type is 'MM_LP':\n", + "if model.model_type == 'MM_LP':\n", " Ypred, Stats = MM_LP(model, verbose=True)\n", "\n", "# Printing results\n", From 82aa9ac0d95d5fe220370b2752ce77dfe45934a7 Mon Sep 17 00:00:00 2001 From: Martin Patz Date: Wed, 8 May 2024 17:27:53 +0200 Subject: [PATCH 2/5] fix Keras layer imports --- Library/Build_Model.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Library/Build_Model.py b/Library/Build_Model.py index 195b549..1dd898f 100644 --- a/Library/Build_Model.py +++ b/Library/Build_Model.py @@ -43,9 +43,7 @@ from keras.layers import Lambda, Reshape, multiply from keras.layers import concatenate, add, subtract, dot from keras.wrappers.scikit_learn import KerasRegressor -from keras.layers.core import Activation -from keras.utils.generic_utils import get_custom_objects -from keras.utils.generic_utils import CustomObjectScope +from keras.utils import get_custom_objects, CustomObjectScope from keras.callbacks import EarlyStopping from sklearn import linear_model From 988399b9cd49bf468ee3edd935b52684cfb2353b Mon Sep 17 00:00:00 2001 From: Martin Patz Date: Wed, 8 May 2024 17:28:12 +0200 Subject: [PATCH 3/5] migrate to scikeras --- Library/Build_Model.py | 2 +- environment_amn.yml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Library/Build_Model.py b/Library/Build_Model.py index 1dd898f..8d4337c 100644 --- a/Library/Build_Model.py +++ b/Library/Build_Model.py @@ -42,10 +42,10 @@ from keras.layers import Input, Dense, LSTM, Dropout, Flatten, Activation from keras.layers import Lambda, Reshape, multiply from keras.layers import concatenate, add, subtract, dot -from keras.wrappers.scikit_learn import KerasRegressor from keras.utils import get_custom_objects, CustomObjectScope from keras.callbacks import EarlyStopping +from scikeras.wrappers import KerasRegressor from sklearn import linear_model from sklearn.model_selection import cross_val_score, KFold from sklearn.pipeline import Pipeline diff --git a/environment_amn.yml b/environment_amn.yml index e821ecb..a5307f7 100644 --- a/environment_amn.yml +++ b/environment_amn.yml @@ -16,4 +16,5 @@ dependencies: - pip - ipykernel - pip: - - silence-tensorflow \ No newline at end of file + - silence-tensorflow + - scikeras \ No newline at end of file From fe761cd96457642e143e48c7590454d78e3430ce Mon Sep 17 00:00:00 2001 From: Martin Patz Date: Wed, 8 May 2024 17:28:40 +0200 Subject: [PATCH 4/5] remove unused "parameter", fixes the tutorial as well --- Library/Build_Model.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/Library/Build_Model.py b/Library/Build_Model.py index 8d4337c..c2ca2f9 100644 --- a/Library/Build_Model.py +++ b/Library/Build_Model.py @@ -154,14 +154,13 @@ def Loss_SV(V, S, gradient=False): dLoss = 0 * V return Loss_norm, dLoss -def Loss_Vin(V, Pin, Vin, bound, parameter, gradient=False): +def Loss_Vin(V, Pin, Vin, bound, gradient=False): # Gradient for input boundary constraint # Loss = ReLU(Pin . V - Vin) # dLoss = ∂(ReLU(Pin . V - Vin)^2/∂V # Input: Cf. Gradient_Descent Pin = tf.convert_to_tensor(np.float32(Pin)) Loss = tf.linalg.matmul(V, tf.transpose(Pin), b_is_sparse=True) - Vin - # tf.cast(tf.multiply(Vin, parameter.scaler), tf.float32) Loss = tf.keras.activations.relu(Loss) if bound == 'UB' else Loss Loss_norm = tf.norm(Loss, axis=1, keepdims=True)/Pin.shape[0] # rescaled if gradient: @@ -191,7 +190,7 @@ def Loss_constraint(V, Vin, parameter, gradient=False): # mean squared sum L2+L3+L4 L2, dL2 = Loss_SV(V, parameter.S, gradient=gradient) L3, dL3 = Loss_Vin(V, parameter.Pin, Vin, - parameter.mediumbound, parameter, gradient=gradient) + parameter.mediumbound, gradient=gradient) L4, dL4 = Loss_Vpos(V, parameter, gradient=gradient) # square sum of L2, L3, L4 L2 = tf.math.square(L2) @@ -211,7 +210,7 @@ def Loss_all(V, Vin, Vout, parameter, gradient=False): L1, dL1 = Loss_Vout(V, parameter.Pout, Vout, gradient=gradient) L2, dL2 = Loss_SV(V, parameter.S, gradient=gradient) L3, dL3 = Loss_Vin(V, parameter.Pin, Vin, - parameter.mediumbound, parameter, gradient=gradient) + parameter.mediumbound, gradient=gradient) L4, dL4 = Loss_Vpos(V, parameter, gradient=gradient) # square sum of L1, L2, L3, L4 L1 = tf.math.square(L1) From dbec6b0f875df96ed3ac409122cf79bf7f2721b3 Mon Sep 17 00:00:00 2001 From: Martin Patz Date: Wed, 8 May 2024 17:32:56 +0200 Subject: [PATCH 5/5] add empty line at EOL --- environment_amn.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment_amn.yml b/environment_amn.yml index a5307f7..9b5ca6d 100644 --- a/environment_amn.yml +++ b/environment_amn.yml @@ -17,4 +17,4 @@ dependencies: - ipykernel - pip: - silence-tensorflow - - scikeras \ No newline at end of file + - scikeras