@@ -103,7 +103,6 @@ Our default value of $k$ will be 10.
103103``` {code-cell} ipython3
104104class Config(NamedTuple):
105105 epochs: int = 4000 # Number of passes through the data set
106- num_layers: int = 4 # Depth of the network
107106 output_dim: int = 10 # Output dimension of input and hidden layers
108107 learning_rate: float = 0.001 # Learning rate for gradient descent
109108 layer_sizes: tuple = (1, 10, 10, 10, 1) # Sizes of each layer in the network
@@ -167,15 +166,15 @@ def build_keras_model(
167166 ):
168167 model = Sequential()
169168 # Add layers to the network sequentially, from inputs towards outputs
170- for i in range(config.num_layers- 1):
169+ for i in range(len( config.layer_sizes) - 1):
171170 model.add(
172171 Dense(units=config.output_dim, activation=activation_function)
173172 )
174173 # Add a final layer that maps to a scalar value, for regression.
175174 model.add(Dense(units=1))
176175 # Embed training configurations
177176 model.compile(
178- optimizer=keras.optimizers.SGD(),
177+ optimizer=keras.optimizers.SGD(),
179178 loss='mean_squared_error'
180179 )
181180 return model
@@ -214,10 +213,10 @@ The next function extracts and visualizes a prediction from the trained model.
214213
215214``` {code-cell} ipython3
216215def plot_keras_output(model, x, y, x_validate, y_validate):
217- y_predict = model.predict(x , verbose=2)
216+ y_predict = model.predict(x_validate , verbose=2)
218217 fig, ax = plt.subplots()
219- ax.scatter(x, y )
220- ax.plot(x , y_predict, label="fitted model", color='black')
218+ ax.scatter(x_validate, y_validate, color='red', alpha=0.5 )
219+ ax.plot(x_validate , y_predict, label="fitted model", color='black')
221220 ax.set_xlabel('x')
222221 ax.set_ylabel('y')
223222 plt.show()
@@ -495,8 +494,8 @@ Here's a visualization of the quality of our fit.
495494
496495``` {code-cell} ipython3
497496fig, ax = plt.subplots()
498- ax.scatter(x_train, y_train )
499- ax.plot(x_train .flatten(), f(θ, x_train ).flatten(),
497+ ax.scatter(x_validate, y_validate, color='red', alpha=0.5 )
498+ ax.plot(x_validate .flatten(), f(θ, x_validate ).flatten(),
500499 label="fitted model", color='black')
501500ax.set_xlabel('x')
502501ax.set_ylabel('y')
@@ -566,8 +565,8 @@ print(f"Final MSE on validation data = {optax_sgd_mse:.6f}")
566565
567566``` {code-cell} ipython3
568567fig, ax = plt.subplots()
569- ax.scatter(x_train, y_train )
570- ax.plot(x_train .flatten(), f(θ, x_train ).flatten(),
568+ ax.scatter(x_validate, y_validate, color='red', alpha=0.5 )
569+ ax.plot(x_validate .flatten(), f(θ, x_validate ).flatten(),
571570 label="fitted model", color='black')
572571ax.set_xlabel('x')
573572ax.set_ylabel('y')
@@ -633,8 +632,8 @@ Here's a visualization of the result.
633632
634633``` {code-cell} ipython3
635634fig, ax = plt.subplots()
636- ax.scatter(x_train, y_train )
637- ax.plot(x_train .flatten(), f(θ, x_train ).flatten(),
635+ ax.scatter(x_validate, y_validate, color='red', alpha=0.5 )
636+ ax.plot(x_validate .flatten(), f(θ, x_validate ).flatten(),
638637 label="fitted model", color='black')
639638ax.set_xlabel('x')
640639ax.set_ylabel('y')
@@ -688,6 +687,9 @@ results = {
688687}
689688
690689df = pd.DataFrame(results)
690+ # Format MSE columns to 6 decimal places
691+ df['Training MSE'] = df['Training MSE'].apply(lambda x: f"{x:.6f}")
692+ df['Validation MSE'] = df['Validation MSE'].apply(lambda x: f"{x:.6f}")
691693print("\nSummary of Training Methods:")
692694print(df.to_string(index=False))
693695```
0 commit comments