From c8f924a9e0893a60032d53492087443824f42c30 Mon Sep 17 00:00:00 2001 From: Matt McKay Date: Tue, 19 Nov 2024 13:38:54 +1100 Subject: [PATCH 1/2] Update lectures/keras.md --- lectures/keras.md | 7 ------- 1 file changed, 7 deletions(-) diff --git a/lectures/keras.md b/lectures/keras.md index 5bfe433..5bbc8f1 100644 --- a/lectures/keras.md +++ b/lectures/keras.md @@ -289,10 +289,3 @@ plot_results(x_validate, y_validate, y_predict, ax) plt.show() ``` -```{code-cell} ipython3 - -``` - -```{code-cell} ipython3 - -``` From b9ae7f42dfb71462667198ef0bf95887eb205b33 Mon Sep 17 00:00:00 2001 From: Matt McKay Date: Tue, 19 Nov 2024 13:39:02 +1100 Subject: [PATCH 2/2] Update lectures/keras.md --- lectures/keras.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lectures/keras.md b/lectures/keras.md index 5bbc8f1..50ef16c 100644 --- a/lectures/keras.md +++ b/lectures/keras.md @@ -133,7 +133,7 @@ def build_regression_model(model): return model ``` -In the function above you can see that we use stochatic gradient descent to +In the function above you can see that we use stochastic gradient descent to train the model, and that the loss is mean squared error (MSE). MSE is the standard loss function for ordinary least squares regression.