diff --git a/11_training_deep_neural_networks.ipynb b/11_training_deep_neural_networks.ipynb index e9fdc9f..4fb717f 100644 --- a/11_training_deep_neural_networks.ipynb +++ b/11_training_deep_neural_networks.ipynb @@ -970,6 +970,13 @@ "model_B_on_A.add(keras.layers.Dense(1, activation=\"sigmoid\"))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that `model_B_on_A` and `model_A` actually share layers now, so when we train one, it will update both models. If we want to avoid that, we need to build `model_B_on_A` on top of a *clone* of `model_A`:" + ] + }, { "cell_type": "code", "execution_count": 61, @@ -977,7 +984,9 @@ "outputs": [], "source": [ "model_A_clone = keras.models.clone_model(model_A)\n", - "model_A_clone.set_weights(model_A.get_weights())" + "model_A_clone.set_weights(model_A.get_weights())\n", + "model_B_on_A = keras.models.Sequential(model_A_clone.layers[:-1])\n", + "model_B_on_A.add(keras.layers.Dense(1, activation=\"sigmoid\"))" ] }, { @@ -1042,7 +1051,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Great! We got quite a bit of transfer: the error rate dropped by a factor of 4.5!" + "Great! We got quite a bit of transfer: the error rate dropped by a factor of 4.9!" ] }, { @@ -1051,7 +1060,7 @@ "metadata": {}, "outputs": [], "source": [ - "(100 - 97.05) / (100 - 99.35)" + "(100 - 97.05) / (100 - 99.40)" ] }, {