From 3313c217234b679aae16cb5282ad381340341725 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20Geron?= Date: Mon, 25 Mar 2019 12:03:44 +0800 Subject: [PATCH] No bias before BN layers --- 11_deep_learning.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/11_deep_learning.ipynb b/11_deep_learning.ipynb index d470157..b0c4eec 100644 --- a/11_deep_learning.ipynb +++ b/11_deep_learning.ipynb @@ -643,7 +643,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Sometimes applying BN before the activation function works better (there's a debate on this topic):" + "Sometimes applying BN before the activation function works better (there's a debate on this topic). Moreover, the layer before a `BatchNormalization` layer does not need to have bias terms, since the `BatchNormalization` layer some as well, it would be a waste of parameters, so you can set `use_bias=False` when creating those layers:" ] }, { @@ -655,10 +655,10 @@ "model = keras.models.Sequential([\n", " keras.layers.Flatten(input_shape=[28, 28]),\n", " keras.layers.BatchNormalization(),\n", - " keras.layers.Dense(300),\n", + " keras.layers.Dense(300, use_bias=False),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.Activation(\"relu\"),\n", - " keras.layers.Dense(100),\n", + " keras.layers.Dense(100, use_bias=False),\n", " keras.layers.Activation(\"relu\"),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.Dense(10, activation=\"softmax\")\n",