No bias before BN layers
parent
1774ede668
commit
3313c21723
|
@ -643,7 +643,7 @@
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"Sometimes applying BN before the activation function works better (there's a debate on this topic):"
|
"Sometimes applying BN before the activation function works better (there's a debate on this topic). Moreover, the layer before a `BatchNormalization` layer does not need to have bias terms, since the `BatchNormalization` layer some as well, it would be a waste of parameters, so you can set `use_bias=False` when creating those layers:"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -655,10 +655,10 @@
|
||||||
"model = keras.models.Sequential([\n",
|
"model = keras.models.Sequential([\n",
|
||||||
" keras.layers.Flatten(input_shape=[28, 28]),\n",
|
" keras.layers.Flatten(input_shape=[28, 28]),\n",
|
||||||
" keras.layers.BatchNormalization(),\n",
|
" keras.layers.BatchNormalization(),\n",
|
||||||
" keras.layers.Dense(300),\n",
|
" keras.layers.Dense(300, use_bias=False),\n",
|
||||||
" keras.layers.BatchNormalization(),\n",
|
" keras.layers.BatchNormalization(),\n",
|
||||||
" keras.layers.Activation(\"relu\"),\n",
|
" keras.layers.Activation(\"relu\"),\n",
|
||||||
" keras.layers.Dense(100),\n",
|
" keras.layers.Dense(100, use_bias=False),\n",
|
||||||
" keras.layers.Activation(\"relu\"),\n",
|
" keras.layers.Activation(\"relu\"),\n",
|
||||||
" keras.layers.BatchNormalization(),\n",
|
" keras.layers.BatchNormalization(),\n",
|
||||||
" keras.layers.Dense(10, activation=\"softmax\")\n",
|
" keras.layers.Dense(10, activation=\"softmax\")\n",
|
||||||
|
|
Loading…
Reference in New Issue