Update 11_training_deep_neural_networks.ipynb
Swapped the Activation and BatchNormalization lines in order to make the code consistent with the description and the book (p. 343), i.e. adding the BN layers BEFORE the activation function.main
parent
1f542d2757
commit
de450d8077
|
@ -715,8 +715,8 @@
|
||||||
" keras.layers.BatchNormalization(),\n",
|
" keras.layers.BatchNormalization(),\n",
|
||||||
" keras.layers.Activation(\"relu\"),\n",
|
" keras.layers.Activation(\"relu\"),\n",
|
||||||
" keras.layers.Dense(100, use_bias=False),\n",
|
" keras.layers.Dense(100, use_bias=False),\n",
|
||||||
" keras.layers.Activation(\"relu\"),\n",
|
|
||||||
" keras.layers.BatchNormalization(),\n",
|
" keras.layers.BatchNormalization(),\n",
|
||||||
|
" keras.layers.Activation(\"relu\"),\n",
|
||||||
" keras.layers.Dense(10, activation=\"softmax\")\n",
|
" keras.layers.Dense(10, activation=\"softmax\")\n",
|
||||||
"])"
|
"])"
|
||||||
]
|
]
|
||||||
|
|
Loading…
Reference in New Issue