Fix error in training a stacked encoder one encoder at a time (fixes #166)

main
Aurélien Geron 2018-02-06 16:36:23 +01:00
parent a6fdeb23dd
commit 1a6094a8dc
1 changed files with 15 additions and 28 deletions

View File

@ -31,9 +31,7 @@
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"# To support both python 2 and python 3\n",
@ -80,9 +78,7 @@
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"def plot_image(image, shape=[28, 28]):\n",
@ -93,9 +89,7 @@
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"def plot_multiple_images(images, n_rows, n_cols, pad=2):\n",
@ -126,9 +120,7 @@
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"import numpy.random as rnd\n",
@ -419,9 +411,7 @@
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"reset_graph()\n",
@ -479,9 +469,7 @@
{
"cell_type": "code",
"execution_count": 16,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"saver = tf.train.Saver()"
@ -545,9 +533,7 @@
{
"cell_type": "code",
"execution_count": 19,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"reset_graph()\n",
@ -555,8 +541,9 @@
"from functools import partial\n",
"\n",
"def train_autoencoder(X_train, n_neurons, n_epochs, batch_size,\n",
" learning_rate = 0.01, l2_reg = 0.0005,\n",
" activation=tf.nn.elu, seed=42):\n",
" learning_rate = 0.01, l2_reg = 0.0005, seed=42,\n",
" hidden_activation=tf.nn.elu,\n",
" output_activation=tf.nn.elu):\n",
" graph = tf.Graph()\n",
" with graph.as_default():\n",
" tf.set_random_seed(seed)\n",
@ -567,12 +554,11 @@
" \n",
" my_dense_layer = partial(\n",
" tf.layers.dense,\n",
" activation=activation,\n",
" kernel_initializer=tf.contrib.layers.variance_scaling_initializer(),\n",
" kernel_regularizer=tf.contrib.layers.l2_regularizer(l2_reg))\n",
"\n",
" hidden = my_dense_layer(X, n_neurons, name=\"hidden\")\n",
" outputs = my_dense_layer(hidden, n_inputs, activation=None, name=\"outputs\")\n",
" hidden = my_dense_layer(X, n_neurons, activation=hidden_activation, name=\"hidden\")\n",
" outputs = my_dense_layer(hidden, n_inputs, activation=output_activation, name=\"outputs\")\n",
"\n",
" reconstruction_loss = tf.reduce_mean(tf.square(outputs - X))\n",
"\n",
@ -614,7 +600,8 @@
"metadata": {},
"outputs": [],
"source": [
"hidden_output, W1, b1, W4, b4 = train_autoencoder(mnist.train.images, n_neurons=300, n_epochs=4, batch_size=150)\n",
"hidden_output, W1, b1, W4, b4 = train_autoencoder(mnist.train.images, n_neurons=300, n_epochs=4, batch_size=150,\n",
" output_activation=None)\n",
"_, W2, b2, W3, b3 = train_autoencoder(hidden_output, n_neurons=150, n_epochs=4, batch_size=150)"
]
},
@ -1748,7 +1735,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
"version": "3.6.4"
},
"nav_menu": {
"height": "381px",