From 6ec768b1652fb2893f73a91e9d44d928471ef586 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20Geron?= Date: Tue, 6 Jun 2017 23:12:21 +0200 Subject: [PATCH] Use tf.set_random_seed(42) and more to make notebook's output constant --- ...uction_to_artificial_neural_networks.ipynb | 102 +++++++++++++----- 1 file changed, 76 insertions(+), 26 deletions(-) diff --git a/10_introduction_to_artificial_neural_networks.ipynb b/10_introduction_to_artificial_neural_networks.ipynb index 36ca5b7..2d9d9fa 100644 --- a/10_introduction_to_artificial_neural_networks.ipynb +++ b/10_introduction_to_artificial_neural_networks.ipynb @@ -55,11 +55,13 @@ "\n", "# Common imports\n", "import numpy as np\n", - "import numpy.random as rnd\n", "import os\n", "\n", "# to make this notebook's output stable across runs\n", - "rnd.seed(42)\n", + "def reset_graph(seed=42):\n", + " tf.reset_default_graph()\n", + " tf.set_random_seed(seed)\n", + " np.random.seed(seed)\n", "\n", "# To plot pretty figures\n", "%matplotlib inline\n", @@ -356,9 +358,11 @@ "source": [ "import tensorflow as tf\n", "\n", + "config = tf.contrib.learn.RunConfig(tf_random_seed=42) # not shown in the config\n", + "\n", "feature_cols = tf.contrib.learn.infer_real_valued_columns_from_input(X_train)\n", "dnn_clf = tf.contrib.learn.DNNClassifier(hidden_units=[300,100], n_classes=10,\n", - " feature_columns=feature_cols)\n", + " feature_columns=feature_cols, config=config)\n", "dnn_clf = tf.contrib.learn.SKCompat(dnn_clf) # if TensorFlow >= 1.1\n", "dnn_clf.fit(X_train, y_train, batch_size=50, steps=40000)" ] @@ -434,7 +438,7 @@ }, "outputs": [], "source": [ - "tf.reset_default_graph()\n", + "reset_graph()\n", "\n", "X = tf.placeholder(tf.float32, shape=(None, n_inputs), name=\"X\")\n", "y = tf.placeholder(tf.int64, shape=(None), name=\"y\")" @@ -723,7 +727,7 @@ }, "outputs": [], "source": [ - "tf.reset_default_graph()\n", + "reset_graph()\n", "\n", "X = tf.placeholder(tf.float32, shape=(None, n_inputs), name=\"X\")\n", "y = tf.placeholder(tf.int64, shape=(None), name=\"y\") " @@ -751,7 +755,9 @@ "cell_type": "code", "execution_count": 31, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -764,7 +770,9 @@ "cell_type": "code", "execution_count": 32, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -779,7 +787,9 @@ "cell_type": "code", "execution_count": 33, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -792,7 +802,9 @@ "cell_type": "code", "execution_count": 34, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -873,21 +885,30 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "## 9." ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "_Train a deep MLP on the MNIST dataset and see if you can get over 98% precision. Just like in the last exercise of chapter 9, try adding all the bells and whistles (i.e., save checkpoints, restore the last checkpoint in case of an interruption, add summaries, plot learning curves using TensorBoard, and so on)._" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "First let's create the deep net. It's exactly the same as earlier, with just one addition: we add a `tf.summary.scalar()` to track the loss and the accuracy during training, so we can view nice learning curves using TensorBoard." ] @@ -918,7 +939,7 @@ }, "outputs": [], "source": [ - "tf.reset_default_graph()\n", + "reset_graph()\n", "\n", "X = tf.placeholder(tf.float32, shape=(None, n_inputs), name=\"X\")\n", "y = tf.placeholder(tf.int64, shape=(None), name=\"y\") " @@ -1007,7 +1028,10 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "Now we need to define the directory to write the TensorBoard logs to:" ] @@ -1016,7 +1040,9 @@ "cell_type": "code", "execution_count": 44, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1035,7 +1061,9 @@ "cell_type": "code", "execution_count": 45, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1044,7 +1072,10 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "Now we can create the `FileWriter` that we will use to write the TensorBoard logs:" ] @@ -1053,7 +1084,9 @@ "cell_type": "code", "execution_count": 46, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1062,7 +1095,10 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "Hey! Why don't we implement early stopping? For this, we are going to need a validation set. Luckily, the dataset returned by TensorFlow's `input_data()` function (see above) is already split into a training set (60,000 instances, already shuffled for us), a validation set (5,000 instances) and a test set (5,000 instances). So we can easily define `X_valid` and `y_valid`:" ] @@ -1071,7 +1107,9 @@ "cell_type": "code", "execution_count": 47, "metadata": { - "collapsed": false + "collapsed": false, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1083,7 +1121,9 @@ "cell_type": "code", "execution_count": 48, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1094,7 +1134,9 @@ "cell_type": "code", "execution_count": 49, "metadata": { - "collapsed": false + "collapsed": false, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1149,7 +1191,9 @@ "cell_type": "code", "execution_count": 50, "metadata": { - "collapsed": false + "collapsed": false, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1160,7 +1204,9 @@ "cell_type": "code", "execution_count": 51, "metadata": { - "collapsed": false + "collapsed": false, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1173,7 +1219,9 @@ "cell_type": "code", "execution_count": 52, "metadata": { - "collapsed": false + "collapsed": false, + "deletable": true, + "editable": true }, "outputs": [], "source": [ @@ -1184,7 +1232,9 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true + "collapsed": true, + "deletable": true, + "editable": true }, "outputs": [], "source": []