From 6f98bc5a04edef3e9b77f306aa772336e90f5a48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20Geron?= Date: Sun, 21 Apr 2019 15:55:39 +0800 Subject: [PATCH] Stateful RNNs now support recurrent_dropout --- 16_nlp_with_rnns_and_attention.ipynb | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/16_nlp_with_rnns_and_attention.ipynb b/16_nlp_with_rnns_and_attention.ipynb index 07d63e0..45e1d97 100644 --- a/16_nlp_with_rnns_and_attention.ipynb +++ b/16_nlp_with_rnns_and_attention.ipynb @@ -471,11 +471,10 @@ "source": [ "model = keras.models.Sequential([\n", " keras.layers.GRU(128, return_sequences=True, stateful=True,\n", - "# dropout=0.2, recurrent_dropout=0.2, # see TF issue #27829\n", + " dropout=0.2, recurrent_dropout=0.2,\n", " batch_input_shape=[batch_size, None, max_id]),\n", - " keras.layers.GRU(128, return_sequences=True, stateful=True\n", - "# dropout=0.2, recurrent_dropout=0.2 # see TF issue #27829\n", - " ),\n", + " keras.layers.GRU(128, return_sequences=True, stateful=True,\n", + " dropout=0.2, recurrent_dropout=0.2),\n", " keras.layers.TimeDistributed(keras.layers.Dense(max_id,\n", " activation=\"softmax\"))\n", "])"