diff --git a/16_nlp_with_rnns_and_attention.ipynb b/16_nlp_with_rnns_and_attention.ipynb index 07d63e0..45e1d97 100644 --- a/16_nlp_with_rnns_and_attention.ipynb +++ b/16_nlp_with_rnns_and_attention.ipynb @@ -471,11 +471,10 @@ "source": [ "model = keras.models.Sequential([\n", " keras.layers.GRU(128, return_sequences=True, stateful=True,\n", - "# dropout=0.2, recurrent_dropout=0.2, # see TF issue #27829\n", + " dropout=0.2, recurrent_dropout=0.2,\n", " batch_input_shape=[batch_size, None, max_id]),\n", - " keras.layers.GRU(128, return_sequences=True, stateful=True\n", - "# dropout=0.2, recurrent_dropout=0.2 # see TF issue #27829\n", - " ),\n", + " keras.layers.GRU(128, return_sequences=True, stateful=True,\n", + " dropout=0.2, recurrent_dropout=0.2),\n", " keras.layers.TimeDistributed(keras.layers.Dense(max_id,\n", " activation=\"softmax\"))\n", "])"