Introduction
# Creating the Encoder RNN
def encoder_rnn(rnn_inputs, rnn_size, num_layers, keep_prob, sequence_length):
lstm = tf.contrib.rnn.BasicLSTMCell(rnn_size)
lstm_dropout = tf.contrib.rnn.DropoutWrapper(lstm, input_keep_prob = keep_prob)
encoder_cell = tf.contrib.rnn.MultiRNNCell([lstm_dropout] * num_layers)
encoder_output, encoder_state = tf.nn.bidirectional_dynamic_rnn(cell_fw = encoder_cell,
cell_bw = encoder_cell,
sequence_length = sequence_length,
inputs = rnn_inputs,
dtype = tf.float32)
return encoder_statetf.contrib.rnn.BasicLSTMCell(num_hidden_neurons)tf.nn.bidirectional_dynamic_rnn( # tf.While loop to dynamically construct the graph when executed.
cell_fw = encoder_cell,
cell_bw = encoder_cell,
sequence_length = sequence_length, #
inputs = rnn_inputs,
dtype = tf.float32
)Last updated