像这样的东西应该有效。
import tensorflow as tf
import numpy as np
class CustomRNN(tf.contrib.rnn.LSTMCell):
def __init__(self, *args, **kwargs):
kwargs['state_is_tuple'] = False # force the use of a concatenated state.
returns = super(CustomRNN, self).__init__(*args, **kwargs) # create an lstm cell
self._output_size = self._state_size # change the output size to the state size
return returns
def __call__(self, inputs, state):
output, next_state = super(CustomRNN, self).__call__(inputs, state)
return next_state, next_state # return two copies of the state, instead of the output and the state
X = np.random.randn(2, 10, 8)
X[1,6:] = 0
X_lengths = [10, 10]
cell = CustomRNN(num_units=64)
outputs, last_states = tf.nn.dynamic_rnn(
cell=cell,
dtype=tf.float64,
sequence_length=X_lengths,
inputs=X)
sess = tf.InteractiveSession()
sess.run(tf.global_variables_initializer())
states, last_state = sess.run([outputs, last_states], feed_dict=None)
这使用连接状态,因为我不知道是否可以存储任意数量的元组状态。 states 变量的形状为 (batch_size, max_time_size, state_size)。