@tf.function def train(model, dataset, optimizer): for x, y in dataset: with tf.GradientTape() as tape: prediction = model(x) loss = loss_fn(prediction, y) gradients = tape.gradients(loss, model.trainable_variables) optimizer.apply_gradients(gradients, model.trainable_variables)
model.compile(optimizer=optimizer, loss=loss_fn) model.fit(dataset)
class DynamicRNN(tf.keras.Model):
def __init__(self, rnn_cell): super(DynamicRNN, self).__init__(self) self.cell = rnn_cell
def call(self, input_data): # [batch, time, features] -> [time, batch, features] input_data = tf.transpose(input_data, [1, 0, 2]) outputs = tf.TensorArray(tf.float32, input_data.shape[0]) state = self.cell.zero_state(input_data.shape[1], dtype=tf.float32) for i in tf.range(input_data.shape[0]): output, state = self.cell(input_data[i], state) outputs = outputs.write(i, output) return tf.transpose(outputs.stack(), [1, 0, 2]), state