from keras.models import Sequential
from keras.layers import Dense, Masking, LSTM, TimeDistributed, Activation
import numpy as np

#Masking RNN
#Keras hanya mendukung input sample yang mempunyai panjang timestep sama untuk setiap instance
#ada trik agar mengizinkan panjang timestep yang berbeda-beda untuk setiap instance
#1. padding dahulu dengan zero
#2. gunakan Masking layer dengan mask_value=0.

X = np.array([[[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]],\
              [[1.,1.,1.,0.,0.],[1.,1.,0.,0.,0.],[1.,1.,1.,0.,0.],[0.,0.,0.,1.,1.],[0.,0.,1.,1.,1.],[0.,0.,0.,1.,1.]]])

y = np.array([[[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]],\
              [[1.,0.],[1.,0.],[1.,0.],[0.,1.],[0.,1.],[0.,1.]]])

x_train, x_test = (X[:6], X[6:])
y_train, y_test = (y[:6], y[6:])

model = Sequential()

timesteps = 6
features = 5

#input_dim = features
#input_length = timesteps
model.add(Masking(mask_value=0., input_shape=(timesteps, features)))

#otomatis, input_dim=features
model.add(LSTM(output_dim=3, return_sequences=True))

#otomatis, input_dim=previous output_dim=5
model.add(TimeDistributed(Dense(output_dim=2)))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', 
              optimizer='rmsprop', 
              metrics=['accuracy'])

model.fit(x_train, y_train, nb_epoch=200, batch_size=4)

score = model.evaluate(x_test, y_test, batch_size=4)

#predict classes given unseen data
classes = model.predict_classes(x_test, batch_size=4)
print(classes)
#most likely
#[[0 0 0 1 1 1]
# [0 0 0 1 1 1]
# [0 0 0 1 1 1]
# [0 0 0 1 1 1]]

#show the probability scores
probs = model.predict_proba(x_test, batch_size=4)
print(probs)


