import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.optimizers import SGD
from sklearn.utils import shuffle
np.random.seed(0)
M = 2
K = 3
n = 100
N = n * K
minibatch_size = 50
X1 = np.random.randn(n, M) + np.array([0, 10])
X2 = np.random.randn(n, M) + np.array([5, 5])
X3 = np.random.randn(n, M) + np.array([10, 0])
Y1 = np.array([[1, 0, 0] for i in range(n)])
Y2 = np.array([[0, 1, 0] for i in range(n)])
Y3 = np.array([[0, 0, 1] for i in range(n)])
X = np.concatenate((X1, X2, X3), axis=0)
Y = np.concatenate((Y1, Y2, Y3), axis=0)
model = Sequential()
model.add(Dense(input_dim=M, units=K))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.1))
model.fit(X, Y, epochs=20, batch_size=minibatch_size)
X_, Y_ = shuffle(X, Y)
classes = model.predict_classes(X_[0:10], batch_size=minibatch_size)
prob = model.predict_proba(X_[0:10], batch_size=minibatch_size)
print('classified:')
print(np.argmax(model.predict(X_[0:10]), axis=1) == classes)
print()
print('output probability:')
print(prob)
classified:
[ True True True True True True True True True True]
output probability:
[[9.9865520e-01 1.3448129e-03 2.1935644e-10]
[2.9307455e-03 9.7283429e-01 2.4235010e-02]
[9.6656960e-01 3.3430368e-02 4.5890197e-08]
[1.7156685e-02 9.7406363e-01 8.7796366e-03]
[3.4497321e-09 9.4414754e-03 9.9055851e-01]
[1.4953143e-02 9.7935200e-01 5.6948471e-03]
[2.0219375e-07 4.6562523e-02 9.5343727e-01]
[6.5991614e-08 9.5339743e-03 9.9046600e-01]
[7.0335742e-12 1.5032224e-04 9.9984968e-01]
[9.9576533e-01 4.2346525e-03 2.3160656e-09]]