keras--simpleRNN 用于minist

 

from keras.models import Sequential
from keras.layers import Dense,SimpleRNN,Activation
from keras.datasets import mnist
from keras.utils import np_utils
from keras.optimizers import Adam
import numpy as np

(x_train,y_train),(x_test,y_test) = mnist.load_data()
x_train = x_train.reshape(-1,28,28)/255
x_test = x_test.reshape(-1,28,28)/255
y_train = np_utils.to_categorical(y_train,num_classes=10)
y_test = np_utils.to_categorical(y_test,num_classes=10)

TIME_STEPS = 28 # as same as the image height
INPUT_SIZE = 28 # as same as the image width
BATCH_SIZE = 100
BATCH_INDEX = 0
OUTPUT_SIZE = 10
CELL_SIZE = 50 # how many hidden layer
LR = 0.001

# built the RNN model
model = Sequential()
model.add(SimpleRNN(batch_input_shape=(None,TIME_STEPS,INPUT_SIZE),
                    output_dim=CELL_SIZE,
                    activation='relu'))
model.add(Dense(OUTPUT_SIZE))
model.add(Activation('softmax'))


adam = Adam(LR)
model.compile(optimizer=adam,loss='categorical_crossentropy',metrics=['accuracy'])

# training
print('training...')
for step in range(4001):
    x_batch = x_train[BATCH_INDEX:BATCH_SIZE+BATCH_INDEX,:,:]
    y_batch = y_train[BATCH_INDEX:BATCH_SIZE+BATCH_INDEX,:]
    cost = model.train_on_batch(x_batch,y_batch)
    BATCH_INDEX += BATCH_SIZE
    if BATCH_INDEX >= x_train.shape[0]:
        BATCH_INDEX = 0
    if step%500 == 0:
        cost,accuracy = model.evaluate(x_test,y_test,batch_size=y_test.shape[0],verbose=False)
        print('cost : ',cost,' accuracy : ',accuracy)

 

 

 

 

 

 

 

 

 

 

 

 

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值