# 生成批次 gen_batch
def gen_batch(dataSet, batch_size):
row, col = dataSet.shape
n = row // batch_size # +1 是保证剩余的不够一个批次大小的数据也能 构造成 一个小批次, 但是,好像 不行,tensorflow图构造成后,就不能再修改了
for i in range(n): # 抽取 epoch_size 个数据
x = dataSet[i * batch_size : (i + 1) * batch_size, 1:col] # 第后面的1-col-1列是数据
y = dataSet[i * batch_size : (i + 1) * batch_size, 0] # 第0列是标签
yield (x, y)
def gen_epochs(n, dataSet, batch_size):
for _ in range(n):
np.random.shuffle(dataSet) #将数据集打扰
yield gen_batch(dataSet, batch_size)
a = np.array([[1,2,4], [3,4,4], [5,6,4], [7,8,4], [9,0,4], [8,7,4], [6,5,4], [4,3,4], [2,1,4],
[0,1,4], [12,11,4], [10,21,4]])print a for idx, epoch in enumerate(gen_epochs(2, a, batch_size=5)): # 这是训练两论 for step, (X, Y) in enumerate(epoch): # 每论的批次读取 print X, Y for step, (X, Y) in enumerate(gen_batch(a,batch_size = 5)): print X, Y