Tensorflow 实现一个三层的神经网络
import numpy as np
import tensorflow as tf
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
#生成数据
np.random.seed(12)
num_observations = 5000
#高斯分布数据
x1 = np.random.multivariate_normal([0, 0], [[2, .75],[.75, 2]], num_observations)
x2 = np.random.multivariate_normal([1, 4], [[1, .75],[.75, 1]], num_observations)
x3 = np.random.multivariate_normal([2, 8], [[0, .75],[.75, 0]], num_observations)
simulated_separableish_features = np.vstack((x1, x2, x3)).astype(np.float32)
simulated_labels = np.hstack((np.zeros(num_observations),
np.ones(num_observations), np.ones(num_observations) + 1))
plt.figure(figsize=(12,8))
plt.scatter(simulated_separableish_features[:, 0], simulated_separableish_features[:, 1],
c = simulated_labels, alpha = .4)
准备数据
Tensorflow希望每个特征标签都是一个单独编码的向量,所以我将重新格式化一个模拟标签。
创建一个训练和测试集,以便评估该模型对于数据的泛化程度。
labels_onehot = np.zeros((simulated_labels.shape[0], 3)).astype(int)
labels_onehot[np.arange(len(simulated_labels)), simulated_labels.astype(int)] = 1
train_dataset, test_dataset, \
train_labels, test_labels = train_test_split(
simulated_separableish_features, labels_onehot, test_size = .1, random_state = 12)
基于Tensorflow构建简单神经网络
输入层,隐藏层(5个神经元)和输出层
#隐含层单元数
hidden_nodes = 5
num_labels = train_labels.shape[1]
#批处理大小
batch_size = 100
num_features = train_dataset.shape[1]
#学习率
learning_rate = .01
graph = tf.Graph