tensorflow 2.0学习教程一

这篇博客介绍了如何使用TensorFlow 2.0进行深度学习,特别关注了数据集的获取和使用。提供的链接指向了一个包含详细教程的数据集。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from tensorflow import keras

'''
sys.version_info(major=3, minor=5, micro=4, releaselevel='final', serial=0)
matplotlib 3.0.3
numpy 1.16.2
pandas 0.24.2
sklearn 0.20.3
tensorflow 1.11.0
tensorflow.keras 2.1.6-tf

'''
print(tf.__version__)
print(sys.version_info)
for module in mpl, np, pd, sklearn, tf, keras:
    print(module.__name__, module.__version__)
def load_data(path,files):
    import gzip
    import numpy as np
    paths=[path+each for each in files]
    with gzip.open(paths[0],'rb') as lbpath:
        train_labels = np.frombuffer(lbpath.read(),np.uint8,offset=8)
    with gzip.open(paths[1],'rb') as imgpath:
        train_images = np.frombuffer(imgpath.read(),np.uint8,offset=16).reshape(len(train_labels),28,28)
    with gzip.open(paths[2],'rb') as lbgpath:
        test_labels = np.frombuffer(lbgpath.read(),np.uint8,offset=8)
    with gzip.open(paths[3],'rb') as imgpath:
        test_images = np.frombuffer(imgpath.read(),np.uint8,offset=16).reshape(len(test_labels),28,28)
    return (train_images,train_labels),(test_images,test_labels)
path='fashion-mnist/'
files=['train-labels-idx1-ubyte.gz','train-images-idx3-ubyte.gz','t10k-labels-idx1-ubyte.gz','t10k-images-idx3-ubyte.gz']

(x_train_all, y_train_all), (x_test, y_test) = load_data(path,files)
x_valid, x_train = x_train_all[:5000], x_train_all[5000:]
y_valid, y_train = y_train_all[:5000], y_train_all[5000:]
#做数据归一化处理  使其满足正太分布
# x = (x - u) / std
from  sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
x_train_scaled =scaler.fit_transform(x_train.astype(np.float32).reshape(-1,1)).reshape(-1,28,28)
x_valid_scaled =scaler.transform(x_valid.astype(np.float32).reshape(-1,1)).reshape(-1,28,28)
x_test_scaled =scaler.transform(x_test.astype(np.float32).reshape(-1,1)).reshape(-1,28,28)
# model=keras.models.Sequential([
#     keras.layers.Flatten(input_shape=[28,28]), #展平 28*28
#     keras.layers.Dense(300,activation='relu'),
#     keras.layers.Dense(100,activation='relu'),
#     keras.layers.Dense(10,activation='softmax')
# ])
model=keras.models.Sequential()
model.add(keras.layers.Flatten(input_shape=[28,28]))
for _ in range(20):
    model.add(keras.layers.Dense(100,activation='relu'))
    # model.add(keras.layers.BatchNormalization())  #(1)加速收敛(2)控制过拟合,可以少用或不用Dropout和正则(3)降低网络对初始化权重不敏感(4)允许使用较大的学习率
# model.add(keras.layers.AlphaDropout(rate=0.5)) #减少过拟合
model.add(keras.layers.Dense(10,activation='softmax'))

#目标函数
model.compile(loss="sparse_categorical_crossentropy",optimizer = "sgd",metrics = ["accuracy"])
logdir='./dnn-bn-callbacks'
if not os.path.exists(logdir):
    os.mkdir(logdir)
output_model_file = os.path.join(logdir,'fashion_mnist_model.h5')
callbacks=[
    keras.callbacks.TensorBoard(logdir),
    keras.callbacks.ModelCheckpoint(output_model_file,save_best_only=True),
    keras.callbacks.EarlyStopping(patience=5,min_delta=1e-3),
]
history=model.fit(x_train_scaled,y_train,epochs=20,validation_data=(x_valid_scaled,y_valid),callbacks=callbacks)
def plot_learn_curves(history):
    pd.DataFrame(history.history).plot(figsize=(8,5))
    plt.grid(True)
    plt.ylim(0,1)
    plt.show()
plot_learn_curves(history)
model.evaluate(x_test_scaled,y_test)

数据集:链接:https://pan.baidu.com/s/1mI-JZW8042xikQ6NRlebUg 
提取码:t8va 
 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值