Tensorflow基础知识
Tensorflow安装
- pip install tensorflow
- pip install tensorflow-gpu
Tensorflow 2需要pip版本>19.0
pip install tensorflow==1.15
import tensorflow as tf
tf.__version__
简介
- TensorFlow是一个基于数据流编程(dataflow progragmming)的符号数学系统,被广泛应用于各类机器学习(machine learning)算法的编程实现,其前身是谷歌的神经网络算法库DistBelief。
- Tensorflow拥有多层级结构,可部署于各类服务器、PC终端和网页并支持GPU和TPU高性能数值计算,被广泛应用于谷歌内部的产品开发和各领域的科学研究
- Tensorflow由自2015年11月9日起,Tensorflow依据阿帕奇授权协议(Apache 2.0 open source license)开放源代码
深度学习
计算图方法(graph)
- 静态图和动态图
- 静态图:先定义,再计算,再次运行的时候就不再需要重新构建计算图
- 动态图:每次计算都会重新构建一个新的计算图
- tf 1.X 静态图
- tf 2.0 默认静态图
张量
- tf.Tensor
tf.Tensor(id, shape=(), dtype, value, graph, name)
# 常量
a = tf.constant(2)
a = th.constant(2, name = 'a')
# 值
a.numpy()
Shape
a.get_shape()
张量操作
tf.add tf.sub tf.and tf.or tf.matmul
变量
- tf.Variable
tf.Variable(name, shape, dtype, value)
- s = tf.Variable(2, name='scalar')
- m = tf.Variable([[0, 1], [2, 3]], name='matrix')
- W = tf.Variable(tf.zeros([784, 10]))
变量操作
- tf.assign()
a = tf.Variable(1)
b = (a+2) * 3
b = (a.assign_add(2)) * 3
变量跟踪
class MyModule(tf.Module):
def __init__(self):
self.v0 = tf.Variable(1.0)
self.vs = [tf.Variable(x) for x in range(10)]
m = MyModule()
m.variables
数据类型
tf.data
使用简单的结构构建复杂的数据流,创建数据集,高效pipeline
创建数据集
tf.data.Dataset.from_tensors((features, labels))
tf.data.Dataset.from_tensor_slices((features, labels))
tf.data.Dataset.from_generator(gen, output_types, output_shapes)
读取数据集
#包含多个txt文件的行
tf.data.TextLineDataset(filenames)
#来自一个或多个二进制文件的固定长度记录的数据集
tf.data.FixedLengthRecordDataset(filenames)
#包含多个TFRecord文件的记录
tf.data.TFRecordDataset(filenames)
数据迭代
dataset = tf.data.Dataset.from_tensor_slices([])
for element in dataset:
print(element.numpy())
it = iter(dataset)
print(next(it).numpy())
数据集合并
dataset3 = tf.data.Dataset.zip((dataset1, dataset2))
dataset3
取batch
batched_dataset = dataset.batch(4)
随机选取
dataset = dataset.shuffle(buffer_size=100)
常用数据集
tf.keras.datasets
tf.keras.datasets.
xx.load_data()
模型存取
保存checkpoints
model
model.save_weights('checkpoint')
# 获得最新的检查点
ckpt = tf.train.latest_checkpoint('./tf_ckpts/')
# 读取到模型
model.restore(ckpt)
保存整个模型
from keras.models import load_model
model.save('my_model.h5’)
model = load_model('my_model.h5')
可视化
Tensorboard tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1) 命令行输入 tensorboard --logdir
Tensorflow 高级API keras
keras
keras是Tensorflow对keras API规范的实现。这是一个用于构建和训练模型的高级API,其中包括Tensorflow特定功能的一流支持,例如快速执行、tf.data pipeline和估计器。tf.keras使得张量流更容易使用而不牺牲灵活性和性能。
全连接层
tf.keras.layers.Dense
__init__(
units,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs
)
卷积层
tf.keras.layers.Conv2D, 1D, 3D
__init__(
filters,
kernel_size,
strides=(1, 1),padding='valid',
data_format=None,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs
)
池化层
tf.keras.layers.AveragePooling2D
tf.keras.layers.MaxPool2D
__init__(
pool_size=(2, 2),
strides=None,
padding='valid',
data_format=None,
**kwargs
)
全局池化层
tf.keras.layers.GlobalAveragePooling2D
tf.keras.layers.GlobalMaxPool2D
dropout层
tf.keras.layers.Dropout
__init__(
rate,
noise_shape=None,
seed=None,
**kwargs
)
batchnorm层
tf.keras.layers.BatchNormalization
__init__(
axis=-1,
momentum=0.99,
epsilon=0.001,
beta_initializer='zeros',
gamma_initializer='ones',
moving_mean_initializer='zeros',
moving_variance_initializer='ones',
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
renorm=False,
renorm_clipping=None,
renorm_momentum=0.99,
trainable=True,
**kwargs
)
RNN单元
tf.keras.layers.RNN
__init__(
cell,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
unroll=False,
time_major=False,
**kwargs
)
LSTM单元
tf.keras.layers.LSTM
__init__(
units,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
unit_forget_bias=True,
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.0,
recurrent_dropout=0.0,
implementation=2,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
time_major=False,
unroll=False,**kwargs
)
GRU单元
tf.keras.layers.GRU
__init__(
units,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.0,
recurrent_dropout=0.0,
implementation=2,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
unroll=False,
time_major=False,
reset_after=True,
**kwargs
)
优化器
tf.keras.optimizers
Adagrad
Adam
SGD