#《深度学习》4.1.2激活函数 #Relu函数 #导入库 import torch import matplotlib.pyplot as plt import numpy as np x=torch.arange(-8.0,8.0,0.1)#图像坐标的范围 plt.grid()#添加网格 def relu(x):#定义函数 return np.maximum(0,x)#公式 y=relu(x)#调用函数 plt.plot(x,y) plt.show()#matplotlib显示
#relu函数求导 import torch import matplotlib.pyplot as plt import numpy as np x=torch.arange(-8.0,8.0,0.1) def grad_relu(x): return np.where(x<0,0,1) y=grad_relu(x) plt.grid() plt.plot(x,y) plt.show()
#sigmod函数 import numpy as np import matplotlib.pyplot as plt import torch x=torch.arange(-8.0,8.0,0.1) def sigmod(x): return 1/(np.exp(-x)+1) y=sigmod(x) plt.grid() plt.plot(x,y) plt.show()
#sigmod函数的导数 import matplotlib.pyplot as plt import numpy as np import torch x=torch.arange(-8.0,8.0,0.1) def sigmod_gred(x): z=1/(np.exp(-x)+1) return z*(1-z) y=sigmod_gred(x) plt.grid() plt.plot(x,y) plt.show()
#tanh函数 import numpy as np import torch import matplotlib.pyplot as plt x=torch.arange(-8.0,8.0,0.1) def tanh(x): z1=1-np.exp(-2*x) z2=1+np.exp(-2*x) return z1/z2 y=tanh(x) plt.grid() plt.plot(x,y) plt.show()
#tanh函数的导数 import torch import matplotlib.pyplot as plt import numpy as np x=torch.arange(-8.0,8.0,0.1) def tanh_grad(x): z1 = 1 - np.exp(-2 * x) z2 = 1 + np.exp(-2 * x) z=z1 / z2 return 1-z*z y=tanh_grad(x) plt.grid() plt.plot(x,y) plt.show()