绘制 S=x02+x12 梯度下降学习情况的函数图形
import numpy as np
import matplotlib.pylab as plt
from mpl_toolkits.mplot3d import Axes3D
from pylab import mpl
mpl.rcParams['font.sans-serif'] = ['Microsoft YaHei'] #汉字显示
def f(x,y):
return x**2 + (y/1.7)**2
def numerical_gradient_no_batch(f,x):
h = 1e-4 #0.0001
grad = np.zeros_like(x) #生成和X形状相同的数组
for idx in range(x.size):
tmp_val = x[idx]
# f(x+h)的计算
x[idx] = tmp_val + h
fxh1 = f(x)
# f(x-h)的计算
x[idx] = tmp_val - h
fxh2 = f(x)
grad[idx] = (fxh1 - fxh2)/(2*h)
x[idx] = tmp_val #还原值
return grad
def numerical_gradient(f,X):
if X.ndim == 1:
return numerical_gradient_no_batch(f,X)
else:
grad = np.zeros_like(X)
for idx,x in enumerate(X):
grad[idx] = numerical_gradient_no_batch(f,x)
return grad
def function_2(x):
return np.sum(x ** 2)
def gradient_descent(f,init_x,lr=0.01,step_num=100):
x = init_x
x_history = []
for i in range(step_num):
x_history.append(x.copy())
grad = numerical_gradient(f,x)
x -= lr*grad
return x,np.array(x_history)
if __name__ == '__main__':
init_x= np.array([-3.0,4.0])
lr = 0.1
step_num = 10000
x,x_history = gradient_descent(function_2,init_x,lr=lr,step_num=step_num)
print(x_history)
fig1 = plt.figure()
ax = Axes3D(fig1)
ax.scatter(x_history[:,0],x_history[:,1],np.array(x_history[:,0]**2 + x_history[:,1**2]),c='r')
ax.set_xlabel('x0')
ax.set_ylabel('01')
ax.set_title('函数图形')
plt.xlim([-3.5,3.5])
plt.ylim([-4.5,4.5])
plt.xlabel('x0')
plt.ylabel('x1')
plt.show()
运价结果
欢迎访问作者个人技术博客:BlackvonCode(www.blackvon.top)
作者的微信公众号和小程序