下面介绍深度学习中常用的函数及其代码实现:
1.恒等函数
import numpy as np
import matplotlib.pyplot as plt
def identity_function(x):
return x
x = np.arange(0.0, 5.0, 0.1)
y = identity_function(x)
plt.plot(x, y)
plt.xlabel('x')
plt.ylabel('y')
plt.show()
2.阶跃函数
import numpy as np
import matplotlib.pyplot as plt
def step_function(x):
return np.array(x > 0, dtype=np.int)
x = np.arange(-5.0, 5.0, 0.1)
y = step_fucntion(x)
plt.plot(x, y)
plt.xlabel('x')
plt.ylabel('y')
plt.show()
3.sigmoid函数
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x):
return 1 / (1 + np.exp(-x))
x = np.arange(-5.0, 5.0, 0.1)
y = sigmoid(x)
plt.plot(x, y)
plt.xlabel('x')
plt.ylabel('y')
plt.show()
4. sigmoid_grad函数
import numpy as np
import matplotpli.pyplot as plt
def sigmoid(x):
return 1 / (1 + np.exp(-x)
def sigmoid_grad(x):
return (1.0 - sigmoid(x)) * sigmoid(x)
x = np.arange(-5.0, 5.0, 0.1)
y = sigmoid_grad(x)
plt.plot(x, y)
plt.xlabel('x')
plt.ylabel('y')
plt.show()
5.relu函数
import numpy as np
import matplotplib.pyplot as plt
def relu(x):
return np.maximum(0, x)
x = np.arange(-5.0, 5.0, 0.1)
y = relu(x)
plt.plot(x, y)
plt.xlabel('x')
plt.ylabel('y')
plt.show()
6.relu_grad函数
import numpy as np
def relu_grad(x):
grad = np.zeros(x)
grad[x>=] = 1
return grad
7.softmax函数
import numpy as np
def softmax(x):
if x.ndim == 2:
x = x.T
x = x - np.max(x, axis=0)
y = np.exp(x) / np.sum(np.exp(x), axis=0)
return y.T
x = x - np.max(x)
return np.exp(x) / np.sum(np.exp(x))
8.mean_squared_error函数
import numpy as np
def mean_squared_error(y, x):
return 0.5 * np.sum((y-t)**2)
9.cross_entropy_error函数
import numpy as np
def cross_entropy_error(y, t):
if y.ndim == 1:
t = t.reshape(1, t.size)
y = y.reshape(1, y.size)
if t.size == y.size:
t = t.argmax(axis=1)
batch_size = y.shape[0]
return -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size
10.softmax_loss函数
import numpy as np
def softmax_loss(X, t):
y = softmax(X)
return cross_entropy_error(y, t)