线性回归代码:
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 19 23:19:49 2019
@author: 86135
"""
import numpy as np
import matplotlib.pyplot as plt
class LinearRegression():
def_init_(self):
self._coef_=None
self.interception_=None
self._theta=None
def fit_normal(self,x_train,y_train):
assert x_train.shape[0]==y_train.shape[0],\
"the size of x_train must be equal to the size of y_train"
x_b=np.hatack([np.ones((len(x_train),1)),x_train])
self.theta=np.linalg.inv(x_b.T.dot(x_b)).dot(x_b.T).dot(y_train);
self.interception_=self.theta[0]
self.coef_=self._thta[1:]
return self
def predict(self,x_predict):
assert self.self.interception_is not None and self.coef is not None,\
"must fit before predict !"
assert x_predict.shape[1]==len(self.coef_),\
"the feature number of x_predict must be equal to x_train"
x_b=np.hatact([np.ones((len(x_train),i)),x_predict])
return x_b.dot(self.theta)
def _repr_(self):
return "LinearRegression()"
来先康康sigmoid函数
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 18 22:48:04 2019
@author: 86135
"""
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(t):
return 1/(1+np.exp(-t))
x=np.linspace(-10,10,500)
y=sigmoid(x)
plt.plot(x,y)
plt.show()
逻辑回归代码
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 18 22:48:35 2019
@author: 86135
"""
import numpy as np
import matplotlib.pyplot as plt
class LogisticRegression():
def_init_(self):
self.coef_=None
self.interception_=None
self.theta=None
def_sigmoid(self,t):
return 1./(1.+np.exp(-t))
def fit(self,x_train,y_train,eta=0.01,n_iters=1e4):
assert x_train.shape[0]==y_train.shape[0],\
"the size of x_train must be equal to the size of y_train"
def j(theta,x_b,y):
y_hat=self.sigmoid(x_b.dot(theta))
try:
return -np.sum(y*np.log(y_hat)+(1-y)*np.log(1-y_hat))/len(y)
except:
return float('inf')
def dj(theta,x_b,y):
return x_b.t.dot(self.sigmoid(x_b.dot(theta))-y)/len(x_b)
def gradient_descent(x_b,y,initial_theta,eta,n_iters=1e4,epsilon=1e-8):
theta=initial_theta
cur_iter=0
while cur_iter<n_iters:
gradient=dj(theta,x_b,y)
last_theta=theta
theta=theta-eta*gradient
if(abs(j(theta,x_b,y,)-j(last_theta,x_b,y,))<epsilon):
break
cur_iter+=1
return theta
x__b=np.hstack([np.ones((len(x_train),1)),x_train])
initial_theta=np.zeros(x_b,shape[1])
self.theta=gradient_descent(x_b,y_train,initial_theta,eta,n_iters)
self.intercept_=self._theta[0]
self.coef_=self._theta[1:]
return self
predict_proba(self,x_predict):
assert self.self.interception_is not None and self.coef is not None,\
"must fit before predict!"
assert x_predict.shape[1]==len(self.coef_),\
"the feature number of xpredict must be equal to x_train"
x_b=np.hatack([np.ones((len(x_predict),1)),x_predict])
return self.sigmoid(x_b.dot(self._theta))
predict(self,x_predict):
assert self.self.interception_is not None and self.coef is not None,\
"the feature number of xpredict must be equal to x_train"
proba=self.predict_proba(x_predict)
return np.array(proba>=0.5,dtype='int')
_repr_(self):
return "LogisticRegression()"