import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
from sklearn.linear_model import LinearRegression
X = np.linspace(0,10,50).reshape(-1,1)
X = np.concatenate([X**2,X],axis = 1)
X.shape

w = np.random.randint(1,10,size=2)
b = np.random.randint(-5,5,size=1)
y = X.dot(w) + b
y.shape

plt.plot(X[:,1],y,color = 'r')

plt.plot(X[:,1],y,color = 'r')
plt.title('w1:%d.w2:%d.b:%d'%(w[0],w[1],b[0]))

lr = LinearRegression()
lr.fit(X,y)
print(lr.coef_,lr.intercept_)

plt.plot(X[:,1],y,color = 'r')
plt.title('w1:%d.w2:%d.b:%d'%(w[0],w[1],b[0]))
plt.scatter(X[:,1],y,marker = '*')
x = np.linspace(-2,12,100)
plt.plot(x,2*x**2 + 6*x-4,color = 'green')

def gradient_descent(X,y,lr,epoch,w,b):
batch = len(X)
for i in range(epoch):
d_loss = 0
dw = [0 for _ in range(len(w))]
db = 0
for j in range(batch):
y_ = 0
for n in range(len(w)):
y_ += X[j][n]*w[n]
y_ += b
d_loss = -(y[j]-y_)
for n in range(len(w)):
dw[n] += X[j][n]*d_loss/float(batch)
db += 1*d_loss/float(batch)
for n in range(len(w)):
w[n] -= dw[n]*lr[n]
b -= db*lr[0]
return w,b
lr = [0.0001,0.001]
w = np.random.randn(2)
b = np.random.randn(1)[0]
w_,b_ = gradient_descent(X,y,lr,500,w,b)
print(w_,b_)

plt.scatter(X[:,1],y,marker = '*')
x = np.linspace(-2,12,100)
f = lambda x:w_[0]*x**2 + w_[1]*x + b_
plt.plot(x,f(x),color = 'g')
