【吴恩达 机器学习】ex1 gradient descend —— python

单变量线性回归

Python代码:

import numpy as np
import matplotlib.pyplot as plt
import pandas as pd

# cost function
def cost_function(X, Y, theta):
    hyposis = np.dot(np.transpose(theta), X) - np.transpose(Y)
    cost = np.dot(hyposis, hyposis) / (2 * np.size(Y))
    return cost

# gradient descend
def gradient_descend(X, Y, theta, alpha, iter_num):
    m = Y.shape[0]
    for num in range(0, iter_num):
        hyp = np.dot(np.transpose(theta), X)
        theta = theta - alpha * np.dot((hyp - Y), np.transpose(X)) / m
    return theta


# load the data
filepath = r'ex1data1.txt'
dataset = np.loadtxt(filepath, delimiter=',', usecols=(0, 1))
input = dataset[:, 0]
output = dataset[:, 1]

# scatter plot
plt.plot(input, output, 'r+')

X = np.c_[np.ones(np.size(output)), input]
X = np.transpose(X)
Y = output
theta = np.transpose(np.array([0, 0]))
alpha = 0.01
iter_num = 10000

theta = gradient_descend(X, Y, theta, alpha, iter_num)

# fitted curve
plt.plot(np.transpose(input), np.dot(np.transpose(theta), X), 'b-')
plt.show()

在这里插入图片描述

多变量线性回归

Python代码

import numpy as np
import matplotlib.pyplot as plt
import pandas as pd

# normalization
def normalization(data):
    mean = np.mean(data)
    std = np.std(data)
    return (data - mean)/std


# cost function
def cost_function(X, Y, theta):
    hyposis = np.dot(np.transpose(theta), X) - np.transpose(Y)
    cost = np.dot(hyposis, hyposis) / (2 * np.size(Y))
    return cost

# gradient descend
def gradient_descend(X, Y, theta, alpha, iter_num):
    m = Y.shape[0]
    for num in range(0, iter_num):
        hyp = np.dot(np.transpose(theta), X)
        theta = theta - alpha * np.dot((hyp - Y), np.transpose(X)) / m
    return theta


# load the data
filepath = r'ex1data2.txt'
dataset = np.loadtxt(filepath, delimiter=',', usecols=(0, 1))
input = dataset[:, 0:2]
output = dataset[:, 1]

input = normalization(input)
output = normalization(output)

X = np.c_[np.ones(np.size(output)), input]
X = np.transpose(X)
Y = output
theta = np.transpose(np.array([0, 0, 0]))
alpha = 0.01
iter_num = 10000

theta = gradient_descend(X, Y, theta, alpha, iter_num)
print(theta)
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值