Add. Realizes the Gradient Descent

pull/2/head
benjas 5 years ago
parent a06f650b3e
commit 21cbf93963

@ -13,8 +13,6 @@ class LinearRegression:
polynomial_degree: 是否做额外变换 polynomial_degree: 是否做额外变换
sinusoid_degree: 是否做额外变换 sinusoid_degree: 是否做额外变换
normalize_data: 是否标准化数据 normalize_data: 是否标准化数据
:return
""" """
(data_processed, (data_processed,
features_mean, features_mean,
@ -28,3 +26,56 @@ class LinearRegression:
self.polynomial_degree = polynomial_degree self.polynomial_degree = polynomial_degree
self.sinusoid_degree = sinusoid_degree self.sinusoid_degree = sinusoid_degree
self.normalize_data = normalize_data self.normalize_data = normalize_data
num_features = self.data.shape[1]
self.theta = np.zeros((num_features, 1))
def train(self, alpha, num_iterations=500):
"""
训练模块执行梯度下降
alpha: 学习率
num_iterations: 迭代次数
"""
cost_history = self.gradient_descent(alpha, num_iterations)
return self.theta, cost_history
def gradient_descent(self, alpha, num_iterations):
"""
实际迭代模块
alpha: 学习率
num_iterations: 迭代次数
"""
cost_history = []
for _ in range(num_iterations): # 开始迭代
self.gradient_step(alpha)
cost_history.append(self.cost_function(self.data, self.labels))
return cost_history
def gradient_step(self, alpha):
"""
梯度下降参数更新计算方法注意是矩阵运算
alpha: 学习率
"""
num_examples = self.data.shape[0] # 当前样本个数
# 根据当前数据和θ获取预测值
prediction = LinearRegression.hypothesis(self.data, self.theta)
delta = prediction - self.labels # 残差,即预测值减去真实值
theta = self.theta
# 依照小批量梯度下降法,写代码表示
theta = theta - alpha * (1/num_examples)*(np.dot(delta.T, self.data)).T
self.theta = theta # 计算完theta后更新当前theta
@staticmethod
def hypothesis(data, theta):
"""
获取预测值
:param data: 矩阵数据
:param theta: 权重θ
:return: 返回预测值
"""
predictions = np.dot(data, theta)
return predictions

Loading…
Cancel
Save