当前位置:网站首页>Multiple linear regression (gradient descent method)
Multiple linear regression (gradient descent method)
2022-07-05 08:50:00 【Python code doctor】
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# Reading data
data = np.loadtxt('Delivery.csv',delimiter=',')
print(data)
# Structural features x, The goal is y
# features
x_data = data[:,0:-1]
y_data = data[:,-1]
# Initialize learning rate ( step )
learning_rate = 0.0001
# initialization intercept
theta0 = 0
# initialization coefficient
theta1 = 0
theta2 = 0
# Maximum number of initialization iterations
n_iterables = 100
def compute_mse(theta0,theta1,theta2,x_data,y_data):
''' Computational cost function '''
total_error = 0
for i in range(len(x_data)):
# Calculate the loss ( True value - Predictive value )**2
total_error += (y_data[i]-(theta0 + theta1*x_data[i,0]+theta2*x_data[i,1]))**2
mse_ = total_error/len(x_data)/2
return mse_
def gradient_descent(x_data,y_data,theta0,theta1,theta2,learning_rate,n_iterables):
''' Gradient descent method '''
m = len(x_data)
# loop
for i in range(n_iterables):
# initialization theta0,theta1,theta2 Partial derivative
theta0_grad = 0
theta1_grad = 0
theta2_grad = 0
# Calculate the sum of partial derivatives and then average
# Traverse m Time
for j in range(m):
theta0_grad += (1/m)*((theta1*x_data[j,0]+theta2*x_data[j,1]+theta0)-y_data[j])
theta1_grad += (1/m)*((theta1*x_data[j,0]+theta2*x_data[j,1]+theta0)-y_data[j])*x_data[j,0]
theta2_grad += (1/m)*((theta1*x_data[j,0]+theta2*x_data[j,1]+theta0)-y_data[j])*x_data[j,1]
# to update theta
theta0 = theta0 - (learning_rate*theta0_grad)
theta1 = theta1 - (learning_rate*theta1_grad)
theta2 = theta2 - (learning_rate*theta2_grad)
return theta0,theta1,theta2
# Visual distribution
fig = plt.figure()
ax = Axes3D(fig)
ax.scatter(x_data[:,0],x_data[:,1],y_data)
plt.show()
print(f" Start : intercept theta0={
theta0},theta1={
theta1},theta2={
theta2}, Loss ={
compute_mse(theta0,theta1,theta2,x_data,y_data)}")
print(" Start running ~")
theta0,theta1,theta2 = gradient_descent(x_data,y_data,theta0,theta1,theta2,learning_rate,n_iterables)
print(f" iteration {
n_iterables} Next time : intercept theta0={
theta0},theta1={
theta1},theta2={
theta2}, Loss ={
compute_mse(theta0,theta1,theta2,x_data,y_data)}")
# Draw the desired plane
x_0 = x_data[:,0]
x_1 = x_data[:,1]
# Generate grid matrix
x_0,x_1 = np.meshgrid(x_0,x_1)
# y
y_hat = theta0 + theta1*x_0 +theta2*x_1
ax.plot_surface(x_0,x_1,y_hat)
# Set the label
ax.set_xlabel('Miles')
ax.set_ylabel('nums')
ax.set_zlabel('Time')
plt.show()
边栏推荐
- Wheel 1:qcustomplot initialization template
- TypeScript手把手教程,简单易懂
- Guess riddles (7)
- Mengxin summary of LIS (longest ascending subsequence) topics
- Use arm neon operation to improve memory copy speed
- Business modeling | process of software model
- How apaas is applied in different organizational structures
- 猜谜语啦(10)
- Agile project management of project management
- 猜谜语啦(3)
猜你喜欢
猜谜语啦(6)
Halcon blob analysis (ball.hdev)
RT-Thread内核快速入门,内核实现与应用开发学习随笔记
Old Wang's esp8266 and old Wu's ws2818 light strip
[daiy4] copy of JZ35 complex linked list
Numpy pit: after the addition of dimension (n, 1) and dimension (n,) array, the dimension becomes (n, n)
Ros-11 common visualization tools
ROS learning 4 custom message
Guess riddles (7)
Guess riddles (10)
随机推荐
微信H5公众号获取openid爬坑记
Halcon snap, get the area and position of coins
Bit operation related operations
猜谜语啦(7)
Array, date, string object method
猜谜语啦(2)
Classification of plastic surgery: short in long long long
js异步错误处理
猜谜语啦(6)
Shift operation of complement
【日常训练--腾讯精选50】557. 反转字符串中的单词 III
Beautiful soup parsing and extracting data
Mathematical modeling: factor analysis
Ros-10 roslaunch summary
How apaas is applied in different organizational structures
Business modeling | process of software model
[matlab] matlab reads and writes Excel
LLVM之父Chris Lattner:为什么我们要重建AI基础设施软件
Guess riddles (6)
It cold knowledge (updating ing~)