当前位置:网站首页>Multiple linear regression (gradient descent method)
Multiple linear regression (gradient descent method)
2022-07-05 08:50:00 【Python code doctor】
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# Reading data
data = np.loadtxt('Delivery.csv',delimiter=',')
print(data)
# Structural features x, The goal is y
# features
x_data = data[:,0:-1]
y_data = data[:,-1]
# Initialize learning rate ( step )
learning_rate = 0.0001
# initialization intercept
theta0 = 0
# initialization coefficient
theta1 = 0
theta2 = 0
# Maximum number of initialization iterations
n_iterables = 100
def compute_mse(theta0,theta1,theta2,x_data,y_data):
''' Computational cost function '''
total_error = 0
for i in range(len(x_data)):
# Calculate the loss ( True value - Predictive value )**2
total_error += (y_data[i]-(theta0 + theta1*x_data[i,0]+theta2*x_data[i,1]))**2
mse_ = total_error/len(x_data)/2
return mse_
def gradient_descent(x_data,y_data,theta0,theta1,theta2,learning_rate,n_iterables):
''' Gradient descent method '''
m = len(x_data)
# loop
for i in range(n_iterables):
# initialization theta0,theta1,theta2 Partial derivative
theta0_grad = 0
theta1_grad = 0
theta2_grad = 0
# Calculate the sum of partial derivatives and then average
# Traverse m Time
for j in range(m):
theta0_grad += (1/m)*((theta1*x_data[j,0]+theta2*x_data[j,1]+theta0)-y_data[j])
theta1_grad += (1/m)*((theta1*x_data[j,0]+theta2*x_data[j,1]+theta0)-y_data[j])*x_data[j,0]
theta2_grad += (1/m)*((theta1*x_data[j,0]+theta2*x_data[j,1]+theta0)-y_data[j])*x_data[j,1]
# to update theta
theta0 = theta0 - (learning_rate*theta0_grad)
theta1 = theta1 - (learning_rate*theta1_grad)
theta2 = theta2 - (learning_rate*theta2_grad)
return theta0,theta1,theta2
# Visual distribution
fig = plt.figure()
ax = Axes3D(fig)
ax.scatter(x_data[:,0],x_data[:,1],y_data)
plt.show()
print(f" Start : intercept theta0={
theta0},theta1={
theta1},theta2={
theta2}, Loss ={
compute_mse(theta0,theta1,theta2,x_data,y_data)}")
print(" Start running ~")
theta0,theta1,theta2 = gradient_descent(x_data,y_data,theta0,theta1,theta2,learning_rate,n_iterables)
print(f" iteration {
n_iterables} Next time : intercept theta0={
theta0},theta1={
theta1},theta2={
theta2}, Loss ={
compute_mse(theta0,theta1,theta2,x_data,y_data)}")
# Draw the desired plane
x_0 = x_data[:,0]
x_1 = x_data[:,1]
# Generate grid matrix
x_0,x_1 = np.meshgrid(x_0,x_1)
# y
y_hat = theta0 + theta1*x_0 +theta2*x_1
ax.plot_surface(x_0,x_1,y_hat)
# Set the label
ax.set_xlabel('Miles')
ax.set_ylabel('nums')
ax.set_zlabel('Time')
plt.show()
边栏推荐
- Install the CPU version of tensorflow+cuda+cudnn (ultra detailed)
- Tips 1: Web video playback code
- Solutions of ordinary differential equations (2) examples
- Digital analog 2: integer programming
- Meta标签详解
- Typescript hands-on tutorial, easy to understand
- Shift operation of complement
- Guess riddles (11)
- C language data type replacement
- 我从技术到产品经理的几点体会
猜你喜欢
Guess riddles (7)
Typical low code apaas manufacturer cases
319. Bulb switch
Typescript hands-on tutorial, easy to understand
Business modeling | process of software model
Programming implementation of ROS learning 6 -service node
Ros-11 common visualization tools
猜谜语啦(3)
猜谜语啦(10)
Numpy 小坑:维度 (n, 1) 和 维度 (n, ) 数组相加运算后维度变为 (n, n)
随机推荐
Halcon color recognition_ fuses. hdev:classify fuses by color
ORACLE进阶(三)数据字典详解
Lori remote control LEGO motor
Search data in geo database
OpenFeign
It cold knowledge (updating ing~)
Array, date, string object method
ROS learning 1- create workspaces and function packs
The first week of summer vacation
Redis实现高性能的全文搜索引擎---RediSearch
Halcon shape_ trans
猜谜语啦(5)
Business modeling of software model | stakeholders
[formation quotidienne - Tencent Selection 50] 557. Inverser le mot III dans la chaîne
【日常训练】1200. 最小绝对差
猜谜语啦(10)
C语言标准函数scanf不安全的原因
Classification of plastic surgery: short in long long long
Halcon: check of blob analysis_ Blister capsule detection
[牛客网刷题 Day4] JZ55 二叉树的深度