当前位置:网站首页>B站刘二大人-多元逻辑回归 Lecture 7

B站刘二大人-多元逻辑回归 Lecture 7

2022-07-06 05:33:00 宁然也


系列文章:


文章目录


import torch
import  matplotlib.pyplot as plt
import numpy as  np
class LogisticRegressionModel(torch.nn.Module):
    def __init__(self):
        super(LogisticRegressionModel, self).__init__()
        # 输入维度8输出维度6
        self.lay1 = torch.nn.Linear(8,6)
        self.lay2 = torch.nn.Linear(6,4)
        self.lay3 = torch.nn.Linear(4,1)
        self.sigmod = torch.nn.Sigmoid()

    def forward(self,x):
        x = self.sigmod(self.lay1(x))
        x = self.sigmod(self.lay2(x))
        x = self.sigmod(self.lay3(x))
        return  x

model = LogisticRegressionModel()
criterion = torch.nn.BCELoss(reduction='mean')
optimizer = torch.optim.SGD(model.parameters(), lr=0.005)
# 读取数据
xy = np.loadtxt('./datasets/diabetes.csv.gz', delimiter=',', dtype=np.float32)
x_data = torch.from_numpy(xy[:,:-1])
y_data = torch.from_numpy(xy[:,[-1]])
epoch_list = []
loss_list = []
for epoch in range(1000):
# 没有用到最小批处理
    y_pred = model(x_data)
    loss = criterion(y_pred, y_data)
    loss_list.append(loss.item())
    epoch_list.append(epoch)
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()
plt.plot(epoch_list, loss_list)
plt.xlabel("epoch")
plt.ylabel("loss")
plt.show()
原网站

版权声明
本文为[宁然也]所创,转载请带上原文链接,感谢
https://blog.csdn.net/weixin_42382758/article/details/125593581