pytorch實現線性迴歸

有何m不可發表於2024-05-31

轉自:https://www.cnblogs.com/miraclepbc/p/14329186.html

匯入相關python包

import torch
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from torch import nn
%matplotlib inline

載入資料

data = pd.read_csv('E:/datasets/dataset/Income1.csv')
X = torch.from_numpy(data.Education.values.reshape(-1, 1).astype(np.float32)) #DataFrame轉tensor的常用方法
Y = torch.from_numpy(data.Income.values.reshape(-1, 1).astype(np.float32))

pytorch實現線性迴歸

定義模型

model = nn.Linear(in_features = 1, out_features = 1) # w * input + b 等價於 model(input)
loss_func = nn.MSELoss() # 損失函式
optimizer = torch.optim.SGD(params = model.parameters(), lr = 0.0001)

訓練模型

for epoch in range(5000):
    for x, y in zip(X, Y):
        y_pred = model(x)             # 使用模型預測
        loss   = loss_func(y, y_pred) # 根據預測結果計算損失
        optimizer.zero_grad()         # 把變數梯度清 0
        loss.backward()               # 求解梯度
        optimizer.step()              # 最佳化模型引數

檢視訓練結果

model.weight, model.bias

pytorch實現線性迴歸

plt.scatter(data.Education, data.Income)
plt.plot(X.numpy(), model(X).data.numpy(), c = 'r')
#model(X).data是將tensor取出,否則會帶著grad和grad_fn

pytorch實現線性迴歸

相關文章