首页 > 其他 > 详细

pytorch:一个简单的线性回归的例子

时间:2020-10-15 16:04:47      阅读:61      评论:0      收藏:0      [点我收藏+]
import torch
import torch.utils.data as Data
import torch.optim as optim
import torch.nn as nn
from torch.nn import init
import numpy as np

num_inputs=2
num_examples=1000
true_w=[2,-3.4]
true_b=4.2
batchsize=10
num_epochs=100

#generate data
features=torch.tensor(np.random.normal(0,1,(num_examples,num_inputs)),dtype=torch.float)
labels=true_w[0]*features[:,0]+true_w[1]*features[:,1]+true_b+       torch.tensor((np.random.normal(0,0.01,size=(num_examples))),dtype=torch.float)

#iter data
dataset=Data.TensorDataset(features,labels)
data_iter=Data.DataLoader(dataset,batch_size=batchsize,shuffle=True)

#define net
class LinearNet(nn.Module):
    def __init__(self,n_features):
        super().__init__()
        self.linear=nn.Linear(n_features,1)

        #init params way
        init.normal_(self.linear.weight, mean=0, std=0.01)
        init.constant_(self.linear.bias, val=0)

    def forward(self,x):
        y=self.linear(x)
        return y

net=LinearNet(num_inputs)

#define loss
loss=nn.MSELoss()
#define optimizer
optimizer=optim.SGD(net.parameters(),lr=0.003)

#training...
for epoch in range(1,num_epochs+1):
    for X,y in data_iter:
        output=net(X)
        l=loss(output,y.view(-1,1))
        optimizer.zero_grad()
        l.backward()
        optimizer.step()

    print(‘epoch %d, loss %f ‘% (epoch,l.item()))

  

pytorch:一个简单的线性回归的例子

原文:https://www.cnblogs.com/liutianrui1/p/13820293.html

(0)
(0)
   
举报
评论 一句话评论(0
关于我们 - 联系我们 - 留言反馈 - 联系我们:wmxa8@hotmail.com
© 2014 bubuko.com 版权所有
打开技术之扣,分享程序人生!