pytorch实用快速入门(二)

搭建神经网络模型

pytorch模型的搭建有两种方法,第一种是nn.Module搭配forward

第二种是nn.sequential

贴上一个简单的cnn网络,体会一下

利用nn.Module()搭建简单cnn网络

class CnnNet(nn.Module):
    def __init__(self):
        super(CnnNet, self).__init__()
        self.conv1 = nn.Sequential(
            nn.Conv2d(
                in_channels = 1,
                out_channels = 16,
                kernel_size = 5,
                stride = 1,
                padding = 2,
            ),
            nn.ReLU(),
            nn.MaxPool2d(kernel_size = 2),
        )
        self.conv2 = nn.Sequential(
            nn.Conv2d(16, 32, 5, 1, 2),
            nn.ReLU(),
            nn.MaxPool2d(2),

        )
        self.out = nn.Linear(32*7*7,10)
    def forward(self,x):
        x = self.conv1(x)
        x = self.conv2(x)
        x = x.view(x.size(0),-1)
        out = self.out(x)
        return out,x


cnn = CnnNet()
print(cnn)

利用nn.sequential()与nn.Module()搭建简单神经网络模型

# replace following class code with an easy sequential network
class Net(torch.nn.Module):
    def __init__(self, n_feature, n_hidden, n_output):
        super(Net, self).__init__()
        self.hidden = torch.nn.Linear(n_feature, n_hidden)   # hidden layer
        self.predict = torch.nn.Linear(n_hidden, n_output)   # output layer

    def forward(self, x):
        x = F.relu(self.hidden(x))      # activation function for hidden layer
        x = self.predict(x)             # linear output
        return x

net1 = Net(1, 10, 1)

# easy and fast way to build your network
net2 = torch.nn.Sequential(
    torch.nn.Linear(1, 10),
    torch.nn.ReLU(),
    torch.nn.Linear(10, 1)
)
print(net1)

# Net(
#   (hidden): Linear(in_features=1, out_features=10, bias=True)
#   (predict): Linear(in_features=10, out_features=1, bias=True)
# )

print(net2)

# Sequential(
#   (0): Linear(in_features=1, out_features=10, bias=True)
#   (1): ReLU()
#   (2): Linear(in_features=10, out_features=1, bias=True)
# )

    原文作者:小哲lxz
    原文地址: https://zhuanlan.zhihu.com/p/64517856
    本文转自网络文章,转载此文章仅为分享知识,如有侵权,请联系博主进行删除。
点赞