1200字范文,内容丰富有趣,写作的好帮手!
1200字范文 > 深度学习框架PyTorch:入门与实践 学习(三)

深度学习框架PyTorch:入门与实践 学习(三)

时间:2024-02-01 02:47:04

相关推荐

深度学习框架PyTorch:入门与实践 学习(三)

nn.Module

用nn.Module实现全连接层

import torch as tfrom torch import nnfrom torch.autograd import Variable as Vclass Linear(nn.Module):def __init__(self, in_features, out_features):super(Linear, self).__init__()self.w = nn.Parameter(t.randn(in_features, out_features))print(self.w.shape)self.b = nn.Parameter(t.randn(out_features))def forward(self, x):x = x.mm(self.w)x = x + self.b.expand_as(x)return xlayer = Linear(4, 3)input = V(t.randn(2, 4))print(input.shape)output = layer(input)print(output)for name, parameter in layer.named_parameters():print(name, parameter)

自定义层必须继承nn.Module,并在构造函数中调用nn.Module的构造函数,即super(Linear, self)__init__(),可利用前面自定义的层作为当前module的子层,前面定义的Module的可学习参数在当前的模块中也是可学习的在构造函数__init__中自己定义可学习的参数,并封装成parameterforward函数实现前向传播无须实现反向传播函数把layer看成数学概念中的函数module中的可学习的参数可通过named_paremeters()或者parameters()返回迭代器

当网络很深,包含很多层的时候利用nn.Squential()

from torch import nnnet1 = nn.Sequential()net1.add_module('conv', nn.Conv2d(3, 3, 3))net1.add_module('batchnorm', nn.BatchNorm2d(3))net1.add_module('activation_layer', nn.ReLU())net2 = nn.Sequential(nn.Conv2d(3, 3, 3),nn.BatchNorm2d(3),nn.ReLU())from collections import OrderedDictnet3 = nn.Sequential(OrderedDict([('conv1', nn.Conv2d(3, 3, 3)),('bn1', nn.BatchNorm2d(3)),('relu1', nn.ReLU())]))print('net1: ', net1)print('net2: ', net2)print('net3: ', net3)

初始化

from torch.nn import initfrom torch import nnimport torchLinear = nn.Linear(3, 4)torch.manual_seed(1)init.xavier_normal(Linear.weight)

并行计算的两个方法

new_net = nn.DataParallel(net, device_ids=[0, 1])output = new_net(input)output = nn.parallel.data_parallel(net, input, device_ids=[0, 1])

搭建ResNet

from torch.nn import initfrom torch import nnimport torch as tfrom torch.nn import functional as Fclass ResidualBlock(nn.Module):def __init__(self, inchannel, outchannel, stride=1, shortcut=None):super(ResidualBlock, self).__init__()self.left = nn.Sequential(nn.Conv2d(in_channels=inchannel, out_channels=outchannel, kernel_size=3, stride=stride, padding=1, bias=False),nn.BatchNorm2d(outchannel),nn.ReLU(inplace=True),nn.Conv2d(in_channels=outchannel, out_channels=outchannel, kernel_size=3, stride=1, padding=1, bias=False),nn.BatchNorm2d(outchannel))self.right = shortcutdef forward(self, x):out = self.left(x)residual = x if self.right is None else self.right(x)# print("out ", out.shape)# print("residual ", residual.shape)out += residualreturn F.relu(out)class ResNet(nn.Module):def __init__(self, num_classes=1000):super(ResNet, self).__init__()self.pre = nn.Sequential(nn.Conv2d(3, 64, 7, 2, 3, bias=False),nn.BatchNorm2d(64),nn.ReLU(inplace=True),nn.MaxPool2d(3, 2, 1))self.layer1 = self._make_layer(64, 128, 3)self.layer2 = self._make_layer(128, 256, 4, stride=2)self.layer3 = self._make_layer(256, 512, 6, stride=2)self.layer4 = self._make_layer(512, 512, 3, stride=2)self.fc = nn.Linear(512, num_classes)def _make_layer(selfself, inchannel, outchannel, block_num, stride=1):shortcut = nn.Sequential(nn.Conv2d(inchannel, outchannel, 1, stride=stride, bias=False),nn.BatchNorm2d(outchannel))layers = []layers.append(ResidualBlock(inchannel, outchannel, stride, shortcut))for i in range(1, block_num):layers.append(ResidualBlock(outchannel, outchannel))return nn.Sequential(*layers)def forward(self, x):x = self.pre(x)x = self.layer1(x)x = self.layer2(x)x = self.layer3(x)x = self.layer4(x)x = F.avg_pool2d(x, 7)x = x.view(x.size(0), -1)return self.fc(x)model = ResNet()input = t.autograd.Variable(t.randn(1, 3, 224, 224))o = model(input)print(o.shape)

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。