%reset -f
import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import numpy as np
import matplotlib.pyplot as plt
import torch.utils.data as data_utils
import torch.nn as nn
import torch.nn.functional as F
num_epochs = 20
x1 = np.array([0,0])
x2 = np.array([0,1])
x3 = np.array([1,0])
x4 = np.array([1,1])
num_epochs = 200
x = torch.tensor([x1,x2,x3,x4]).float()
y = torch.tensor([0,1,1,0]).long()
train = data_utils.TensorDataset(x,y)
train_loader = data_utils.DataLoader(train , batch_size=2 , shuffle=True)
device = 'cpu'
input_size = 2
hidden_size = 100
num_classes = 2
learning_rate = .0001
torch.manual_seed(24)
def weights_init(m):
m.weight.data.normal_(0.0, 1)
class NeuralNet(nn.Module) :
def __init__(self, input_size, hidden_size, num_classes) :
super(NeuralNet, self).__init__()
self.fc1 = nn.Linear(input_size , hidden_size)
self.relu = nn.ReLU()
self.fc2 = nn.Linear(hidden_size , num_classes)
def forward(self, x) :
out = self.fc1(x)
out = self.relu(out)
out = self.fc2(out)
return out
model = NeuralNet(input_size, hidden_size, num_classes).to(device)
model.apply(weights_init)
criterionCE = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
for i in range(0 , 1) :
total_step = len(train_loader)
for epoch in range(num_epochs) :
for i,(images , labels) in enumerate(train_loader) :
images = images.to(device)
labels = labels.to(device)
outputs = model(images)
loss = criterionCE(outputs , labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
outputs = model(x)
print(outputs.data.max(1)[1])
我用来初始化权重:
def weights_init(m):
m.weight.data.normal_(0.0, 1)
但是会引发以下错误:
~/anaconda3/envs/pytorch/lib/python3.7/site-packages/torch/nn/modules/module.py in __getattr__(self, name)
533 return modules[name]
534 raise AttributeError("'{}' object has no attribute '{}'".format(
--> 535 type(self).__name__, name))
536
537 def __setattr__(self, name, value):
AttributeError: 'ReLU' object has no attribute 'weight'
这是初始化权重的正确方法吗?
另外,对象应该是nn.Module
类型,而不是Relu
类型吗?
答案 0 :(得分:1)
您正在尝试设置无重量层(ReLU)的权重。
在weights_init
内部,应在初始化权重之前检查层的类型。例如:
def weights_init(m):
if type(m) == nn.Linear:
m.weight.data.normal_(0.0, 1)
答案 1 :(得分:1)
除了Fabio提到的关于检查层类型和ReLU是激活层而不是可训练层(因为它涉及初始化)之外,您还可以像下面这样在__init__方法本身中进行权重初始化:
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
def __init__(self, features, num_classes=1000,...):
----snip---
self._initialize_weights()
def _initialize_weights(self):
if isinstance(m, nn.Linear):
m.weight.data.normal_(0.0, 1)