我有以下代码:
class myLSTM(nn.Module):
def __init__(self, input_size, output_size, hidden_size, num_layers):
super(myLSTM, self).__init__()
self.input_size = input_size + 1
self.output_size = output_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.layers = []
new_input_size = self.input_size
for i in xrange(num_layers):
self.layers.append(LSTMCell(new_input_size, hidden_size))
new_input_size = hidden_size
self.linear = nn.Linear(hidden_size, output_size)
self.softmax = nn.Softmax()
def forwardLayers(self, input, hns, cns, layers):
new_hns = []
new_cns = []
(hn, cn) = layers[0](input, (hns[0], cns[0]))
new_hns.append(hn)
new_cns.append(cn)
for i in xrange(1, len(layers)):
(hn, cn) = layers[i](hn, (hns[i], cns[i]))
new_hns.append(hn)
new_cns.append(cn)
return hn, (new_hns, new_cns)
def forward(self, input, hx):
actions = []
hns, cns = hx
action = torch.Tensor([[0.0]])
for i in range(len(input)):
new_input = input[i]
new_input = new_input.view(1, -1)
output, (hns, cns) = self.forwardLayers(new_input, hns, cns, self.layers)
output = self.softmax(self.linear(output))
return output
现在,当我调用以下代码以查看网络参数时:
for name, param in myLSTM_object.named_parameters():
if param.requires_grad:
print name, param.data
我得到的是:
linear.weight tensor([[ 0.5042, -0.6984],
[ 0.0721, -0.4060]])
linear.bias tensor([ 0.6968, -0.4649])
因此,它完全错过了LSTMCell的参数。这是否意味着LSTMCell的参数没有经过训练。如何查看LSTMCell参数?
答案 0 :(得分:2)
这是预料之中的-将模块存储在list
,dict
,set
或其他python容器中不会将它们注册到拥有所述list
的模块中,依此类推。要使代码正常工作,请改用nn.ModuleList
。就像修改您的__init__
代码以使用一样简单
layers = []
new_input_size = self.input_size
for i in xrange(num_layers):
layers.append(LSTMCell(new_input_size, hidden_size))
new_input_size = hidden_size
self.layers = nn.ModuleList(layers)