我有一个基本的keras网络,可以预测一个值,并且效果很好。我尝试添加对预测4个值的支持,但是当我这样做时,输出立即变为0。现在,网络只是用于测试的简单模型。
网络的输入是一个形状数组(90,),输出应具有4个值。我尝试也有一个包含4个节点的输出层,但是显示出与这一层相同的行为。我还尝试了一些损失函数。
def runNN(training_data,training_labels,test_data,test_labels,model = None):
if model == None:
inp = (Input(shape = (90,), name = 'input'))
model = (Dense(units = 90, activation='relu'(inp)
model = (Dropout(0.5))(model)
model = (Dense(units = 180, activation='relu'))(model)
model = (Dropout(0.5))(model)
output1 = Dense(1, activation = 'relu', name = 'preretirement')(model)
output2 = Dense(1, activation = 'relu',name = 'cola')(model)
output3 = Dense(1, activation = 'relu',name = 'initialNC')(model)
output4 = Dense(1, activation = 'relu',name = 'finalNC')(model)
complete_model = Model(inputs = [inp], outputs = [output1, output2, output3, output4])
optimizer = Adam(lr = .0003)
complete_model.compile(loss='mean_absolute_error',
optimizer=optimizer,
metrics=['mean_absolute_error'])
complete_model.fit(training_data, {'preretirement' : training_labels[0],
'cola' : training_labels[1],
'initialNC' : training_labels[2],
'finalNC' : training_labels[3]},
epochs = 10, batch_size = 128)
每个测试点在1个历元之后以及之后的所有结果的输出为[0,0,0,0]。它应该是一个4项列表,其值介于0和1之间,例如[.34,.56,.12,.87]
答案 0 :(得分:1)
可能您正在执行预测任务或函数拟合任务。有两个建议可以帮助您:
sigmoid
在预测任务中通常比relu
更好。下面的代码是您自己修改的,可以正常工作。
from keras.layers import Input, Dense, Dropout
from keras.models import Model
from keras.optimizers import Adam
dropout_rate = .5
activate_function = 'sigmoid'
num_iteration = 20
inp = Input(shape = (90,), name = 'input')
model = Dense(units = 90, activation=activate_function)(inp)
model = Dropout(rate=dropout_rate)(model)
model = Dense(units = 180, activation=activate_function)(model)
model = Dropout(rate=dropout_rate)(model)
output1 = Dense(units=1, name = 'preretirement')(model)
output2 = Dense(units=1, name = 'cola')(model)
output3 = Dense(units=1, name = 'initialNC')(model)
output4 = Dense(units=1, name = 'finalNC')(model)
# # Your original code
# output1 = Dense(units=1, activation = activate_function, name = 'preretirement')(model)
# output2 = Dense(units=1, activation = activate_function,name = 'cola')(model)
# output3 = Dense(units=1, activation = activate_function,name = 'initialNC')(model)
# output4 = Dense(units=1, activation = activate_function,name = 'finalNC')(model)
complete_model = Model(inputs = [inp], outputs = [output1, output2, output3, output4])
optimizer = Adam(lr = .0003)
complete_model.compile(loss='mean_absolute_error',
optimizer=optimizer,
metrics=['mean_absolute_error'])
# generate data for training the model
import numpy as np
num_train = 4000 # the number of training instances
# a normal distribution with mean=2, variance=1
training_data = np.random.normal(2, 1, (num_train, 90))
training_labels = np.zeros(shape=(num_train, 4))
for i in range(num_train):
tmp = np.sum(training_data[i, :])/90.0
training_labels[i, :] = [tmp, np.sin(tmp), np.cos(tmp), tmp*tmp]
print(training_data.shape, training_labels.shape)
# generate data for testing the model
test_data = np.random.normal(0, 1, (10, 90)) # 10 test instances
test_labels = np.zeros(shape=(10, 4))
for i in range(10):
tmp = np.sum(training_data[i, :])/90.0
test_labels[i, :] = [tmp, np.sin(tmp), np.cos(tmp), tmp*tmp]
print(test_data.shape, test_labels.shape)
complete_model.fit(training_data, {'preretirement' : training_labels[:, 0],
'cola' : training_labels[:, 1],
'initialNC' : training_labels[:, 2],
'finalNC' : training_labels[:, 3]},
epochs = num_iteration,
batch_size = 128)
results = complete_model.predict(test_data)
for i in range(10):
print('true', test_labels[i])
print('predicted', results[0][i, 0], results[1][i, 0], results[2][i, 0], results[3][i, 0])
print('--------------------------')
代码产生