(TalosReturnError)Talos确保输入模型返回'out,model'model.fit()

时间:2019-01-26 10:50:17

标签: keras hyperparameters talos

我正在尝试使用talos库调整CNN模型的超参数,但是却遇到了确保函数返回模型错误的错误。但是在我返回两个变量的函数中。

我尝试了许多文章,但是它们相同的命令运行良好。我正在kaggle笔记本上编写代码

def Talos_Model(X_train, y_train, X_test, y_test, params):
    #parameters defined
    lr = params['lr']
    epochs=params['epochs']
    dropout_rate=params['dropout']
    optimizer=params['optimizer']
    loss=params['loss']
    last_activation=params['last_activation']
    activation=params['activation']
    clipnorm=params['clipnorm']
    decay=params['decay']
    momentum=params['momentum']
    l1=params['l1']
    l2=params['l2']
    No_of_CONV_and_Maxpool_layers=params['No_of_CONV_and_Maxpool_layers']
    No_of_Dense_Layers =params['No_of_Dense_Layers']
    No_of_Units_in_dense_layers=params['No_of_Units_in_dense_layers']
    Kernal_Size=params['Kernal_Size']
    Conv2d_filters=params['Conv2d_filters']
    pool_size_p=params['pool_size']
    padding_p=params['padding']

    #model sequential
    model=Sequential()

    for i in range(0,No_of_CONV_and_Maxpool_layers):
        model.add(Conv2D(Conv2d_filters, Kernal_Size ,padding=padding_p))
        model.add(Activation(activation))
        model.add(MaxPooling2D(pool_size=pool_size_p,strides=(2,2)))


    model.add(Flatten())

    for i in range (0,No_of_Dense_Layers):
        model.add(Dense(units=No_of_Units_in_dense_layers,activation=activation, kernel_regularizer=regularizers.l2(l2),
                  activity_regularizer=regularizers.l1(l1)))


    model.add(Dense(units=20,activation=activation))

    model.add(Dense(units=2,activation=activation))

    model.compile(loss=loss,optimizer=params['optimizer'](lr=lr, decay=decay, momentum=momentum),
                 metrics=['accuracy'])

    out = model.fit(X_train, y_train, epochs=params['epochs'])

    return out,model

    import talos as ta

    params = {'lr': (0.1, 0.01,1 ),
         'epochs': [10,5,15],
         'dropout': (0, 0.40, 0.8),
         'optimizer': ["Adam","Adagrad","sgd"],
         'loss': ["binary_crossentropy","mean_squared_error","mean_absolute_error","squared_hinge"],
         'last_activation': ["softmax","sigmoid"],
         'activation' :["relu","selu","linear"],
         'clipnorm':(0.0,0.5,1),
         'decay':(1e-6,1e-4,1e-2),
         'momentum':(0.9,0.5,0.2),
         'l1': (0.01,0.001,0.0001),
         'l2': (0.01,0.001,0.0001),
         'No_of_CONV_and_Maxpool_layers':[2,3,4],
         'No_of_Dense_Layers': [2,3,4],
         'No_of_Units_in_dense_layers':[128,64,32,256],
         'Kernal_Size':[(2,2),(4,4),(6,6)],
         'Conv2d_filters':[60,40,80,120],
         'pool_size':[(2,2),(4,4),(6,6)],
         'padding':["valid","same"]
        }
    h = ta.Scan(X_train, y_train, params=params,
                model=Talos_Model,
                dataset_name='DR',
                experiment_no='1',
                grid_downsample=.01)
    Thanking for taking this under considration

错误引用:

  0%|          | 0/5598 [00:00<?, ?it/s]
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
/opt/conda/lib/python3.6/site-packages/talos/scan/scan_round.py in scan_round(self)
     31     try:
---> 32         _hr_out, self.keras_model = ingest_model(self)
     33     except TypeError as err:

/opt/conda/lib/python3.6/site-packages/talos/model/ingest_model.py in ingest_model(self)
      9                       self.y_val,
---> 10                       self.round_params)

<ipython-input-93-d0c3779dc659> in Talos_Model(X_train, y_train, X_test, y_test, params)
     43 
---> 44     model.compile(loss=loss,optimizer=params['optimizer'](lr=lr, decay=decay, momentum=momentum),
     45                  metrics=['accuracy'])

TypeError: 'str' object is not callable

During handling of the above exception, another exception occurred:

TalosReturnError                          Traceback (most recent call last)
<ipython-input-95-5853eb1b121e> in <module>()
      3             dataset_name='DR',
      4             experiment_no='1',
----> 5             grid_downsample=.01)

/opt/conda/lib/python3.6/site-packages/talos/scan/Scan.py in __init__(self, x, y, params, model, dataset_name, experiment_no, x_val, y_val, val_split, shuffle, round_limit, grid_downsample, random_method, seed, search_method, reduction_method, reduction_interval, reduction_window, reduction_threshold, reduction_metric, reduce_loss, last_epoch_value, clear_tf_session, disable_progress_bar, print_params, debug)
    161         # input parameters section ends
    162 
--> 163         self._null = self.runtime()
    164 
    165     def runtime(self):

/opt/conda/lib/python3.6/site-packages/talos/scan/Scan.py in runtime(self)
    166 
    167         self = scan_prepare(self)
--> 168         self = scan_run(self)

/opt/conda/lib/python3.6/site-packages/talos/scan/scan_run.py in scan_run(self)
     18                      disable=self.disable_progress_bar)
     19     while len(self.param_log) != 0:
---> 20         self = scan_round(self)
     21         self.pbar.update(1)
     22     self.pbar.close()

/opt/conda/lib/python3.6/site-packages/talos/scan/scan_round.py in scan_round(self)
     35             raise TalosTypeError("Activation should be as object and not string in params")
     36         else:
---> 37             raise TalosReturnError("Make sure that input model returns 'out, model' where out is history object from model.fit()")
     38 
     39     # set end time and log

TalosReturnError: Make sure that input model returns 'out, model' where out is history object from model.fit()

1 个答案:

答案 0 :(得分:2)

抱歉,我正在将无效参数传递给优化器

   model.compile(loss=loss,optimizer=params['optimizer'](lr=lr, decay=decay, momentum=momentum),
                 metrics=['accuracy'])

在此 optimizer = params ['optimizer'](lr = lr,衰减=衰减,动量=动量) params ['optimizer']具有字符串值,例如“ adam”,我们不能在括号((lr = lr,衰减=衰减,动量=动量))中传递字符串参数,因此我们必须在进入编译功能之前,先准备好我们的优化程序,我们可以像这样

optimizer=params["optimizer"]

if optimizer=="Adam":
        opt=keras.optimizers.Adam(lr=lr, decay=decay, beta_1=0.9, beta_2=0.999)
if optimizer=="Adagrad":
        opt=keras.optimizers.Adagrad(lr=lr, epsilon=None, decay=decay)
if optimizer=="sgd":
        opt=keras.optimizers.SGD(lr=lr, momentum=momentum, decay=decay, nesterov=False)