我正在尝试在ANN模型上执行网格搜索。仅该模型有效,但是当我尝试使用Grid Search对其进行调整时,我得到了错误:
TypeError:无法腌制_thread.RLock对象
我在其他线程中看到Keras不允许酸洗,而且人们弄错的参数之一是“ n_jobs = -1”,但是为了确定起见,我专门将其设置为1。
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import train_test_split
# Reading data
X_dum = pd.read_csv("X_dum.csv")
Y = pd. read_csv("Y.csv")
# Splitting the dataset into the Training set and Test set
X_train, X_test, y_train, y_test = train_test_split(X_dum, Y, test_size = 0.2)
def createANN():
# Initialising the ANN
model = Sequential()
model.add(Dense(units = 200, kernel_initializer = 'uniform', activation = 'relu', input_dim = 224))
model.add(Dense(units = 150, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dense(units = 100, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dense(units = 50, kernel_initializer = 'uniform', activation = 'relu'))
model.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'sigmoid'))
# Compiling the ANN
model.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
return model
print("Grid Search - ANN")
# Wrap Keras model so it can be used by scikit-learn
ann_sk = KerasClassifier(build_fn = createANN(), epochs = 5, batch_size = 10)
# Applying Grid Search to find the best model and the best parameters
parameters = [{'batch_size': [10, 100]}]
grid_search = GridSearchCV(estimator = ann_sk, param_grid = parameters, cv = 3, n_jobs = 1)
grid_search = grid_search.fit(X_train, y_train)
ann_best_accuracy = grid_search.best_score_
ann_best_parameters = grid_search.best_params_
# Results
print("Accuracy: {0}".format(ann_best_accuracy))
print("Parameters: {0}".format(ann_best_parameters))
我收到以下错误流:
File "C:\ProgramData\Anaconda3\lib\site-packages\spyder_kernels\customize\spydercustomize.py", line 827, in runfile
execfile(filename, namespace)
File "C:\ProgramData\Anaconda3\lib\site-packages\spyder_kernels\customize\spydercustomize.py", line 110, in execfile
exec(compile(f.read(), filename, 'exec'), namespace)
File "C:/XXXXXXXXXXXXX/GridTest.py", line 37, in <module>
grid_search = grid_search.fit(X_train, y_train)
File "C:\ProgramData\Anaconda3\lib\site-packages\sklearn\model_selection\_search.py", line 632, in fit
base_estimator = clone(self.estimator)
File "C:\ProgramData\Anaconda3\lib\site-packages\sklearn\base.py", line 64, in clone
new_object_params[name] = clone(param, safe=False)
File "C:\ProgramData\Anaconda3\lib\site-packages\sklearn\base.py", line 55, in clone
return copy.deepcopy(estimator)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 180, in deepcopy
y = _reconstruct(x, memo, *rv)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 280, in _reconstruct
state = deepcopy(state, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 150, in deepcopy
y = copier(x, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 240, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 180, in deepcopy
y = _reconstruct(x, memo, *rv)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 280, in _reconstruct
state = deepcopy(state, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 150, in deepcopy
y = copier(x, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 240, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 180, in deepcopy
y = _reconstruct(x, memo, *rv)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 280, in _reconstruct
state = deepcopy(state, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 150, in deepcopy
y = copier(x, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 240, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 180, in deepcopy
y = _reconstruct(x, memo, *rv)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 280, in _reconstruct
state = deepcopy(state, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 150, in deepcopy
y = copier(x, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 240, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 180, in deepcopy
y = _reconstruct(x, memo, *rv)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 280, in _reconstruct
state = deepcopy(state, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 150, in deepcopy
y = copier(x, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 240, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 180, in deepcopy
y = _reconstruct(x, memo, *rv)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 280, in _reconstruct
state = deepcopy(state, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 150, in deepcopy
y = copier(x, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 240, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "C:\ProgramData\Anaconda3\lib\copy.py", line 169, in deepcopy
rv = reductor(4)
TypeError: can't pickle _thread.RLock objects
我真的看不到这里的错误。