TypeError:尝试保存keras模型时无法腌制_thread.RLock对象

时间:2019-06-13 02:27:33

标签: python tensorflow keras

尝试保存我的keras模型时收到错误消息:TypeError: can't pickle _thread.RLock objects。我相信这与我正在使用的lambda层有关,但是我不确定要修复哪些层。

到目前为止(在代码中看到),我已经尝试按照以下问题的答案3进行操作:Checkpointing keras model: TypeError: can't pickle _thread.lock objects

我这样做是通过从lambda中提取参数来避免它们引起问题。但是,这似乎无法解决该错误。我正在Windows 10,使用anaconda的python 3.7上运行。

这是我要保存的模型:

import tensorflow as tf
from keras.layers import Lambda, Add, Multiply, Conv2D, Input
import keras.backend as K
import functools
import keras
from keras.models import Model

def resBlock_Keras(x, channels=64, kernel_size=[3,3], scale=1):
    tmp = Conv2D(channels, kernel_size, activation='relu', padding='same')(x)
    tmp = Conv2D(channels, kernel_size, padding='same')(tmp)
    multiply_by_scale = functools.partial(
        tf.multiply,
        scale
    )
    add_input = functools.partial(
        tf.add,
        x
    )
    tmp = Lambda(lambda q: multiply_by_scale(q))(tmp)
    return Lambda(lambda q: add_input(q))(tmp)

def PS_Keras(X, r, color=False):
    if color:
        split = functools.partial(
            tf.split,
            num_or_size_splits=3,
            axis=3
        )
        concat = functools.partial(
            tf.concat,
            axis=3
        )
        Xc = Lambda(lambda x: split(x))(X)
        shifts = [_phase_shift_keras(x, r) for x in Xc]
        X = Lambda(lambda q: concat(q))(shifts)
    else:
        X = _phase_shift_keras(X, r)
    return X

def _phase_shift_keras(I, r):
    bsize, a, b, c = I.get_shape().as_list()
    bsize = K.shape(I)[0] # Handling Dimension(None) type for undefined batch dim
    X = K.reshape(I, [bsize, a, b, c//(r*r),r, r]) # bsize, a, b, c/(r*r), r, r
    X = K.permute_dimensions(X, (0, 1, 2, 5, 4, 3))  # bsize, a, b, r, r, c/(r*r)
    #Keras backend does not support tf.split, so in future versions this could be nicer
    X = [X[:,i,:,:,:,:] for i in range(a)] # a, [bsize, b, r, r, c/(r*r)
    X = K.concatenate(X, 2)  # bsize, b, a*r, r, c/(r*r)
    X = [X[:,i,:,:,:] for i in range(b)] # b, [bsize, r, r, c/(r*r)
    X = K.concatenate(X, 2)  # bsize, a*r, b*r, c/(r*r)
    return X

def upsample_keras(x,scale=2,features=64,activation='relu'):
    assert scale in [2,3,4]
    x = Conv2D(features,[3,3],activation=activation,padding='same')(x)
    if scale == 2:
        ps_features = 3*(scale**2)
        x = Conv2D(ps_features,[3,3],activation=activation,padding='same')(x)
        ps = functools.partial(
            PS_Keras,
            r=2,
            color=True
        )
        x = Lambda(lambda q: ps(q))(x)
    elif scale == 3:
        ps_features =3*(scale**2)
        x = Conv2D(ps_features,[3,3],activation=activation,padding='same')(x)
        ps = functools.partial(
            PS_Keras,
            r=3,
            color=True
        )
        x = Lambda(lambda q: ps(q))(x)
    elif scale == 4:
        ps_features = 3*(2**2)
        ps = functools.partial(
            PS_Keras,
            r=2,
            color=True
        )
        for i in range(2):
            x = Conv2D(ps_features,[3,3],activation=activation,padding='same')(x)
            x = Lambda(lambda q: ps(q))(x)
    return x

def subtract_mean(x):
    return tf.subtract(x,127.0)

def build_model(img_size=32,num_layers=32,feature_size=64,scale=2,output_channels=3):
    MEAN_PIXEL = 127.0
    MIN_PIXEL = 0.0
    MAX_PIXEL = 255.0

    scaling_factor = 0.1
    img_size = img_size
    scale=scale
    input = Input(shape=(img_size,img_size,output_channels))
    curr_layer = Lambda(lambda x: subtract_mean(x))(input)
    first_conv = Conv2D(feature_size, [3,3], padding='same')(curr_layer)
    curr_layer = first_conv
    for i in range(num_layers):
        curr_layer = resBlock_Keras(curr_layer, feature_size, scale=scaling_factor)
    curr_layer = Conv2D(feature_size, [3,3],padding='same')(curr_layer)
    curr_layer = Add()([curr_layer, first_conv])

    clip_by_value = functools.partial(
    tf.clip_by_value,
    clip_value_min=MIN_PIXEL,
    clip_value_max=MAX_PIXEL)

    upsample = functools.partial(
        upsample_keras,
        scale = scale,
        features = feature_size,
        activation=None
    )

    curr_layer = Lambda(lambda x: upsample(x))(curr_layer)
    output = Lambda(lambda x: clip_by_value(x))(curr_layer)

    model = Model(inputs=input, outputs=output)
    model.compile(loss='mae',optimizer='adam')
    model.summary()
    model.save("model.h5")

build_model()

这是我收到的完整堆栈跟踪记录:

Traceback (most recent call last):
  File "tmp.py", line 125, in <module>
    build_model()
  File "tmp.py", line 123, in build_model
    model.save("model.h5")
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\site-packages\keras\engine\network.py", line 1090, in save
    save_model(self, filepath, overwrite, include_optimizer)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\site-packages\keras\engine\saving.py", line 382, in save_model
    _serialize_model(model, f, include_optimizer)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\site-packages\keras\engine\saving.py", line 83, in _serialize_model
    model_config['config'] = model.get_config()
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\site-packages\keras\engine\network.py", line 931, in get_config
    return copy.deepcopy(config)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 215, in _deepcopy_list
    append(deepcopy(a, memo))
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in _deepcopy_tuple
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in <listcomp>
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in _deepcopy_tuple
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in <listcomp>
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 180, in deepcopy
    y = _reconstruct(x, memo, *rv)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 280, in _reconstruct
    state = deepcopy(state, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in _deepcopy_tuple
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in <listcomp>
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in _deepcopy_tuple
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 220, in <listcomp>
    y = [deepcopy(a, memo) for a in x]
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 180, in deepcopy
    y = _reconstruct(x, memo, *rv)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 280, in _reconstruct
    state = deepcopy(state, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 180, in deepcopy
    y = _reconstruct(x, memo, *rv)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 280, in _reconstruct
    state = deepcopy(state, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 180, in deepcopy
    y = _reconstruct(x, memo, *rv)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 280, in _reconstruct
    state = deepcopy(state, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "C:\Users\pwatm\Anaconda3\envs\gpu\lib\copy.py", line 169, in deepcopy
    rv = reductor(4)
TypeError: can't pickle _thread.RLock objects

0 个答案:

没有答案