如何使用tf.keras.Model.summary查看父模型中的子模型层?

时间:2019-11-01 09:39:35

标签: python tensorflow machine-learning keras neural-network

我有tf.keras.Model的子模型,代码在后面

def home(request):
    if request.user.groups.filter(name='some_group').exists():
        # do something ....
    else:
        #do somethig...

结果是:

import tensorflow as tf


class Mymodel(tf.keras.Model):

    def __init__(self, classes, backbone_model, *args, **kwargs):
        super(Mymodel, self).__init__(self, args, kwargs)
        self.backbone = backbone_model
        self.classify_layer = tf.keras.layers.Dense(classes,activation='sigmoid')

    def call(self, inputs):
        x = self.backbone(inputs)
        x = self.classify_layer(x)
        return x

inputs = tf.keras.Input(shape=(224, 224, 3))
model = Mymodel(inputs=inputs, classes=61, 
                backbone_model=tf.keras.applications.MobileNet())
model.build(input_shape=(20, 224, 224, 3))
model.summary()

但是我想查看移动网络的所有层,然后我尝试提取移动网络的所有层并放入模型中:

_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
mobilenet_1.00_224 (Model)   (None, 1000)              4253864   
_________________________________________________________________
dense (Dense)                multiple                  61061     
=================================================================
Total params: 4,314,925
Trainable params: 4,293,037
Non-trainable params: 21,888
_________________________________________________________________

然后,结果未更改。

import tensorflow as tf


class Mymodel(tf.keras.Model):

    def __init__(self, classes, backbone_model, *args, **kwargs):
        super(Mymodel, self).__init__(self, args, kwargs)
        self.backbone = backbone_model
        self.classify_layer = tf.keras.layers.Dense(classes,activation='sigmoid')

    def my_process_layers(self,inputs):
        layers = self.backbone.layers
        tmp_x = inputs
        for i in range(1,len(layers)):
            tmp_x = layers[i](tmp_x)
        return tmp_x

    def call(self, inputs):
        x = self.my_process_layers(inputs)
        x = self.classify_layer(x)
        return x

inputs = tf.keras.Input(shape=(224, 224, 3))
model = Mymodel(inputs=inputs, classes=61, 
                backbone_model=tf.keras.applications.MobileNet())
model.build(input_shape=(20, 224, 224, 3))
model.summary()

然后我尝试将一层插入物提取到模型中:

    _________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
mobilenet_1.00_224 (Model)   (None, 1000)              4253864   
_________________________________________________________________
dense (Dense)                multiple                  61061     
=================================================================
Total params: 4,314,925
Trainable params: 4,293,037
Non-trainable params: 21,888
_________________________________________________________________

它也没有改变。我很困惑。

import tensorflow as tf


class Mymodel(tf.keras.Model):

    def __init__(self, classes, backbone_model, *args, **kwargs):
        super(Mymodel, self).__init__(self, args, kwargs)
        self.backbone = backbone_model
        self.classify_layer = tf.keras.layers.Dense(classes,activation='sigmoid')

    def call(self, inputs):
        x = self.backbone.layers[1](inputs)
        x = self.classify_layer(x)
        return x

inputs = tf.keras.Input(shape=(224, 224, 3))
model = Mymodel(inputs=inputs, classes=61, 
                backbone_model=tf.keras.applications.MobileNet())
model.build(input_shape=(20, 224, 224, 3))
model.summary()

但是我发现致密层的参数发生了变化,我不知道发生了什么。

2 个答案:

答案 0 :(得分:2)

为了能够查看骨干层,您将不得不使用backbone.inputbackbone.output

来构建新模型。
from tensorflow.keras.models import Model
def  Mymodel(backbone_model, classes):
    backbone = backbone_model
    x = backbone.output
    x = tf.keras.layers.Dense(classes,activation='sigmoid')(x)
    model = Model(inputs=backbone.input, outputs=x)
    return model

input_shape = (224, 224, 3)
model = Mymodel(backbone_model=tf.keras.applications.MobileNet(input_shape=input_shape, include_top=False, pooling='avg'),
                classes=61)

model.summary()

答案 1 :(得分:2)

@Ioannis的回答很好,但不幸的是,它删除了问题中存在的keras'Model Subclassing'结构。如果像我一样想要保留该模型的子类并仍显示summary中的所有层,则可以使用for循环分为更复杂的模型的所有各个层:

class MyMobileNet(tf.keras.Sequential):
    def __init__(self, input_shape=(224, 224, 3), classes=61):
        super(MyMobileNet, self).__init__()
        self.backbone_model = [layer for layer in
               tf.keras.applications.MobileNet(input_shape, include_top=False, pooling='avg').layers]
        self.classificator = tf.keras.layers.Dense(classes,activation='sigmoid', name='classificator')

    def call(self, inputs):
        x = inputs
        for layer in self.backbone_model:
            x = layer(x)
        x = self.classificator(x)
        return x
model = MyMobileNet()

此后,我们可以直接构建模型并调用summary

model.build(input_shape=(None, 224, 224, 3))
model.summary()

>
Model: "my_mobile_net"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1_pad (ZeroPadding2D)    (None, 225, 225, 3)       0         
_________________________________________________________________
conv1 (Conv2D)               (None, 112, 112, 32)      864       
_________________________________________________________________
conv1_bn (BatchNormalization (None, 112, 112, 32)      128       
_________________________________________________________________
....
....
conv_pw_13 (Conv2D)          (None, 7, 7, 1024)        1048576   
_________________________________________________________________
conv_pw_13_bn (BatchNormaliz (None, 7, 7, 1024)        4096      
_________________________________________________________________
conv_pw_13_relu (ReLU)       (None, 7, 7, 1024)        0         
_________________________________________________________________
global_average_pooling2d_13  (None, 1024)              0         
_________________________________________________________________
classificator (Dense)        multiple                  62525     
=================================================================
Total params: 3,291,389
Trainable params: 3,269,501
Non-trainable params: 21,888
_________________________________________________________________