def NCF(feature_dim_dict,num_users,num_items,mf_dim = 10,layers = [10],reg_layers = [0],reg_mf = 0,embeddding_size = 8,l2_reg_linear = 0.00001,l2_reg_embedding = 0.00001,init_std = 0.0001,种子= 1024,任务=“二进制”): 断言len(layers)== len(reg_layers) check_feature_config_dict(feature_dim_dict)
deep_emb_list, linear_emb_list, dense_input_dict, inputs_list =preprocess_input_embedding(feature_dim_dict,
embedding_size,
l2_reg_embedding,
l2_reg_linear, init_std,
seed,
create_linear_weight=True)
num_layer = len(layers)
movie_emb=deep_emb_list[0]
user_emb=deep_emb_list[1]
inputs_list=[user_emb,movie_emb]
mf_user_latent = tf.keras.layers.Flatten()(user_emb)
mf_item_latent = tf.keras.layers.Flatten()(movie_emb)
mf_vector =tf.multiply(mf_user_latent, mf_item_latent)
mlp_user_latent = tf.keras.layers.Flatten()(movie_emb)
mlp_item_latent =tf.keras.layers.Flatten()(user_emb)
mlp_vector = tf.keras.layers.Concatenate(axis=-1)([mlp_user_latent, mlp_item_latent])
for idx in range(1, num_layer):
layer = tf.keras.layers.Dense(layers[idx], W_regularizer=l2(reg_layers[idx]), activation='relu', name="layer%d" % idx)
mlp_vector = layer(mlp_vector)
predict_vector = tf.keras.layers.Concatenate(axis=-1)([mf_vector, mlp_vector])
prediction =tf.keras.layers.Dense(1, kernel_initializer='lecun_uniform')(predict_vector)
model =tf.keras.models.Model(inputs=inputs_list,outputs=prediction)
return model