深度推荐模型——FiBiNet[RecSys 19][Weibo]

2021-03-20 14:26:50 浏览数 (2)

微博提出的FiBiNet相当于对FNN进行了两部分的改进: 1、SENET Layer。作者认为模型需要学习不同特征的一个重要程度,对重要特征加权,对蕴含信息量不多的特征进行削弱

先对e(Embedding) 做 mean pooling,再做Excitation(类似计算出Attention):

最后元素相乘得到v

2、传统的特征交叉方式广泛采用了内积和哈达玛积,而这两种方式在稀疏数据上很难有效对特征交叉进行建模。 文章提出Bilinear-Interaction,即结合内积和哈达玛积并引入一个额外的参数矩阵W来学习特征交叉:

代码语言:javascript复制
import tensorflow as tf
from tensorflow import keras
from utils import *

EPOCH = 10
BATCH_SIZE = 32
VEC_DIM = 10
DNN_LAYERS = [64, 128, 64]
DROPOUT_RATE = 0.5
R = 2 # 压缩比例

base, test = loadData()
# 所有的特征各个类别值个数之和
FEAT_CATE_NUM = base.shape[1] - 1
K = tf.keras.backend


def run():
    # 将所有的特征的各个类别值统一id化。x中每行为各特征的类别值的id
    val_x, val_y = getAllData(test)
    train_x, train_y = getAllData(base)
    field_dim = val_x[0].shape[0]
    inputs = keras.Input((field_dim,))
    emb = keras.layers.Embedding(FEAT_CATE_NUM, VEC_DIM, input_length=field_dim)(inputs) # [-1,field_dim,vec_dim]

    # Squeeze
    z = tf.reduce_mean(emb,axis=2) # [-1,field_dim]
    # Excitation
    a = keras.layers.Dense(int(field_dim / R), activation='sigmoid')(z)
    a = keras.layers.Dense(field_dim,activation='sigmoid')(a)

    # Re-Weight
    v = emb * tf.expand_dims(a,-1) # [-1,field_dim,vec_dim]

    sub_emb_arr = []
    bilinear_list = []
    # 将emb切分成各个field的小emb
    split_arr = tf.split(emb, field_dim, 1)
    for split in split_arr:
        sub_emb_arr.append(keras.layers.Flatten()(split))

    # bilinear
    for i in range(0, len(sub_emb_arr)):
        for j in range(i   1, len(sub_emb_arr)):
            viW = keras.layers.Dense(VEC_DIM)(sub_emb_arr[i])
            bilinear_list.append(viW * sub_emb_arr[j])

    sub_emb_arr = []
    senet_bilinear_list = []
    # 将emb切分成各个field的小emb
    split_arr = tf.split(v, field_dim, 1)
    for split in split_arr:
        sub_emb_arr.append(keras.layers.Flatten()(split))

    # bilinear
    for i in range(0, len(sub_emb_arr)):
        for j in range(i   1, len(sub_emb_arr)):
            viW = keras.layers.Dense(VEC_DIM)(sub_emb_arr[i])
            senet_bilinear_list.append(viW * sub_emb_arr[j])

    dense = keras.layers.concatenate(bilinear_list   senet_bilinear_list)

    for units in DNN_LAYERS:
        dense = keras.layers.Dense(units, activation='relu')(dense)
        dense = keras.layers.Dropout(DROPOUT_RATE)(dense)
    outputs = keras.layers.Dense(1, activation='sigmoid')(dense)

    model = keras.Model(inputs=inputs, outputs=outputs)
    model.compile(loss='binary_crossentropy', optimizer=tf.train.AdamOptimizer(0.001), metrics=[keras.metrics.AUC()])
    tbCallBack = keras.callbacks.TensorBoard(log_dir='./logs',
                                             histogram_freq=0,
                                             write_graph=True,
                                             write_grads=True,
                                             write_images=True,
                                             embeddings_freq=0,
                                             embeddings_layer_names=None,
                                             embeddings_metadata=None)

    model.fit(train_x, train_y, batch_size=BATCH_SIZE, epochs=EPOCH, verbose=2, validation_data=(val_x, val_y),
              callbacks=[tbCallBack])


run()

0 人点赞