我正在努力在 tensorflow 中重现一个简单的代码。
我有自己定义的函数用作模型的度量。
这是一个简单的三元组损失函数(稍作修改),但如果我使用普通函数,问题还是一样的。
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow.keras.activations import sigmoid
from tensorflow.keras import backend
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Input, Embedding, Flatten, Dense, Dropout, Lambda, dot, concatenate
@tf.function
def bpr_triplet_loss(inputs):
anchor_latent, positive_item_latent, negative_item_latent = inputs
# BPR loss
loss = 1.0 - backend.sigmoid(
backend.sum(anchor_latent * positive_item_latent, axis=-1, keepdims=True) -
backend.sum(anchor_latent * negative_item_latent, axis=-1, keepdims=True))
return loss
def getModel(n_users, n_items, emb_dim = 20):
# Input Layers
user_input = Input(shape=[1], name = 'user_input')
pos_item_input = Input(shape=[1], name = 'pos_item_input')
neg_item_input = Input(shape=[1], name = 'neg_item_input')
# Embedding Layers
# Shared embedding layer for positive and negative items
user_embedding = Embedding(output_dim=emb_dim, input_dim=n_users + 1, input_length=1, name='user_emb')(user_input)
item_embedding = Embedding(output_dim=emb_dim, input_dim=n_items + 1, input_length=1, name='item_emb')
pos_item_embedding = item_embedding(pos_item_input)
neg_item_embedding = item_embedding(neg_item_input)
user_vecs = Flatten()(user_embedding)
pos_item_vecs = Flatten()(pos_item_embedding)
neg_item_vecs = Flatten()(neg_item_embedding)
# Triplet loss function
output = concatenate([user_vecs, pos_item_vecs, neg_item_vecs])
loss = Lambda(bpr_triplet_loss, (1,))(output)
model = Model(inputs=[anchor, positive, negative], outputs=loss)
model.compile(optimizer='Adam', loss='mse',
metrics=["mae"])
当我运行此代码时,我收到以下(开始令人沮丧)错误
注意 我在 Tensorflow 2.0.0 中的tf.executing_eagerly()
状态True
明月笑刀无情
相关分类