[英]No attribute 'compile', how can I modify the class, so that it works?
所述neuMF
类不是Keras
的类,因此它不能提供任何编译方法。 我最好使用keras.Model
而不是nn.Blocks
。
不幸的是,我不太明白 nn.Blocks 是什么以及如何在课堂上替换它。 我应该如何列表的相关我的代码,以便它与keras.Model
并且可以使用Keras
方法?
这是我的代码:
from d2l import mxnet as d2l
from mxnet import autograd, gluon, np, npx
from mxnet.gluon import nn
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
class NeuMF(nn.Block):
def init(self, num_factors, num_users, num_items, nums_hiddens,
kwargs):
super(NeuMF, self).init(kwargs)
self.P = nn.Embedding(num_users, num_factors)
self.Q = nn.Embedding(num_items, num_factors)
self.U = nn.Embedding(num_users, num_factors)
self.V = nn.Embedding(num_items, num_factors)
self.mlp = nn.Sequential()
for num_hiddens in nums_hiddens:
self.mlp.add(nn.Dense(num_hiddens, activation='relu',
use_bias=True))
self.prediction_layer = nn.Dense(1, activation='sigmoid', use_bias=False)
def forward(self, user_id, item_id):
p_mf = self.P(user_id)
q_mf = self.Q(item_id)
gmf = p_mf * q_mf
p_mlp = self.U(user_id)
q_mlp = self.V(item_id)
mlp = self.mlp(np.concatenate([p_mlp, q_mlp], axis=1))
con_res = np.concatenate([gmf, mlp], axis=1)
return self.prediction_layer(con_res)
hidden = [5,5,5]
model = NeuMF(5, num_users, num_items, hidden)
model.compile(
#loss=tf.keras.losses.BinaryCrossentropy(),
loss=tf.keras.losses.MeanSquaredError(),
optimizer=keras.optimizers.Adam(lr=0.001)
)
我收到以下错误:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-21-5979072369bd> in <module>()
2
3 model = NeuMF(5, num_users, num_items, hidden)
----> 4 model.compile(
5 #loss=tf.keras.losses.BinaryCrossentropy(),
6 loss=tf.keras.losses.MeanSquaredError(),
AttributeError: 'NeuMF' object has no attribute 'compile'
非常感谢您提前!
编辑:
我将nn
替换为layers
class NeuMF(keras.Model):
def __init__(self, num_factors, num_users, num_items, nums_hiddens,
**kwargs):
super(NeuMF, self).__init__(**kwargs)
self.P = layers.Embedding(num_users, num_factors)
self.Q = layers.Embedding(num_items, num_factors)
self.U = layers.Embedding(num_users, num_factors)
self.V = layers.Embedding(num_items, num_factors)
self.mlp = layers.Sequential()
for num_hiddens in nums_hiddens:
self.mlp.add(layers.Dense(num_hiddens, activation='relu',
use_bias=True))
self.prediction_layer = layers.Dense(1, activation='sigmoid', use_bias=False)
def forward(self, user_id, item_id):
p_mf = self.P(user_id)
q_mf = self.Q(item_id)
gmf = p_mf * q_mf
p_mlp = self.U(user_id)
q_mlp = self.V(item_id)
mlp = self.mlp(np.concatenate([p_mlp, q_mlp], axis=1))
con_res = np.concatenate([gmf, mlp], axis=1)
return self.prediction_layer(con_res)
然后我得到了一个新错误:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-26-7e09b0f80300> in <module>()
1 hidden = [1,1,1]
2
----> 3 model = NeuMF(1, num_users, num_items, hidden)
4 model.compile(
5 #loss=tf.keras.losses.BinaryCrossentropy(),
1 frames
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/layers/embeddings.py in __init__(self, input_dim, output_dim, embeddings_initializer, embeddings_regularizer, activity_regularizer, embeddings_constraint, mask_zero, input_length, **kwargs)
102 else:
103 kwargs['input_shape'] = (None,)
--> 104 if input_dim <= 0 or output_dim <= 0:
105 raise ValueError('Both `input_dim` and `output_dim` should be positive, '
106 'found input_dim {} and output_dim {}'.format(
ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
在评论中已经进行了相当多的讨论之后,您的代码仍然存在一些问题,需要您进行澄清:
keras.Model
子类应该实现__call__
方法,而不是forward
方法。np.concatenate
这样的numpy
操作,总是使用像tf.keras.layers.Concatenate
这样的keras
tf.keras.layers.Concatenate
。num_factors
、 num_users
、 num_items
不是整数,但我只能在这里猜测,因为您没有向我们提供这些。让我们以不同的方式处理这些问题。 以下代码片段运行没有错误,可能是您的一个很好的起点:
import tensorflow as tf
class NeuMF(tf.keras.Model):
def __init__(self, num_factors, num_users, num_items, nums_hiddens,
**kwargs):
super(NeuMF, self).__init__(**kwargs)
self.P = tf.keras.layers.Embedding(num_users, num_factors)
self.Q = tf.keras.layers.Embedding(num_items, num_factors)
self.U = tf.keras.layers.Embedding(num_users, num_factors)
self.V = tf.keras.layers.Embedding(num_items, num_factors)
self.mlp = tf.keras.Sequential()
for num_hiddens in nums_hiddens:
self.mlp.add(
tf.keras.layers.Dense(
num_hiddens,
activation='relu',
use_bias=True
)
)
self.prediction_layer = tf.keras.layers.Dense(1, activation='sigmoid', use_bias=False)
def __call__(self, inputs):
x = self.P(inputs[0])
x1 = self.Q(inputs[1])
x = tf.keras.layers.Multiply()([x,x1])
y = self.U(inputs[0])
y1 = self.V(inputs[1])
y = tf.keras.layers.Concatenate()([y,y1])
y = self.mlp(y)
x = tf.keras.layers.Concatenate()([x,y])
return self.prediction_layer(x)
if __name__ == '__main__':
#replace these with values of your choice:
num_factors = 2
num_users = 3
num_items = 4
nums_hidden = [5,5,5]
model = NeuMF(num_users, num_items, num_items, nums_hidden)
model.compile(
loss = tf.keras.losses.MeanSquaredError(),
optimizer = tf.keras.optimizers.Adam(lr=0.001)
)
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.