[英]No attribute 'compile', how can I modify the class, so that it works?
所述neuMF
類不是Keras
的類,因此它不能提供任何編譯方法。 我最好使用keras.Model
而不是nn.Blocks
。
不幸的是,我不太明白 nn.Blocks 是什么以及如何在課堂上替換它。 我應該如何列表的相關我的代碼,以便它與keras.Model
並且可以使用Keras
方法?
這是我的代碼:
from d2l import mxnet as d2l
from mxnet import autograd, gluon, np, npx
from mxnet.gluon import nn
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
class NeuMF(nn.Block):
def init(self, num_factors, num_users, num_items, nums_hiddens,
kwargs):
super(NeuMF, self).init(kwargs)
self.P = nn.Embedding(num_users, num_factors)
self.Q = nn.Embedding(num_items, num_factors)
self.U = nn.Embedding(num_users, num_factors)
self.V = nn.Embedding(num_items, num_factors)
self.mlp = nn.Sequential()
for num_hiddens in nums_hiddens:
self.mlp.add(nn.Dense(num_hiddens, activation='relu',
use_bias=True))
self.prediction_layer = nn.Dense(1, activation='sigmoid', use_bias=False)
def forward(self, user_id, item_id):
p_mf = self.P(user_id)
q_mf = self.Q(item_id)
gmf = p_mf * q_mf
p_mlp = self.U(user_id)
q_mlp = self.V(item_id)
mlp = self.mlp(np.concatenate([p_mlp, q_mlp], axis=1))
con_res = np.concatenate([gmf, mlp], axis=1)
return self.prediction_layer(con_res)
hidden = [5,5,5]
model = NeuMF(5, num_users, num_items, hidden)
model.compile(
#loss=tf.keras.losses.BinaryCrossentropy(),
loss=tf.keras.losses.MeanSquaredError(),
optimizer=keras.optimizers.Adam(lr=0.001)
)
我收到以下錯誤:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-21-5979072369bd> in <module>()
2
3 model = NeuMF(5, num_users, num_items, hidden)
----> 4 model.compile(
5 #loss=tf.keras.losses.BinaryCrossentropy(),
6 loss=tf.keras.losses.MeanSquaredError(),
AttributeError: 'NeuMF' object has no attribute 'compile'
非常感謝您提前!
編輯:
我將nn
替換為layers
class NeuMF(keras.Model):
def __init__(self, num_factors, num_users, num_items, nums_hiddens,
**kwargs):
super(NeuMF, self).__init__(**kwargs)
self.P = layers.Embedding(num_users, num_factors)
self.Q = layers.Embedding(num_items, num_factors)
self.U = layers.Embedding(num_users, num_factors)
self.V = layers.Embedding(num_items, num_factors)
self.mlp = layers.Sequential()
for num_hiddens in nums_hiddens:
self.mlp.add(layers.Dense(num_hiddens, activation='relu',
use_bias=True))
self.prediction_layer = layers.Dense(1, activation='sigmoid', use_bias=False)
def forward(self, user_id, item_id):
p_mf = self.P(user_id)
q_mf = self.Q(item_id)
gmf = p_mf * q_mf
p_mlp = self.U(user_id)
q_mlp = self.V(item_id)
mlp = self.mlp(np.concatenate([p_mlp, q_mlp], axis=1))
con_res = np.concatenate([gmf, mlp], axis=1)
return self.prediction_layer(con_res)
然后我得到了一個新錯誤:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-26-7e09b0f80300> in <module>()
1 hidden = [1,1,1]
2
----> 3 model = NeuMF(1, num_users, num_items, hidden)
4 model.compile(
5 #loss=tf.keras.losses.BinaryCrossentropy(),
1 frames
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/layers/embeddings.py in __init__(self, input_dim, output_dim, embeddings_initializer, embeddings_regularizer, activity_regularizer, embeddings_constraint, mask_zero, input_length, **kwargs)
102 else:
103 kwargs['input_shape'] = (None,)
--> 104 if input_dim <= 0 or output_dim <= 0:
105 raise ValueError('Both `input_dim` and `output_dim` should be positive, '
106 'found input_dim {} and output_dim {}'.format(
ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
在評論中已經進行了相當多的討論之后,您的代碼仍然存在一些問題,需要您進行澄清:
keras.Model
子類應該實現__call__
方法,而不是forward
方法。np.concatenate
這樣的numpy
操作,總是使用像tf.keras.layers.Concatenate
這樣的keras
tf.keras.layers.Concatenate
。num_factors
、 num_users
、 num_items
不是整數,但我只能在這里猜測,因為您沒有向我們提供這些。讓我們以不同的方式處理這些問題。 以下代碼片段運行沒有錯誤,可能是您的一個很好的起點:
import tensorflow as tf
class NeuMF(tf.keras.Model):
def __init__(self, num_factors, num_users, num_items, nums_hiddens,
**kwargs):
super(NeuMF, self).__init__(**kwargs)
self.P = tf.keras.layers.Embedding(num_users, num_factors)
self.Q = tf.keras.layers.Embedding(num_items, num_factors)
self.U = tf.keras.layers.Embedding(num_users, num_factors)
self.V = tf.keras.layers.Embedding(num_items, num_factors)
self.mlp = tf.keras.Sequential()
for num_hiddens in nums_hiddens:
self.mlp.add(
tf.keras.layers.Dense(
num_hiddens,
activation='relu',
use_bias=True
)
)
self.prediction_layer = tf.keras.layers.Dense(1, activation='sigmoid', use_bias=False)
def __call__(self, inputs):
x = self.P(inputs[0])
x1 = self.Q(inputs[1])
x = tf.keras.layers.Multiply()([x,x1])
y = self.U(inputs[0])
y1 = self.V(inputs[1])
y = tf.keras.layers.Concatenate()([y,y1])
y = self.mlp(y)
x = tf.keras.layers.Concatenate()([x,y])
return self.prediction_layer(x)
if __name__ == '__main__':
#replace these with values of your choice:
num_factors = 2
num_users = 3
num_items = 4
nums_hidden = [5,5,5]
model = NeuMF(num_users, num_items, num_items, nums_hidden)
model.compile(
loss = tf.keras.losses.MeanSquaredError(),
optimizer = tf.keras.optimizers.Adam(lr=0.001)
)
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.