I am trying to define a model an compile it, but I am for some reason not able to compile or define this model...
def fws():
filter_size = 8
pooling_size = 6
stride_step = 2
J = 80
splits = 33
total_frames_with_deltas = 45
pool_splits = ((splits - pooling_size)+1)/2
print "pool_splits" + str(pool_splits)
print "Printing shapes"
list_of_input = [Input(shape=(8,3)) for i in range(splits*total_frames_with_deltas)]
output_convolution = []
for steps in range(total_frames_with_deltas):
conv = Conv1D(filters = J, kernel_size = 8)
column = 0
skip = 45
conv_output = []
for _ in range(splits):
conv_output.append(conv(list_of_input[(column*skip)+steps]))
column = column + 1
output_convolution.append((conv_output))
print len(output_convolution)
print len(output_convolution[0])
out = 0
output_conv = []
for row in range(splits):
for column in range(total_frames_with_deltas):
#print row
#print column
out = out + output_convolution[column][row]
output_conv.append(out)
output_con = Concatenate()(output_conv)
output_con = Reshape((splits,-1))(output_con)
pooled = MaxPooling1D(pool_size = pooling_size, strides = stride_step)(output_con)
print pooled.shape
#reshape = Reshape((3,-1))(pooled)
#fc
dense1 = Dense(units = 1000, activation = 'relu', name = "dense_1")(pooled)
dense2 = Dense(units = 1000, activation = 'relu', name = "dense_2")(dense1)
dense3 = Dense(units = 50 , activation = 'softmax', name = "dense_3")(dense2)
raw_input("Model definition ok!")
model = Model(inputs = list_of_input , outputs = dense3)
raw_input("Model definition with input/output")
model.compile(loss="categorical_crossentropy", optimizer='sgd' , metrics = [metrics.categorical_accuracy])
This is the full error message:
File "keras_cnn_phoneme_original_fit_generator.py", line 231, in <module>
fws()
File "keras_cnn_phoneme_original_fit_generator.py", line 212, in fws
model = Model(inputs = list_of_input , outputs = dense3)
File "/usr/local/lib/python2.7/dist-packages/keras/legacy/interfaces.py", line 88, in wrapper
return func(*args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1676, in __init__
build_map_of_graph(x, finished_nodes, nodes_in_progress)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1666, in build_map_of_graph
layer, node_index, tensor_index)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1666, in build_map_of_graph
layer, node_index, tensor_index)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1666, in build_map_of_graph
layer, node_index, tensor_index)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1666, in build_map_of_graph
layer, node_index, tensor_index)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1666, in build_map_of_graph
layer, node_index, tensor_index)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 1664, in build_map_of_graph
next_node = layer.inbound_nodes[node_index]
AttributeError: 'NoneType' object has no attribute 'inbound_nodes'
The error seem to occur when define the input and output of the network.. I am not sure why.. Both the convolution and pooling has been designed to handle the input.. So the error makes no sense to me?
Kind of late, but I just had a similar problem, and I guess someone else might have a similar problem. I think the line you got wrong is:
out = out + output_convolution[column][row]
Try changing it to:
out = add([out, output_convolution[column][row]]))
Where add
is in keras.layers.merge
. Unlike tensorflow, keras seems to be unable to interpret a+b
as a node in the graph, therefore it brakes.
Also for future reference, what I was trying to do, was to subract two tensors ( a - b
) as follows:
subt = add([a, -b])
Which raises the same exception. The way i did it was to define b
as -b
, not fancy but it works.
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.