[英]InvalidArgumentError: 2 root error(s) found.(0) Invalid argument: You must feed a value for placeholder tensor 'Placeholder_22'
def CTcnn(shape=(None,SIZE,SIZE,1)):
x_input = tf.placeholder(tf.float32, shape=[None, 256,256, 1])
#s = tf.keras.layers.Lambda(lambda x: x / 255)(x_input)
encode_conv1 = Quad_conv_layer_same(x_input,shape=[3,3,1,15])
encode_conv2 = Quad_conv_layer_same(encode_conv1,shape=[3,3,15,15])
encode_conv3 = Quad_conv_layer_same(encode_conv2,shape=[3,3,15,15])
encode_conv4 = Quad_conv_layer_same(encode_conv3,shape=[3,3,15,15])
encode_conv5 = Quad_conv_layer_valid(encode_conv4,shape=[3,3,15,15])
decode_conv4 = tf.nn.relu(Quad_deconv_layer_valid_linear(encode_conv5,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv4))+encode_conv4)
decode_conv3 = Quad_deconv_layer_same(decode_conv4,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv3))
decode_conv2 = tf.nn.relu(Quad_deconv_layer_same_linear(decode_conv3,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv2))+encode_conv2)
decode_conv1 = Quad_deconv_layer_same(decode_conv2,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv1))
x_output = tf.nn.relu(Quad_deconv_layer_same_linear(decode_conv1,shape=[3, 3,1,15],outputshape=tf.shape(x_input))+x_input)
model=tf.keras.Model(inputs=x_input, outputs=x_output, name='CTcnn')
return model
cost = tf.reduce_mean(tf.square(tf.subtract(x_output, x_input)))
opt = keras.optimizers.Adam(learning_rate=0.0004).minimize(cost)
When call the function:调用 function 时:
model = CTcnn(shape=(None,SIZE,SIZE,1))
Error message:错误信息:
InvalidArgumentError Traceback (most recent call last)
<ipython-input-77-4de3fa78340e> in <module>()
----> 1 model = CTcnn(shape=(None,SIZE,SIZE,1))
2 #opt = keras.optimizers.Adam(learning_rate=0.0004)
3 #model.compile(loss='categorical_crossentropy', optimizer=opt)
4 model.compile(optimizer=opt, loss='mean_squared_error')
5 model.summary()
<ipython-input-76-01dc6c972eba> in CTcnn(shape)
17 x_output = tf.nn.relu(Quad_deconv_layer_same_linear(decode_conv1,shape=[3, 3,1,15],outputshape=tf.shape(x_input))+x_input)
18
---> 19 model=tf.keras.Model(inputs=x_input, outputs=x_output, name='CTcnn')
20 return model
21
/usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
515 self._self_setattr_tracking = False # pylint: disable=protected-access
516 try:
--> 517 result = method(self, *args, **kwargs)
518 finally:
519 self._self_setattr_tracking = previous_value # pylint:
disable=protected-access
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/functional.py in __init__(self, inputs, outputs, name, trainable, **kwargs)
118 generic_utils.validate_kwargs(kwargs, {})
119 super(Functional, self).__init__(name=name, trainable=trainable)
--> 120 self._init_graph_network(inputs, outputs)
121
122 @trackable.no_automatic_dependency_tracking
/usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
515 self._self_setattr_tracking = False # pylint: disable=protected-access
516 try:
--> 517 result = method(self, *args, **kwargs)
518 finally:
519 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/functional.py in _init_graph_network(self, inputs, outputs)
153 if not keras_tensor.keras_tensors_enabled():
154 if any(not hasattr(tensor, '_keras_history') for tensor in self.outputs):
--> 155 base_layer_utils.create_keras_history(self._nested_outputs)
156
157 self._validate_graph_inputs_and_outputs()
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer_utils.py in create_keras_history(tensors)
186 the raw Tensorflow operations.
187 """
--> 188 _, created_layers = _create_keras_history_helper(tensors, set(), [])
189 return created_layers
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer_utils.py in _create_keras_history_helper(tensors, processed_ops, created_layers)
248 constants[i] = backend.eval_in_eager_or_function(op_input)
249 else:
--> 250 constants[i] = backend.function([], op_input)([])
251 layer_inputs = unnest_if_single_tensor(layer_inputs)
252 processed_ops, created_layers = _create_keras_history_helper(
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/backend.py in __call__(self, inputs)
3955
3956 fetched = self._callable_fn(*array_vals,
-> 3957 run_metadata=self.run_metadata)
3958 self._call_fetch_callbacks(fetched[-len(self._fetches):])
3959 output_structure = nest.pack_sequence_as(
/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py in __call__(self, *args, **kwargs)
1480 ret = tf_session.TF_SessionRunCallable(self._session._session,
1481 self._handle, args,
-> 1482 run_metadata_ptr)
1483 if run_metadata:
1484 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
InvalidArgumentError: 2 root error(s) found.
(0) Invalid argument: You must feed a value for placeholder tensor 'Placeholder_22' with dtype float and shape [?,256,256,1]
[[{{node Placeholder_22}}]]
(1) Invalid argument: You must feed a value for placeholder tensor 'Placeholder_22' with dtype float and shape [?,256,256,1]
[[{{node Placeholder_22}}]]
[[add_902/_3]]
0 successful operations.
0 derived errors ignored.
Looks like issue is with tf.placeholder
.看起来问题出在
tf.placeholder
。 The error suggests that, feed some value to Placeholder_22.该错误表明,为 Placeholder_22 提供一些值。 It is required to feed some value to use Placeholder.
需要提供一些值才能使用占位符。
Name your placeholders: Then it is clear where they are defined命名占位符:然后很清楚它们的定义位置
Example: tf.placeholder(name="some Name")示例:tf.placeholder(name="some Name")
It shows you in the 2nd traceback where the placeholder that is unspecified它在第二个回溯中向您显示未指定的占位符
ie placeholder_22即占位符_22
It shows you what sess.run() call failed to have all the placeholders in the traceback too.它向您展示了 sess.run() 调用也未能在回溯中包含所有占位符。
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.