Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ValueError: Layer model expects 1 input(s), but it received 2 input tensors. Inputs received: [<tf.Tensor 'IteratorGetNext:0' shape=(None, 35, 30) dtype=float32>, <tf.Tensor 'IteratorGetNext:1' shape=(None, 35) dtype=float32>] #1

Open
1170300423 opened this issue May 29, 2022 · 0 comments

Comments

@1170300423
Copy link

No matter which way I choose to write it, it pops up this error
############ the first way #################
history = model.fit(
trainX,
trainY,
epochs=100,
batch_size=60,
shuffle=True,
verbose=0,
validation_data=(testX, testY)
)
############ the second way #################
history = model.fit(
trainX,
trainY,
epochs=100,
batch_size=60,
shuffle=True,
verbose=0,
validation_data=[testX, testY]
)

############# error message ######################

ValueError Traceback (most recent call last)
in
6 shuffle=True,
7 verbose=0,
----> 8 validation_data=(testX, testY)
9 )

~/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1224 use_multiprocessing=use_multiprocessing,
1225 return_dict=True,
-> 1226 use_cached_eval_dataset=True)
1227 val_logs = {'val
' + name: val for name, val in val_logs.items()}
1228 epoch_logs.update(val_logs)

~/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/training.py in evaluate(self, x, y, batch_size, verbose, sample_weight, steps, callbacks, max_queue_size, workers, use_multiprocessing, return_dict, **kwargs)
1499 with tf.profiler.experimental.Trace('test', step_num=step, _r=1):
1500 callbacks.on_test_batch_begin(step)
-> 1501 tmp_logs = self.test_function(iterator)
1502 if data_handler.should_sync:
1503 context.async_wait()

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in call(self, *args, **kwds)
883
884 with OptionalXlaContext(self._jit_compile):
--> 885 result = self._call(*args, **kwds)
886
887 new_tracing_count = self.experimental_get_tracing_count()

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
922 # In this case we have not created variables on the first call. So we can
923 # run the first trace but we should fail if variables are created.
--> 924 results = self._stateful_fn(*args, **kwds)
925 if self._created_variables and not ALLOW_DYNAMIC_VARIABLE_CREATION:
926 raise ValueError("Creating variables on a non-first call to a function"

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/function.py in call(self, *args, **kwargs)
3036 with self._lock:
3037 (graph_function,
-> 3038 filtered_flat_args) = self._maybe_define_function(args, kwargs)
3039 return graph_function._call_flat(
3040 filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3458 call_context_key in self._function_cache.missed):
3459 return self._define_function_with_shape_relaxation(
-> 3460 args, kwargs, flat_args, filtered_flat_args, cache_key_context)
3461
3462 self._function_cache.missed.add(call_context_key)

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _define_function_with_shape_relaxation(self, args, kwargs, flat_args, filtered_flat_args, cache_key_context)
3380
3381 graph_function = self._create_graph_function(
-> 3382 args, kwargs, override_flat_arg_shapes=relaxed_arg_shapes)
3383 self._function_cache.arg_relaxed[rank_only_cache_key] = graph_function
3384

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3306 arg_names=arg_names,
3307 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3308 capture_by_value=self._capture_by_value),
3309 self._function_attributes,
3310 function_spec=self.function_spec,

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes, acd_record_initial_resource_uses)
1005 _, original_func = tf_decorator.unwrap(python_func)
1006
-> 1007 func_outputs = python_func(*func_args, **func_kwargs)
1008
1009 # invariant: func_outputs contains only Tensors, CompositeTensors,

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
666 # the function a weak reference to itself to avoid a reference cycle.
667 with OptionalXlaContext(compile_with_xla):
--> 668 out = weak_wrapped_fn().wrapped(*args, **kwds)
669 return out
670

~/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
992 except Exception as e: # pylint:disable=broad-except
993 if hasattr(e, "ag_error_metadata"):
--> 994 raise e.ag_error_metadata.to_exception(e)
995 else:
996 raise

ValueError: in user code:

/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/training.py:1330 test_function  *
    return step_function(self, iterator)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/training.py:1320 step_function  **
    outputs = model.distribute_strategy.run(run_step, args=(data,))
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:1286 run
    return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2849 call_for_each_replica
    return self._call_for_each_replica(fn, args, kwargs)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:3632 _call_for_each_replica
    return fn(*args, **kwargs)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/training.py:1313 run_step  **
    outputs = model.test_step(data)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/training.py:1267 test_step
    y_pred = self(x, training=False)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/base_layer.py:1020 __call__
    input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
/home/wf/Program_file/anaconda3/lib/python3.7/site-packages/keras/engine/input_spec.py:202 assert_input_compatibility
    ' input tensors. Inputs received: ' + str(inputs))

ValueError: Layer model expects 1 input(s), but it received 2 input tensors. Inputs received: [<tf.Tensor 'IteratorGetNext:0' shape=(None, 35, 30) dtype=float32>, <tf.Tensor 'IteratorGetNext:1' shape=(None, 35) dtype=float32>]

please can you give me some advise

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant