Last active
September 21, 2016 09:23
-
-
Save Higgcz/f263fb933da48361cd75b4d4b9006019 to your computer and use it in GitHub Desktop.
Keras 1.1.0 - Error with TimeDistributed wrapper
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ValueError Traceback (most recent call last) | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/tensorflow/python/framework/tensor_shape.py in merge_with(self, other) | |
562 try: | |
--> 563 self.assert_same_rank(other) | |
564 new_dims = [] | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/tensorflow/python/framework/tensor_shape.py in assert_same_rank(self, other) | |
608 raise ValueError( | |
--> 609 "Shapes %s and %s must have the same rank" % (self, other)) | |
610 | |
ValueError: Shapes (?, 1, 15, 63, 32) and (1, 1, 32) must have the same rank | |
During handling of the above exception, another exception occurred: | |
ValueError Traceback (most recent call last) | |
<ipython-input-5-94aae201f795> in <module>() | |
----> 1 model.add(TimeDistributed(Convolution2D(32,2,2), batch_input_shape=(1, 1, 16, 64, 3))) | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/models.py in add(self, layer) | |
274 else: | |
275 input_dtype = None | |
--> 276 layer.create_input_layer(batch_input_shape, input_dtype) | |
277 | |
278 if len(layer.inbound_nodes) != 1: | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/engine/topology.py in create_input_layer(self, batch_input_shape, input_dtype, name) | |
368 # and create the node connecting the current layer | |
369 # to the input layer we just created. | |
--> 370 self(x) | |
371 | |
372 def assert_input_compatibility(self, input): | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/engine/topology.py in __call__(self, x, mask) | |
512 if inbound_layers: | |
513 # this will call layer.build() if necessary | |
--> 514 self.add_inbound_node(inbound_layers, node_indices, tensor_indices) | |
515 input_added = True | |
516 | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/engine/topology.py in add_inbound_node(self, inbound_layers, node_indices, tensor_indices) | |
570 # creating the node automatically updates self.inbound_nodes | |
571 # as well as outbound_nodes on inbound layers. | |
--> 572 Node.create_node(self, inbound_layers, node_indices, tensor_indices) | |
573 | |
574 def get_output_shape_for(self, input_shape): | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/engine/topology.py in create_node(cls, outbound_layer, inbound_layers, node_indices, tensor_indices) | |
147 | |
148 if len(input_tensors) == 1: | |
--> 149 output_tensors = to_list(outbound_layer.call(input_tensors[0], mask=input_masks[0])) | |
150 output_masks = to_list(outbound_layer.compute_mask(input_tensors[0], input_masks[0])) | |
151 # TODO: try to auto-infer shape if exception is raised by get_output_shape_for | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/layers/wrappers.py in call(self, X, mask) | |
115 | |
116 last_output, outputs, states = K.rnn(step, X, | |
--> 117 initial_states=[]) | |
118 y = outputs | |
119 else: | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/keras/backend/tensorflow_backend.py in rnn(step_function, inputs, initial_states, go_backwards, mask, constants, unroll, input_length) | |
1252 parallel_iterations=32, | |
1253 swap_memory=True, | |
-> 1254 sequence_length=None) | |
1255 | |
1256 if nb_states > 1: | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/tensorflow/python/ops/rnn.py in _dynamic_rnn_loop(cell, inputs, initial_state, parallel_iterations, swap_memory, sequence_length, dtype) | |
1023 shape = _state_size_with_prefix( | |
1024 output_size, prefix=[const_time_steps, const_batch_size]) | |
-> 1025 output.set_shape(shape) | |
1026 | |
1027 final_outputs = nest.pack_sequence_as( | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/tensorflow/python/framework/ops.py in set_shape(self, shape) | |
406 this tensor. | |
407 """ | |
--> 408 self._shape = self._shape.merge_with(shape) | |
409 | |
410 @property | |
/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/site-packages/tensorflow/python/framework/tensor_shape.py in merge_with(self, other) | |
568 except ValueError: | |
569 raise ValueError("Shapes %s and %s are not compatible" % | |
--> 570 (self, other)) | |
571 | |
572 def concatenate(self, other): | |
ValueError: Shapes (?, 1, 15, 63, 32) and (1, 1, 32) are not compatible |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment