---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
theano/scan_module/scan_perform.pyx in theano.scan_module.scan_perform.perform (/home/dblank/.theano/compiledir_Linux-4.10--generic-x86_64-with-Ubuntu-17.04-zesty-x86_64-3.5.3-64/scan_perform/mod.cpp:4490)()
ValueError: Shape mismatch: x has 25 rows but z has 1 rows
During handling of the above exception, another exception occurred:
ValueError Traceback (most recent call last)
/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py in __call__(self, *args, **kwargs)
883 outputs =\
--> 884 self.fn() if output_subset is None else\
885 self.fn(output_subset=output_subset)
/usr/local/lib/python3.5/dist-packages/theano/scan_module/scan_op.py in rval(p, i, o, n, allow_gc)
988 allow_gc=allow_gc):
--> 989 r = p(n, [x[0] for x in i], o)
990 for o in node.outputs:
/usr/local/lib/python3.5/dist-packages/theano/scan_module/scan_op.py in p(node, args, outs)
977 outs,
--> 978 self, node)
979 except (ImportError, theano.gof.cmodule.MissingGXX):
theano/scan_module/scan_perform.pyx in theano.scan_module.scan_perform.perform (/home/dblank/.theano/compiledir_Linux-4.10--generic-x86_64-with-Ubuntu-17.04-zesty-x86_64-3.5.3-64/scan_perform/mod.cpp:4606)()
/usr/local/lib/python3.5/dist-packages/theano/gof/link.py in raise_with_op(node, thunk, exc_info, storage_map)
324 pass
--> 325 reraise(exc_type, exc_value, exc_trace)
326
/usr/lib/python3/dist-packages/six.py in reraise(tp, value, tb)
684 if value.__traceback__ is not tb:
--> 685 raise value.with_traceback(tb)
686 raise value
theano/scan_module/scan_perform.pyx in theano.scan_module.scan_perform.perform (/home/dblank/.theano/compiledir_Linux-4.10--generic-x86_64-with-Ubuntu-17.04-zesty-x86_64-3.5.3-64/scan_perform/mod.cpp:4490)()
ValueError: Shape mismatch: x has 25 rows but z has 1 rows
Apply node that caused the error: Gemm{no_inplace}(Subtensor{::, int64::}.0, TensorConstant{0.20000000298023224}, lstm1/variable[t-1], <TensorType(float32, matrix)>, TensorConstant{0.20000000298023224})
Toposort index: 5
Inputs types: [TensorType(float32, matrix), TensorType(float32, scalar), TensorType(float32, matrix), TensorType(float32, matrix), TensorType(float32, scalar)]
Inputs shapes: [(1, 50), (), (25, 50), (50, 50), ()]
Inputs strides: [(800, 4), (), (200, 4), (800, 4), ()]
Inputs values: ['not shown', array(0.20000000298023224, dtype=float32), 'not shown', 'not shown', array(0.20000000298023224, dtype=float32)]
Outputs clients: [[Elemwise{Composite{(clip((i0 + i1), i2, i3) * tanh(i4))}}(TensorConstant{(1, 1) of 0.5}, Gemm{no_inplace}.0, TensorConstant{(1, 1) of 0}, TensorConstant{(1, 1) of 1}, Elemwise{Composite{((clip((i0 + i1), i2, i3) * i4) + (clip((i5 + i6), i2, i3) * tanh(i7)))}}.0)]]
HINT: Re-running with most Theano optimization disabled could give you a back-trace of when this node was created. This can be done with by setting the Theano flag 'optimizer=fast_compile'. If that does not work, Theano optimizations can be disabled with 'optimizer=None'.
HINT: Use the Theano flag 'exception_verbosity=high' for a debugprint and storage map footprint of this apply node.
During handling of the above exception, another exception occurred:
ValueError Traceback (most recent call last)
/usr/local/lib/python3.5/dist-packages/IPython/core/formatters.py in __call__(self, obj)
334 method = get_real_method(obj, self.print_method)
335 if method is not None:
--> 336 return method()
337 return None
338 else:
/usr/local/lib/python3.5/dist-packages/conx/network.py in _repr_svg_(self)
192 def _repr_svg_(self):
193 if all([layer.model for layer in self.layers]):
--> 194 return self.build_svg()
195 else:
196 return None
/usr/local/lib/python3.5/dist-packages/conx/network.py in build_svg(self, inputs, class_id, opts)
982 in_layer = [layer for layer in self.layers if layer.kind() == "input"][0]
983 v = in_layer.make_dummy_vector()
--> 984 image = self.propagate_to_image(layer_name, v)
985 else: # no propagate
986 # get image based on ontputs
/usr/local/lib/python3.5/dist-packages/conx/network.py in propagate_to_image(self, layer_name, input, batch_size)
735 if self.num_input_layers == 1:
736 input = input[0]
--> 737 outputs = self.propagate_to(layer_name, input, batch_size)
738 array = np.array(outputs)
739 image = self[layer_name].make_image(array, self.config)
/usr/local/lib/python3.5/dist-packages/conx/network.py in propagate_to(self, layer_name, inputs, batch_size, visualize)
707 inputs = inputs[0]
708 if self.num_input_layers == 1:
--> 709 outputs = self[layer_name].model.predict(np.array([inputs]), batch_size=batch_size)
710 else:
711 # get just inputs for this layer, in order:
/usr/local/lib/python3.5/dist-packages/keras/engine/training.py in predict(self, x, batch_size, verbose)
1515 f = self.predict_function
1516 return self._predict_loop(f, ins,
-> 1517 batch_size=batch_size, verbose=verbose)
1518
1519 def train_on_batch(self, x, y,
/usr/local/lib/python3.5/dist-packages/keras/engine/training.py in _predict_loop(self, f, ins, batch_size, verbose)
1139 ins_batch = _slice_arrays(ins, batch_ids)
1140
-> 1141 batch_outs = f(ins_batch)
1142 if not isinstance(batch_outs, list):
1143 batch_outs = [batch_outs]
/usr/local/lib/python3.5/dist-packages/keras/backend/theano_backend.py in __call__(self, inputs)
1195 def __call__(self, inputs):
1196 assert isinstance(inputs, (list, tuple))
-> 1197 return self.function(*inputs)
1198
1199
/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py in __call__(self, *args, **kwargs)
896 node=self.fn.nodes[self.fn.position_of_error],
897 thunk=thunk,
--> 898 storage_map=getattr(self.fn, 'storage_map', None))
899 else:
900 # old-style linkers raise their own exceptions
/usr/local/lib/python3.5/dist-packages/theano/gof/link.py in raise_with_op(node, thunk, exc_info, storage_map)
323 # extra long error message in that case.
324 pass
--> 325 reraise(exc_type, exc_value, exc_trace)
326
327
/usr/lib/python3/dist-packages/six.py in reraise(tp, value, tb)
683 value = tp()
684 if value.__traceback__ is not tb:
--> 685 raise value.with_traceback(tb)
686 raise value
687
/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py in __call__(self, *args, **kwargs)
882 try:
883 outputs =\
--> 884 self.fn() if output_subset is None else\
885 self.fn(output_subset=output_subset)
886 except Exception:
/usr/local/lib/python3.5/dist-packages/theano/scan_module/scan_op.py in rval(p, i, o, n, allow_gc)
987 def rval(p=p, i=node_input_storage, o=node_output_storage, n=node,
988 allow_gc=allow_gc):
--> 989 r = p(n, [x[0] for x in i], o)
990 for o in node.outputs:
991 compute_map[o][0] = True
/usr/local/lib/python3.5/dist-packages/theano/scan_module/scan_op.py in p(node, args, outs)
976 args,
977 outs,
--> 978 self, node)
979 except (ImportError, theano.gof.cmodule.MissingGXX):
980 p = self.execute
theano/scan_module/scan_perform.pyx in theano.scan_module.scan_perform.perform (/home/dblank/.theano/compiledir_Linux-4.10--generic-x86_64-with-Ubuntu-17.04-zesty-x86_64-3.5.3-64/scan_perform/mod.cpp:4606)()
/usr/local/lib/python3.5/dist-packages/theano/gof/link.py in raise_with_op(node, thunk, exc_info, storage_map)
323 # extra long error message in that case.
324 pass
--> 325 reraise(exc_type, exc_value, exc_trace)
326
327
/usr/lib/python3/dist-packages/six.py in reraise(tp, value, tb)
683 value = tp()
684 if value.__traceback__ is not tb:
--> 685 raise value.with_traceback(tb)
686 raise value
687
theano/scan_module/scan_perform.pyx in theano.scan_module.scan_perform.perform (/home/dblank/.theano/compiledir_Linux-4.10--generic-x86_64-with-Ubuntu-17.04-zesty-x86_64-3.5.3-64/scan_perform/mod.cpp:4490)()
ValueError: Shape mismatch: x has 25 rows but z has 1 rows
Apply node that caused the error: Gemm{no_inplace}(Subtensor{::, int64::}.0, TensorConstant{0.20000000298023224}, lstm1/variable[t-1], <TensorType(float32, matrix)>, TensorConstant{0.20000000298023224})
Toposort index: 5
Inputs types: [TensorType(float32, matrix), TensorType(float32, scalar), TensorType(float32, matrix), TensorType(float32, matrix), TensorType(float32, scalar)]
Inputs shapes: [(1, 50), (), (25, 50), (50, 50), ()]
Inputs strides: [(800, 4), (), (200, 4), (800, 4), ()]
Inputs values: ['not shown', array(0.20000000298023224, dtype=float32), 'not shown', 'not shown', array(0.20000000298023224, dtype=float32)]
Outputs clients: [[Elemwise{Composite{(clip((i0 + i1), i2, i3) * tanh(i4))}}(TensorConstant{(1, 1) of 0.5}, Gemm{no_inplace}.0, TensorConstant{(1, 1) of 0}, TensorConstant{(1, 1) of 1}, Elemwise{Composite{((clip((i0 + i1), i2, i3) * i4) + (clip((i5 + i6), i2, i3) * tanh(i7)))}}.0)]]
HINT: Re-running with most Theano optimization disabled could give you a back-trace of when this node was created. This can be done with by setting the Theano flag 'optimizer=fast_compile'. If that does not work, Theano optimizations can be disabled with 'optimizer=None'.
HINT: Use the Theano flag 'exception_verbosity=high' for a debugprint and storage map footprint of this apply node.
Apply node that caused the error: forall_inplace,cpu,scan_fn}(TensorConstant{1}, InplaceDimShuffle{1,0,2}.0, IncSubtensor{InplaceSet;:int64:}.0, IncSubtensor{InplaceSet;:int64:}.0, TensorConstant{1}, Subtensor{::, int64:int64:}.0, Subtensor{::, :int64:}.0, Subtensor{::, int64:int64:}.0, Subtensor{::, int64::}.0)
Toposort index: 77
Inputs types: [TensorType(int64, scalar), TensorType(float32, (True, False, False)), TensorType(float32, 3D), TensorType(float32, 3D), TensorType(int64, scalar), TensorType(float32, matrix), TensorType(float32, matrix), TensorType(float32, matrix), TensorType(float32, matrix)]
Inputs shapes: [(), (1, 1, 200), (2, 25, 50), (2, 25, 50), (), (50, 50), (50, 50), (50, 50), (50, 50)]
Inputs strides: [(), (800, 800, 4), (5000, 200, 4), (5000, 200, 4), (), (800, 4), (800, 4), (800, 4), (800, 4)]
Inputs values: [array(1), 'not shown', 'not shown', 'not shown', array(1), 'not shown', 'not shown', 'not shown', 'not shown']
Outputs clients: [[Subtensor{int64}(forall_inplace,cpu,scan_fn}.0, Constant{1})], [Subtensor{int64}(forall_inplace,cpu,scan_fn}.1, Constant{1})], [InplaceDimShuffle{1,0,2}(forall_inplace,cpu,scan_fn}.2)]]
HINT: Re-running with most Theano optimization disabled could give you a back-trace of when this node was created. This can be done with by setting the Theano flag 'optimizer=fast_compile'. If that does not work, Theano optimizations can be disabled with 'optimizer=None'.
HINT: Use the Theano flag 'exception_verbosity=high' for a debugprint and storage map footprint of this apply node.