I am trying to classify an input image with a TensorFlow model in a multi-class classification problem. I would like to plot the probabilities of the top-10 highest predicted class. I do that with the following steps:
1. Load the model
import tensorflow as tf
import tensorflow.compat.v1 as tfc
import PIL
import numpy as np
import cv2
from tensorflow.keras.models import model_from_json
import warnings
import matplotlib.pyplot as plt
warnings.filterwarnings('ignore')
tf.compat.v1.disable_eager_execution()
json_file = open('resnet-model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model_json = loaded_model_json.replace('"activation":"softmax"', '"activation":"linear"')
model = tf.keras.models.model_from_json(loaded_model_json)
2. Prepare the function to return logits and probabilities
#Sets the threshold for what messages will be logged
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
#Starts the Interactive Session
sess=tfc.InteractiveSession()
#Size of the image that will be classified
image=tf.Variable(tf.zeros((1,224,224,3)))
def resnet2(image,model):
logits = model(image)
probs = tf.nn.softmax(logits)
return logits, probs
#Returns logits and probabilities from the network
logits, probs = resnet2(image, model)
3. Function to classify and return top10 probabilities
def classify(img, correct_class=None, target_class=None):
#Get probabilities given the input image
p = sess.run(probs, feed_dict={image: img})[0]
#get top-10 probablilities
topk = list(p.argsort()[-10:][::-1])
#Select top10 probabilities
topprobs = p[topk]
print(topprobs)
4. Prepare Image to be classified and classify it
#Image class is 11
img_class = 11
#Open the image
img = cv2.imread("sample-image.jpg")
img= img.reshape(1,224,224,3)
img = (np.asarray(img) / 255.0).astype(np.float32)
classify(img, correct_class=img_class)
However, whenever I run it I have the following error:
---------------------------------------------------------------------------
FailedPreconditionError Traceback (most recent call last)
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1364 try:
-> 1365 return fn(*args)
1366 except errors.OpError as e:
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in _run_fn(feed_dict, fetch_list, target_list, options, run_metadata)
1348 self._extend_graph()
-> 1349 return self._call_tf_sessionrun(options, feed_dict, fetch_list,
1350 target_list, run_metadata)
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in _call_tf_sessionrun(self, options, feed_dict, fetch_list, target_list, run_metadata)
1440 run_metadata):
-> 1441 return tf_session.TF_SessionRun_wrapper(self._session, options, feed_dict,
1442 fetch_list, target_list,
FailedPreconditionError: 2 root error(s) found.
(0) Failed precondition: Error while reading resource variable conv4_block4_3_bn_9/beta from Container: localhost. This could mean that the variable was uninitialized. Not found: Container localhost does not exist. (Could not find resource: localhost/conv4_block4_3_bn_9/beta)
[[{{node functional_1_5/conv4_block4_3_bn/ReadVariableOp_1}}]]
[[functional_1_5/dropout/cond/then/_0/dropout/GreaterEqual/y/_41]]
(1) Failed precondition: Error while reading resource variable conv4_block4_3_bn_9/beta from Container: localhost. This could mean that the variable was uninitialized. Not found: Container localhost does not exist. (Could not find resource: localhost/conv4_block4_3_bn_9/beta)
[[{{node functional_1_5/conv4_block4_3_bn/ReadVariableOp_1}}]]
0 successful operations.
0 derived errors ignored.
During handling of the above exception, another exception occurred:
FailedPreconditionError Traceback (most recent call last)
<ipython-input-116-291671c55c1a> in <module>
----> 1 classify(img, correct_class=img_class)
<ipython-input-114-f609e06291c4> in classify(img, correct_class, target_class)
6
7 #Get probabilities given the input image
----> 8 p = sess.run(probs, feed_dict={image: img})[0]
9 #ax1.imshow(img)
10 #fig.sca(ax1)
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
955
956 try:
--> 957 result = self._run(None, fetches, feed_dict, options_ptr,
958 run_metadata_ptr)
959 if run_metadata:
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
1178 # or if the call is a partial run that specifies feeds.
1179 if final_fetches or final_targets or (handle and feed_dict_tensor):
-> 1180 results = self._do_run(handle, final_targets, final_fetches,
1181 feed_dict_tensor, options, run_metadata)
1182 else:
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1356
1357 if handle is None:
-> 1358 return self._do_call(_run_fn, feeds, fetches, targets, options,
1359 run_metadata)
1360 else:
/usr/local/lib/python3.8/dist-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1382 '\nsession_config.graph_options.rewrite_options.'
1383 'disable_meta_optimizer = True')
-> 1384 raise type(e)(node_def, op, message)
1385
1386 def _extend_graph(self):
FailedPreconditionError: 2 root error(s) found.
(0) Failed precondition: Error while reading resource variable conv4_block4_3_bn_9/beta from Container: localhost. This could mean that the variable was uninitialized. Not found: Container localhost does not exist. (Could not find resource: localhost/conv4_block4_3_bn_9/beta)
[[node functional_1_5/conv4_block4_3_bn/ReadVariableOp_1 (defined at <ipython-input-113-a25550204dc5>:3) ]]
[[functional_1_5/dropout/cond/then/_0/dropout/GreaterEqual/y/_41]]
(1) Failed precondition: Error while reading resource variable conv4_block4_3_bn_9/beta from Container: localhost. This could mean that the variable was uninitialized. Not found: Container localhost does not exist. (Could not find resource: localhost/conv4_block4_3_bn_9/beta)
[[node functional_1_5/conv4_block4_3_bn/ReadVariableOp_1 (defined at <ipython-input-113-a25550204dc5>:3) ]]
0 successful operations.
0 derived errors ignored.
Errors may have originated from an input operation.
Input Source operations connected to node functional_1_5/conv4_block4_3_bn/ReadVariableOp_1:
conv4_block4_3_bn_9/beta (defined at <ipython-input-111-e21a4c41d093>:12)
Input Source operations connected to node functional_1_5/conv4_block4_3_bn/ReadVariableOp_1:
conv4_block4_3_bn_9/beta (defined at <ipython-input-111-e21a4c41d093>:12)
Original stack trace for 'functional_1_5/conv4_block4_3_bn/ReadVariableOp_1':
File "/usr/lib/python3.8/runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/usr/lib/python3.8/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/usr/local/lib/python3.8/dist-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/home/anselmo/.local/lib/python3.8/site-packages/traitlets/config/application.py", line 845, in launch_instance
app.start()
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelapp.py", line 612, in start
self.io_loop.start()
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/platform/asyncio.py", line 149, in start
self.asyncio_loop.run_forever()
File "/usr/lib/python3.8/asyncio/base_events.py", line 570, in run_forever
self._run_once()
File "/usr/lib/python3.8/asyncio/base_events.py", line 1859, in _run_once
handle._run()
File "/usr/lib/python3.8/asyncio/events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/ioloop.py", line 690, in <lambda>
lambda f: self._run_callback(functools.partial(callback, future))
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/ioloop.py", line 743, in _run_callback
ret = callback()
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/gen.py", line 787, in inner
self.run()
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/gen.py", line 748, in run
yielded = self.gen.send(value)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelbase.py", line 365, in process_one
yield gen.maybe_future(dispatch(*args))
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/gen.py", line 209, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelbase.py", line 268, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/gen.py", line 209, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelbase.py", line 543, in execute_request
self.do_execute(
File "/home/anselmo/.local/lib/python3.8/site-packages/tornado/gen.py", line 209, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/ipkernel.py", line 306, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/home/anselmo/.local/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 2876, in run_cell
result = self._run_cell(
File "/home/anselmo/.local/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 2922, in _run_cell
return runner(coro)
File "/home/anselmo/.local/lib/python3.8/site-packages/IPython/core/async_helpers.py", line 68, in _pseudo_sync_runner
coro.send(None)
File "/home/anselmo/.local/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3145, in run_cell_async
has_raised = await self.run_ast_nodes(code_ast.body, cell_name,
File "/home/anselmo/.local/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3337, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "/home/anselmo/.local/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-113-a25550204dc5>", line 8, in <module>
logits, probs = resnet2(image, model)
File "<ipython-input-113-a25550204dc5>", line 3, in resnet2
logits = model(image)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/engine/base_layer_v1.py", line 776, in __call__
outputs = call_fn(cast_inputs, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/engine/functional.py", line 385, in call
return self._run_internal_graph(
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/engine/functional.py", line 508, in _run_internal_graph
outputs = node.layer(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/engine/base_layer_v1.py", line 776, in __call__
outputs = call_fn(cast_inputs, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/layers/normalization.py", line 720, in call
outputs = self._fused_batch_norm(inputs, training=training)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/layers/normalization.py", line 576, in _fused_batch_norm
output, mean, variance = tf_utils.smart_cond(training, train_op,
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/utils/tf_utils.py", line 64, in smart_cond
return smart_module.smart_cond(
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/smart_cond.py", line 56, in smart_cond
return false_fn()
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/keras/layers/normalization.py", line 558, in _fused_batch_norm_inference
return nn.fused_batch_norm(
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/util/dispatch.py", line 201, in wrapper
return target(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/nn_impl.py", line 1626, in fused_batch_norm
offset = ops.convert_to_tensor(offset, name="offset")
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/ops.py", line 1499, in convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/resource_variable_ops.py", line 1909, in _dense_var_to_tensor
return var._dense_var_to_tensor(dtype=dtype, name=name, as_ref=as_ref) # pylint: disable=protected-access
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/resource_variable_ops.py", line 1326, in _dense_var_to_tensor
return self.value()
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/resource_variable_ops.py", line 555, in value
return self._read_variable_op()
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/resource_variable_ops.py", line 657, in _read_variable_op
result = read_and_set_handle()
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/resource_variable_ops.py", line 647, in read_and_set_handle
result = gen_resource_variable_ops.read_variable_op(self._handle,
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/ops/gen_resource_variable_ops.py", line 490, in read_variable_op
_, _, _op, _outputs = _op_def_library._apply_op_helper(
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/op_def_library.py", line 742, in _apply_op_helper
op = g._create_op_internal(op_type_name, inputs, dtypes=None,
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/ops.py", line 3477, in _create_op_internal
ret = Operation(
File "/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/ops.py", line 1949, in __init__
self._traceback = tf_stack.extract_stack()
What is wrong with my code? is the model not initialized? how to fix it?
P.s1= I can call the function resnet2 normally with an input image and it returns both logits and probs. The problem is when I run sess.run()
P.s2= the model and image used can be found HERE
Since you are initializing a tensorflow (version < 2) session with tfc.InteractiveSession() you might need to initialize all values before running your session by calling:
tfc.initialize_all_variables().run()
This should initialize all variables. This would also back your case, that you can run Resnet alone but not with sess.run().
Related
I am trying to write a function based on Tensorflow to give output 1 if the value is between minval and maxval, and gives output 0 otherwise.
I use the graph structure that first finds all values below maxval and all values above minval and uses logical and.
import tensorflow as tf
def betweentf(inp,minval,maxval):
x=tf.Variable(inp,name='x',dtype=tf.float32)
z1=tf.math.greater(x,minval,name='mygreater')
z2=tf.math.less(x,maxval,name='myless')
z=tf.math.logical_and(z1,z2)
out=tf.cast(z, tf.float32)
return out
x=tf.Variable([1,2,3,4,5,6,7],name='x',dtype=tf.float32)
r=betweentf(x,1,3)
sess=tf.Session()
sess.run(x.initializer)
result=sess.run(r)
print(result)
sess.close()
I am not sure what the problem is. Here is the error:
---------------------------------------------------------------------------
FailedPreconditionError Traceback (most recent call last)
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in _do_call(self, fn, *args)
1355 try:
-> 1356 return fn(*args)
1357 except errors.OpError as e:
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in _run_fn(feed_dict, fetch_list, target_list, options, run_metadata)
1340 return self._call_tf_sessionrun(
-> 1341 options, feed_dict, fetch_list, target_list, run_metadata)
1342
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in _call_tf_sessionrun(self, options, feed_dict, fetch_list, target_list, run_metadata)
1428 self._session, options, feed_dict, fetch_list, target_list,
-> 1429 run_metadata)
1430
FailedPreconditionError: Attempting to use uninitialized value x_4
[[{{node x_4/read}}]]
During handling of the above exception, another exception occurred:
FailedPreconditionError Traceback (most recent call last)
<ipython-input-5-2d5235662348> in <module>
12 sess=tf.Session()
13 sess.run(x.initializer)
---> 14 result=sess.run(r)
15 print(result)
16 sess.close()
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in run(self, fetches, feed_dict, options, run_metadata)
948 try:
949 result = self._run(None, fetches, feed_dict, options_ptr,
--> 950 run_metadata_ptr)
951 if run_metadata:
952 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
1171 if final_fetches or final_targets or (handle and feed_dict_tensor):
1172 results = self._do_run(handle, final_targets, final_fetches,
-> 1173 feed_dict_tensor, options, run_metadata)
1174 else:
1175 results = []
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1348 if handle is None:
1349 return self._do_call(_run_fn, feeds, fetches, targets, options,
-> 1350 run_metadata)
1351 else:
1352 return self._do_call(_prun_fn, handle, feeds, fetches)
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\client\session.py in _do_call(self, fn, *args)
1368 pass
1369 message = error_interpolation.interpolate(message, self._graph)
-> 1370 raise type(e)(node_def, op, message)
1371
1372 def _extend_graph(self):
FailedPreconditionError: Attempting to use uninitialized value x_4
[[node x_4/read (defined at <ipython-input-5-2d5235662348>:3) ]]
Original stack trace for 'x_4/read':
File "C:\ProgramData\Anaconda3\lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "C:\ProgramData\Anaconda3\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "C:\ProgramData\Anaconda3\lib\site-packages\traitlets\config\application.py", line 658, in launch_instance
app.start()
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel\kernelapp.py", line 505, in start
self.io_loop.start()
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\platform\asyncio.py", line 148, in start
self.asyncio_loop.run_forever()
File "C:\ProgramData\Anaconda3\lib\asyncio\base_events.py", line 539, in run_forever
self._run_once()
File "C:\ProgramData\Anaconda3\lib\asyncio\base_events.py", line 1775, in _run_once
handle._run()
File "C:\ProgramData\Anaconda3\lib\asyncio\events.py", line 88, in _run
self._context.run(self._callback, *self._args)
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\ioloop.py", line 690, in <lambda>
lambda f: self._run_callback(functools.partial(callback, future))
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\ioloop.py", line 743, in _run_callback
ret = callback()
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\gen.py", line 787, in inner
self.run()
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\gen.py", line 748, in run
yielded = self.gen.send(value)
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 365, in process_one
yield gen.maybe_future(dispatch(*args))
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\gen.py", line 209, in wrapper
yielded = next(result)
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 272, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\gen.py", line 209, in wrapper
yielded = next(result)
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 542, in execute_request
user_expressions, allow_stdin,
File "C:\ProgramData\Anaconda3\lib\site-packages\tornado\gen.py", line 209, in wrapper
yielded = next(result)
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel\ipkernel.py", line 294, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "C:\ProgramData\Anaconda3\lib\site-packages\ipykernel\zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 2854, in run_cell
raw_cell, store_history, silent, shell_futures)
File "C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 2880, in _run_cell
return runner(coro)
File "C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\async_helpers.py", line 68, in _pseudo_sync_runner
coro.send(None)
File "C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 3057, in run_cell_async
interactivity=interactivity, compiler=compiler, result=result)
File "C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 3248, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 3325, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-5-2d5235662348>", line 11, in <module>
r=betweentf(x,1,3)
File "<ipython-input-5-2d5235662348>", line 3, in betweentf
x=tf.Variable(inp,name='x',dtype=tf.float32)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variables.py", line 259, in __call__
return cls._variable_v1_call(*args, **kwargs)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variables.py", line 220, in _variable_v1_call
shape=shape)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variables.py", line 198, in <lambda>
previous_getter = lambda **kwargs: default_variable_creator(None, **kwargs)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variable_scope.py", line 2511, in default_variable_creator
shape=shape)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variables.py", line 263, in __call__
return super(VariableMetaclass, cls).__call__(*args, **kwargs)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variables.py", line 1568, in __init__
shape=shape)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\variables.py", line 1755, in _init_from_args
self._snapshot = array_ops.identity(self._variable, name="read")
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\util\dispatch.py", line 180, in wrapper
return target(*args, **kwargs)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\array_ops.py", line 86, in identity
ret = gen_array_ops.identity(input, name=name)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\gen_array_ops.py", line 4996, in identity
"Identity", input=input, name=name)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\util\deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\ops.py", line 3616, in create_op
op_def=op_def)
File "C:\Users\r.jack\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\ops.py", line 2005, in __init__
self._traceback = tf_stack.extract_stack()
Question 1: How can I fix it?
Question 2: Any easier way to implement this?
Question 1
I am not sure what you expect by trying to create another tf.variable (i.e. inp) with a tf.variable (i.e. x). If you remove that, your code should work.
import tensorflow as tf
def betweentf(inp,minval,maxval):
z1=tf.math.greater(inp,minval,name='mygreater')
z2=tf.math.less(inp,maxval,name='myless')
z=tf.math.logical_and(z1,z2)
out=tf.cast(z, tf.float32)
return out
x=tf.Variable([1,2,3,4,5,6,7],name='x',dtype=tf.float32)
r=betweentf(x,1,3)
sess=tf.Session()
sess.run(x.initializer)
result=sess.run(r)
print(result)
sess.close()
Question 2
I can't really think of another way.
I want to test a new network structure which requires changing some of the elements of a tensor in a keras model. If I could find a way to convert/copy the tensor to a numpy array and then later transform it back into a tensor, then I should be able to make the model work.
I tried using the .eval() method to convert the tensor to a numpy array, but it gives me errors. I am also using this model with a DQN Agent from keras-rl, so it is possible the error is from how keras-rl uses the model. Here is my code:
def Filter_Features(F):
sess = Session()
with sess.as_default():
F_np = F.eval()
min_pos = np.argmin(F_np)
F_np[min_pos] = 0
return convert_to_tensor(F_np)
def create_model(nb_actions, num_frames=4):
inputs = Input(shape = (num_frames,84,84))
F = Conv2D(16,(8,8), activation='relu', strides=(4,4), data_format = "channels_first")(inputs)
...
F_k = Lambda(Filter_Features)(F)
actions = Dense(nb_actions, activation = 'linear')(F_k)
nnf_model = Model(inputs = inputs, outputs = actions)
return nnf_model
Note that the code runs if I remove the lambda layer, so the issue must be originating there. I received the error:
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1356, in _do_call
return fn(*args)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1341, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1429, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.FailedPreconditionError: Attempting to use uninitialized value conv2d_1/bias
[[{{node conv2d_1/bias/read}}]]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "Atari_Test.py", line 32, in <module>
model = Model_StackExchange.create_model(nb_actions = nb_actions)
File "/Users/j/deep-rl/Model_StackExchange.py", line 26, in create_model
F_k = Lambda(Filter_Features)(F)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/engine/base_layer.py", line 457, in __call__
output = self.call(inputs, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/layers/core.py", line 687, in call
return self.function(inputs, **arguments)
File "/Users/j/deep-rl/Model_StackExchange.py", line 11, in Filter_Features
F_np = F.eval()
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 731, in eval
return _eval_using_default_session(self, feed_dict, self.graph, session)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 5579, in _eval_using_default_session
return session.run(tensors, feed_dict)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 950, in run
run_metadata_ptr)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1173, in _run
feed_dict_tensor, options, run_metadata)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1350, in _do_run
run_metadata)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1370, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.FailedPreconditionError: Attempting to use uninitialized value conv2d_1/bias
[[node conv2d_1/bias/read (defined at /Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:402) ]]
Original stack trace for 'conv2d_1/bias/read':
File "Atari_Test.py", line 32, in <module>
model = Model_StackExchange.create_model(nb_actions = nb_actions)
File "/Users/j/deep-rl/Model_StackExchange.py", line 21, in create_model
F = Conv2D(16,(8,8), activation='relu', strides=(4,4), data_format = "channels_first")(inputs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/engine/base_layer.py", line 431, in __call__
self.build(unpack_singleton(input_shapes))
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/layers/convolutional.py", line 147, in build
constraint=self.bias_constraint)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/legacy/interfaces.py", line 91, in wrapper
return func(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/engine/base_layer.py", line 252, in add_weight
constraint=constraint)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py", line 402, in variable
v = tf.Variable(value, dtype=tf.as_dtype(dtype), name=name)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variables.py", line 259, in __call__
return cls._variable_v1_call(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variables.py", line 220, in _variable_v1_call
shape=shape)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variables.py", line 198, in <lambda>
previous_getter = lambda **kwargs: default_variable_creator(None, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variable_scope.py", line 2511, in default_variable_creator
shape=shape)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variables.py", line 263, in __call__
return super(VariableMetaclass, cls).__call__(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variables.py", line 1568, in __init__
shape=shape)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/variables.py", line 1755, in _init_from_args
self._snapshot = array_ops.identity(self._variable, name="read")
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/util/dispatch.py", line 180, in wrapper
return target(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py", line 86, in identity
ret = gen_array_ops.identity(input, name=name)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/ops/gen_array_ops.py", line 4253, in identity
"Identity", input=input, name=name)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 3616, in create_op
op_def=op_def)
File "/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 2005, in __init__
self._traceback = tf_stack.extract_stack()
Please let me know if you know how to access and change the elements of a tensor within a keras model. Thank you.
Trying to run: https://github.com/huseinzol05/Stock-Prediction-Models/blob/master/deep-learning/17.lstm-seq2seq-bidirectional-attention.ipynb
With the data: https://gist.github.com/TensorTom/d37f4bd605d6a13dfc75015c5e150b82
But this happens during training epochs (Or just at or after the last epoch):
---------------------------------------------------------------------------
UnimplementedError Traceback (most recent call last)
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1333 try:
-> 1334 return fn(*args)
1335 except errors.OpError as e:
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in _run_fn(feed_dict, fetch_list, target_list, options, run_metadata)
1318 return self._call_tf_sessionrun(
-> 1319 options, feed_dict, fetch_list, target_list, run_metadata)
1320
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in _call_tf_sessionrun(self, options, feed_dict, fetch_list, target_list, run_metadata)
1406 self._session, options, feed_dict, fetch_list, target_list,
-> 1407 run_metadata)
1408
UnimplementedError: TensorArray has size zero, but element shape [?,128] is not fully defined. Currently only static shapes are supported when packing zero-size TensorArrays.
[[{{node decoder/bidirectional_rnn/fw/fw/TensorArrayStack/TensorArrayGatherV3}}]]
[[{{node decoder/bidirectional_rnn/bw/bw/while/Exit_3}}]]
During handling of the above exception, another exception occurred:
UnimplementedError Traceback (most recent call last)
<ipython-input-8-07a23d0b1dbd> in <module>
22 modelnn.X: np.expand_dims(df_log.iloc[upper_b:], axis = 0),
23 modelnn.backward_hidden_layer: init_value_backward,
---> 24 modelnn.forward_hidden_layer: init_value_forward,
25 },
26 )
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
927 try:
928 result = self._run(None, fetches, feed_dict, options_ptr,
--> 929 run_metadata_ptr)
930 if run_metadata:
931 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
1150 if final_fetches or final_targets or (handle and feed_dict_tensor):
1151 results = self._do_run(handle, final_targets, final_fetches,
-> 1152 feed_dict_tensor, options, run_metadata)
1153 else:
1154 results = []
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1326 if handle is None:
1327 return self._do_call(_run_fn, feeds, fetches, targets, options,
-> 1328 run_metadata)
1329 else:
1330 return self._do_call(_prun_fn, handle, feeds, fetches)
/usr/lib/python3.7/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1346 pass
1347 message = error_interpolation.interpolate(message, self._graph)
-> 1348 raise type(e)(node_def, op, message)
1349
1350 def _extend_graph(self):
UnimplementedError: TensorArray has size zero, but element shape [?,128] is not fully defined. Currently only static shapes are supported when packing zero-size TensorArrays.
[[node decoder/bidirectional_rnn/fw/fw/TensorArrayStack/TensorArrayGatherV3 (defined at <ipython-input-5-7fc0ccd4ec49>:97) ]]
[[node decoder/bidirectional_rnn/bw/bw/while/Exit_3 (defined at <ipython-input-5-7fc0ccd4ec49>:97) ]]
Caused by op 'decoder/bidirectional_rnn/fw/fw/TensorArrayStack/TensorArrayGatherV3', defined at:
File "/usr/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/lib/python3.7/site-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/usr/lib/python3.7/site-packages/traitlets/config/application.py", line 658, in launch_instance
app.start()
File "/usr/lib/python3.7/site-packages/ipykernel/kernelapp.py", line 505, in start
self.io_loop.start()
File "/usr/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 132, in start
self.asyncio_loop.run_forever()
File "/usr/lib/python3.7/asyncio/base_events.py", line 539, in run_forever
self._run_once()
File "/usr/lib/python3.7/asyncio/base_events.py", line 1775, in _run_once
handle._run()
File "/usr/lib/python3.7/asyncio/events.py", line 88, in _run
self._context.run(self._callback, *self._args)
File "/usr/lib/python3.7/site-packages/tornado/ioloop.py", line 758, in _run_callback
ret = callback()
File "/usr/lib/python3.7/site-packages/tornado/stack_context.py", line 300, in null_wrapper
return fn(*args, **kwargs)
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 1233, in inner
self.run()
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 1147, in run
yielded = self.gen.send(value)
File "/usr/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 370, in dispatch_queue
yield self.process_one()
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 346, in wrapper
runner = Runner(result, future, yielded)
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 1080, in __init__
self.run()
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 1147, in run
yielded = self.gen.send(value)
File "/usr/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 357, in process_one
yield gen.maybe_future(dispatch(*args))
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/usr/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 267, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/usr/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 534, in execute_request
user_expressions, allow_stdin,
File "/usr/lib/python3.7/site-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/usr/lib/python3.7/site-packages/ipykernel/ipkernel.py", line 294, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/usr/lib/python3.7/site-packages/ipykernel/zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/usr/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 2819, in run_cell
raw_cell, store_history, silent, shell_futures)
File "/usr/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 2845, in _run_cell
return runner(coro)
File "/usr/lib/python3.7/site-packages/IPython/core/async_helpers.py", line 67, in _pseudo_sync_runner
coro.send(None)
File "/usr/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3020, in run_cell_async
interactivity=interactivity, compiler=compiler, result=result)
File "/usr/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3185, in run_ast_nodes
if (yield from self.run_code(code, result)):
File "/usr/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3267, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-2e75d5202b26>", line 2, in <module>
modelnn = Model(0.01, num_layers, df_log.shape[1], size_layer, df_log.shape[1], dropout_rate)
File "<ipython-input-5-7fc0ccd4ec49>", line 97, in __init__
dtype = tf.float32,
File "/usr/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py", line 324, in new_func
return func(*args, **kwargs)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/rnn.py", line 443, in bidirectional_dynamic_rnn
time_major=time_major, scope=fw_scope)
File "/usr/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py", line 324, in new_func
return func(*args, **kwargs)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/rnn.py", line 671, in dynamic_rnn
dtype=dtype)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/rnn.py", line 883, in _dynamic_rnn_loop
final_outputs = tuple(ta.stack() for ta in output_final_ta)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/rnn.py", line 883, in <genexpr>
final_outputs = tuple(ta.stack() for ta in output_final_ta)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/tensor_array_ops.py", line 1128, in stack
return self._implementation.stack(name=name)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/tensor_array_ops.py", line 292, in stack
return self.gather(math_ops.range(0, self.size()), name=name)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/tensor_array_ops.py", line 306, in gather
element_shape=element_shape)
File "/usr/lib/python3.7/site-packages/tensorflow/python/ops/gen_data_flow_ops.py", line 6291, in tensor_array_gather_v3
element_shape=element_shape, name=name)
File "/usr/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "/usr/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "/usr/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 3300, in create_op
op_def=op_def)
File "/usr/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 1801, in __init__
self._traceback = tf_stack.extract_stack()
UnimplementedError (see above for traceback): TensorArray has size zero, but element shape [?,128] is not fully defined. Currently only static shapes are supported when packing zero-size TensorArrays.
[[node decoder/bidirectional_rnn/fw/fw/TensorArrayStack/TensorArrayGatherV3 (defined at <ipython-input-5-7fc0ccd4ec49>:97) ]]
[[node decoder/bidirectional_rnn/bw/bw/while/Exit_3 (defined at <ipython-input-5-7fc0ccd4ec49>:97) ]]
So I talked to someone about this who was using the same script and they suggested that it was because my dataset was too many lines; that they were able to avoid this error by limiting the lines of data to 400 lines. This did indeed get rid of the error but it doesn't solve the problem.
Using < 400 lines is fine for 1 day per-line OHLCV data but if I want to use a lower timeframe, say, 1minute per-line OHLCV data, 400 lines isn't going to cut it.
How can I get this model to accept an arbitrary rows of data?
So it seems I'm doing something wrong here and I would appreciate some help. When I input my validation set into the network, the dimensions are not the same as the ones used for training. I would've expected "shape[0] = [1363,300] vs. shape[1] = [128,300]" since my dimension for the word embedding is 300.
def make_cell():
cell = tf.contrib.rnn.BasicLSTMCell(lstmUnits)
if dropout_rate.eval(session=tf.Session()) == 1:
cell = tf.contrib.rnn.DropoutWrapper(cell, output_keep_prob=dropout_rate)
return cell
labels = tf.placeholder(tf.float32, [None, numClasses])
input_data = tf.placeholder(tf.int32, [None, maxSeqLength])
dropout_rate = tf.placeholder_with_default(1.0, shape=())
rnn_input = tf.Variable(tf.zeros([batchSize, maxSeqLength, numDimensions]), dtype=tf.float32)
rnn_input = tf.nn.embedding_lookup(vectors_fasttext, input_data) # fastext lookup
multiLSTMCell = tf.contrib.rnn.MultiRNNCell([make_cell() for _ in range(num_layers)], state_is_tuple=True)
init_state = state = multiLSTMCell.zero_state(batchSize, tf.float32)
rnn_output, _ = tf.nn.dynamic_rnn(multiLSTMCell, rnn_input, initial_state=init_state, dtype=tf.float32)
# training
for i in range(iterations):
nextBatch, nextBatchLabels = getBatch(train_ids, train_labels)
_, batch_loss = sess.run([optimizer, loss], feed_dict={input_data: nextBatch,
labels: nextBatchLabels,
dropout_rate: 0.5})
if (i % 500 == 0 and i != 0):
acc, prec, rec, f1 = sess.run([accuracy, pre_op, rec_op, f1_op],
feed_dict={input_data: validation_data, labels: validation_data_labels})
print(validation_data_labels.shape)
print(validation_data.shape)
(1363, 2)
(1363, 20)
And I'm getting this error:
InvalidArgumentError Traceback (most recent call last)
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\client\session.py in _do_call(self, fn, *args)
1322 try:
-> 1323 return fn(*args)
1324 except errors.OpError as e:
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\client\session.py in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
1301 feed_dict, fetch_list, target_list,
-> 1302 status, run_metadata)
1303
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\framework\errors_impl.py in __exit__(self, type_arg, value_arg, traceback_arg)
472 compat.as_text(c_api.TF_Message(self.status.status)),
--> 473 c_api.TF_GetCode(self.status.status))
474 # Delete the underlying status object from memory otherwise it stays alive
InvalidArgumentError: ConcatOp : Dimensions of inputs should match: shape[0] = [1363,300] vs. shape[1] = [128,128]
[[Node: rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat = ConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32, _device="/job:localhost/replica:0/task:0/device:CPU:0"](rnn/while/TensorArrayReadV3, rnn/while/Identity_3, rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat/axis)]]
During handling of the above exception, another exception occurred:
InvalidArgumentError Traceback (most recent call last)
<ipython-input-24-2e91b9d38b41> in <module>()
19
20 acc, prec, rec, f1 = sess.run([accuracy, pre_op, rec_op, f1_op],
---> 21 feed_dict={input_data: validation_data, labels: validation_data_labels})
22
23 print('Validation scores at iteration', i, ': accuracy', acc,
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\client\session.py in run(self, fetches, feed_dict, options, run_metadata)
887 try:
888 result = self._run(None, fetches, feed_dict, options_ptr,
--> 889 run_metadata_ptr)
890 if run_metadata:
891 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\client\session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
1118 if final_fetches or final_targets or (handle and feed_dict_tensor):
1119 results = self._do_run(handle, final_targets, final_fetches,
-> 1120 feed_dict_tensor, options, run_metadata)
1121 else:
1122 results = []
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\client\session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1315 if handle is None:
1316 return self._do_call(_run_fn, self._session, feeds, fetches, targets,
-> 1317 options, run_metadata)
1318 else:
1319 return self._do_call(_prun_fn, self._session, handle, feeds, fetches)
c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\client\session.py in _do_call(self, fn, *args)
1334 except KeyError:
1335 pass
-> 1336 raise type(e)(node_def, op, message)
1337
1338 def _extend_graph(self):
InvalidArgumentError: ConcatOp : Dimensions of inputs should match: shape[0] = [1363,300] vs. shape[1] = [128,128]
[[Node: rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat = ConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32, _device="/job:localhost/replica:0/task:0/device:CPU:0"](rnn/while/TensorArrayReadV3, rnn/while/Identity_3, rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat/axis)]]
Caused by op 'rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat', defined at:
File "c:\users\tony\appdata\local\programs\python\python36\lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "c:\users\tony\appdata\local\programs\python\python36\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\traitlets\config\application.py", line 658, in launch_instance
app.start()
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel\kernelapp.py", line 477, in start
ioloop.IOLoop.instance().start()
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\zmq\eventloop\ioloop.py", line 177, in start
super(ZMQIOLoop, self).start()
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tornado\ioloop.py", line 888, in start
handler_func(fd_obj, events)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tornado\stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\zmq\eventloop\zmqstream.py", line 440, in _handle_events
self._handle_recv()
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\zmq\eventloop\zmqstream.py", line 472, in _handle_recv
self._run_callback(callback, msg)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\zmq\eventloop\zmqstream.py", line 414, in _run_callback
callback(*args, **kwargs)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tornado\stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel\kernelbase.py", line 283, in dispatcher
return self.dispatch_shell(stream, msg)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel\kernelbase.py", line 235, in dispatch_shell
handler(stream, idents, msg)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel\kernelbase.py", line 399, in execute_request
user_expressions, allow_stdin)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel\ipkernel.py", line 196, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\ipykernel\zmqshell.py", line 533, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\IPython\core\interactiveshell.py", line 2728, in run_cell
interactivity=interactivity, compiler=compiler, result=result)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\IPython\core\interactiveshell.py", line 2850, in run_ast_nodes
if self.run_code(code, result):
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\IPython\core\interactiveshell.py", line 2910, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-22-54e19d6ef3f0>", line 17, in <module>
rnn_output, _ = tf.nn.dynamic_rnn(multiLSTMCell, rnn_input, initial_state=init_state, dtype=tf.float32)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn.py", line 614, in dynamic_rnn
dtype=dtype)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn.py", line 777, in _dynamic_rnn_loop
swap_memory=swap_memory)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2816, in while_loop
result = loop_context.BuildLoop(cond, body, loop_vars, shape_invariants)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2640, in BuildLoop
pred, body, original_loop_vars, loop_vars, shape_invariants)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2590, in _BuildLoop
body_result = body(*packed_vars_for_body)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn.py", line 762, in _time_step
(output, new_state) = call_cell()
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn.py", line 748, in <lambda>
call_cell = lambda: cell(input_t, state)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 183, in __call__
return super(RNNCell, self).__call__(inputs, state)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\layers\base.py", line 575, in __call__
outputs = self.call(inputs, *args, **kwargs)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 1066, in call
cur_inp, new_state = cell(cur_inp, cur_state)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 891, in __call__
output, new_state = self._cell(inputs, state, scope)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 183, in __call__
return super(RNNCell, self).__call__(inputs, state)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\layers\base.py", line 575, in __call__
outputs = self.call(inputs, *args, **kwargs)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 441, in call
value=self._linear([inputs, h]), num_or_size_splits=4, axis=1)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 1189, in __call__
res = math_ops.matmul(array_ops.concat(args, 1), self._weights)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\array_ops.py", line 1099, in concat
return gen_array_ops._concat_v2(values=values, axis=axis, name=name)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\ops\gen_array_ops.py", line 705, in _concat_v2
"ConcatV2", values=values, axis=axis, name=name)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\framework\op_def_library.py", line 787, in _apply_op_helper
op_def=op_def)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\framework\ops.py", line 2956, in create_op
op_def=op_def)
File "c:\users\tony\appdata\local\programs\python\python36\lib\site-packages\tensorflow\python\framework\ops.py", line 1470, in __init__
self._traceback = self._graph._extract_stack() # pylint: disable=protected-access
InvalidArgumentError (see above for traceback): ConcatOp : Dimensions of inputs should match: shape[0] = [1363,300] vs. shape[1] = [128,128]
[[Node: rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat = ConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32, _device="/job:localhost/replica:0/task:0/device:CPU:0"](rnn/while/TensorArrayReadV3, rnn/while/Identity_3, rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/basic_lstm_cell/concat/axis)]]
Thank you in advance for any help.
From a quick look: You define
rnn_input = tf.Variable(tf.zeros([batchSize, maxSeqLength, numDimensions]), dtype=tf.float32)
which is dependend on batch size (128) but then feed validation data which has len(1363)
I guess
rnn_input = tf.Variable(tf.zeros([tf.get_shape(input_data)[0], maxSeqLength, numDimensions]), dtype=tf.float32)
might solve your issue, but I am afraid I can't check :)
You also can split the validation into batches with same size as training data and then combine results...
I'm trying to read some .wav files using tensorflow, using the following code:
import tensorflow as tf
filenames = tf.train.match_filenames_once('audio_dataset/*.wav')
count_num_files = tf.size(filenames)
filename_queue = tf.train.string_input_producer(filenames)
reader = tf.WholeFileReader()
filename, file_contents = reader.read(filename_queue)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
num_files = sess.run(count_num_files)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
for i in range(num_files):
audio_file = sess.run(filename)
print(audio_file)
I'm running tensorflow in a Python 3.5 environment, using Jupyter notebook.
in order to read audio files, I need to use a dependency, the Bregman Toolkit, which uses Python 2.X.
documentation Bregman Toolkit says that you can set up a python 2.X kernel alongside python 3X, doing:
$ python2 -m pip install ipykernel
$ python2 -m -ipykernel install --user
which I did. Jupyter has now the option of starting either 2.X or 3.X kernels.
but after all setup, when I try to run the code, I get this long Traceback:
---------------------------------------------------------------------------
FailedPreconditionError Traceback (most recent call last)
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1038 try:
-> 1039 return fn(*args)
1040 except errors.OpError as e:
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/client/session.py in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
1020 feed_dict, fetch_list, target_list,
-> 1021 status, run_metadata)
1022
/Users/me/anaconda/envs/py35/lib/python3.5/contextlib.py in __exit__(self, type, value, traceback)
65 try:
---> 66 next(self.gen)
67 except StopIteration:
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/framework/errors_impl.py in raise_exception_on_not_ok_status()
465 compat.as_text(pywrap_tensorflow.TF_Message(status)),
--> 466 pywrap_tensorflow.TF_GetCode(status))
467 finally:
FailedPreconditionError: Attempting to use uninitialized value matching_filenames_7
[[Node: matching_filenames_7/read = Identity[T=DT_STRING, _class=["loc:#matching_filenames_7"], _device="/job:localhost/replica:0/task:0/cpu:0"](matching_filenames_7)]]
During handling of the above exception, another exception occurred:
FailedPreconditionError Traceback (most recent call last)
<ipython-input-12-a6f1ad8baaa4> in <module>()
9 with tf.Session() as sess:
10 sess.run(tf.global_variables_initializer())
---> 11 num_files = sess.run(count_num_files)
12
13 coord = tf.train.Coordinator()
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
776 try:
777 result = self._run(None, fetches, feed_dict, options_ptr,
--> 778 run_metadata_ptr)
779 if run_metadata:
780 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
980 if final_fetches or final_targets:
981 results = self._do_run(handle, final_targets, final_fetches,
--> 982 feed_dict_string, options, run_metadata)
983 else:
984 results = []
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1030 if handle is None:
1031 return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1032 target_list, options, run_metadata)
1033 else:
1034 return self._do_call(_prun_fn, self._session, handle, feed_dict,
/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1050 except KeyError:
1051 pass
-> 1052 raise type(e)(node_def, op, message)
1053
1054 def _extend_graph(self):
FailedPreconditionError: Attempting to use uninitialized value matching_filenames_7
[[Node: matching_filenames_7/read = Identity[T=DT_STRING, _class=["loc:#matching_filenames_7"], _device="/job:localhost/replica:0/task:0/cpu:0"](matching_filenames_7)]]
Caused by op 'matching_filenames_7/read', defined at:
File "/Users/me/anaconda/envs/py35/lib/python3.5/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/Users/me/anaconda/envs/py35/lib/python3.5/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/traitlets/config/application.py", line 658, in launch_instance
app.start()
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel/kernelapp.py", line 477, in start
ioloop.IOLoop.instance().start()
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/zmq/eventloop/ioloop.py", line 177, in start
super(ZMQIOLoop, self).start()
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tornado/ioloop.py", line 888, in start
handler_func(fd_obj, events)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tornado/stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/zmq/eventloop/zmqstream.py", line 440, in _handle_events
self._handle_recv()
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/zmq/eventloop/zmqstream.py", line 472, in _handle_recv
self._run_callback(callback, msg)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/zmq/eventloop/zmqstream.py", line 414, in _run_callback
callback(*args, **kwargs)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tornado/stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel/kernelbase.py", line 283, in dispatcher
return self.dispatch_shell(stream, msg)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel/kernelbase.py", line 235, in dispatch_shell
handler(stream, idents, msg)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel/kernelbase.py", line 399, in execute_request
user_expressions, allow_stdin)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel/ipkernel.py", line 196, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/ipykernel/zmqshell.py", line 533, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/IPython/core/interactiveshell.py", line 2717, in run_cell
interactivity=interactivity, compiler=compiler, result=result)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/IPython/core/interactiveshell.py", line 2821, in run_ast_nodes
if self.run_code(code, result):
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/IPython/core/interactiveshell.py", line 2881, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-12-a6f1ad8baaa4>", line 3, in <module>
filenames = tf.train.match_filenames_once('audio_dataset/*.wav')
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/training/input.py", line 68, in match_filenames_once
collections=[ops.GraphKeys.LOCAL_VARIABLES])
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/ops/variables.py", line 197, in __init__
expected_shape=expected_shape)
File "/Users/meme/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/ops/variables.py", line 316, in _init_from_args
self._snapshot = array_ops.identity(self._variable, name="read")
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/ops/gen_array_ops.py", line 1338, in identity
result = _op_def_lib.apply_op("Identity", input=input, name=name)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/framework/op_def_library.py", line 768, in apply_op
op_def=op_def)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/framework/ops.py", line 2336, in create_op
original_op=self._default_original_op, op_def=op_def)
File "/Users/me/anaconda/envs/py35/lib/python3.5/site-packages/tensorflow/python/framework/ops.py", line 1228, in __init__
self._traceback = _extract_stack()
FailedPreconditionError (see above for traceback): Attempting to use uninitialized value matching_filenames_7
[[Node: matching_filenames_7/read = Identity[T=DT_STRING, _class=["loc:#matching_filenames_7"], _device="/job:localhost/replica:0/task:0/cpu:0"](matching_filenames_7)]]
can anyone please point me in the right direction?
The immediate fix is to add the following line:
sess.run(tf.global_variables_initializer())
...before this line:
threads = tf.train.start_queue_runners(coord=coord)
However, I'd like to show you a new and slightly easier way to write your program using the tf.contrib.data API (available in TensorFlow 1.3, and will become tf.data in TensorFlow 1.4):
import tensorflow as tf
filenames = tf.contrib.data.Dataset.list_files('audio_dataset/*.wav')
name_and_content = filenames.map(lambda x: (x, tf.read_file(x))
iterator = name_and_content.make_one_shot_iterator()
filename, file_contents = iterator.get_next()
with tf.Session() as sess:
try:
while True:
audio_file = sess.run(filename)
print(audio_file)
except tf.errors.OutOfRangeError: # Raised when there are no more files.
pass