有一个类似的帖子,但我的错误来自另一个不同的变量。
我有object detection using tensorflow
,如下。
import numpy as np
import os
import tensorflow as tf
import cv2
from object_detection.utils import ops as utils_ops
from utils import label_map_util
from utils import visualization_utils as vis_util
PATH_TO_LABELS = os.path.join('NumberPlateDetection/Numplate_ssdmobilenetv1_fpn/', 'numberplate_label_map.pbtxt')
cap = cv2.VideoCapture('/media/nyan/697eb924-2fe2-4054-b4a7-1b7d9578ab8f/nyan/Documents/data/NumberPlate_old/videos/IMG_5716.MOV')
category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=True)
with tf.Session() as sess:
saver = tf.train.import_meta_graph('NumberPlateDetection/Numplate_ssdmobilenetv1_fpn/inference_graph/model.ckpt.meta')
saver.restore(sess, 'NumberPlateDetection/Numplate_ssdmobilenetv1_fpn/inference_graph/model.ckpt')
image_tensor = sess.graph.get_tensor_by_name('image_tensor:0')
boxes = sess.graph.get_tensor_by_name('detection_boxes:0')
scores = sess.graph.get_tensor_by_name('detection_scores:0')
cls = sess.graph.get_tensor_by_name('detection_classes:0')
num_detections = sess.graph.get_tensor_by_name('num_detections:0')
while True:
ret, image_np = cap.read()
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
(box, score, clses, num_detection) = sess.run([boxes, scores, cls, num_detections], {image_tensor:image_np_expanded})
boxes = box[0]
scores = score[0]
classes = clses[0]
classes = classes.astype(np.uint8)
num_detections = int(num_detection[0])
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
boxes,
classes,
scores,
category_index,
instance_masks=None,
use_normalized_coordinates=True,
line_thickness=8)
cv2.imshow("result", image_np)
cv2.waitKey(1)
我跑步时遇到错误
Traceback (most recent call last):
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 300, in __init__
fetch, allow_tensor=True, allow_operation=True))
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/framework/ops.py", line 3490, in as_graph_element
return self._as_graph_element_locked(obj, allow_tensor, allow_operation)
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/framework/ops.py", line 3579, in _as_graph_element_locked
types_str))
TypeError: Can not convert a ndarray into a Tensor or Operation.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.5/pdb.py", line 1661, in main
pdb._runscript(mainpyfile)
File "/usr/lib/python3.5/pdb.py", line 1542, in _runscript
self.run(statement)
File "/usr/lib/python3.5/bdb.py", line 431, in run
exec(cmd, globals, locals)
File "<string>", line 1, in <module>
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/models/research/object_detection/platedetection_ckpt.py", line 28, in <module>
(box, score, clses, num_detection) = sess.run([boxes, scores, cls, num_detections], {image_tensor:image_np_expanded})
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 929, in run
run_metadata_ptr)
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 1137, in _run
self._graph, fetches, feed_dict_tensor, feed_handles=feed_handles)
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 471, in __init__
self._fetch_mapper = _FetchMapper.for_fetch(fetches)
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 261, in for_fetch
return _ListFetchMapper(fetch)
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 370, in __init__
self._mappers = [_FetchMapper.for_fetch(fetch) for fetch in fetches]
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 370, in <listcomp>
self._mappers = [_FetchMapper.for_fetch(fetch) for fetch in fetches]
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 271, in for_fetch
return _ElementFetchMapper(fetches, contraction_fn)
File "/home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 304, in __init__
(fetch, type(fetch), str(e)))
TypeError: Fetch argument array([[5.77544332e-01, 8.45235586e-01, 6.43900394e-01, 9.07178044e-01],
[5.33677042e-01, 4.86815065e-01, 5.80641329e-01, 5.77449381e-01],
[4.74086463e-01, 8.70195180e-02, 5.24333954e-01, 1.79605111e-01],
[5.19357920e-01, 4.99080241e-01, 5.94354272e-01, 5.73785603e-01],
[5.82983911e-01, 8.45164299e-01, 6.39722884e-01, 9.40664649e-01],
[4.97819275e-01, 4.99108940e-01, 5.69518626e-01, 5.63991427e-01],
[4.78881955e-01, 1.06984094e-01, 5.30262947e-01, 1.93123356e-01],
[4.74147946e-01, 7.63106495e-02, 5.15930593e-01, 1.53555691e-01],
[5.25953293e-01, 4.86130446e-01, 5.71939588e-01, 5.59750021e-01],
[4.64354783e-01, 8.78702700e-02, 5.15185475e-01, 1.42144859e-01],
[5.40731013e-01, 4.96525466e-01, 5.90277135e-01, 5.89240730e-01],
[6.48492575e-01, 1.36446729e-01, 7.56026268e-01, 2.32635275e-01],
[4.54677820e-01, 9.66271162e-02, 5.39422929e-01, 2.08910465e-01],
[4.61367697e-01, 9.85105634e-02, 5.36004603e-01, 1.70391887e-01],
[4.63144124e-01, 0.00000000e+00, 5.07749021e-01, 3.52073647e-03],
[5.63846111e-01, 8.45380187e-01, 6.23896122e-01, 9.11140084e-01],
[4.69701201e-01, 6.63738027e-02, 5.41866899e-01, 1.83118671e-01],
[4.77697611e-01, 5.06144345e-01, 5.42844176e-01, 5.60523808e-01],
[4.72879499e-01, 6.01597130e-05, 5.19583941e-01, 2.92058010e-03],
[5.12202442e-01, 4.92434919e-01, 6.10050261e-01, 5.93318999e-01],
[4.63009596e-01, 6.20467886e-02, 5.24990618e-01, 1.64958715e-01],
[6.13893151e-01, 9.94723976e-01, 6.61991119e-01, 9.98004138e-01],
[4.53765631e-01, 0.00000000e+00, 4.97960925e-01, 3.26656410e-03],
[6.27439976e-01, 9.93734777e-01, 6.73767447e-01, 9.97623742e-01],
[5.66576362e-01, 8.47537696e-01, 6.28735662e-01, 9.50264513e-01],
[6.90762818e-01, 3.60847414e-01, 7.64203012e-01, 4.26695585e-01],
[4.83208567e-01, 1.36616826e-01, 5.39002538e-01, 1.87182754e-01],
[4.69947368e-01, 1.26561165e-01, 5.23706973e-01, 1.83843672e-01],
[5.13909400e-01, 0.00000000e+00, 5.66110313e-01, 2.27159658e-03],
[4.83525634e-01, 0.00000000e+00, 5.33509970e-01, 8.35428480e-04],
[5.99805713e-01, 9.96134222e-01, 6.48686290e-01, 9.99062121e-01],
[5.64034224e-01, 8.48343968e-01, 6.62548661e-01, 9.65659022e-01],
[5.38653016e-01, 5.33363700e-01, 5.93894958e-01, 5.86480379e-01],
[5.25070310e-01, 4.74877059e-01, 5.88969350e-01, 5.65924466e-01],
[6.08664870e-01, 9.81623173e-01, 6.55121446e-01, 9.92417455e-01],
[6.22447193e-01, 9.83231008e-01, 6.65400684e-01, 9.93343532e-01],
[4.47022170e-01, 8.48337337e-02, 5.31865537e-01, 1.56093568e-01],
[5.86298764e-01, 9.97262597e-01, 6.34290516e-01, 1.00000000e+00],
[5.97444713e-01, 9.81289029e-01, 6.42550528e-01, 9.91424084e-01],
[6.00944221e-01, 0.00000000e+00, 6.96744621e-01, 1.08848168e-02],
[5.74251950e-01, 7.97534823e-01, 6.52808249e-01, 9.11253214e-01],
[5.76594234e-01, 9.88356769e-01, 6.14586353e-01, 9.95811999e-01],
[6.49482548e-01, 1.17947564e-01, 7.69276202e-01, 2.12465331e-01],
[4.43520725e-01, 0.00000000e+00, 4.88523185e-01, 3.06037255e-03],
[5.73850274e-01, 9.98099804e-01, 6.19951129e-01, 1.00000000e+00],
[5.88080227e-01, 9.84225333e-01, 6.29365742e-01, 9.92947876e-01],
[4.63795245e-01, 0.00000000e+00, 5.01449883e-01, 6.69268705e-03],
[5.98340154e-01, 8.42971087e-01, 6.55794621e-01, 9.12072539e-01],
[6.37270927e-01, 9.93961275e-01, 6.85116649e-01, 9.98596966e-01],
[4.52485174e-01, 0.00000000e+00, 4.90930170e-01, 6.75926497e-03],
[4.75573689e-01, 2.19370006e-04, 5.12709975e-01, 7.29098916e-03],
[6.35851383e-01, 9.84483540e-01, 6.74637556e-01, 9.93774951e-01],
[5.68791032e-01, 8.28377008e-01, 6.33071899e-01, 9.21479702e-01],
[6.47346854e-01, 9.93806422e-01, 6.99080467e-01, 9.99322712e-01],
[5.27590513e-01, 5.16463578e-01, 5.83534241e-01, 5.73334515e-01],
[5.64501703e-01, 9.90154684e-01, 6.00881636e-01, 9.97316182e-01],
[5.77020407e-01, 0.00000000e+00, 6.73190713e-01, 8.89170542e-03],
[5.32372057e-01, 2.13325815e-03, 5.79382718e-01, 1.05855763e-02],
[8.98179829e-01, 9.49538112e-01, 9.95757401e-01, 1.00000000e+00],
[4.99201924e-01, 5.06181300e-01, 5.89679837e-01, 6.16471350e-01],
[5.21599293e-01, 4.78651732e-01, 5.77003956e-01, 5.33329070e-01],
[4.50019956e-01, 1.22330472e-01, 5.41050851e-01, 2.47679010e-01],
[4.29230541e-01, 0.00000000e+00, 4.76692528e-01, 2.87851854e-03],
[5.62652290e-01, 9.98400152e-01, 6.07492149e-01, 1.00000000e+00],
[4.87109214e-01, 0.00000000e+00, 5.24568141e-01, 5.97356074e-03],
[5.90865076e-01, 9.54585433e-01, 6.50085270e-01, 9.82792616e-01],
[5.74019790e-01, 8.37128580e-01, 6.29044652e-01, 8.99678290e-01],
[5.53819358e-01, 8.57016563e-01, 6.38536036e-01, 9.92708564e-01],
[4.83464658e-01, 1.37476027e-01, 5.26389778e-01, 2.12601632e-01],
[5.96642673e-01, 0.00000000e+00, 6.87337220e-01, 1.86070986e-02],
[4.97669607e-01, 9.93196428e-01, 5.45862556e-01, 9.98551309e-01],
[4.99121875e-01, 0.00000000e+00, 5.39442122e-01, 3.23236943e-03],
[4.84660000e-01, 9.94518518e-01, 5.32520056e-01, 9.99546885e-01],
[4.40976650e-01, 0.00000000e+00, 4.80491728e-01, 6.86422409e-03],
[5.49527287e-01, 9.97768939e-01, 5.95556974e-01, 1.00000000e+00],
[5.15272915e-01, 0.00000000e+00, 5.56237638e-01, 5.66899357e-03],
[4.92071599e-01, 4.88374949e-01, 5.88230669e-01, 5.52991986e-01],
[5.38274407e-01, 5.20660639e-01, 5.77628613e-01, 5.94384313e-01],
[5.63051403e-01, 8.20557296e-01, 6.14975750e-01, 8.86713326e-01],
[5.54642916e-01, 0.00000000e+00, 6.46643758e-01, 9.22345091e-03],
[2.36634426e-02, 4.01944248e-03, 9.46529508e-02, 1.34058632e-02],
[5.73593915e-01, 0.00000000e+00, 6.63180649e-01, 1.62993632e-02],
[6.20564580e-01, 0.00000000e+00, 7.05988526e-01, 2.11203881e-02],
[5.41595221e-01, 8.35655257e-04, 5.84146738e-01, 2.76394002e-02],
[5.35019100e-01, 9.95864511e-01, 5.82375228e-01, 1.00000000e+00],
[4.70296830e-01, 5.23132443e-01, 5.61518908e-01, 6.35736704e-01],
[0.00000000e+00, 0.00000000e+00, 2.24809378e-01, 4.31476653e-01],
[4.69021201e-01, 9.71517414e-02, 5.23901165e-01, 1.55832961e-01],
[4.80020583e-01, 6.51456229e-03, 5.30158699e-01, 3.32816690e-02],
[4.71901923e-01, 9.95348930e-01, 5.20548344e-01, 1.00000000e+00],
[6.60102069e-01, 9.90527689e-01, 7.13672221e-01, 9.98146236e-01],
[4.54827368e-01, 4.24538225e-01, 5.26905894e-01, 4.86199707e-01],
[4.68487084e-01, 6.32896926e-03, 5.18374741e-01, 3.10271867e-02],
[6.03597522e-01, 9.62240696e-01, 6.60672426e-01, 9.92416143e-01],
[5.50437331e-01, 0.00000000e+00, 6.85913920e-01, 3.50675546e-02],
[5.30428112e-01, 4.69184518e-01, 5.82211554e-01, 5.43515205e-01],
[8.77255440e-01, 9.52708483e-01, 9.69427347e-01, 1.00000000e+00],
[5.10310709e-01, 9.92310524e-01, 5.58632672e-01, 9.97960567e-01],
[3.31037417e-02, 1.61963236e-03, 1.10646553e-01, 1.46547714e-02],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
dtype=float32) has invalid type <class 'numpy.ndarray'>, must be a string or Tensor. (Can not convert a ndarray into a Tensor or Operation.)
Uncaught exception. Entering post mortem debugging
Running 'cont' or 'step' will restart the program
> /home/nyan/venvpy3_cpu/lib/python3.5/site-packages/tensorflow/python/client/session.py(304)__init__()
-> (fetch, type(fetch), str(e)))
怎么了?
答案 0 :(得分:0)
似乎您在sess.run
下方用numpy数组覆盖了张量引用:
boxes = box[0]
scores = score[0]
classes = clses[0]
因此,在下一次迭代中,您尝试获取numpy数组,这没有意义。
答案 1 :(得分:0)
我需要按照以下说明进行喂食。
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
np.squeeze(box),
np.squeeze(clses).astype(np.int32),
np.squeeze(score),
category_index,
use_normalized_coordinates=True,
line_thickness=8)