我现在正在构建一个Android应用,该应用可以检测我训练过的自定义对象。一旦检测到对象,应用程序将弹出一个按钮,将用户带到另一个活动。例如:分数,详细说明...
我一直在尝试使用Tensorflow(https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android)提供的示例代码。
当检测到对象时,我能够显示按钮,但是我不知道为什么单击按钮时我的应用程序总是崩溃。由于检测是在后台线程运行的,所以我知道我只能使用处理程序调用任何clicklistener。
第一个代码片段将在检测到对象时调用处理程序。 第二个代码片段将调用意图进行其他活动。
runInBackground(
new Runnable() {
@Override
public void run() {
LOGGER.i("Running detection on image " + currTimestamp);
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final Canvas canvas = new Canvas(cropCopyBitmap);
final Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(2.0f);
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
switch (MODE) {
case TF_OD_API:
minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
break;
case MULTIBOX:
minimumConfidence = MINIMUM_CONFIDENCE_MULTIBOX;
break;
case YOLO:
minimumConfidence = MINIMUM_CONFIDENCE_YOLO;
break;
}
final List<Classifier.Recognition> mappedRecognitions =
new LinkedList<Classifier.Recognition>();
for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation();
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas.drawRect(location, paint);
Toast.makeText(DetectorActivity.this, result.getTitle(), Toast.LENGTH_LONG).show();
cropToFrameTransform.mapRect(location);
result.setLocation(location);
mappedRecognitions.add(result);
found = true; //when the object is found
waitMsgHandler.sendEmptyMessage(0);
}
}
if(found)
{
// call handler to update UI
<!-- begin snippet: js hide: false console: true babel: false -->
waitMsgHandler.sendEmptyMessage(0);
}
tracker.trackResults(mappedRecognitions, luminanceCopy, currTimestamp);
trackingOverlay.postInvalidate();
requestRender();
computingDetection = false;
}
});