这个项目应该看到某人脸上的点并开始播放视频。我将屏幕划分为三个区域并命名。当我运行项目相机时突然看到第二个区域并且没有错误,因为第二个区域没有动作。但是当相机看到第一或第三区域 - 项目应该开始在这些区域播放视频时 - 项目突然关闭。
这是我的logcat错误:
08-28 11:08:09.392: E/AndroidRuntime(4635): FATAL EXCEPTION: Thread-373
08-28 11:08:09.392: E/AndroidRuntime(4635): android.view.ViewRootImpl$CalledFromWrongThreadException: Only the original thread that created a view hierarchy can touch its views.
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.ViewRootImpl.checkThread(ViewRootImpl.java:4078)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.ViewRootImpl.requestLayout(ViewRootImpl.java:714)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.View.requestLayout(View.java:12678)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.View.requestLayout(View.java:12678)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.View.requestLayout(View.java:12678)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.View.requestLayout(View.java:12678)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.widget.RelativeLayout.requestLayout(RelativeLayout.java:268)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.view.View.requestLayout(View.java:12678)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.widget.VideoView.setVideoURI(VideoView.java:213)
08-28 11:08:09.392: E/AndroidRuntime(4635): at android.widget.VideoView.setVideoURI(VideoView.java:202)
08-28 11:08:09.392: E/AndroidRuntime(4635): at org.opencv.samples.facedetect.FdActivity.startPlaying(FdActivity.java:526)
08-28 11:08:09.392: E/AndroidRuntime(4635): at org.opencv.samples.facedetect.FdActivity.onCameraFrame(FdActivity.java:372)
08-28 11:08:09.392: E/AndroidRuntime(4635): at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:387)
08-28 11:08:09.392: E/AndroidRuntime(4635): at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:321)
08-28 11:08:09.392: E/AndroidRuntime(4635): at java.lang.Thread.run(Thread.java:856)
以下是主要代码的必要部分。
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
.....
mMethodSeekbar = (SeekBar) findViewById(R.id.methodSeekBar);
mValue = (TextView) findViewById(R.id.method);
.....
videoView.setZOrderOnTop(true);
// Creating MediaController
MediaController mediaController = new MediaController(this);
mediaController.setAnchorView(videoView);
videoView.setMediaController(mediaController);
videoView.setVideoURI(uri2);
videoView.requestFocus();
videoView.start();
currentPosition = 0;
}
@Override
public void onPause() {
.....
}
@Override
public void onResume() {
....
}
@Override
public void onDestroy() {
...
}
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
}
if (mZoomWindow == null || mZoomWindow2 == null)
CreateAuxiliaryMats();
MatOfRect faces = new MatOfRect();
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2,
2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
new Size());
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
//Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(),
// FACE_RECT_COLOR, 3);
xCenter = (facesArray[i].x + facesArray[i].width + facesArray[i].x) / 2;
yCenter = (facesArray[i].y + facesArray[i].y + facesArray[i].height) / 2;
Point center = new Point(xCenter, yCenter);
//Core.circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);
Log.i("Point Location", Double.toString(xCenter));
if(xCenter>50 && xCenter<250 ){
Log.i("Point Location","Area 1");
startPlaying(false);
}
else if (xCenter>250 && xCenter<450) {
Log.i("Point Location","Area 2");
/*
videoView.stopPlayback();
videoView.setVideoURI(uri2);
videoView.requestFocus();
videoView.start();
*/
}
else if (xCenter>450 && xCenter <650){
Log.i("Point Location","Area 3");
startPlaying(false);
}
/*Core.putText(mRgba, "[" + center.x + "," + center.y + "]",
new Point(center.x + 20, center.y + 20),
Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255,
255));
//KOORDINATLARI YAZAN FONKSIYON// */
Rect r = facesArray[i];
// compute the eye area
Rect eyearea = new Rect(r.x + r.width / 8,
(int) (r.y + (r.height / 4.5)), r.width - 2 * r.width / 8,
(int) (r.height / 3.0));
// split it
Rect eyearea_right = new Rect(r.x + r.width / 16,
(int) (r.y + (r.height / 4.5)),
(r.width - 2 * r.width / 16) / 2, (int) (r.height / 3.0));
Rect eyearea_left = new Rect(r.x + r.width / 16
+ (r.width - 2 * r.width / 16) / 2,
(int) (r.y + (r.height / 4.5)),
(r.width - 2 * r.width / 16) / 2, (int) (r.height / 3.0));
// draw the area - mGray is working grayscale mat, if you want to
// see area in rgb preview, change mGray to mRgba
Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(),
new Scalar(255, 0, 0, 255), 2);
Core.rectangle(mRgba, eyearea_right.tl(), eyearea_right.br(),
new Scalar(255, 0, 0, 255), 2);
if (learn_frames < 5) {
teplateR = get_template(mJavaDetectorEye, eyearea_right, 24);
teplateL = get_template(mJavaDetectorEye, eyearea_left, 24);
learn_frames++;
} else {
// Learning finished, use the new templates for template
// matching
// match_eye(eyearea_right, teplateR, method);
//match_eye(eyearea_left, teplateL, method);
}
}
return mRgba;
}
.....
}
}
我认为存在问题:
private void startPlaying(boolean startFromCurrent) { //IHTIYAC 1
videoView.stopPlayback();
videoView.setVideoURI(uri1);
//videoView.requestFocus();
...
}
});
}
答案 0 :(得分:1)
如果您认为startPlaying()
是错误的来源,请在runOnUiThread()
内执行与Android视图相关的所有方法:
private void startPlaying(boolean startFromCurrent) {
runOnUiThread(new Runnable(){
@Override
public void run(){
videoView.stopPlayback();
videoView.setVideoURI(uri1);
}
});
...