ObjectInputStream readObject调用导致stackOverFlowError

时间:2014-06-27 01:06:22

标签: android deserialization stack-overflow objectinputstream

好的,我的代码很大但我会简化它,以便你可以快速获得它。所以通常我的代码算法是:

1。从SD卡读取位图(这是一个面部图像)。
2。从SD卡读取每个读取照片的一个对象并对其进行反序列化(对象包含面部区域的坐标,如眼睛,鼻子,嘴巴等)。
3。处理图像并从中获取一些数据。
4。将数据写入SD卡上的.txt文件。

有一种使用recurency的方法(如下所示)。我输入包含照片路径作为参数的列表。然后对于每张照片我从SD卡反序列化对象。成功调用后 - 我用其他参数输入dezerialized对象到processPhoto方法。在processPhoto()完成处理后,它调用interface onPhotoProcessFinished()。界面增量" imageProcessed"并再次调用processNextImageIfPossible。重复处理直到处理完所有照片。它有效,但经过50次迭代后我得到了错误。你知道我怎么解决这个问题吗?

private void processNextImageIfPossible(List<String> photosToProcess) {
    if (imageProcessed < imageToProcess) {

          try{
               File photoDataFile = new File(photosToProcess.get(imageProcessed).replace("jpg", "data").substring(6));   
               ObjectInputStream ois = new ObjectInputStream(new FileInputStream(photoDataFile));
               FaceData faceData  = (FaceData) ois.readObject(); // ERROR HAPPENS HERE     
               ois.close();


                if (photosToProcess.get(imageProcessed).contains(EMOTION_NEUTRAL)) {
                    neutralImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_NEUTRAL);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_HAPPINESS)) {
                    happinessImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_HAPPINESS);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SADNESS)) {
                    sadnessImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SADNESS);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_ANGER)) {
                    angerImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_ANGER);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SUPRISE)) {
                    supriseImages++; 
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SUPRISE);
                }
           }catch(Exception ex){
               Toast.makeText(NeuralNetworkActivity.this, getString(R.string.process_file_read_error), Toast.LENGTH_LONG).show();
               ex.printStackTrace();
           } 

    } else {
        //createAndTrainNeuralNetwork();
    }
}

@Override
public void onPhotoProcessFinished() {
    imageProcessed++;
    processNextImageIfPossible(photosToProcessList);    
}

错误日志:

06-27 02:46:31.856: E/AndroidRuntime(11552): FATAL EXCEPTION: main
06-27 02:46:31.856: E/AndroidRuntime(11552): Process: org.opencv.samples.facedetect, PID: 11552
06-27 02:46:31.856: E/AndroidRuntime(11552): java.lang.StackOverflowError
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.lang.Thread.currentThread(Thread.java:470)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.lang.ThreadLocal.get(ThreadLocal.java:53)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at dalvik.system.BlockGuard.getThreadPolicy(BlockGuard.java:139)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at libcore.io.BlockGuardOs.read(BlockGuardOs.java:148)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at libcore.io.IoBridge.read(IoBridge.java:425)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.FileInputStream.read(FileInputStream.java:179)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at libcore.io.Streams.readFully(Streams.java:81)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.readFully(DataInputStream.java:99)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.decodeUTF(DataInputStream.java:178)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.decodeUTF(DataInputStream.java:173)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.readUTF(DataInputStream.java:169)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readFieldDescriptors(ObjectInputStream.java:895)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readClassDescriptor(ObjectInputStream.java:1719)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readNewClassDesc(ObjectInputStream.java:1631)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:658)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readNewObject(ObjectInputStream.java:1781)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readNonPrimitiveContent(ObjectInputStream.java:762)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:1981)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:1938)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:162)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:177)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralN

2 个答案:

答案 0 :(得分:0)

看起来像processImafge,onPhotoProcessFinished和processNextImageIfPossible之间的无限递归循环。您需要阻止onPhotoProcessFinished调用processNextImageIfPossible。在Handler而不是直接函数调用上这样做应该就足够了。

答案 1 :(得分:0)

使用处理程序的好主意应该有效,因为它使用其他线程。这是很好的暗示。我设法使用AsyncTask修复它。我不太了解内存进程,但UI线程上有太多内容。以下代码有效:

private void processNextImageIfPossible(final List<String> photosToProcess) {
    if (imageProcessed < imageToProcess) {

        final String filePath = photosToProcess.get(imageProcessed).replace("jpg", "data").substring(6);

        new AsyncTask<Void, Void, FaceData>() {

            @Override
            protected FaceData doInBackground(Void... params) {
                 try{
                       File photoDataFile = new File(filePath);  
                       ObjectInputStream ois = new ObjectInputStream(new FileInputStream(photoDataFile));
                       FaceData faceData  = (FaceData) ois.readObject();       
                       ois.close();

                       return faceData;
                 }catch(Exception ex){

                       Toast.makeText(NeuralNetworkActivity.this, getString(R.string.camera_load_error), Toast.LENGTH_LONG).show();
                       ex.printStackTrace();
                       return null;
                 } 
            }

            @Override
            protected void onPostExecute(FaceData faceData) {

                if (photosToProcess.get(imageProcessed).contains(EMOTION_NEUTRAL)) {
                    neutralImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_NEUTRAL);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_HAPPINESS)) {
                    happinessImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_HAPPINESS);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SADNESS)) {
                    sadnessImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SADNESS);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_ANGER)) {
                    angerImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_ANGER);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SUPRISE)) {
                    supriseImages++; 
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SUPRISE);
                }

                super.onPostExecute(faceData);
            }   
        }.execute();

    } else {
        //createAndTrainNeuralNetwork();
    }
}