【问题标题】:ObjectInputStream readObject calls cause stackOverFlowErrorObjectInputStream readObject 调用导致 stackOverFlowError
【发布时间】:2014-06-27 01:06:21
【问题描述】:

好的,我的代码很大,但我会简化它,以便您快速获得它。所以一般来说我的代码算法是:

1。从 SD 卡读取位图(人脸图像)。
2。为每张读取的照片从 SD 卡中读取一个对象并对其进行反序列化(对象包含人脸区域的坐标,例如眼睛、鼻子、嘴巴等)。
3。处理图像并从中获取一些数据。
4。将数据写入 SD 卡上的 .txt 文件。

有一种使用递归的方法(如下所示)。我输入包含照片路径作为参数的列表。然后对于每张照片,我从 SD 卡反序列化对象。成功调用后 - 我使用其他参数将反序列化对象输入到 processPhoto 方法。在 processPhoto() 完成处理后,它调用接口 onPhotoProcessFinished()。接口增加“imageProcessed”并再次调用 processNextImageIfPossible。重复过程,直到处理完所有照片。它可以工作,但是经过 50 次迭代后,我得到了错误。你知道我该如何解决这个问题吗?

private void processNextImageIfPossible(List<String> photosToProcess) {
    if (imageProcessed < imageToProcess) {

          try{
               File photoDataFile = new File(photosToProcess.get(imageProcessed).replace("jpg", "data").substring(6));   
               ObjectInputStream ois = new ObjectInputStream(new FileInputStream(photoDataFile));
               FaceData faceData  = (FaceData) ois.readObject(); // ERROR HAPPENS HERE     
               ois.close();


                if (photosToProcess.get(imageProcessed).contains(EMOTION_NEUTRAL)) {
                    neutralImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_NEUTRAL);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_HAPPINESS)) {
                    happinessImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_HAPPINESS);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SADNESS)) {
                    sadnessImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SADNESS);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_ANGER)) {
                    angerImages++;
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_ANGER);
                } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SUPRISE)) {
                    supriseImages++; 
                    processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SUPRISE);
                }
           }catch(Exception ex){
               Toast.makeText(NeuralNetworkActivity.this, getString(R.string.process_file_read_error), Toast.LENGTH_LONG).show();
               ex.printStackTrace();
           } 

    } else {
        //createAndTrainNeuralNetwork();
    }
}

@Override
public void onPhotoProcessFinished() {
    imageProcessed++;
    processNextImageIfPossible(photosToProcessList);    
}

错误日志:

06-27 02:46:31.856: E/AndroidRuntime(11552): FATAL EXCEPTION: main
06-27 02:46:31.856: E/AndroidRuntime(11552): Process: org.opencv.samples.facedetect, PID: 11552
06-27 02:46:31.856: E/AndroidRuntime(11552): java.lang.StackOverflowError
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.lang.Thread.currentThread(Thread.java:470)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.lang.ThreadLocal.get(ThreadLocal.java:53)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at dalvik.system.BlockGuard.getThreadPolicy(BlockGuard.java:139)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at libcore.io.BlockGuardOs.read(BlockGuardOs.java:148)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at libcore.io.IoBridge.read(IoBridge.java:425)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.FileInputStream.read(FileInputStream.java:179)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at libcore.io.Streams.readFully(Streams.java:81)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.readFully(DataInputStream.java:99)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.decodeUTF(DataInputStream.java:178)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.decodeUTF(DataInputStream.java:173)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.DataInputStream.readUTF(DataInputStream.java:169)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readFieldDescriptors(ObjectInputStream.java:895)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readClassDescriptor(ObjectInputStream.java:1719)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readNewClassDesc(ObjectInputStream.java:1631)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:658)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readNewObject(ObjectInputStream.java:1781)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readNonPrimitiveContent(ObjectInputStream.java:762)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:1981)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:1938)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:162)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:177)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.onPhotoProcessFinished(NeuralNetworkActivity.java:195)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processImage(NeuralNetworkActivity.java:504)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralNetworkActivity.processNextImageIfPossible(NeuralNetworkActivity.java:171)
06-27 02:46:31.856: E/AndroidRuntime(11552):    at org.opencv.samples.facedetect.ui.activity.NeuralN

【问题讨论】:

    标签: android deserialization stack-overflow objectinputstream


    【解决方案1】:

    看起来像是 processImafge、onPhotoProcessFinished 和 processNextImageIfPossible 之间的无限递归循环。您需要防止 onPhotoProcessFinished 调用 processNextImageIfPossible。在 Handler 上这样做而不是作为直接函数调用应该就足够了。

    【讨论】:

      【解决方案2】:

      处理程序的好主意应该可以工作,因为它使用其他线程。那是很好的暗示。我设法使用 AsyncTask 修复它。我不太了解内存过程,但 UI 线程上发生了太多事情。以下代码有效:

      private void processNextImageIfPossible(final List<String> photosToProcess) {
          if (imageProcessed < imageToProcess) {
      
              final String filePath = photosToProcess.get(imageProcessed).replace("jpg", "data").substring(6);
      
              new AsyncTask<Void, Void, FaceData>() {
      
                  @Override
                  protected FaceData doInBackground(Void... params) {
                       try{
                             File photoDataFile = new File(filePath);  
                             ObjectInputStream ois = new ObjectInputStream(new FileInputStream(photoDataFile));
                             FaceData faceData  = (FaceData) ois.readObject();       
                             ois.close();
      
                             return faceData;
                       }catch(Exception ex){
      
                             Toast.makeText(NeuralNetworkActivity.this, getString(R.string.camera_load_error), Toast.LENGTH_LONG).show();
                             ex.printStackTrace();
                             return null;
                       } 
                  }
      
                  @Override
                  protected void onPostExecute(FaceData faceData) {
      
                      if (photosToProcess.get(imageProcessed).contains(EMOTION_NEUTRAL)) {
                          neutralImages++;
                          processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_NEUTRAL);
                      } else if (photosToProcess.get(imageProcessed).contains(EMOTION_HAPPINESS)) {
                          happinessImages++;
                          processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_HAPPINESS);
                      } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SADNESS)) {
                          sadnessImages++;
                          processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SADNESS);
                      } else if (photosToProcess.get(imageProcessed).contains(EMOTION_ANGER)) {
                          angerImages++;
                          processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_ANGER);
                      } else if (photosToProcess.get(imageProcessed).contains(EMOTION_SUPRISE)) {
                          supriseImages++; 
                          processImage(Uri.parse(photosToProcess.get(imageProcessed)), faceData, EMOTION_SUPRISE);
                      }
      
                      super.onPostExecute(faceData);
                  }   
              }.execute();
      
          } else {
              //createAndTrainNeuralNetwork();
          }
      }
      

      【讨论】:

        猜你喜欢
        • 2012-07-29
        • 1970-01-01
        • 1970-01-01
        • 1970-01-01
        • 1970-01-01
        • 2017-10-14
        • 1970-01-01
        • 1970-01-01
        • 2023-04-02
        相关资源
        最近更新 更多