带有集成Google Cloud Vision API的应用程序崩溃,空对象引用

时间:2019-02-10 02:22:54

标签: android google-vision

我目前正在使用一个Android应用程序,希望在其中集成Google Cloud Vision API并在图像上进行面部识别。

到目前为止,我为实现此目的而实施的代码:

import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v4.content.FileProvider;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;

import com.google.api.client.extensions.android.json.AndroidJsonFactory;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.services.vision.v1.Vision;
import com.google.api.services.vision.v1.VisionRequestInitializer;
import com.google.api.services.vision.v1.model.AnnotateImageRequest;
import com.google.api.services.vision.v1.model.BatchAnnotateImagesRequest;
import com.google.api.services.vision.v1.model.BatchAnnotateImagesResponse;
import com.google.api.services.vision.v1.model.FaceAnnotation;
import com.google.api.services.vision.v1.model.Feature;
import com.google.api.services.vision.v1.model.Image;

import org.apache.commons.io.IOUtils;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;

import static org.apache.commons.io.IOUtils.toByteArray;

public class HomeActivity extends AppCompatActivity {

    /* Variables */
    MarshMallowPermission mmp = new MarshMallowPermission(this);


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.home);
        super.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);

        Vision.Builder visionBuilder = new Vision.Builder(
                new NetHttpTransport(),
                new AndroidJsonFactory(),
                null);

        visionBuilder.setVisionRequestInitializer(
                new VisionRequestInitializer("AIzaSyCnPwvnEQakkUXpkFaj2TcwJs_E3DPqjm0"));
        final Vision vision = visionBuilder.build();

        // Create new thread
        AsyncTask.execute(new Runnable() {
            @Override
            public void run() {
                // Convert photo to byte array
                InputStream inputStream =
                        getResources().openRawResource(R.raw.apollo9);
                byte[] photoData = new byte[0];
                try {
                    photoData = toByteArray(inputStream);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                try {
                    inputStream.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }

                Image inputImage = new Image();
                inputImage.encodeContent(photoData);
                Feature desiredFeature = new Feature();
                desiredFeature.setType("FACE_DETECTION");
                AnnotateImageRequest request = new AnnotateImageRequest();
                request.setImage(inputImage);
                request.setFeatures(Arrays.asList(desiredFeature));
                BatchAnnotateImagesRequest batchRequest =
                        new BatchAnnotateImagesRequest();

                batchRequest.setRequests(Arrays.asList(request));
                BatchAnnotateImagesResponse batchResponse =
                        null;
                try {
                    batchResponse = vision.images().annotate(batchRequest).execute();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                List<FaceAnnotation> faces = batchResponse.getResponses()
                        .get(0).getFaceAnnotations();

                                    // Count faces
                                    int numberOfFaces = faces.size();

                    // Get joy likelihood for each face
                                    String likelihoods = "";
                                    for(int i=0; i<numberOfFaces; i++) {
                                        likelihoods += "\n It is " +
                                                faces.get(i).getJoyLikelihood() +
                                                " that face " + i + " is happy";
                                    }

                    // Concatenate everything
                                    final String message =
                                            "This photo has " + numberOfFaces + " faces" + likelihoods;

                    // Display toast on UI thread
                                    runOnUiThread(new Runnable() {
                                        @Override
                                        public void run() {
                                            Toast.makeText(getApplicationContext(),
                                                    message, Toast.LENGTH_LONG).show();
                                        }
                                    });
            }
        });



        // Check for Permission for Storage
        if (!mmp.checkPermissionForReadExternalStorage()) {
            mmp.requestPermissionForReadExternalStorage();
        }

        // MARK: - CAMERA BUTTON ------------------------------------
        Button camButt = findViewById(R.id.cameraButt);
        camButt.setOnClickListener(new View.OnClickListener() {
          @Override
          public void onClick(View view) {
              if (!mmp.checkPermissionForCamera()) {
                  mmp.requestPermissionForCamera();
              } else { openCamera(); }
        }});


        // MARK: - GALLERY BUTTON ------------------------------------
        Button galleryButt = findViewById(R.id.galleryButt);
        galleryButt.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                if (!mmp.checkPermissionForReadExternalStorage()) {
                    mmp.requestPermissionForReadExternalStorage();
                } else { openGallery(); }
        }});





    }// end onCreate()




    // IMAGE HANDLING METHODS ------------------------------------------------------------------------
    int CAMERA = 0;
    int GALLERY = 1;
    Uri imageURI;
    File file;


    // OPEN CAMERA
    public void openCamera() {
        Intent intent= new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
        file = new File(Environment.getExternalStorageDirectory(), "image.jpg");
        imageURI = FileProvider.getUriForFile(getApplicationContext(), getPackageName() + ".provider", file);
        intent.putExtra(MediaStore.EXTRA_OUTPUT, imageURI);
        startActivityForResult(intent, CAMERA);
    }


    // OPEN GALLERY
    public void openGallery() {
        Intent intent = new Intent();
        intent.setType("image/*");
        intent.setAction(Intent.ACTION_GET_CONTENT);
        startActivityForResult(Intent.createChooser(intent, "Select Image"), GALLERY);
    }



    // IMAGE PICKED DELEGATE -----------------------------------
    @Override
    public void onActivityResult(int requestCode, int resultCode, Intent data) {
        super.onActivityResult(requestCode, resultCode, data);

        if (resultCode == Activity.RESULT_OK) {
            Bitmap bm = null;

            // Image from Camera
            if (requestCode == CAMERA) {

                try {
                    File f = file;
                    ExifInterface exif = new ExifInterface(f.getPath());
                    int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);

                    int angle = 0;
                    if (orientation == ExifInterface.ORIENTATION_ROTATE_90) { angle = 90; }
                    else if (orientation == ExifInterface.ORIENTATION_ROTATE_180) { angle = 180; }
                    else if (orientation == ExifInterface.ORIENTATION_ROTATE_270) { angle = 270; }
                    Log.i("log-", "ORIENTATION: " + orientation);

                    Matrix mat = new Matrix();
                    mat.postRotate(angle);

                    Bitmap bmp = BitmapFactory.decodeStream(new FileInputStream(f), null, null);
                    bm = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), mat, true);

                    // Get FINAL IMAGE URI
                    Configs.finalImageUri = getImageUri(HomeActivity.this, bm);

                }
                catch (IOException | OutOfMemoryError e) { Log.i("log-", e.getMessage()); }


                // Image from Gallery
            } else if (requestCode == GALLERY) {
                try {
                    bm = MediaStore.Images.Media.getBitmap(getApplicationContext().getContentResolver(), data.getData());

                    // Get FINAL IMAGE URI
                    Configs.finalImageUri = getImageUri(HomeActivity.this, bm);

                } catch (IOException e) { e.printStackTrace(); }
            }

            Log.i("log-", "FINAL IMAGE URI: " + Configs.finalImageUri);
            startActivity(new Intent(HomeActivity.this, ImageEditor.class));
        }

    }
    //---------------------------------------------------------------------------------------------




    // Method to get URI of a stored image
    public Uri getImageUri(Context inContext, Bitmap inImage) {
        ByteArrayOutputStream bytes = new ByteArrayOutputStream();
        inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
        String path = MediaStore.Images.Media.insertImage(inContext.getContentResolver(), inImage, "image", null);
        return Uri.parse(path);
    }



}// @end

布局XML文件:

<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
    xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:app="http://schemas.android.com/apk/res-auto"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context=".HomeActivity">


    <TextView
        android:id="@+id/textView2"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:layout_alignParentTop="true"
        android:layout_centerHorizontal="true"
        android:layout_marginTop="50dp"
        android:text="TEXTIFEYE"
        android:textSize="30sp" />

    <ImageView
        android:id="@+id/imageView"
        android:layout_width="100dp"
        android:layout_height="100dp"
        android:layout_below="@+id/textView2"
        android:layout_centerHorizontal="true"
        android:layout_marginTop="10dp"
        android:scaleType="centerCrop"
        app:srcCompat="@drawable/logo" />

    <Button
        android:id="@+id/cameraButt"
        android:layout_width="44dp"
        android:layout_height="44dp"
        android:layout_below="@+id/imageView"
        android:layout_marginTop="50dp"
        android:layout_toStartOf="@+id/imageView"
        android:background="@drawable/camera_butt" />

    <TextView
        android:id="@+id/textView3"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:layout_alignEnd="@+id/cameraButt"
        android:layout_alignStart="@+id/cameraButt"
        android:layout_below="@+id/cameraButt"
        android:text="CAMERA"
        android:textAlignment="center"
        android:textSize="10sp" />

    <Button
        android:id="@+id/galleryButt"
        android:layout_width="44dp"
        android:layout_height="44dp"
        android:layout_alignTop="@+id/cameraButt"
        android:layout_toEndOf="@+id/imageView"
        android:background="@drawable/gallery_butt" />

    <TextView
        android:id="@+id/textView4"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:layout_alignEnd="@+id/galleryButt"
        android:layout_alignStart="@+id/galleryButt"
        android:layout_below="@+id/galleryButt"
        android:text="GALLERY"
        android:textSize="10sp" />
</RelativeLayout>

日志消息:

java.lang.NullPointerException: Attempt to invoke virtual method 'java.util.List com.google.api.services.vision.v1.model.BatchAnnotateImagesResponse.getResponses()' on a null object reference
    at gmbh.webagenten.textifeye.HomeActivity$1.run(HomeActivity.java:113)
    at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:245)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
    at java.lang.Thread.run(Thread.java:764)

我基本上不知道为什么应用程序在崩溃时会出现一个空对象引用,并希望得到任何提示和反馈,谢谢!

1 个答案:

答案 0 :(得分:1)

您在batchResponse = vision.images().annotate(batchRequest).execute();块中有try-catch,这意味着此语句可能不会总是执行。仅当确定上述语句已运行时,才应查询batchResponse。所以:

将代码移动到try-catch块内try之后,如下所示:

try {
    batchResponse = vision.images().annotate(batchRequest).execute();
    List<FaceAnnotation> faces = batchResponse.getResponses()
                    .get(0).getFaceAnnotations();

    // Count faces
    int numberOfFaces = faces.size();
    .
    .
    .
} 
catch (IOException e) {
    e.printStackTrace();
}

或如果可能的话用空对象而不是null初始化batchResponse