从tflite模型中提取特征(预测)

时间:2019-07-06 15:59:17

标签: java android opencv tensorflow deep-learning

我是使用Tensorflow的新手,并且正在使用tensorflow库进行实时眼睛检测项目。

我使用CNN和图像上的数据集训练自己的模型,并生成了冻结的推理图,并使用python脚本在网络摄像头上测试了模型,并给出了一些不错的效果(下次可以对模型进行优化和改进)

我已经从pb模型生成了tflite模型,并且我想使用该模型来预测并获取Android设备上眼睛区域的关键点。

我尝试编写一些代码,并从Web和Tensorflow提供的示例中获取其他代码。

我已经集成了OpenCV库,并在应用程序中添加了TensorFlow tflite库。我的android设备的前置摄像头工作正常。我已经关注了许多教程(中级,Github,Tensorflow文档...),但是其中的任何教程都可以帮助我。

最近,我在数据科学交流中问了这个问题,但我没有得到任何答案link

我想知道如何从视频流中获得预测?

如何从输入流中获取帧?

如何使用tflite模型从视频流(或图像)中获取预测?

谢谢。

package com.example.opencvtest;

    import org.opencv.android.BaseLoaderCallback;
    import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;  
    import org.opencv.android.LoaderCallbackInterface;
    import org.opencv.android.OpenCVLoader;
    import org.opencv.core.Mat;
    import org.opencv.android.CameraBridgeViewBase;
    import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;

    import android.app.Activity;
    import android.content.pm.ActivityInfo;
    import android.content.res.AssetFileDescriptor;
    import android.hardware.Camera;
    import android.os.Bundle;
    import android.util.Log;
    import android.view.Menu;
    import android.view.MenuItem;
    import android.view.SurfaceView;
    import android.view.WindowManager;
    import android.widget.Toast;

    import java.io.FileInputStream;
    import java.io.IOException;
    import java.lang.reflect.Method;
    import java.nio.MappedByteBuffer;
    import java.nio.channels.FileChannel;
    import org.tensorflow.lite.Interpreter;
    import org.tensorflow.lite.Tensor;


    public class MainActivity extends Activity implements CvCameraViewListener2 {
    private static final String TAG = "OCVSample::Activity";

    private CameraBridgeViewBase mOpenCvCameraView;
    private boolean              mIsJavaCamera = true;
    private MenuItem             mItemSwitchCamera = null;
    String modelFile="iristf.tflite";
    Interpreter tflite;
    Tensor ts;


    //Load model tflite
    private MappedByteBuffer loadModelFile(Activity activity, String MODEL_FILE) throws IOException {
        AssetFileDescriptor fileDescriptor = activity.getAssets().openFd(MODEL_FILE);
        FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
        FileChannel fileChannel = inputStream.getChannel();
        long startOffset = fileDescriptor.getStartOffset();
        long declaredLength = fileDescriptor.getDeclaredLength();
        return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
    }
    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
                case LoaderCallbackInterface.SUCCESS:
                {
                    Log.i(TAG, "OpenCV loaded successfully");
                    mOpenCvCameraView.setCameraIndex(1);
                    mOpenCvCameraView.enableView();
                } break;
                default:
                {
                    super.onManagerConnected(status);
                } break;
            }
        }
    };

    public MainActivity() {
        Log.i(TAG, "Instantiated new " + this.getClass());
    }

    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        Log.i(TAG, "called onCreate");
        super.onCreate(savedInstanceState);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);
        try {
            tflite = new Interpreter(loadModelFile(this, modelFile));
             ts=tflite.getInputTensor(0);

        }
        catch (IOException e) {
            e.printStackTrace();
        }

        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);

        mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);

        mOpenCvCameraView.setCvCameraViewListener(this);







    }

    @Override
    public void onPause()
    {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onResume()
    {
        super.onResume();
        if (!OpenCVLoader.initDebug()) {
            Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
        } else {
            Log.d(TAG, "OpenCV library found inside package. Using it!");
            mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
        }
    }

    public void onDestroy() {
        super.onDestroy();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
    }

    public void onCameraViewStopped() {
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        return inputFrame.rgba();
    }
    protected void setDisplayOrientation(Camera camera, int angle){
        Method downPolymorphic;
        try
        {
            downPolymorphic = camera.getClass().getMethod("setDisplayOrientation", new Class[] { int.class });
            if (downPolymorphic != null)
                downPolymorphic.invoke(camera, new Object[] { angle });
        }
        catch (Exception e1)
        {
            e1.printStackTrace();
        }
    }
    }

0 个答案:

没有答案