在Android的Cordova中使用face-api.js

时间:2019-06-11 13:16:46

标签: android cordova tensorflow face-api

Cordova不允许将本地文件加载到face-api.js中使用的TensorFlow培训中,但是在iOS或浏览器中不会发生此问题。该怎么解决?

1 个答案:

答案 0 :(得分:0)

要在本地加载将被解压缩并用于TensorFlow训练的文件,您必须告诉Face-api.js库将调用哪种方法读取文件,并在faceapi.env.monkeyPatch中设置值

我不能说这是最好的解决方案,但它是可行的解决方案。我将Android平台与其他平台分离(没有问题),并且在Android内部,我将JSON文件与二进制文件分离了。

这是一个完整的示例,其中考虑到加载了带有人脸的512x512图像:

PS:VueJS中的Javascript代码。

插件列表

  

cordoba-plugin-device

     

cordova-plugin-file

App.vue

<script>
import * as faceapi from "face-api.js";

export default {
  data: () => ({}),
  mounted() {
    let cls = this;
    document.addEventListener(
      "deviceready",
      function() {
        cls.loadFaceDetectModels();
      },
      false
    );
  },
  methods: {
    async loadFaceDetectModels() {
      let MODEL_URL;
      if (window.device.platform === "Android") {
        MODEL_URL =
          window.cordova.file.applicationDirectory + "www/static/models/";
        faceapi.env.monkeyPatch({
          readFile: filePath =>
            new Promise(resolve => {
              window.resolveLocalFileSystemURL(
                filePath,
                function(fileEntry) {
                  fileEntry.file(
                    function(file) {
                      var reader = new FileReader();

                      let fileExtension = filePath
                        .split("?")[0]
                        .split(".")
                        .pop();
                      if (fileExtension === "json") {
                        reader.onloadend = function() {
                          resolve(this.result);
                        };
                        reader.readAsText(file);
                      } else {
                        reader.onloadend = function() {
                          resolve(new Uint8Array(this.result));
                        };

                        reader.readAsArrayBuffer(file);
                      }
                    },
                    function() {
                      resolve(false);
                    }
                  );
                },
                function() {
                  resolve(false);
                }
              );
            }),
          Canvas: HTMLCanvasElement,
          Image: HTMLImageElement,
          ImageData: ImageData,
          Video: HTMLVideoElement,
          createCanvasElement: () => document.createElement("canvas"),
          createImageElement: () => document.createElement("img")
        });
        await faceapi.nets.tinyFaceDetector.loadFromDisk(MODEL_URL);
        await faceapi.nets.faceRecognitionNet.loadFromDisk(MODEL_URL);
      } else {
        MODEL_URL = "./static/models";
        await faceapi.loadTinyFaceDetectorModel(MODEL_URL);
        await faceapi.loadFaceRecognitionModel(MODEL_URL);
      }

      this.testFaceDetector();
    },
    testFaceDetector() {
      let cls = this;
      let baseImage = new Image();
      baseImage.src = "./static/img/faceWillSmith.jpg";
      baseImage.onload = function() {
        faceapi
          .detectSingleFace(baseImage, new faceapi.TinyFaceDetectorOptions())
          .run()
          .then(res => {
            alert(JSON.stringify(res));
          });
      };
    }
  }
};
</script>

config.xml

<platform name="android">
    <allow-intent href="market:*" />
    <preference name="loadUrlTimeoutValue" value="700000" />
    <preference name="android-minSdkVersion" value="21" />
    <preference name="android-targetSdkVersion" value="21" />
    <preference name="AndroidPersistentFileLocation" value="Compatibility" />
    <preference name="AndroidPersistentFileLocation" value="Internal" />
    <preference name="AndroidExtraFilesystems" value="files,files-external,documents,sdcard,cache,cache-external,assets,root,applicationDirectory" />
</platform>