如何从Firebase面部识别中获取图像并将其显示在图像小部件中 这是初始化Camera的代码,并发送给Test类
void _initializeCamera2() async {
loadModel();
CameraDescription description = await getCamera(_direction);
ImageRotation rotation = rotationIntToImageRotation(
description.sensorOrientation,
);
_camera =
CameraController(description, ResolutionPreset.low, enableAudio: false);
await _camera.initialize();
await Future.delayed(Duration(milliseconds: 500));
tempDir = await getApplicationDocumentsDirectory();
/*String _embPath = tempDir.path + '/emb.json';
jsonFile = new File(_embPath);
if (jsonFile.existsSync()) data = json.decode(jsonFile.readAsStringSync());
*/
_camera.startImageStream((CameraImage image) {
if (_camera != null) {
if (_isDetecting) return;
_isDetecting = true;
String res;
dynamic finalResult = Multimap<String, Face>();
detect(image, _getDetectionMethod(), rotation).then(
(dynamic result) async {
if (result.length == 0)
_faceFound = false;
else
_faceFound = true;
Face _face;
imglib.Image convertedImage =
_convertCameraImage(image, _direction);
for (_face in result) {
double x, y, w, h;
x = (_face.boundingBox.left - 10);
y = (_face.boundingBox.top - 10);
w = (_face.boundingBox.width + 10);
h = (_face.boundingBox.height + 10);
imglib.Image croppedImage = imglib.copyCrop(
convertedImage, x.round(), y.round(), w.round(), h.round());
croppedImage = imglib.copyResizeCropSquare(croppedImage, 112);
// int startTime = new DateTime.now().millisecondsSinceEpoch;
res2 = _recog(croppedImage);
// int endTime = new DateTime.now().millisecondsSinceEpoch;
// print("Inference took ${endTime - startTime}ms");
Navigator.of(context)
.push(
MaterialPageRoute (
builder: (_) => Test(img:croppedImage)
)
);
finalResult.add(res2, _face);
}
setState(() {
_scanResults = finalResult;
});
_isDetecting = false;
},
).catchError(
(_) {
_isDetecting = false;
},
);
}
});
}
这是用于显示Image
class Test extends StatelessWidget {
var img;
Test({this.img});
@override
Widget build(BuildContext context) {
return Scaffold(
body: Center(
child: Image.file(img),
),
);
}
}