我有一个使用 flutter 和机器学习的工作代码,它使用语音识别来触发应用程序中的基本功能,例如:
然后将所有这些翻译成语音以帮助视障人士。但是,我遇到了这个问题,例如,如果我说“检测货币”,它会触发正确的功能并加载相机以拍摄照片,但是,它会在一个不会停止的循环中执行此操作。我只是对如何让它工作感到困惑,一切似乎都很好,不应该进入循环,如果有人能提供帮助,我们将不胜感激。如果有人有兴趣查看,这是 Github 的链接:https://github.com/Tamer7/minor
这是 Main.dart 文件的代码
import 'package:flutter/material.dart';
import 'package:minor/color.dart';
import 'package:minor/currency.dart';
import 'package:minor/text/expiry.dart';
import 'package:speech_to_text/speech_to_text.dart' as stt;
import 'tts.dart';
import 'text/recognise.dart';
void main() {
runApp(
MyApp()
);
}
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Minor',
debugShowCheckedModeBanner: false,
home: SpeechScreen(),
);
}
}
class SpeechScreen extends StatefulWidget {
@override
_SpeechScreenState createState() => _SpeechScreenState();
}
class _SpeechScreenState extends State<SpeechScreen> {
stt.SpeechToText _speech;
bool _isListening = false;
String _text = 'Touch anywhere and start speaking';
double _confidence = 1.0;
@override
void initState() {
super.initState();
speak("What do you want to do?");
_speech = stt.SpeechToText();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Center(
child: Text(
'Accuracy: ${(_confidence * 100.0).toStringAsFixed(1)}%',
style: TextStyle(
fontFamily: 'nerko',
color: Colors.red,
fontSize: 30
),),
),
backgroundColor: Colors.black,
),
body: GestureDetector(
onTap: () {
return _listen();
},
child: Container(
color: Colors.black,
width: double.infinity,
height: double.infinity,
child: Text(
_text,
style: const TextStyle(
fontFamily: 'nerko',
fontSize: 32.0,
color: Colors.white,
fontWeight: FontWeight.w400,
),
),
),
),
);
}
void _listen() async {
if (!_isListening) {
bool available = await _speech.initialize(
onStatus: (val) => print('onStatus: $val'),
onError: (val) => print('onError: $val'),
);
if (available) {
setState(() => _isListening = true);
_speech.listen(
onResult: (val) => setState(() {
_text = val.recognizedWords;
if (val.hasConfidenceRating && val.confidence > 0) {
_confidence = val.confidence;
}
if (_text.contains('currency') ) {
Navigator.push(
context, MaterialPageRoute(builder: (context) => currency()));
}
else if (_text.contains('color')|| _text.contains('colour') ) {
Navigator.push(
context, MaterialPageRoute(builder: (context) => color()));
}
else if (_text.contains('text')|| _text.contains('read') ) {
Navigator.push(
context, MaterialPageRoute(builder: (context) => TextRecognitionWidget()));
}
else if (_text.contains('expiry')) {
Navigator.push(
context, MaterialPageRoute(builder: (context) => Expiry()));
}
}),
);
}
} else {
setState(() => _isListening = false);
_speech.stop();
}
}
}
这是currency.dart文件的代码
import 'package:flutter/material.dart';
import "package:tflite/tflite.dart";
import 'package:image_picker/image_picker.dart';
import 'dart:io';
import 'tts.dart';
class currency extends StatefulWidget {
@override
_currencyState createState() => _currencyState();
}
class _currencyState extends State<currency> {
File _image;
List _outputs;
bool _loading = false;
@override
void initState() {
super.initState();
_loading = true;
speak('Try to capture the notes one by one. Click anywhere to open the camera.');
loadModel().then((value) {
setState(() {
_loading = false;
});
});
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text(
'Currency Recognition',
style: TextStyle(
fontFamily: 'nerko',
fontSize: 30,
color: Colors.red,
),
),
backgroundColor: Colors.black,
),
body: _loading
? Container(
alignment: Alignment.center,
child: CircularProgressIndicator(),
)
: Container(
color: Colors.black,
width: MediaQuery.of(context).size.width,
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
_image == null ? Expanded(
child: GestureDetector(
onTap: pickImage,
child: Container(
height: double.infinity,
width: double.infinity,
color: Colors.black,
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Text(
'Click anywhere to open the Camera',
style: TextStyle(
color: Colors.white,
fontSize: 30,
fontFamily: 'nerko'
),
),
],
),
),
),
) : GestureDetector(
onTap: ()=> Navigator.of(context).pushNamedAndRemoveUntil('/', (Route<dynamic> route) => false),
child: Column(
children: [
Image.file(_image),
SizedBox(
height: 10,
),
Text(
"Yippie! you got ${_outputs[0]["label"].toString().substring(2)} rupees, now you will be rich.",
style: TextStyle(
color: Colors.red,
fontFamily: 'nerko',
fontSize: 30,
),
),
],
),
),
],
),
),
);
}
pickImage() async {
var image = await ImagePicker.pickImage(source: ImageSource.camera);
if (image == null) return null;
setState(() {
_loading = true;
_image = image;
});
classifyImage(image);
}
classifyImage(File image) async {
var output = await Tflite.runModelOnImage(
path: image.path,
numResults: 2,
threshold: 0.5,
imageMean: 127.5,
imageStd: 127.5,
);
setState(() {
_loading = false;
_outputs = output;
});
if (_outputs!=null)
{speak("Yippie! you got ${_outputs[0]["label"].toString().substring(2)} rupees, now you will be rich. Click anywhere to start again.");
}
}
loadModel() async {
await Tflite.loadModel(
model: "assets/model_unquant.tflite",
labels: "assets/labels.txt",
);
}
@override
void dispose() {
Tflite.close();
super.dispose();
}
}