summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorxAlpharax <42233094+xAlpharax@users.noreply.github.com>2023-10-15 17:06:36 +0300
committerxAlpharax <42233094+xAlpharax@users.noreply.github.com>2023-10-15 17:06:36 +0300
commit244f367d7b47a301fe5eeba63e899c570122d4c2 (patch)
tree1d6b92e43ba8a2d93aec94b0af545fd2cd0090f6
parent2d9d9e3cf6e5dbb0b162cc3d085771e55a6084ed (diff)
YOLO view as main.
Changes to be committed: modified: lib/main.dart
-rw-r--r--lib/main.dart241
1 files changed, 23 insertions, 218 deletions
diff --git a/lib/main.dart b/lib/main.dart
index a94f581..1cb08b6 100644
--- a/lib/main.dart
+++ b/lib/main.dart
@@ -1,5 +1,5 @@
import 'package:camera/camera.dart';
-import 'package:permission_handler/permission_handler.dart';
+// import 'package:permission_handler/permission_handler.dart';
// import 'package:google_mlkit_text_recognition/google_mlkit_text_recognition.dart';
// import 'package:google_mlkit_object_detection/google_mlkit_object_detection.dart';
// import 'package:flutter_tflite/flutter_tflite.dart';
@@ -32,28 +32,28 @@ class App extends StatelessWidget {
),
debugShowCheckedModeBanner: false,
// home: const MainScreen(),
- home: const MainScreen()
+ home: const YoloVideo()
);
}
}
-class MainScreen extends StatefulWidget {
- const MainScreen({super.key});
+// YOLO V5 REAL-TIME OBJECT DETECTION
+
+class YoloVideo extends StatefulWidget {
+ const YoloVideo({super.key});
@override
- State<MainScreen> createState() => _MainScreenState();
+ State<YoloVideo> createState() => _YoloVideoState();
}
-class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
- bool _isPermissionGranted = false;
-
- late final Future<void> _future;
- CameraController? _cameraController;
+class _YoloVideoState extends State<YoloVideo> {
+ late CameraController controller;
+ late List<Map<String, dynamic>> yoloResults;
+ CameraImage? cameraImage;
+ bool isLoaded = false;
+ bool isDetecting = false;
late FlutterVision vision; // YOLO
-
- // final textRecognizer = TextRecognizer(); // OCR
-
FlutterTts flutterTts = FlutterTts(); // TTS
@override
@@ -63,221 +63,23 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
vision = FlutterVision(); // YOLO
initTTS(); // TTS
- WidgetsBinding.instance.addObserver(this);
-
- _future = _requestCameraPermission();
+ init();
}
Future<void> initTTS() async { // TTS
await flutterTts.setLanguage("en-US"); // Set the language you want
- await flutterTts.setSpeechRate(0.5); // Adjust speech rate (1.0 is normal but too fast for my liking)
+ await flutterTts.setSpeechRate(1.0); // Adjust speech rate (1.0 is normal but too fast for my liking)
await flutterTts.setVolume(1.0); // Adjust volume (0.0 to 1.0)
await flutterTts.setPitch(1.0); // Adjust pitch (1.0 is normal)
-
- // You can set other configurations as well
-
- // Check if TTS is available
- // bool isAvailable = await flutterTts.isLanguageAvailable("en-US");
- // print("TTS is available: $isAvailable");
}
Future<void> speak(String text) async {
await flutterTts.speak(text); // TTS
}
- @override
- void dispose() {
- WidgetsBinding.instance.removeObserver(this);
- _stopCamera();
- // textRecognizer.close(); // OCR Stop
- flutterTts.stop(); // TTS Stop
- vision.closeYoloModel(); // YOLO Stop
- super.dispose();
- }
-
- @override
- void didChangeAppLifecycleState(AppLifecycleState state) {
- if (_cameraController == null || !_cameraController!.value.isInitialized) {
- return;
- }
-
- if (state == AppLifecycleState.inactive) {
- _stopCamera();
- } else if (state == AppLifecycleState.resumed &&
- _cameraController != null &&
- _cameraController!.value.isInitialized) {
- _startCamera();
- }
- }
-
- @override
- Widget build(BuildContext context) {
- return FutureBuilder(
- future: _future,
- builder: (context, snapshot) {
- return Stack(
- children: [
- if (_isPermissionGranted)
- FutureBuilder<List<CameraDescription>>(
- future: availableCameras(),
- builder: (context, snapshot) {
- if (snapshot.hasData) {
- _initCameraController(snapshot.data!);
-
- return Center(child: CameraPreview(_cameraController!));
- } else {
- return const LinearProgressIndicator();
- }
- },
- ),
- Scaffold(
- appBar: AppBar(
- backgroundColor: Theme.of(context).colorScheme.inversePrimary,
- title: const Text('MegaView'),
- centerTitle: true,
- ),
- backgroundColor: _isPermissionGranted ? Colors.transparent : null,
- body: _isPermissionGranted
- ? Column(
- mainAxisAlignment: MainAxisAlignment.end,
- children: [
- Container(),
- Container(
- color: Theme.of(context).colorScheme.inversePrimary,
- alignment: Alignment.center,
- child: Padding(
- padding: const EdgeInsets.symmetric(vertical: 8.0),
- child: ElevatedButton(
- onPressed: _scanImage,
- style: ButtonStyle(
- minimumSize: MaterialStateProperty.all<Size>(
- const Size(256, 64), // Set the desired width and height
- ),
- ),
- child: const Text('Scan'),
- ),
- ),
- ),
- ],
- )
- : Center(
- child: Container(
- padding: const EdgeInsets.only(left: 24.0, right: 24.0),
- child: const Text(
- 'Camera permission denied',
- textAlign: TextAlign.center,
- ),
- ),
- ),
- ),
- ],
- );
- },
- );
- }
-
- Future<void> _requestCameraPermission() async {
- final status = await Permission.camera.request();
- _isPermissionGranted = status == PermissionStatus.granted;
- }
-
- void _startCamera() {
- if (_cameraController != null) {
- _cameraSelected(_cameraController!.description);
- }
- }
-
- void _stopCamera() {
- if (_cameraController != null) {
- _cameraController?.dispose();
- }
- }
-
- void _initCameraController(List<CameraDescription> cameras) {
- if (_cameraController != null) {
- return;
- }
-
- // Select the first rear camera.
- CameraDescription? camera;
- for (var i = 0; i < cameras.length; i++) {
- final CameraDescription current = cameras[i];
- if (current.lensDirection == CameraLensDirection.back) {
- camera = current;
- break;
- }
- }
-
- if (camera != null) {
- _cameraSelected(camera);
- }
- }
-
- Future<void> _cameraSelected(CameraDescription camera) async {
- _cameraController = CameraController(
- camera,
- ResolutionPreset.high,
- enableAudio: false,
- );
-
- await _cameraController!.initialize();
- await _cameraController!.setFlashMode(FlashMode.off);
-
- if (!mounted) {
- return;
- }
- setState(() {});
- }
-
- Future<void> _scanImage() async {
- if (_cameraController == null) return;
-
- final navigator = Navigator.of(context);
-
- // final pictureFile = await _cameraController!.takePicture();
-
- // final file = File(pictureFile.path);
-
- // final inputImage = InputImage.fromFile(file);
- // final recognizedText = await textRecognizer.processImage(inputImage);
-
- // speak(recognizedText.text);
-
- await navigator.push(
- MaterialPageRoute(
- builder: (BuildContext context) =>
- YoloVideo(vision: vision)
- ),
- );
- }
-}
-
-// YOLO V5 REAL-TIME OBJECT DETECTION
-
-class YoloVideo extends StatefulWidget {
- final FlutterVision vision;
- const YoloVideo({Key? key, required this.vision}) : super(key: key);
-
- @override
- State<YoloVideo> createState() => _YoloVideoState();
-}
-
-class _YoloVideoState extends State<YoloVideo> {
- late CameraController controller;
- late List<Map<String, dynamic>> yoloResults;
- CameraImage? cameraImage;
- bool isLoaded = false;
- bool isDetecting = false;
-
- @override
- void initState() {
- super.initState();
- init();
- }
-
init() async {
cameras = await availableCameras();
- controller = CameraController(cameras[0], ResolutionPreset.high);
+ controller = CameraController(cameras[0], ResolutionPreset.high, enableAudio: false);
controller.initialize().then((value) {
loadYoloModel().then((value) {
setState(() {
@@ -291,6 +93,8 @@ class _YoloVideoState extends State<YoloVideo> {
@override
void dispose() async {
+ flutterTts.stop(); // TTS Stop
+ vision.closeYoloModel(); // YOLO Stop
super.dispose();
controller.dispose();
}
@@ -300,8 +104,9 @@ class _YoloVideoState extends State<YoloVideo> {
final Size size = MediaQuery.of(context).size;
if (!isLoaded) {
return const Scaffold(
+ backgroundColor: Colors.transparent,
body: Center(
- child: Text("Model not loaded, waiting for it"),
+ child: Text("Model not loaded. Waiting for it."),
),
);
}
@@ -354,7 +159,7 @@ class _YoloVideoState extends State<YoloVideo> {
}
Future<void> loadYoloModel() async {
- await widget.vision.loadYoloModel(
+ await vision.loadYoloModel(
labels: 'assets/labels.txt',
modelPath: 'assets/yolov5n.tflite',
modelVersion: "yolov5",
@@ -366,7 +171,7 @@ class _YoloVideoState extends State<YoloVideo> {
}
Future<void> yoloOnFrame(CameraImage cameraImage) async {
- final result = await widget.vision.yoloOnFrame(
+ final result = await vision.yoloOnFrame(
bytesList: cameraImage.planes.map((plane) => plane.bytes).toList(),
imageHeight: cameraImage.height,
imageWidth: cameraImage.width,
@@ -411,7 +216,7 @@ class _YoloVideoState extends State<YoloVideo> {
return yoloResults.map((result) {
- // speak("${result['tag']}.toStringAsFixed(0)}")
+ speak("${result['tag']}");
return Positioned(
left: result["box"][0] * factorX,