gascom/lib/screens/face_detection_screen.dart
Abdullah Salah 216efb8a83 first commit
2024-12-25 11:09:55 +03:00

276 lines
8.6 KiB
Dart

import 'dart:io';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:flutter/material.dart';
import 'package:gascom/constants/app_theme.dart';
import 'package:gascom/extensions/face_detection_extension.dart';
import 'package:gascom/screens/order_details_screen.dart';
import 'package:gascom/widgets/custom_app_bar.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import 'package:path_provider/path_provider.dart';
class FaceDetectionScreen extends StatefulWidget {
const FaceDetectionScreen({super.key});
@override
State<FaceDetectionScreen> createState() => _FaceDetectionScreenState();
}
class _FaceDetectionScreenState extends State<FaceDetectionScreen> {
String message = "ضع وجهك في الدائرة المخصصة وتاكد من وجود اضاءة مناسبة";
final options = FaceDetectorOptions(
enableContours: true,
enableClassification: true,
enableLandmarks: true,
performanceMode: FaceDetectorMode.accurate,
minFaceSize: 0.5,
);
late final faceDetector = FaceDetector(options: options);
// Face stability tracking
DateTime? _stableStartTime;
Rect? _lastFacePosition;
// Constants for stability detection
final double _movementThreshold = 40;
final Duration _requiredStableTime = const Duration(seconds: 2);
@override
void dispose() {
faceDetector.close();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: const CustomAppBar(
title: "تحقق الوجه",
),
body: SafeArea(
child: Stack(
children: [
CameraAwesomeBuilder.previewOnly(
builder: (state, preview) => const SizedBox(),
imageAnalysisConfig: AnalysisConfig(
maxFramesPerSecond: 10,
),
onImageForAnalysis: handleImageAnalysis,
sensorConfig: SensorConfig.single(
aspectRatio: CameraAspectRatios.ratio_4_3,
flashMode: FlashMode.none,
sensor: Sensor.position(SensorPosition.front),
zoom: 0.0,
),
),
Center(
child: ClipPath(
clipper: HoleClipper(),
child: Container(
color: AppTheme.primaryColor,
),
),
),
Positioned(
bottom: 50,
left: 20,
right: 20,
child: Text(
message,
style: Theme.of(context).textTheme.bodyMedium,
textAlign: TextAlign.center,
),
)
],
),
),
);
}
bool _isFaceStable(Rect currentFace) {
if (_lastFacePosition == null) {
_lastFacePosition = currentFace;
return false;
}
final double movement =
(currentFace.center.dx - _lastFacePosition!.center.dx).abs()
+ (currentFace.center.dy - _lastFacePosition!.center.dy).abs();
_lastFacePosition = currentFace;
return movement < _movementThreshold;
}
bool _isFaceCovered(Face face) {
// Check if essential face contours are present and visible
final requiredContours = [
FaceContourType.face,
FaceContourType.leftEye,
FaceContourType.rightEye,
FaceContourType.noseBridge,
FaceContourType.noseBottom,
FaceContourType.leftCheek,
FaceContourType.rightCheek,
FaceContourType.upperLipTop,
FaceContourType.upperLipBottom,
FaceContourType.lowerLipTop,
FaceContourType.lowerLipBottom,
];
// Count visible contours
int visibleContours = 0;
for (var contourType in requiredContours) {
if (face.contours[contourType]?.points.isNotEmpty ?? false) {
visibleContours++;
}
}
// Check landmarks visibility
final hasEssentialLandmarks =
face.landmarks[FaceLandmarkType.leftEye] != null &&
face.landmarks[FaceLandmarkType.rightEye] != null &&
face.landmarks[FaceLandmarkType.noseBase] != null &&
face.landmarks[FaceLandmarkType.bottomMouth] != null &&
face.landmarks[FaceLandmarkType.leftMouth] != null &&
face.landmarks[FaceLandmarkType.rightMouth] != null &&
face.landmarks[FaceLandmarkType.leftEar] != null &&
face.landmarks[FaceLandmarkType.rightEar] != null;
// Check if eyes are open (existing check enhanced)
final eyesOpen = (face.rightEyeOpenProbability ?? 0) > 0.7 && (face.leftEyeOpenProbability ?? 0) > 0.7;
print("@@@@@@@@@@@@@@@@@@");
print(visibleContours);
print(hasEssentialLandmarks);
print(eyesOpen);
return !hasEssentialLandmarks ||
visibleContours != requiredContours.length ||
!eyesOpen;
}
Future<void> handleImageAnalysis(AnalysisImage img) async {
final inputImage = img.toInputImage();
try {
final faces = await faceDetector.processImage(inputImage);
if (!context.mounted || !mounted) return;
if (faces.length == 1) {
var face = faces.first;
var rect = faces.first.boundingBox;
final bool isStable = _isFaceStable(rect);
// Check if face is covered
if (_isFaceCovered(face)) {
setState(() {
message = "الرجاء إزالة أي غطاء عن الوجه";
});
_stableStartTime = null;
return;
}
if (!isStable) {
setState(() {
message = "ثبت وجهك في المكان المخصص وتأكد من أنه وجه حقيقي";
});
_stableStartTime = null;
return;
}
if (!(
rect.left > (inputImage.metadata?.size.width ?? 0) * 0.1
&& rect.right < (inputImage.metadata?.size.width ?? 0) * 0.9
&& rect.top > (inputImage.metadata?.size.height ?? 0) * 0.1
&& rect.bottom < (inputImage.metadata?.size.height ?? 0) * 0.9
&& (faces.first.rightEyeOpenProbability ?? 0) > 0.3
&& (faces.first.leftEyeOpenProbability ?? 0) > 0.3
)) {
setState(() {
message = "ثبت وجهك في المكان المخصص";
});
return;
}
_stableStartTime ??= DateTime.now();
final stableDuration = DateTime.now().difference(_stableStartTime!);
if (stableDuration >= _requiredStableTime) {
img.when(
nv21: (image) {
faceDetector.close();
Navigator.pop(context);
Navigator.pushReplacement(
context,
MaterialPageRoute(builder: (context) => const OrderDetailsScreen()),
);
},
bgra8888: (image) {
faceDetector.close();
Navigator.pop(context);
Navigator.pushReplacement(
context,
MaterialPageRoute(builder: (context) => const OrderDetailsScreen()),
);
},
);
return;
} else {
setState(() {
message = "ثبت وجهك في المكان المخصص";
});
}
}
} catch (error) {
debugPrint("...sending image resulted error $error");
}
}
Future<CaptureRequest> handleBuildPath(List<Sensor> sensors) async {
final Directory extDir = await getTemporaryDirectory();
final Directory testDir =
await Directory('${extDir.path}/camerawesome')
.create(recursive: true);
if (sensors.length == 1) {
final String filePath =
'${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.jpg';
return SingleCaptureRequest(filePath, sensors.first);
} else {
return MultipleCaptureRequest(
{
for (final sensor in sensors)
sensor:
'${testDir.path}/${sensor.position == SensorPosition.front ? 'front_' : "back_"}${DateTime.now().millisecondsSinceEpoch}.jpg',
},
);
}
}
}
class HoleClipper extends CustomClipper<Path> {
@override
Path getClip(Size size) {
final path = Path()
..addRect(Rect.fromLTWH(0, 0, size.width, size.height))
..addRRect(RRect.fromRectAndRadius(
Rect.fromCenter(
center: Offset(size.width / 2, size.height * 0.45),
width: size.width - (size.width * 0.2), // Adjust the width of the square
height: size.height - (size.height * 0.3), // Adjust the height of the square to match the width for a square shape
),
const Radius.circular(20), // Adjust the border radius as needed
))
..fillType = PathFillType.evenOdd;
return path;
}
@override
bool shouldReclip(covariant CustomClipper<Path> oldClipper) {
return false;
}
}