Remove unused Google ML Kit dependencies and streamline face detection logic

This commit is contained in:
Mohammed Al-Samarraie
2026-03-14 15:03:40 +03:00
parent 0606e26ef7
commit eebec9b7a2
5 changed files with 103 additions and 652 deletions

View File

@@ -1,17 +1,11 @@
import 'dart:async';
import 'dart:io';
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import '../../core/error/exceptions.dart';
import '../face/face_feedback.dart';
import 'package:local_auth/local_auth.dart';
class OvalCameraCapturePage extends StatefulWidget {
@@ -36,61 +30,27 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
bool _isSuccess = false;
bool _isSubmitting = false;
bool _isStreaming = false;
//to handel the state of auth when it gives 422 status code
// Timer countdown
int _countdown = 3;
Timer? _countdownTimer;
bool _countdownStarted = false;
// Local auth for 422
final LocalAuthentication _localAuth = LocalAuthentication();
bool _handlingAuth422 = false; // prevents multiple dialogs/auth prompts
File? _lastCapturedFile; // keep the same image for retry
// Smart feedback
FaceFeedback _feedback = FaceFeedback(
type: FaceHintType.noFace,
message: "ضع وجهك داخل الإطار",
quality: 0,
borderColor: Colors.white70,
);
double _progress = 0;
bool _isDetecting = false;
int _frameCount = 0;
// Stability tracking
Rect? _lastFaceRect;
int _stableFrames = 0;
bool _showManualCapture = false;
Timer? _manualCaptureTimer;
String _debugInfo = "Initializing...";
late final FaceDetector _faceDetector = FaceDetector(
options: FaceDetectorOptions(
performanceMode: FaceDetectorMode.fast,
enableTracking: true,
enableClassification: true,
enableLandmarks: false,
enableContours: false,
),
);
static const int _throttleEveryNFrames = 5;
static const int _stableFramesNeeded = 3;
bool _handlingAuth422 = false;
File? _lastCapturedFile;
@override
void initState() {
super.initState();
// Go straight to camera — no network calls here
_initializeCamera();
}
@override
void dispose() {
_manualCaptureTimer?.cancel();
_stopImageStream();
_countdownTimer?.cancel();
_cameraController?.dispose();
_faceDetector.close();
super.dispose();
}
@@ -101,9 +61,8 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_isCameraInitialized = false;
_isSuccess = false;
_isSubmitting = false;
_progress = 0;
_stableFrames = 0;
_lastFaceRect = null;
_countdown = 3;
_countdownStarted = false;
});
await _cameraController?.dispose();
@@ -128,10 +87,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
selectedCamera,
ResolutionPreset.medium,
enableAudio: false,
imageFormatGroup:
Platform.isAndroid
? ImageFormatGroup.yuv420
: ImageFormatGroup.bgra8888,
);
await _cameraController!.initialize();
@@ -140,21 +95,9 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
setState(() {
_isCameraInitialized = true;
_isStreaming = false;
_showManualCapture = false;
_debugInfo = "Ready. Cam: ${selectedCamera.lensDirection}";
});
_manualCaptureTimer?.cancel();
_manualCaptureTimer = Timer(const Duration(seconds: 10), () {
if (mounted && _isCameraInitialized && !_isSuccess && !_isSubmitting) {
setState(() {
_showManualCapture = true;
});
}
});
_startSmartStream();
_startCountdown();
} catch (e) {
if (!mounted) return;
setState(() {
@@ -164,124 +107,49 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
void _startSmartStream() {
if (_cameraController == null || !_cameraController!.value.isInitialized) {
return;
}
if (_isStreaming) return;
void _startCountdown() {
if (_countdownStarted) return;
_countdownStarted = true;
_countdown = 3;
_isStreaming = true;
_countdownTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
if (!mounted) {
timer.cancel();
return;
}
try {
_cameraController!.startImageStream((CameraImage image) async {
if (!mounted) return;
if (_isSubmitting || _isSuccess) return;
_frameCount++;
if (_frameCount % _throttleEveryNFrames != 0) return;
if (_isDetecting) return;
_isDetecting = true;
try {
final inputImage = _toInputImage(
image,
_cameraController!.description,
);
if (inputImage == null) {
_isDetecting = false;
return;
}
final faces = await _faceDetector.processImage(inputImage);
if (faces.isEmpty) {
_stableFrames = 0;
_applyFeedback(
FaceFeedback(
type: FaceHintType.noFace,
message: "ضع وجهك داخل الإطار",
quality: 0,
borderColor: Colors.white70,
),
);
_isDetecting = false;
return;
}
final face = faces.first;
final brightness = _estimateBrightness(image);
final rotation =
inputImage.metadata?.rotation ?? InputImageRotation.rotation0deg;
final feedback = _evaluate(
face: face,
brightness: brightness,
image: image,
rotation: rotation,
);
_applyFeedback(feedback);
if (feedback.isGood) {
_stableFrames++;
_progress = (_stableFrames / _stableFramesNeeded).clamp(0.0, 1.0);
if (_stableFrames >= _stableFramesNeeded) {
if (mounted) {
setState(() {
_debugInfo = "جاري التحقق من الصورة...";
});
}
_isDetecting = false;
await _captureAndSubmit();
return;
}
} else {
if (_stableFrames > 0) _stableFrames--;
_progress = (_stableFrames / _stableFramesNeeded).clamp(0.0, 1.0);
}
if (mounted && !_isSubmitting && !_isSuccess) {
setState(() {
_debugInfo =
"Faces: ${faces.length} | Bright: ${brightness.toStringAsFixed(1)}\n"
"Msg: ${feedback.message} | Stable: $_stableFrames";
});
}
} catch (e) {
debugPrint("Face detection error: $e");
} finally {
_isDetecting = false;
}
setState(() {
_countdown--;
});
} catch (e) {
debugPrint("Error starting image stream: $e");
_isStreaming = false;
}
if (_countdown <= 0) {
timer.cancel();
_captureAndSubmit();
}
});
}
Future<bool?> _showLocalAuthDialog() {
return showDialog<bool>(
context: context,
barrierDismissible: false,
builder:
(_) => AlertDialog(
title: const Text('فشل التحقق بالوجه', textAlign: TextAlign.center),
content: const Text(
'لم يتم التعرف على الوجه.\n\nيرجى استخدام بصمة الإصبع أو رمز القفل (PIN/النمط) للمتابعة.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () => Navigator.pop(context, false),
child: const Text('إلغاء'),
),
ElevatedButton(
onPressed: () => Navigator.pop(context, true),
child: const Text('استخدام البصمة / رمز القفل'),
),
],
builder: (_) => AlertDialog(
title: const Text('فشل التحقق بالوجه', textAlign: TextAlign.center),
content: const Text(
'لم يتم التعرف على الوجه.\n\nيرجى استخدام بصمة الإصبع أو رمز القفل (PIN/النمط) للمتابعة.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () => Navigator.pop(context, false),
child: const Text('إلغاء'),
),
ElevatedButton(
onPressed: () => Navigator.pop(context, true),
child: const Text('استخدام البصمة / رمز القفل'),
),
],
),
);
}
@@ -293,7 +161,7 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
return await _localAuth.authenticate(
localizedReason: 'تأكيد هويتك لإكمال تسجيل الحضور.',
options: const AuthenticationOptions(
biometricOnly: false, // ✅ allows PIN/pattern fallback
biometricOnly: false,
stickyAuth: true,
useErrorDialogs: true,
),
@@ -303,23 +171,9 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
Future<void> _stopImageStream() async {
if (!_isStreaming || _cameraController == null) return;
try {
await _cameraController!.stopImageStream();
_isStreaming = false;
} catch (e) {
debugPrint("Error stopping image stream: $e");
}
}
void _stopCameraCompletely() {
_manualCaptureTimer?.cancel();
_countdownTimer?.cancel();
try {
if (_isStreaming && _cameraController != null) {
_cameraController!.stopImageStream();
_isStreaming = false;
}
_cameraController?.dispose();
_cameraController = null;
} catch (e) {
@@ -327,114 +181,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
FaceFeedback _evaluate({
required Face face,
required double brightness,
required CameraImage image,
required InputImageRotation rotation,
}) {
// 1) lighting
if (brightness < 40) {
return FaceFeedback(
type: FaceHintType.tooDark,
message: "المكان مظلم — انتقل لمكان أكثر إضاءة",
quality: 0.1,
borderColor: Colors.orangeAccent,
);
}
// 2) head pose
final yaw = (face.headEulerAngleY ?? 0).abs();
final pitch = (face.headEulerAngleX ?? 0).abs();
if (yaw > 20 || pitch > 20) {
return FaceFeedback(
type: FaceHintType.lookStraight,
message: "انظر مباشرةً للكاميرا",
quality: 0.2,
borderColor: Colors.orangeAccent,
);
}
// 3) distance estimate
double frameWidth = image.width.toDouble();
double frameHeight = image.height.toDouble();
if (rotation == InputImageRotation.rotation90deg ||
rotation == InputImageRotation.rotation270deg) {
final temp = frameWidth;
frameWidth = frameHeight;
frameHeight = temp;
}
final box = face.boundingBox;
final frameArea = frameWidth * frameHeight;
final faceArea = box.width * box.height;
final ratio = faceArea / frameArea;
if (ratio < 0.05) {
return FaceFeedback(
type: FaceHintType.tooFar,
message: "اقترب قليلاً",
quality: 0.3,
borderColor: Colors.orangeAccent,
);
}
if (ratio > 0.8) {
return FaceFeedback(
type: FaceHintType.tooClose,
message: "ابتعد قليلاً",
quality: 0.3,
borderColor: Colors.orangeAccent,
);
}
// 4) centered
final faceCenter = Offset(box.center.dx, box.center.dy);
final frameCenter = Offset(frameWidth / 2, frameHeight / 2);
final dist = (faceCenter - frameCenter).distance;
final maxAllowed = math.min(frameWidth, frameHeight) * 0.4;
if (dist > maxAllowed) {
return FaceFeedback(
type: FaceHintType.notCentered,
message: "وسط وجهك داخل الإطار",
quality: 0.4,
borderColor: Colors.orangeAccent,
);
}
// 5) stability
if (_lastFaceRect != null) {
final moved = (box.center - _lastFaceRect!.center).distance;
if (moved > 40) {
_lastFaceRect = box;
return FaceFeedback(
type: FaceHintType.holdStill,
message: "ثبت الهاتف وابقَ ثابتاً",
quality: 0.5,
borderColor: Colors.orangeAccent,
);
}
}
_lastFaceRect = box;
return FaceFeedback(
type: FaceHintType.good,
message: "ممتاز — ثبت قليلاً",
quality: 1.0,
borderColor: Colors.greenAccent,
);
}
void _applyFeedback(FaceFeedback f) {
if (!mounted) return;
if (_feedback.type != f.type || _feedback.message != f.message) {
setState(() {
_feedback = f;
});
}
}
Future<void> _captureAndSubmit() async {
if (_cameraController == null) return;
if (_isSubmitting || _isSuccess) return;
@@ -445,11 +191,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
});
try {
await _stopImageStream();
// Small delay to let camera settle after stopping stream
await Future.delayed(const Duration(milliseconds: 200));
if (_cameraController == null ||
!_cameraController!.value.isInitialized) {
_handleScanError("الكاميرا غير جاهزة، حاول مرة أخرى");
@@ -477,7 +218,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
} on ServerException catch (e) {
final msg = e.message.toLowerCase();
// If your ServerException has statusCode, prefer that:
if (e.statusCode == 422 || msg.contains('face verification failed')) {
await _handleFaceVerificationFailed422(e);
return;
@@ -485,29 +225,27 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
if (msg.contains('already logged in') ||
msg.contains('مسجل دخول بالفعل')) {
// Stop camera and go back with a dialog
_stopCameraCompletely();
if (mounted) {
showDialog(
context: context,
barrierDismissible: false,
builder:
(_) => AlertDialog(
title: const Text('تنبيه', textAlign: TextAlign.center),
content: const Text(
'أنت مسجل دخول بالفعل، لا يمكنك تسجيل الدخول مرة أخرى.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () {
Navigator.of(context).pop(); // Close dialog
Navigator.of(context).pop(); // Go back from camera
},
child: const Text('حسناً'),
),
],
builder: (_) => AlertDialog(
title: const Text('تنبيه', textAlign: TextAlign.center),
content: const Text(
'أنت مسجل دخول بالفعل، لا يمكنك تسجيل الدخول مرة أخرى.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () {
Navigator.of(context).pop();
Navigator.of(context).pop();
},
child: const Text('حسناً'),
),
],
),
);
}
return;
@@ -527,80 +265,12 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
setState(() {
_isSubmitting = false;
_errorMessage = msg;
_progress = 0;
_stableFrames = 0;
_countdown = 3;
_countdownStarted = false;
});
if (_cameraController != null &&
_cameraController!.value.isInitialized &&
!_isStreaming) {
_startSmartStream();
}
}
double _estimateBrightness(CameraImage image) {
if (image.planes.isEmpty) return 0;
final bytes = image.planes[0].bytes;
if (bytes.isEmpty) return 0;
const step = 100;
int sum = 0;
int count = 0;
for (int i = 0; i < bytes.length; i += step) {
sum += bytes[i];
count++;
}
return count == 0 ? 0 : (sum / count);
}
InputImage? _toInputImage(CameraImage image, CameraDescription camera) {
final sensorOrientation = camera.sensorOrientation;
InputImageRotation? rotation;
if (Platform.isIOS) {
rotation = _rotationIntToImageRotation(sensorOrientation);
} else if (Platform.isAndroid) {
var rotationCompensation =
_orientations[_cameraController!.value.deviceOrientation];
if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) {
rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
} else {
rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360;
}
rotation = _rotationIntToImageRotation(rotationCompensation);
}
if (rotation == null) return null;
if (Platform.isAndroid) {
final nv21 = _convertYUV420ToNV21(image);
return InputImage.fromBytes(
bytes: nv21,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: InputImageFormat.nv21,
bytesPerRow: image.width,
),
);
}
// iOS BGRA8888
if (image.planes.length == 1) {
return InputImage.fromBytes(
bytes: image.planes.first.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: InputImageFormat.bgra8888,
bytesPerRow: image.planes.first.bytesPerRow,
),
);
}
return null;
// Restart countdown after error
_startCountdown();
}
Future<void> _handleFaceVerificationFailed422(ServerException e) async {
@@ -609,13 +279,9 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_handlingAuth422 = true;
// stop everything so camera doesnt keep scanning
await _stopImageStream();
setState(() {
_isSubmitting = false;
_errorMessage = null;
_debugInfo = "Face verification failed (422) → Local Auth...";
});
final proceed = await _showLocalAuthDialog();
@@ -623,11 +289,7 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Go back to attendance + show message there
Navigator.of(context).pop(false);
// If you prefer to show inside this screen before pop:
// ScaffoldMessenger.of(context).showSnackBar(const SnackBar(content: Text('تم الإلغاء.')));
return;
}
@@ -636,13 +298,10 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Return to attendance; attendance screen should show snack "failed fingerprint/pattern"
Navigator.of(context).pop("local_auth_failed");
return;
}
// Local auth success → retry SAME image with localAuth=true
final file = _lastCapturedFile;
if (file == null) {
_handlingAuth422 = false;
@@ -654,7 +313,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
setState(() {
_isSubmitting = true;
_debugInfo = "Local auth success → retrying with localAuth=true...";
});
try {
@@ -669,11 +327,10 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
Future.delayed(const Duration(seconds: 1), () {
if (mounted) Navigator.of(context).pop(true);
});
} on ServerException catch (e2) {
} on ServerException catch (_) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Retry failed → go back to attendance
Navigator.of(context).pop("retry_failed");
} catch (_) {
_handlingAuth422 = false;
@@ -683,74 +340,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
Uint8List _convertYUV420ToNV21(CameraImage image) {
final int width = image.width;
final int height = image.height;
final yPlane = image.planes[0];
final uPlane = image.planes[1];
final vPlane = image.planes[2];
final int ySize = width * height;
final int uvSize = ySize ~/ 2;
final Uint8List nv21 = Uint8List(ySize + uvSize);
// Y Channel
if (yPlane.bytesPerRow == width) {
nv21.setAll(0, yPlane.bytes);
} else {
int offset = 0;
for (int i = 0; i < height; i++) {
nv21.setRange(
offset,
offset + width,
yPlane.bytes,
i * yPlane.bytesPerRow,
);
offset += width;
}
}
// UV Channel (NV21 is VU interleaved)
final int uvWidth = width ~/ 2;
final int uvHeight = height ~/ 2;
final int uvPixelStride = uPlane.bytesPerPixel ?? 1;
int uvIndex = ySize;
for (int row = 0; row < uvHeight; row++) {
final int srcIndex = row * uPlane.bytesPerRow;
for (int col = 0; col < uvWidth; col++) {
final int pixelIndex = srcIndex + (col * uvPixelStride);
nv21[uvIndex++] = vPlane.bytes[pixelIndex];
nv21[uvIndex++] = uPlane.bytes[pixelIndex];
}
}
return nv21;
}
InputImageRotation _rotationIntToImageRotation(int rotation) {
switch (rotation) {
case 90:
return InputImageRotation.rotation90deg;
case 180:
return InputImageRotation.rotation180deg;
case 270:
return InputImageRotation.rotation270deg;
default:
return InputImageRotation.rotation0deg;
}
}
static final _orientations = {
DeviceOrientation.portraitUp: 0,
DeviceOrientation.landscapeLeft: 90,
DeviceOrientation.portraitDown: 180,
DeviceOrientation.landscapeRight: 270,
};
@override
Widget build(BuildContext context) {
if (_errorMessage != null && !_isCameraInitialized) {
@@ -789,8 +378,12 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
Center(child: CameraPreview(_cameraController!)),
CustomPaint(
painter: _OvalOverlayPainter(
borderColor: _feedback.borderColor,
progress: _progress,
borderColor: _isSuccess
? Colors.greenAccent
: (_countdown <= 1 ? Colors.orangeAccent : Colors.white70),
progress: _countdownStarted
? ((3 - _countdown) / 3).clamp(0.0, 1.0)
: 0,
),
),
Positioned(
@@ -819,9 +412,13 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
borderRadius: BorderRadius.circular(20),
),
child: Text(
_feedback.message,
_isSubmitting
? "جاري التحقق..."
: _isSuccess
? "تم بنجاح"
: "التقاط الصورة خلال $_countdown ثانية",
style: TextStyle(
color: _feedback.borderColor,
color: _isSuccess ? Colors.greenAccent : Colors.white,
fontSize: 16,
fontWeight: FontWeight.w600,
),
@@ -830,6 +427,18 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
],
),
),
// Countdown number in center
if (!_isSubmitting && !_isSuccess && _countdown > 0)
Center(
child: Text(
'$_countdown',
style: TextStyle(
color: Colors.white.withOpacity(0.7),
fontSize: 80,
fontWeight: FontWeight.bold,
),
),
),
if (_isSubmitting)
const Center(child: CircularProgressIndicator(color: Colors.white)),
if (_isSuccess)
@@ -851,41 +460,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
),
),
),
Positioned(
bottom: 50,
left: 10,
right: 10,
child: Text(
_debugInfo,
style: const TextStyle(
color: Colors.yellow,
fontSize: 12,
backgroundColor: Colors.black54,
),
textAlign: TextAlign.center,
),
),
if (_showManualCapture && !_isSubmitting && !_isSuccess)
Positioned(
bottom: 110,
left: 0,
right: 0,
child: Center(
child: ElevatedButton.icon(
onPressed: _captureAndSubmit,
icon: const Icon(Icons.camera_alt),
label: const Text("التقاط يدوياً"),
style: ElevatedButton.styleFrom(
backgroundColor: Colors.redAccent,
foregroundColor: Colors.white,
padding: const EdgeInsets.symmetric(
horizontal: 24,
vertical: 12,
),
),
),
),
),
],
),
);
@@ -918,28 +492,25 @@ class _OvalOverlayPainter extends CustomPainter {
ovalPath,
);
final bgPaint =
Paint()
..color = Colors.black.withOpacity(0.6)
..style = PaintingStyle.fill;
final bgPaint = Paint()
..color = Colors.black.withOpacity(0.6)
..style = PaintingStyle.fill;
canvas.drawPath(overlayPath, bgPaint);
final borderPaint =
Paint()
..color = borderColor
..style = PaintingStyle.stroke
..strokeWidth = 4.0;
final borderPaint = Paint()
..color = borderColor
..style = PaintingStyle.stroke
..strokeWidth = 4.0;
canvas.drawOval(ovalRect, borderPaint);
if (progress > 0) {
final progressPaint =
Paint()
..color = Colors.greenAccent
..style = PaintingStyle.stroke
..strokeWidth = 6.0
..strokeCap = StrokeCap.round;
final progressPaint = Paint()
..color = Colors.greenAccent
..style = PaintingStyle.stroke
..strokeWidth = 6.0
..strokeCap = StrokeCap.round;
final startAngle = -math.pi / 2;
final sweepAngle = 2 * math.pi * progress;