Files
finger_print_app/lib/presentation/screens/face_screen2.dart
Daniah Ayad Al-sultani f616a2c104 chnages has been made
2026-02-22 11:18:10 +03:00

963 lines
27 KiB
Dart
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import 'dart:async';
import 'dart:io';
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import '../../core/error/exceptions.dart';
import '../face/face_feedback.dart';
import 'package:local_auth/local_auth.dart';
class OvalCameraCapturePage extends StatefulWidget {
final bool isLogin;
final Future<void> Function(File image, {required bool localAuth}) onCapture;
const OvalCameraCapturePage({
super.key,
this.isLogin = true,
required this.onCapture,
});
@override
State<OvalCameraCapturePage> createState() => _OvalCameraCapturePageState();
}
class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
CameraController? _cameraController;
bool _isCameraInitialized = false;
String? _errorMessage;
bool _isSuccess = false;
bool _isSubmitting = false;
bool _isStreaming = false;
//to handel the state of auth when it gives 422 status code
final LocalAuthentication _localAuth = LocalAuthentication();
bool _handlingAuth422 = false; // prevents multiple dialogs/auth prompts
File? _lastCapturedFile; // keep the same image for retry
// Smart feedback
FaceFeedback _feedback = FaceFeedback(
type: FaceHintType.noFace,
message: "ضع وجهك داخل الإطار",
quality: 0,
borderColor: Colors.white70,
);
double _progress = 0;
bool _isDetecting = false;
int _frameCount = 0;
// Stability tracking
Rect? _lastFaceRect;
int _stableFrames = 0;
bool _showManualCapture = false;
Timer? _manualCaptureTimer;
String _debugInfo = "Initializing...";
late final FaceDetector _faceDetector = FaceDetector(
options: FaceDetectorOptions(
performanceMode: FaceDetectorMode.fast,
enableTracking: true,
enableClassification: true,
enableLandmarks: false,
enableContours: false,
),
);
static const int _throttleEveryNFrames = 5;
static const int _stableFramesNeeded = 3;
@override
void initState() {
super.initState();
// Go straight to camera — no network calls here
_initializeCamera();
}
@override
void dispose() {
_manualCaptureTimer?.cancel();
_stopImageStream();
_cameraController?.dispose();
_faceDetector.close();
super.dispose();
}
Future<void> _initializeCamera() async {
try {
setState(() {
_errorMessage = null;
_isCameraInitialized = false;
_isSuccess = false;
_isSubmitting = false;
_progress = 0;
_stableFrames = 0;
_lastFaceRect = null;
});
await _cameraController?.dispose();
_cameraController = null;
final cameras = await availableCameras();
if (cameras.isEmpty) {
if (!mounted) return;
setState(() {
_errorMessage = "لا توجد كاميرات متاحة على هذا الجهاز";
_isCameraInitialized = false;
});
return;
}
final front = cameras.where(
(c) => c.lensDirection == CameraLensDirection.front,
);
final selectedCamera = front.isNotEmpty ? front.first : cameras.first;
_cameraController = CameraController(
selectedCamera,
ResolutionPreset.medium,
enableAudio: false,
imageFormatGroup:
Platform.isAndroid
? ImageFormatGroup.yuv420
: ImageFormatGroup.bgra8888,
);
await _cameraController!.initialize();
if (!mounted) return;
setState(() {
_isCameraInitialized = true;
_isStreaming = false;
_showManualCapture = false;
_debugInfo = "Ready. Cam: ${selectedCamera.lensDirection}";
});
_manualCaptureTimer?.cancel();
_manualCaptureTimer = Timer(const Duration(seconds: 10), () {
if (mounted && _isCameraInitialized && !_isSuccess && !_isSubmitting) {
setState(() {
_showManualCapture = true;
});
}
});
_startSmartStream();
} catch (e) {
if (!mounted) return;
setState(() {
_errorMessage = "خطأ في تهيئة الكاميرا: $e";
_isCameraInitialized = false;
});
}
}
void _startSmartStream() {
if (_cameraController == null || !_cameraController!.value.isInitialized) {
return;
}
if (_isStreaming) return;
_isStreaming = true;
try {
_cameraController!.startImageStream((CameraImage image) async {
if (!mounted) return;
if (_isSubmitting || _isSuccess) return;
_frameCount++;
if (_frameCount % _throttleEveryNFrames != 0) return;
if (_isDetecting) return;
_isDetecting = true;
try {
final inputImage = _toInputImage(
image,
_cameraController!.description,
);
if (inputImage == null) {
_isDetecting = false;
return;
}
final faces = await _faceDetector.processImage(inputImage);
if (faces.isEmpty) {
_stableFrames = 0;
_applyFeedback(
FaceFeedback(
type: FaceHintType.noFace,
message: "ضع وجهك داخل الإطار",
quality: 0,
borderColor: Colors.white70,
),
);
_isDetecting = false;
return;
}
final face = faces.first;
final brightness = _estimateBrightness(image);
final rotation =
inputImage.metadata?.rotation ?? InputImageRotation.rotation0deg;
final feedback = _evaluate(
face: face,
brightness: brightness,
image: image,
rotation: rotation,
);
_applyFeedback(feedback);
if (feedback.isGood) {
_stableFrames++;
_progress = (_stableFrames / _stableFramesNeeded).clamp(0.0, 1.0);
if (_stableFrames >= _stableFramesNeeded) {
if (mounted) {
setState(() {
_debugInfo = "جاري التحقق من الصورة...";
});
}
_isDetecting = false;
await _captureAndSubmit();
return;
}
} else {
if (_stableFrames > 0) _stableFrames--;
_progress = (_stableFrames / _stableFramesNeeded).clamp(0.0, 1.0);
}
if (mounted && !_isSubmitting && !_isSuccess) {
setState(() {
_debugInfo =
"Faces: ${faces.length} | Bright: ${brightness.toStringAsFixed(1)}\n"
"Msg: ${feedback.message} | Stable: $_stableFrames";
});
}
} catch (e) {
debugPrint("Face detection error: $e");
} finally {
_isDetecting = false;
}
});
} catch (e) {
debugPrint("Error starting image stream: $e");
_isStreaming = false;
}
}
Future<bool?> _showLocalAuthDialog() {
return showDialog<bool>(
context: context,
barrierDismissible: false,
builder:
(_) => AlertDialog(
title: const Text('فشل التحقق بالوجه', textAlign: TextAlign.center),
content: const Text(
'لم يتم التعرف على الوجه.\n\nيرجى استخدام بصمة الإصبع أو رمز القفل (PIN/النمط) للمتابعة.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () => Navigator.pop(context, false),
child: const Text('إلغاء'),
),
ElevatedButton(
onPressed: () => Navigator.pop(context, true),
child: const Text('استخدام البصمة / رمز القفل'),
),
],
),
);
}
Future<bool> _authenticateLocally() async {
try {
final isSupported = await _localAuth.isDeviceSupported();
if (!isSupported) return false;
return await _localAuth.authenticate(
localizedReason: 'تأكيد هويتك لإكمال تسجيل الحضور.',
options: const AuthenticationOptions(
biometricOnly: false, // ✅ allows PIN/pattern fallback
stickyAuth: true,
useErrorDialogs: true,
),
);
} catch (_) {
return false;
}
}
Future<void> _stopImageStream() async {
if (!_isStreaming || _cameraController == null) return;
try {
await _cameraController!.stopImageStream();
_isStreaming = false;
} catch (e) {
debugPrint("Error stopping image stream: $e");
}
}
void _stopCameraCompletely() {
_manualCaptureTimer?.cancel();
try {
if (_isStreaming && _cameraController != null) {
_cameraController!.stopImageStream();
_isStreaming = false;
}
_cameraController?.dispose();
_cameraController = null;
} catch (e) {
debugPrint("Error stopping camera: $e");
}
}
FaceFeedback _evaluate({
required Face face,
required double brightness,
required CameraImage image,
required InputImageRotation rotation,
}) {
// 1) lighting
if (brightness < 40) {
return FaceFeedback(
type: FaceHintType.tooDark,
message: "المكان مظلم — انتقل لمكان أكثر إضاءة",
quality: 0.1,
borderColor: Colors.orangeAccent,
);
}
// 2) head pose
final yaw = (face.headEulerAngleY ?? 0).abs();
final pitch = (face.headEulerAngleX ?? 0).abs();
if (yaw > 20 || pitch > 20) {
return FaceFeedback(
type: FaceHintType.lookStraight,
message: "انظر مباشرةً للكاميرا",
quality: 0.2,
borderColor: Colors.orangeAccent,
);
}
// 3) distance estimate
double frameWidth = image.width.toDouble();
double frameHeight = image.height.toDouble();
if (rotation == InputImageRotation.rotation90deg ||
rotation == InputImageRotation.rotation270deg) {
final temp = frameWidth;
frameWidth = frameHeight;
frameHeight = temp;
}
final box = face.boundingBox;
final frameArea = frameWidth * frameHeight;
final faceArea = box.width * box.height;
final ratio = faceArea / frameArea;
if (ratio < 0.05) {
return FaceFeedback(
type: FaceHintType.tooFar,
message: "اقترب قليلاً",
quality: 0.3,
borderColor: Colors.orangeAccent,
);
}
if (ratio > 0.8) {
return FaceFeedback(
type: FaceHintType.tooClose,
message: "ابتعد قليلاً",
quality: 0.3,
borderColor: Colors.orangeAccent,
);
}
// 4) centered
final faceCenter = Offset(box.center.dx, box.center.dy);
final frameCenter = Offset(frameWidth / 2, frameHeight / 2);
final dist = (faceCenter - frameCenter).distance;
final maxAllowed = math.min(frameWidth, frameHeight) * 0.4;
if (dist > maxAllowed) {
return FaceFeedback(
type: FaceHintType.notCentered,
message: "وسط وجهك داخل الإطار",
quality: 0.4,
borderColor: Colors.orangeAccent,
);
}
// 5) stability
if (_lastFaceRect != null) {
final moved = (box.center - _lastFaceRect!.center).distance;
if (moved > 40) {
_lastFaceRect = box;
return FaceFeedback(
type: FaceHintType.holdStill,
message: "ثبت الهاتف وابقَ ثابتاً",
quality: 0.5,
borderColor: Colors.orangeAccent,
);
}
}
_lastFaceRect = box;
return FaceFeedback(
type: FaceHintType.good,
message: "ممتاز — ثبت قليلاً",
quality: 1.0,
borderColor: Colors.greenAccent,
);
}
void _applyFeedback(FaceFeedback f) {
if (!mounted) return;
if (_feedback.type != f.type || _feedback.message != f.message) {
setState(() {
_feedback = f;
});
}
}
Future<void> _captureAndSubmit() async {
if (_cameraController == null) return;
if (_isSubmitting || _isSuccess) return;
setState(() {
_isSubmitting = true;
_errorMessage = null;
});
try {
await _stopImageStream();
// Small delay to let camera settle after stopping stream
await Future.delayed(const Duration(milliseconds: 200));
if (_cameraController == null ||
!_cameraController!.value.isInitialized) {
_handleScanError("الكاميرا غير جاهزة، حاول مرة أخرى");
return;
}
final xFile = await _cameraController!.takePicture();
final file = File(xFile.path);
_lastCapturedFile = file;
await widget.onCapture(file, localAuth: false);
if (mounted) {
setState(() {
_isSuccess = true;
_isSubmitting = false;
});
Future.delayed(const Duration(seconds: 1), () {
if (mounted) {
Navigator.of(context).pop(true);
}
});
}
} on ServerException catch (e) {
final msg = e.message.toLowerCase();
// If your ServerException has statusCode, prefer that:
if (e.statusCode == 422 || msg.contains('face verification failed')) {
await _handleFaceVerificationFailed422(e);
return;
}
if (msg.contains('already logged in') ||
msg.contains('مسجل دخول بالفعل')) {
// Stop camera and go back with a dialog
_stopCameraCompletely();
if (mounted) {
showDialog(
context: context,
barrierDismissible: false,
builder:
(_) => AlertDialog(
title: const Text('تنبيه', textAlign: TextAlign.center),
content: const Text(
'أنت مسجل دخول بالفعل، لا يمكنك تسجيل الدخول مرة أخرى.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () {
Navigator.of(context).pop(); // Close dialog
Navigator.of(context).pop(); // Go back from camera
},
child: const Text('حسناً'),
),
],
),
);
}
return;
}
_handleScanError(e.message);
} on NetworkException catch (e) {
_handleScanError(e.message);
} on CameraException catch (e) {
_handleScanError("فشل التقاط الصورة: ${e.description ?? e.code}");
} catch (e) {
_handleScanError("حدث خطأ غير متوقع: $e");
}
}
void _handleScanError(String msg) {
if (!mounted) return;
setState(() {
_isSubmitting = false;
_errorMessage = msg;
_progress = 0;
_stableFrames = 0;
});
if (_cameraController != null &&
_cameraController!.value.isInitialized &&
!_isStreaming) {
_startSmartStream();
}
}
double _estimateBrightness(CameraImage image) {
if (image.planes.isEmpty) return 0;
final bytes = image.planes[0].bytes;
if (bytes.isEmpty) return 0;
const step = 100;
int sum = 0;
int count = 0;
for (int i = 0; i < bytes.length; i += step) {
sum += bytes[i];
count++;
}
return count == 0 ? 0 : (sum / count);
}
InputImage? _toInputImage(CameraImage image, CameraDescription camera) {
final sensorOrientation = camera.sensorOrientation;
InputImageRotation? rotation;
if (Platform.isIOS) {
rotation = _rotationIntToImageRotation(sensorOrientation);
} else if (Platform.isAndroid) {
var rotationCompensation =
_orientations[_cameraController!.value.deviceOrientation];
if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) {
rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
} else {
rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360;
}
rotation = _rotationIntToImageRotation(rotationCompensation);
}
if (rotation == null) return null;
if (Platform.isAndroid) {
final nv21 = _convertYUV420ToNV21(image);
return InputImage.fromBytes(
bytes: nv21,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: InputImageFormat.nv21,
bytesPerRow: image.width,
),
);
}
// iOS BGRA8888
if (image.planes.length == 1) {
return InputImage.fromBytes(
bytes: image.planes.first.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: InputImageFormat.bgra8888,
bytesPerRow: image.planes.first.bytesPerRow,
),
);
}
return null;
}
Future<void> _handleFaceVerificationFailed422(ServerException e) async {
if (!mounted) return;
if (_handlingAuth422) return;
_handlingAuth422 = true;
// stop everything so camera doesnt keep scanning
await _stopImageStream();
setState(() {
_isSubmitting = false;
_errorMessage = null;
_debugInfo = "Face verification failed (422) → Local Auth...";
});
final proceed = await _showLocalAuthDialog();
if (proceed != true) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Go back to attendance + show message there
Navigator.of(context).pop(false);
// If you prefer to show inside this screen before pop:
// ScaffoldMessenger.of(context).showSnackBar(const SnackBar(content: Text('تم الإلغاء.')));
return;
}
final ok = await _authenticateLocally();
if (!ok) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Return to attendance; attendance screen should show snack "failed fingerprint/pattern"
Navigator.of(context).pop("local_auth_failed");
return;
}
// Local auth success → retry SAME image with localAuth=true
final file = _lastCapturedFile;
if (file == null) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
Navigator.of(context).pop("retry_missing_file");
return;
}
setState(() {
_isSubmitting = true;
_debugInfo = "Local auth success → retrying with localAuth=true...";
});
try {
await widget.onCapture(file, localAuth: true);
if (!mounted) return;
setState(() {
_isSuccess = true;
_isSubmitting = false;
});
Future.delayed(const Duration(seconds: 1), () {
if (mounted) Navigator.of(context).pop(true);
});
} on ServerException catch (e2) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Retry failed → go back to attendance
Navigator.of(context).pop("retry_failed");
} catch (_) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
Navigator.of(context).pop("retry_failed");
}
}
Uint8List _convertYUV420ToNV21(CameraImage image) {
final int width = image.width;
final int height = image.height;
final yPlane = image.planes[0];
final uPlane = image.planes[1];
final vPlane = image.planes[2];
final int ySize = width * height;
final int uvSize = ySize ~/ 2;
final Uint8List nv21 = Uint8List(ySize + uvSize);
// Y Channel
if (yPlane.bytesPerRow == width) {
nv21.setAll(0, yPlane.bytes);
} else {
int offset = 0;
for (int i = 0; i < height; i++) {
nv21.setRange(
offset,
offset + width,
yPlane.bytes,
i * yPlane.bytesPerRow,
);
offset += width;
}
}
// UV Channel (NV21 is VU interleaved)
final int uvWidth = width ~/ 2;
final int uvHeight = height ~/ 2;
final int uvPixelStride = uPlane.bytesPerPixel ?? 1;
int uvIndex = ySize;
for (int row = 0; row < uvHeight; row++) {
final int srcIndex = row * uPlane.bytesPerRow;
for (int col = 0; col < uvWidth; col++) {
final int pixelIndex = srcIndex + (col * uvPixelStride);
nv21[uvIndex++] = vPlane.bytes[pixelIndex];
nv21[uvIndex++] = uPlane.bytes[pixelIndex];
}
}
return nv21;
}
InputImageRotation _rotationIntToImageRotation(int rotation) {
switch (rotation) {
case 90:
return InputImageRotation.rotation90deg;
case 180:
return InputImageRotation.rotation180deg;
case 270:
return InputImageRotation.rotation270deg;
default:
return InputImageRotation.rotation0deg;
}
}
static final _orientations = {
DeviceOrientation.portraitUp: 0,
DeviceOrientation.landscapeLeft: 90,
DeviceOrientation.portraitDown: 180,
DeviceOrientation.landscapeRight: 270,
};
@override
Widget build(BuildContext context) {
if (_errorMessage != null && !_isCameraInitialized) {
return Scaffold(
backgroundColor: Colors.black,
body: Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(Icons.error, color: Colors.red, size: 48),
const SizedBox(height: 16),
Text(_errorMessage!, style: const TextStyle(color: Colors.white)),
const SizedBox(height: 16),
ElevatedButton(
onPressed: _initializeCamera,
child: const Text("إعادة المحاولة"),
),
],
),
),
);
}
if (!_isCameraInitialized || _cameraController == null) {
return const Scaffold(
backgroundColor: Colors.black,
body: Center(child: CircularProgressIndicator(color: Colors.red)),
);
}
return Scaffold(
backgroundColor: Colors.black,
body: Stack(
fit: StackFit.expand,
children: [
Center(child: CameraPreview(_cameraController!)),
CustomPaint(
painter: _OvalOverlayPainter(
borderColor: _feedback.borderColor,
progress: _progress,
),
),
Positioned(
top: 60,
left: 0,
right: 0,
child: Column(
children: [
Text(
widget.isLogin ? "تسجيل الدخول" : "تسجيل خروج",
style: const TextStyle(
color: Colors.white,
fontSize: 24,
fontWeight: FontWeight.bold,
fontFamily: 'Cairo',
),
),
const SizedBox(height: 10),
Container(
padding: const EdgeInsets.symmetric(
horizontal: 16,
vertical: 8,
),
decoration: BoxDecoration(
color: Colors.black54,
borderRadius: BorderRadius.circular(20),
),
child: Text(
_feedback.message,
style: TextStyle(
color: _feedback.borderColor,
fontSize: 16,
fontWeight: FontWeight.w600,
),
),
),
],
),
),
if (_isSubmitting)
const Center(child: CircularProgressIndicator(color: Colors.white)),
if (_isSuccess)
const Center(
child: Icon(Icons.check_circle, color: Colors.green, size: 80),
),
if (_errorMessage != null && _isCameraInitialized)
Positioned(
bottom: 20,
left: 0,
right: 0,
child: Container(
color: Colors.black54,
padding: const EdgeInsets.all(8),
child: Text(
_errorMessage!,
style: const TextStyle(color: Colors.red),
textAlign: TextAlign.center,
),
),
),
Positioned(
bottom: 50,
left: 10,
right: 10,
child: Text(
_debugInfo,
style: const TextStyle(
color: Colors.yellow,
fontSize: 12,
backgroundColor: Colors.black54,
),
textAlign: TextAlign.center,
),
),
if (_showManualCapture && !_isSubmitting && !_isSuccess)
Positioned(
bottom: 110,
left: 0,
right: 0,
child: Center(
child: ElevatedButton.icon(
onPressed: _captureAndSubmit,
icon: const Icon(Icons.camera_alt),
label: const Text("التقاط يدوياً"),
style: ElevatedButton.styleFrom(
backgroundColor: Colors.redAccent,
foregroundColor: Colors.white,
padding: const EdgeInsets.symmetric(
horizontal: 24,
vertical: 12,
),
),
),
),
),
],
),
);
}
}
class _OvalOverlayPainter extends CustomPainter {
final Color borderColor;
final double progress;
_OvalOverlayPainter({required this.borderColor, required this.progress});
@override
void paint(Canvas canvas, Size size) {
final width = size.width * 0.75;
final height = size.height * 0.55;
final center = Offset(size.width / 2, size.height / 2);
final ovalRect = Rect.fromCenter(
center: center,
width: width,
height: height,
);
final screenPath =
Path()..addRect(Rect.fromLTWH(0, 0, size.width, size.height));
final ovalPath = Path()..addOval(ovalRect);
final overlayPath = Path.combine(
PathOperation.difference,
screenPath,
ovalPath,
);
final bgPaint =
Paint()
..color = Colors.black.withOpacity(0.6)
..style = PaintingStyle.fill;
canvas.drawPath(overlayPath, bgPaint);
final borderPaint =
Paint()
..color = borderColor
..style = PaintingStyle.stroke
..strokeWidth = 4.0;
canvas.drawOval(ovalRect, borderPaint);
if (progress > 0) {
final progressPaint =
Paint()
..color = Colors.greenAccent
..style = PaintingStyle.stroke
..strokeWidth = 6.0
..strokeCap = StrokeCap.round;
final startAngle = -math.pi / 2;
final sweepAngle = 2 * math.pi * progress;
canvas.drawArc(
ovalRect.inflate(10),
startAngle,
sweepAngle,
false,
progressPaint,
);
}
}
@override
bool shouldRepaint(covariant _OvalOverlayPainter oldDelegate) {
return oldDelegate.borderColor != borderColor ||
oldDelegate.progress != progress;
}
}