Remove unused Google ML Kit dependencies and streamline face detection logic

This commit is contained in:
Mohammed Al-Samarraie
2026-03-14 15:03:40 +03:00
parent 0606e26ef7
commit eebec9b7a2
5 changed files with 103 additions and 652 deletions

View File

@@ -7,62 +7,9 @@ PODS:
- geolocator_apple (1.2.0):
- Flutter
- FlutterMacOS
- google_mlkit_commons (0.9.0):
- Flutter
- MLKitVision
- google_mlkit_face_detection (0.12.0):
- Flutter
- google_mlkit_commons
- GoogleMLKit/FaceDetection (~> 7.0.0)
- GoogleDataTransport (10.1.0):
- nanopb (~> 3.30910.0)
- PromisesObjC (~> 2.4)
- GoogleMLKit/FaceDetection (7.0.0):
- GoogleMLKit/MLKitCore
- MLKitFaceDetection (~> 6.0.0)
- GoogleMLKit/MLKitCore (7.0.0):
- MLKitCommon (~> 12.0.0)
- GoogleToolboxForMac/Defines (4.2.1)
- GoogleToolboxForMac/Logger (4.2.1):
- GoogleToolboxForMac/Defines (= 4.2.1)
- "GoogleToolboxForMac/NSData+zlib (4.2.1)":
- GoogleToolboxForMac/Defines (= 4.2.1)
- GoogleUtilities/Environment (8.1.0):
- GoogleUtilities/Privacy
- GoogleUtilities/Logger (8.1.0):
- GoogleUtilities/Environment
- GoogleUtilities/Privacy
- GoogleUtilities/Privacy (8.1.0)
- GoogleUtilities/UserDefaults (8.1.0):
- GoogleUtilities/Logger
- GoogleUtilities/Privacy
- GTMSessionFetcher/Core (3.5.0)
- local_auth_darwin (0.0.1):
- Flutter
- FlutterMacOS
- MLImage (1.0.0-beta6)
- MLKitCommon (12.0.0):
- GoogleDataTransport (~> 10.0)
- GoogleToolboxForMac/Logger (< 5.0, >= 4.2.1)
- "GoogleToolboxForMac/NSData+zlib (< 5.0, >= 4.2.1)"
- GoogleUtilities/Logger (~> 8.0)
- GoogleUtilities/UserDefaults (~> 8.0)
- GTMSessionFetcher/Core (< 4.0, >= 3.3.2)
- MLKitFaceDetection (6.0.0):
- MLKitCommon (~> 12.0)
- MLKitVision (~> 8.0)
- MLKitVision (8.0.0):
- GoogleToolboxForMac/Logger (< 5.0, >= 4.2.1)
- "GoogleToolboxForMac/NSData+zlib (< 5.0, >= 4.2.1)"
- GTMSessionFetcher/Core (< 4.0, >= 3.3.2)
- MLImage (= 1.0.0-beta6)
- MLKitCommon (~> 12.0)
- nanopb (3.30910.0):
- nanopb/decode (= 3.30910.0)
- nanopb/encode (= 3.30910.0)
- nanopb/decode (3.30910.0)
- nanopb/encode (3.30910.0)
- PromisesObjC (2.4.0)
- shared_preferences_foundation (0.0.1):
- Flutter
- FlutterMacOS
@@ -72,25 +19,9 @@ DEPENDENCIES:
- Flutter (from `Flutter`)
- flutter_native_splash (from `.symlinks/plugins/flutter_native_splash/ios`)
- geolocator_apple (from `.symlinks/plugins/geolocator_apple/darwin`)
- google_mlkit_commons (from `.symlinks/plugins/google_mlkit_commons/ios`)
- google_mlkit_face_detection (from `.symlinks/plugins/google_mlkit_face_detection/ios`)
- local_auth_darwin (from `.symlinks/plugins/local_auth_darwin/darwin`)
- shared_preferences_foundation (from `.symlinks/plugins/shared_preferences_foundation/darwin`)
SPEC REPOS:
trunk:
- GoogleDataTransport
- GoogleMLKit
- GoogleToolboxForMac
- GoogleUtilities
- GTMSessionFetcher
- MLImage
- MLKitCommon
- MLKitFaceDetection
- MLKitVision
- nanopb
- PromisesObjC
EXTERNAL SOURCES:
camera_avfoundation:
:path: ".symlinks/plugins/camera_avfoundation/ios"
@@ -100,10 +31,6 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/flutter_native_splash/ios"
geolocator_apple:
:path: ".symlinks/plugins/geolocator_apple/darwin"
google_mlkit_commons:
:path: ".symlinks/plugins/google_mlkit_commons/ios"
google_mlkit_face_detection:
:path: ".symlinks/plugins/google_mlkit_face_detection/ios"
local_auth_darwin:
:path: ".symlinks/plugins/local_auth_darwin/darwin"
shared_preferences_foundation:
@@ -114,20 +41,7 @@ SPEC CHECKSUMS:
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
flutter_native_splash: c32d145d68aeda5502d5f543ee38c192065986cf
geolocator_apple: ab36aa0e8b7d7a2d7639b3b4e48308394e8cef5e
google_mlkit_commons: 92c769cc2e0a2bfdeb3c38091a36e8a9cc8c63d4
google_mlkit_face_detection: 76f493a9ffcb1aeb0ad4abd13ea58403e092ab84
GoogleDataTransport: aae35b7ea0c09004c3797d53c8c41f66f219d6a7
GoogleMLKit: eff9e23ec1d90ea4157a1ee2e32a4f610c5b3318
GoogleToolboxForMac: d1a2cbf009c453f4d6ded37c105e2f67a32206d8
GoogleUtilities: 00c88b9a86066ef77f0da2fab05f65d7768ed8e1
GTMSessionFetcher: 5aea5ba6bd522a239e236100971f10cb71b96ab6
local_auth_darwin: d2e8c53ef0c4f43c646462e3415432c4dab3ae19
MLImage: 0ad1c5f50edd027672d8b26b0fee78a8b4a0fc56
MLKitCommon: 07c2c33ae5640e5380beaaa6e4b9c249a205542d
MLKitFaceDetection: 2a593db4837db503ad3426b565e7aab045cefea5
MLKitVision: 45e79d68845a2de77e2dd4d7f07947f0ed157b0e
nanopb: fad817b59e0457d11a5dfbde799381cd727c1275
PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47
shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
PODFILE CHECKSUM: 9580a49fa2c095d51100c20f258df867100b916b

View File

@@ -198,7 +198,6 @@
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
4D4FA041F50DD7060861DFEF /* [CP] Embed Pods Frameworks */,
9729C98D3A3358FF8E0EC9D3 /* [CP] Copy Pods Resources */,
);
buildRules = (
);
@@ -325,23 +324,6 @@
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
9729C98D3A3358FF8E0EC9D3 /* [CP] Copy Pods Resources */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
);
name = "[CP] Copy Pods Resources";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
showEnvVarsInLog = 0;
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;

View File

@@ -1,17 +1,11 @@
import 'dart:async';
import 'dart:io';
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import '../../core/error/exceptions.dart';
import '../face/face_feedback.dart';
import 'package:local_auth/local_auth.dart';
class OvalCameraCapturePage extends StatefulWidget {
@@ -36,61 +30,27 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
bool _isSuccess = false;
bool _isSubmitting = false;
bool _isStreaming = false;
//to handel the state of auth when it gives 422 status code
// Timer countdown
int _countdown = 3;
Timer? _countdownTimer;
bool _countdownStarted = false;
// Local auth for 422
final LocalAuthentication _localAuth = LocalAuthentication();
bool _handlingAuth422 = false; // prevents multiple dialogs/auth prompts
File? _lastCapturedFile; // keep the same image for retry
// Smart feedback
FaceFeedback _feedback = FaceFeedback(
type: FaceHintType.noFace,
message: "ضع وجهك داخل الإطار",
quality: 0,
borderColor: Colors.white70,
);
double _progress = 0;
bool _isDetecting = false;
int _frameCount = 0;
// Stability tracking
Rect? _lastFaceRect;
int _stableFrames = 0;
bool _showManualCapture = false;
Timer? _manualCaptureTimer;
String _debugInfo = "Initializing...";
late final FaceDetector _faceDetector = FaceDetector(
options: FaceDetectorOptions(
performanceMode: FaceDetectorMode.fast,
enableTracking: true,
enableClassification: true,
enableLandmarks: false,
enableContours: false,
),
);
static const int _throttleEveryNFrames = 5;
static const int _stableFramesNeeded = 3;
bool _handlingAuth422 = false;
File? _lastCapturedFile;
@override
void initState() {
super.initState();
// Go straight to camera — no network calls here
_initializeCamera();
}
@override
void dispose() {
_manualCaptureTimer?.cancel();
_stopImageStream();
_countdownTimer?.cancel();
_cameraController?.dispose();
_faceDetector.close();
super.dispose();
}
@@ -101,9 +61,8 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_isCameraInitialized = false;
_isSuccess = false;
_isSubmitting = false;
_progress = 0;
_stableFrames = 0;
_lastFaceRect = null;
_countdown = 3;
_countdownStarted = false;
});
await _cameraController?.dispose();
@@ -128,10 +87,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
selectedCamera,
ResolutionPreset.medium,
enableAudio: false,
imageFormatGroup:
Platform.isAndroid
? ImageFormatGroup.yuv420
: ImageFormatGroup.bgra8888,
);
await _cameraController!.initialize();
@@ -140,21 +95,9 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
setState(() {
_isCameraInitialized = true;
_isStreaming = false;
_showManualCapture = false;
_debugInfo = "Ready. Cam: ${selectedCamera.lensDirection}";
});
_manualCaptureTimer?.cancel();
_manualCaptureTimer = Timer(const Duration(seconds: 10), () {
if (mounted && _isCameraInitialized && !_isSuccess && !_isSubmitting) {
setState(() {
_showManualCapture = true;
});
}
});
_startSmartStream();
_startCountdown();
} catch (e) {
if (!mounted) return;
setState(() {
@@ -164,124 +107,49 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
void _startSmartStream() {
if (_cameraController == null || !_cameraController!.value.isInitialized) {
return;
}
if (_isStreaming) return;
void _startCountdown() {
if (_countdownStarted) return;
_countdownStarted = true;
_countdown = 3;
_isStreaming = true;
_countdownTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
if (!mounted) {
timer.cancel();
return;
}
try {
_cameraController!.startImageStream((CameraImage image) async {
if (!mounted) return;
if (_isSubmitting || _isSuccess) return;
_frameCount++;
if (_frameCount % _throttleEveryNFrames != 0) return;
if (_isDetecting) return;
_isDetecting = true;
try {
final inputImage = _toInputImage(
image,
_cameraController!.description,
);
if (inputImage == null) {
_isDetecting = false;
return;
}
final faces = await _faceDetector.processImage(inputImage);
if (faces.isEmpty) {
_stableFrames = 0;
_applyFeedback(
FaceFeedback(
type: FaceHintType.noFace,
message: "ضع وجهك داخل الإطار",
quality: 0,
borderColor: Colors.white70,
),
);
_isDetecting = false;
return;
}
final face = faces.first;
final brightness = _estimateBrightness(image);
final rotation =
inputImage.metadata?.rotation ?? InputImageRotation.rotation0deg;
final feedback = _evaluate(
face: face,
brightness: brightness,
image: image,
rotation: rotation,
);
_applyFeedback(feedback);
if (feedback.isGood) {
_stableFrames++;
_progress = (_stableFrames / _stableFramesNeeded).clamp(0.0, 1.0);
if (_stableFrames >= _stableFramesNeeded) {
if (mounted) {
setState(() {
_debugInfo = "جاري التحقق من الصورة...";
});
}
_isDetecting = false;
await _captureAndSubmit();
return;
}
} else {
if (_stableFrames > 0) _stableFrames--;
_progress = (_stableFrames / _stableFramesNeeded).clamp(0.0, 1.0);
}
if (mounted && !_isSubmitting && !_isSuccess) {
setState(() {
_debugInfo =
"Faces: ${faces.length} | Bright: ${brightness.toStringAsFixed(1)}\n"
"Msg: ${feedback.message} | Stable: $_stableFrames";
});
}
} catch (e) {
debugPrint("Face detection error: $e");
} finally {
_isDetecting = false;
}
setState(() {
_countdown--;
});
} catch (e) {
debugPrint("Error starting image stream: $e");
_isStreaming = false;
}
if (_countdown <= 0) {
timer.cancel();
_captureAndSubmit();
}
});
}
Future<bool?> _showLocalAuthDialog() {
return showDialog<bool>(
context: context,
barrierDismissible: false,
builder:
(_) => AlertDialog(
title: const Text('فشل التحقق بالوجه', textAlign: TextAlign.center),
content: const Text(
'لم يتم التعرف على الوجه.\n\nيرجى استخدام بصمة الإصبع أو رمز القفل (PIN/النمط) للمتابعة.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () => Navigator.pop(context, false),
child: const Text('إلغاء'),
),
ElevatedButton(
onPressed: () => Navigator.pop(context, true),
child: const Text('استخدام البصمة / رمز القفل'),
),
],
builder: (_) => AlertDialog(
title: const Text('فشل التحقق بالوجه', textAlign: TextAlign.center),
content: const Text(
'لم يتم التعرف على الوجه.\n\nيرجى استخدام بصمة الإصبع أو رمز القفل (PIN/النمط) للمتابعة.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () => Navigator.pop(context, false),
child: const Text('إلغاء'),
),
ElevatedButton(
onPressed: () => Navigator.pop(context, true),
child: const Text('استخدام البصمة / رمز القفل'),
),
],
),
);
}
@@ -293,7 +161,7 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
return await _localAuth.authenticate(
localizedReason: 'تأكيد هويتك لإكمال تسجيل الحضور.',
options: const AuthenticationOptions(
biometricOnly: false, // ✅ allows PIN/pattern fallback
biometricOnly: false,
stickyAuth: true,
useErrorDialogs: true,
),
@@ -303,23 +171,9 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
Future<void> _stopImageStream() async {
if (!_isStreaming || _cameraController == null) return;
try {
await _cameraController!.stopImageStream();
_isStreaming = false;
} catch (e) {
debugPrint("Error stopping image stream: $e");
}
}
void _stopCameraCompletely() {
_manualCaptureTimer?.cancel();
_countdownTimer?.cancel();
try {
if (_isStreaming && _cameraController != null) {
_cameraController!.stopImageStream();
_isStreaming = false;
}
_cameraController?.dispose();
_cameraController = null;
} catch (e) {
@@ -327,114 +181,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
FaceFeedback _evaluate({
required Face face,
required double brightness,
required CameraImage image,
required InputImageRotation rotation,
}) {
// 1) lighting
if (brightness < 40) {
return FaceFeedback(
type: FaceHintType.tooDark,
message: "المكان مظلم — انتقل لمكان أكثر إضاءة",
quality: 0.1,
borderColor: Colors.orangeAccent,
);
}
// 2) head pose
final yaw = (face.headEulerAngleY ?? 0).abs();
final pitch = (face.headEulerAngleX ?? 0).abs();
if (yaw > 20 || pitch > 20) {
return FaceFeedback(
type: FaceHintType.lookStraight,
message: "انظر مباشرةً للكاميرا",
quality: 0.2,
borderColor: Colors.orangeAccent,
);
}
// 3) distance estimate
double frameWidth = image.width.toDouble();
double frameHeight = image.height.toDouble();
if (rotation == InputImageRotation.rotation90deg ||
rotation == InputImageRotation.rotation270deg) {
final temp = frameWidth;
frameWidth = frameHeight;
frameHeight = temp;
}
final box = face.boundingBox;
final frameArea = frameWidth * frameHeight;
final faceArea = box.width * box.height;
final ratio = faceArea / frameArea;
if (ratio < 0.05) {
return FaceFeedback(
type: FaceHintType.tooFar,
message: "اقترب قليلاً",
quality: 0.3,
borderColor: Colors.orangeAccent,
);
}
if (ratio > 0.8) {
return FaceFeedback(
type: FaceHintType.tooClose,
message: "ابتعد قليلاً",
quality: 0.3,
borderColor: Colors.orangeAccent,
);
}
// 4) centered
final faceCenter = Offset(box.center.dx, box.center.dy);
final frameCenter = Offset(frameWidth / 2, frameHeight / 2);
final dist = (faceCenter - frameCenter).distance;
final maxAllowed = math.min(frameWidth, frameHeight) * 0.4;
if (dist > maxAllowed) {
return FaceFeedback(
type: FaceHintType.notCentered,
message: "وسط وجهك داخل الإطار",
quality: 0.4,
borderColor: Colors.orangeAccent,
);
}
// 5) stability
if (_lastFaceRect != null) {
final moved = (box.center - _lastFaceRect!.center).distance;
if (moved > 40) {
_lastFaceRect = box;
return FaceFeedback(
type: FaceHintType.holdStill,
message: "ثبت الهاتف وابقَ ثابتاً",
quality: 0.5,
borderColor: Colors.orangeAccent,
);
}
}
_lastFaceRect = box;
return FaceFeedback(
type: FaceHintType.good,
message: "ممتاز — ثبت قليلاً",
quality: 1.0,
borderColor: Colors.greenAccent,
);
}
void _applyFeedback(FaceFeedback f) {
if (!mounted) return;
if (_feedback.type != f.type || _feedback.message != f.message) {
setState(() {
_feedback = f;
});
}
}
Future<void> _captureAndSubmit() async {
if (_cameraController == null) return;
if (_isSubmitting || _isSuccess) return;
@@ -445,11 +191,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
});
try {
await _stopImageStream();
// Small delay to let camera settle after stopping stream
await Future.delayed(const Duration(milliseconds: 200));
if (_cameraController == null ||
!_cameraController!.value.isInitialized) {
_handleScanError("الكاميرا غير جاهزة، حاول مرة أخرى");
@@ -477,7 +218,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
} on ServerException catch (e) {
final msg = e.message.toLowerCase();
// If your ServerException has statusCode, prefer that:
if (e.statusCode == 422 || msg.contains('face verification failed')) {
await _handleFaceVerificationFailed422(e);
return;
@@ -485,29 +225,27 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
if (msg.contains('already logged in') ||
msg.contains('مسجل دخول بالفعل')) {
// Stop camera and go back with a dialog
_stopCameraCompletely();
if (mounted) {
showDialog(
context: context,
barrierDismissible: false,
builder:
(_) => AlertDialog(
title: const Text('تنبيه', textAlign: TextAlign.center),
content: const Text(
'أنت مسجل دخول بالفعل، لا يمكنك تسجيل الدخول مرة أخرى.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () {
Navigator.of(context).pop(); // Close dialog
Navigator.of(context).pop(); // Go back from camera
},
child: const Text('حسناً'),
),
],
builder: (_) => AlertDialog(
title: const Text('تنبيه', textAlign: TextAlign.center),
content: const Text(
'أنت مسجل دخول بالفعل، لا يمكنك تسجيل الدخول مرة أخرى.',
textAlign: TextAlign.center,
),
actions: [
TextButton(
onPressed: () {
Navigator.of(context).pop();
Navigator.of(context).pop();
},
child: const Text('حسناً'),
),
],
),
);
}
return;
@@ -527,80 +265,12 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
setState(() {
_isSubmitting = false;
_errorMessage = msg;
_progress = 0;
_stableFrames = 0;
_countdown = 3;
_countdownStarted = false;
});
if (_cameraController != null &&
_cameraController!.value.isInitialized &&
!_isStreaming) {
_startSmartStream();
}
}
double _estimateBrightness(CameraImage image) {
if (image.planes.isEmpty) return 0;
final bytes = image.planes[0].bytes;
if (bytes.isEmpty) return 0;
const step = 100;
int sum = 0;
int count = 0;
for (int i = 0; i < bytes.length; i += step) {
sum += bytes[i];
count++;
}
return count == 0 ? 0 : (sum / count);
}
InputImage? _toInputImage(CameraImage image, CameraDescription camera) {
final sensorOrientation = camera.sensorOrientation;
InputImageRotation? rotation;
if (Platform.isIOS) {
rotation = _rotationIntToImageRotation(sensorOrientation);
} else if (Platform.isAndroid) {
var rotationCompensation =
_orientations[_cameraController!.value.deviceOrientation];
if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) {
rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
} else {
rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360;
}
rotation = _rotationIntToImageRotation(rotationCompensation);
}
if (rotation == null) return null;
if (Platform.isAndroid) {
final nv21 = _convertYUV420ToNV21(image);
return InputImage.fromBytes(
bytes: nv21,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: InputImageFormat.nv21,
bytesPerRow: image.width,
),
);
}
// iOS BGRA8888
if (image.planes.length == 1) {
return InputImage.fromBytes(
bytes: image.planes.first.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: InputImageFormat.bgra8888,
bytesPerRow: image.planes.first.bytesPerRow,
),
);
}
return null;
// Restart countdown after error
_startCountdown();
}
Future<void> _handleFaceVerificationFailed422(ServerException e) async {
@@ -609,13 +279,9 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_handlingAuth422 = true;
// stop everything so camera doesnt keep scanning
await _stopImageStream();
setState(() {
_isSubmitting = false;
_errorMessage = null;
_debugInfo = "Face verification failed (422) → Local Auth...";
});
final proceed = await _showLocalAuthDialog();
@@ -623,11 +289,7 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Go back to attendance + show message there
Navigator.of(context).pop(false);
// If you prefer to show inside this screen before pop:
// ScaffoldMessenger.of(context).showSnackBar(const SnackBar(content: Text('تم الإلغاء.')));
return;
}
@@ -636,13 +298,10 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Return to attendance; attendance screen should show snack "failed fingerprint/pattern"
Navigator.of(context).pop("local_auth_failed");
return;
}
// Local auth success → retry SAME image with localAuth=true
final file = _lastCapturedFile;
if (file == null) {
_handlingAuth422 = false;
@@ -654,7 +313,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
setState(() {
_isSubmitting = true;
_debugInfo = "Local auth success → retrying with localAuth=true...";
});
try {
@@ -669,11 +327,10 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
Future.delayed(const Duration(seconds: 1), () {
if (mounted) Navigator.of(context).pop(true);
});
} on ServerException catch (e2) {
} on ServerException catch (_) {
_handlingAuth422 = false;
_stopCameraCompletely();
if (!mounted) return;
// Retry failed → go back to attendance
Navigator.of(context).pop("retry_failed");
} catch (_) {
_handlingAuth422 = false;
@@ -683,74 +340,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
}
}
Uint8List _convertYUV420ToNV21(CameraImage image) {
final int width = image.width;
final int height = image.height;
final yPlane = image.planes[0];
final uPlane = image.planes[1];
final vPlane = image.planes[2];
final int ySize = width * height;
final int uvSize = ySize ~/ 2;
final Uint8List nv21 = Uint8List(ySize + uvSize);
// Y Channel
if (yPlane.bytesPerRow == width) {
nv21.setAll(0, yPlane.bytes);
} else {
int offset = 0;
for (int i = 0; i < height; i++) {
nv21.setRange(
offset,
offset + width,
yPlane.bytes,
i * yPlane.bytesPerRow,
);
offset += width;
}
}
// UV Channel (NV21 is VU interleaved)
final int uvWidth = width ~/ 2;
final int uvHeight = height ~/ 2;
final int uvPixelStride = uPlane.bytesPerPixel ?? 1;
int uvIndex = ySize;
for (int row = 0; row < uvHeight; row++) {
final int srcIndex = row * uPlane.bytesPerRow;
for (int col = 0; col < uvWidth; col++) {
final int pixelIndex = srcIndex + (col * uvPixelStride);
nv21[uvIndex++] = vPlane.bytes[pixelIndex];
nv21[uvIndex++] = uPlane.bytes[pixelIndex];
}
}
return nv21;
}
InputImageRotation _rotationIntToImageRotation(int rotation) {
switch (rotation) {
case 90:
return InputImageRotation.rotation90deg;
case 180:
return InputImageRotation.rotation180deg;
case 270:
return InputImageRotation.rotation270deg;
default:
return InputImageRotation.rotation0deg;
}
}
static final _orientations = {
DeviceOrientation.portraitUp: 0,
DeviceOrientation.landscapeLeft: 90,
DeviceOrientation.portraitDown: 180,
DeviceOrientation.landscapeRight: 270,
};
@override
Widget build(BuildContext context) {
if (_errorMessage != null && !_isCameraInitialized) {
@@ -789,8 +378,12 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
Center(child: CameraPreview(_cameraController!)),
CustomPaint(
painter: _OvalOverlayPainter(
borderColor: _feedback.borderColor,
progress: _progress,
borderColor: _isSuccess
? Colors.greenAccent
: (_countdown <= 1 ? Colors.orangeAccent : Colors.white70),
progress: _countdownStarted
? ((3 - _countdown) / 3).clamp(0.0, 1.0)
: 0,
),
),
Positioned(
@@ -819,9 +412,13 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
borderRadius: BorderRadius.circular(20),
),
child: Text(
_feedback.message,
_isSubmitting
? "جاري التحقق..."
: _isSuccess
? "تم بنجاح"
: "التقاط الصورة خلال $_countdown ثانية",
style: TextStyle(
color: _feedback.borderColor,
color: _isSuccess ? Colors.greenAccent : Colors.white,
fontSize: 16,
fontWeight: FontWeight.w600,
),
@@ -830,6 +427,18 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
],
),
),
// Countdown number in center
if (!_isSubmitting && !_isSuccess && _countdown > 0)
Center(
child: Text(
'$_countdown',
style: TextStyle(
color: Colors.white.withOpacity(0.7),
fontSize: 80,
fontWeight: FontWeight.bold,
),
),
),
if (_isSubmitting)
const Center(child: CircularProgressIndicator(color: Colors.white)),
if (_isSuccess)
@@ -851,41 +460,6 @@ class _OvalCameraCapturePageState extends State<OvalCameraCapturePage> {
),
),
),
Positioned(
bottom: 50,
left: 10,
right: 10,
child: Text(
_debugInfo,
style: const TextStyle(
color: Colors.yellow,
fontSize: 12,
backgroundColor: Colors.black54,
),
textAlign: TextAlign.center,
),
),
if (_showManualCapture && !_isSubmitting && !_isSuccess)
Positioned(
bottom: 110,
left: 0,
right: 0,
child: Center(
child: ElevatedButton.icon(
onPressed: _captureAndSubmit,
icon: const Icon(Icons.camera_alt),
label: const Text("التقاط يدوياً"),
style: ElevatedButton.styleFrom(
backgroundColor: Colors.redAccent,
foregroundColor: Colors.white,
padding: const EdgeInsets.symmetric(
horizontal: 24,
vertical: 12,
),
),
),
),
),
],
),
);
@@ -918,28 +492,25 @@ class _OvalOverlayPainter extends CustomPainter {
ovalPath,
);
final bgPaint =
Paint()
..color = Colors.black.withOpacity(0.6)
..style = PaintingStyle.fill;
final bgPaint = Paint()
..color = Colors.black.withOpacity(0.6)
..style = PaintingStyle.fill;
canvas.drawPath(overlayPath, bgPaint);
final borderPaint =
Paint()
..color = borderColor
..style = PaintingStyle.stroke
..strokeWidth = 4.0;
final borderPaint = Paint()
..color = borderColor
..style = PaintingStyle.stroke
..strokeWidth = 4.0;
canvas.drawOval(ovalRect, borderPaint);
if (progress > 0) {
final progressPaint =
Paint()
..color = Colors.greenAccent
..style = PaintingStyle.stroke
..strokeWidth = 6.0
..strokeCap = StrokeCap.round;
final progressPaint = Paint()
..color = Colors.greenAccent
..style = PaintingStyle.stroke
..strokeWidth = 6.0
..strokeCap = StrokeCap.round;
final startAngle = -math.pi / 2;
final sweepAngle = 2 * math.pi * progress;

View File

@@ -336,22 +336,6 @@ packages:
url: "https://pub.dev"
source: hosted
version: "7.7.0"
google_mlkit_commons:
dependency: transitive
description:
name: google_mlkit_commons
sha256: "7e9a6d6e66b44aa8cfe944bda9bc3346c52486dd890ca49e5bc98845cda40d7f"
url: "https://pub.dev"
source: hosted
version: "0.9.0"
google_mlkit_face_detection:
dependency: "direct main"
description:
name: google_mlkit_face_detection
sha256: "65988405c884fd84a4ccc8bded7b5e3e4c33362f6f4eaaa94818bdaaba7bab7d"
url: "https://pub.dev"
source: hosted
version: "0.12.0"
html:
dependency: transitive
description:

View File

@@ -19,7 +19,7 @@ dependencies:
shared_preferences: ^2.2.2
flutter_bloc: ^8.1.6
intl: ^0.19.0
google_mlkit_face_detection: ^0.12.0
local_auth: ^2.1.8
geolocator: ^13.0.1