feat: Enhance face detection and image processing in liveness detection

This commit is contained in:
vergiLgood1 2025-05-24 17:51:32 +07:00
parent 0eec492698
commit bd99a3dd40
1 changed files with 177 additions and 24 deletions

View File

@ -7,6 +7,8 @@ import 'package:flutter/services.dart';
import 'package:get/get.dart'; import 'package:get/get.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart'; import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
// import 'package:google_mlkit_face_mesh_detection/google_mlkit_face_mesh_detection.dart'; // import 'package:google_mlkit_face_mesh_detection/google_mlkit_face_mesh_detection.dart';
import 'package:image/image.dart' as imglib;
import 'package:path_provider/path_provider.dart';
import 'package:sigap/src/features/auth/data/models/face_model.dart'; import 'package:sigap/src/features/auth/data/models/face_model.dart';
// Enum for liveness status // Enum for liveness status
@ -106,7 +108,6 @@ class FaceLivenessController extends GetxController {
enableTracking: true, enableTracking: true,
minFaceSize: 0.1, minFaceSize: 0.1,
performanceMode: FaceDetectorMode.accurate, performanceMode: FaceDetectorMode.accurate,
), ),
); );
@ -147,12 +148,23 @@ class FaceLivenessController extends GetxController {
cameraController = CameraController( cameraController = CameraController(
frontCamera, frontCamera,
ResolutionPreset.medium, ResolutionPreset
.high, // Changed from medium to high for better detection
enableAudio: false, enableAudio: false,
imageFormatGroup: ImageFormatGroup.nv21, imageFormatGroup:
Platform.isIOS
? ImageFormatGroup.bgra8888
: ImageFormatGroup.yuv420,
); );
await cameraController!.initialize(); await cameraController!.initialize();
// Set flash off to improve face detection
try {
await cameraController!.setFlashMode(FlashMode.off);
} catch (e) {
dev.log('Error setting flash mode: $e', name: 'LIVENESS_CONTROLLER');
}
dev.log('Camera initialized successfully', name: 'LIVENESS_CONTROLLER'); dev.log('Camera initialized successfully', name: 'LIVENESS_CONTROLLER');
@ -209,10 +221,23 @@ class FaceLivenessController extends GetxController {
// Detect faces // Detect faces
final faces = await faceDetector.processImage(inputImage); final faces = await faceDetector.processImage(inputImage);
// Log the face detection attempt
if (faces.isEmpty) {
dev.log(
'No faces detected in this frame, checking for processing issues',
name: 'LIVENESS_CONTROLLER',
);
} else {
dev.log(
'Successfully detected ${faces.length} face(s)',
name: 'LIVENESS_CONTROLLER',
);
}
// Process face detection results // Process face detection results
await _processFaceDetection(faces); await _processFaceDetection(faces);
dev.log('Detected ${faces.length} faces', name: 'LIVENESS_CONTROLLER');
} catch (e) { } catch (e) {
dev.log('Error processing image: $e', name: 'LIVENESS_CONTROLLER'); dev.log('Error processing image: $e', name: 'LIVENESS_CONTROLLER');
} finally { } finally {
@ -220,7 +245,7 @@ class FaceLivenessController extends GetxController {
} }
} }
// Convert CameraImage to InputImage // Convert CameraImage to InputImage with improved handling
InputImage? _convertCameraImage(CameraImage image) { InputImage? _convertCameraImage(CameraImage image) {
try { try {
if (cameras == null || cameras!.isEmpty) { if (cameras == null || cameras!.isEmpty) {
@ -231,6 +256,7 @@ class FaceLivenessController extends GetxController {
return null; return null;
} }
// Get current camera
final camera = cameras!.firstWhere( final camera = cameras!.firstWhere(
(camera) => camera.lensDirection == CameraLensDirection.front, (camera) => camera.lensDirection == CameraLensDirection.front,
orElse: () => cameras!.first, orElse: () => cameras!.first,
@ -240,21 +266,30 @@ class FaceLivenessController extends GetxController {
InputImageRotation? rotation; InputImageRotation? rotation;
if (Platform.isIOS) { if (Platform.isIOS) {
// For iOS, we need specific rotation handling
rotation = InputImageRotationValue.fromRawValue(sensorOrientation); rotation = InputImageRotationValue.fromRawValue(sensorOrientation);
dev.log(
'iOS camera rotation set to: $sensorOrientation',
name: 'LIVENESS_CONTROLLER',
);
} else if (Platform.isAndroid) { } else if (Platform.isAndroid) {
var rotationCompensation = var rotationCompensation =
orientations[cameraController!.value.deviceOrientation]; orientations[cameraController!.value.deviceOrientation];
if (rotationCompensation == null) return null; if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) { if (camera.lensDirection == CameraLensDirection.front) {
// front-facing
rotationCompensation = rotationCompensation =
(sensorOrientation + rotationCompensation) % 360; (sensorOrientation + rotationCompensation) % 360;
} else { } else {
// back-facing
rotationCompensation = rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360; (sensorOrientation - rotationCompensation + 360) % 360;
} }
rotation = InputImageRotationValue.fromRawValue(rotationCompensation); rotation = InputImageRotationValue.fromRawValue(rotationCompensation);
dev.log(
'Android camera rotation set to: $rotationCompensation',
name: 'LIVENESS_CONTROLLER',
);
} }
if (rotation == null) { if (rotation == null) {
@ -262,28 +297,50 @@ class FaceLivenessController extends GetxController {
return null; return null;
} }
// Set appropriate format based on platform
final format = final format =
Platform.isAndroid Platform.isAndroid
? InputImageFormat.nv21 ? InputImageFormat.yuv420
: InputImageFormat.bgra8888; : InputImageFormat.bgra8888;
// Handle different plane configurations
if (image.planes.isEmpty) { if (image.planes.isEmpty) {
dev.log('No image planes available', name: 'LIVENESS_CONTROLLER'); dev.log('No image planes available', name: 'LIVENESS_CONTROLLER');
return null; return null;
} }
final plane = image.planes.first; // Properly handle image planes based on format
if (Platform.isAndroid) {
// For Android, we need to handle YUV format
final plane1 = image.planes[0];
final plane2 = image.planes[1];
final plane3 = image.planes[2];
return InputImage.fromBytes( return InputImage.fromBytes(
bytes: plane.bytes, bytes: Uint8List.fromList([
metadata: InputImageMetadata( ...plane1.bytes,
size: Size(image.width.toDouble(), image.height.toDouble()), ...plane2.bytes,
rotation: rotation, ...plane3.bytes,
format: format, ]),
bytesPerRow: plane.bytesPerRow, metadata: InputImageMetadata(
), size: Size(image.width.toDouble(), image.height.toDouble()),
); rotation: rotation,
format: format,
bytesPerRow: plane1.bytesPerRow,
),
);
} else {
// For iOS, we handle BGRA format
final plane = image.planes.first;
return InputImage.fromBytes(
bytes: plane.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: format,
bytesPerRow: plane.bytesPerRow,
),
);
}
} catch (e) { } catch (e) {
dev.log('Error converting camera image: $e', name: 'LIVENESS_CONTROLLER'); dev.log('Error converting camera image: $e', name: 'LIVENESS_CONTROLLER');
return null; return null;
@ -462,7 +519,7 @@ class FaceLivenessController extends GetxController {
}); });
} }
// Capture image // Capture image with improved processing
Future<void> captureImage() async { Future<void> captureImage() async {
try { try {
if (cameraController == null || !cameraController!.value.isInitialized) { if (cameraController == null || !cameraController!.value.isInitialized) {
@ -477,7 +534,6 @@ class FaceLivenessController extends GetxController {
await cameraController?.stopImageStream(); await cameraController?.stopImageStream();
} catch (e) { } catch (e) {
dev.log('Error stopping image stream: $e', name: 'LIVENESS_CONTROLLER'); dev.log('Error stopping image stream: $e', name: 'LIVENESS_CONTROLLER');
// Continue with capture anyway
} }
status.value = LivenessStatus.photoTaken; status.value = LivenessStatus.photoTaken;
@ -488,7 +544,34 @@ class FaceLivenessController extends GetxController {
while (retryCount < maxRetries) { while (retryCount < maxRetries) {
try { try {
capturedImage = await cameraController!.takePicture(); XFile originalImage = await cameraController!.takePicture();
dev.log(
'Original image captured at: ${originalImage.path}',
name: 'LIVENESS_CONTROLLER',
);
// Process captured image to ensure proper orientation
capturedImage = await _processAndFixImageOrientation(originalImage);
// Verify the captured image contains a face
if (!await _verifyFaceInImage(capturedImage!)) {
retryCount++;
dev.log(
'No face detected in captured image, retrying ($retryCount/$maxRetries)',
name: 'LIVENESS_CONTROLLER',
);
if (retryCount >= maxRetries) {
throw Exception(
'Failed to capture image with face after $maxRetries attempts',
);
}
// Wait before retry
await Future.delayed(Duration(milliseconds: 500));
continue;
}
break; break;
} catch (e) { } catch (e) {
retryCount++; retryCount++;
@ -501,13 +584,12 @@ class FaceLivenessController extends GetxController {
rethrow; rethrow;
} }
// Wait before retry
await Future.delayed(Duration(milliseconds: 500)); await Future.delayed(Duration(milliseconds: 500));
} }
} }
dev.log( dev.log(
'Image captured: ${capturedImage?.path}', 'Image captured and processed: ${capturedImage?.path}',
name: 'LIVENESS_CONTROLLER', name: 'LIVENESS_CONTROLLER',
); );
@ -519,6 +601,77 @@ class FaceLivenessController extends GetxController {
status.value = LivenessStatus.failed; status.value = LivenessStatus.failed;
} }
} }
// Verify that the captured image contains a face
Future<bool> _verifyFaceInImage(XFile image) async {
try {
final inputImage = InputImage.fromFilePath(image.path);
final faces = await faceDetector.processImage(inputImage);
dev.log(
'Verification found ${faces.length} faces in captured image',
name: 'LIVENESS_CONTROLLER',
);
return faces.isNotEmpty;
} catch (e) {
dev.log('Error verifying face in image: $e', name: 'LIVENESS_CONTROLLER');
return false;
}
}
// Process and fix image orientation
Future<XFile> _processAndFixImageOrientation(XFile originalImage) async {
// For iOS, we need to fix the orientation
if (Platform.isIOS) {
try {
dev.log(
'Processing iOS image to fix orientation',
name: 'LIVENESS_CONTROLLER',
);
// Get temp directory for processed image
final directory = await getApplicationDocumentsDirectory();
final path = directory.path;
final filename =
'processed_${DateTime.now().millisecondsSinceEpoch}.jpg';
final outputPath = '$path/$filename';
// Read the image bytes and decode
final imageBytes = await originalImage.readAsBytes();
final originalDecodedImage = imglib.decodeImage(imageBytes);
if (originalDecodedImage == null) {
dev.log('Failed to decode image', name: 'LIVENESS_CONTROLLER');
return originalImage;
}
// Fix orientation
final orientedImage = imglib.bakeOrientation(originalDecodedImage);
// Save the processed image
final processedImageFile = File(outputPath);
await processedImageFile.writeAsBytes(imglib.encodeJpg(orientedImage));
dev.log(
'Successfully processed iOS image to: $outputPath',
name: 'LIVENESS_CONTROLLER',
);
return XFile(processedImageFile.path);
} catch (e) {
dev.log(
'Error processing image orientation: $e',
name: 'LIVENESS_CONTROLLER',
);
// Fall back to original image if processing fails
return originalImage;
}
}
// For Android, we typically don't need this fix
return originalImage;
}
// Force capture (for debugging) // Force capture (for debugging)
Future<void> forceCaptureImage() async { Future<void> forceCaptureImage() async {