feat: Enhance face detection and image processing in liveness detection
This commit is contained in:
parent
0eec492698
commit
bd99a3dd40
|
@ -7,6 +7,8 @@ import 'package:flutter/services.dart';
|
|||
import 'package:get/get.dart';
|
||||
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
|
||||
// import 'package:google_mlkit_face_mesh_detection/google_mlkit_face_mesh_detection.dart';
|
||||
import 'package:image/image.dart' as imglib;
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:sigap/src/features/auth/data/models/face_model.dart';
|
||||
|
||||
// Enum for liveness status
|
||||
|
@ -106,7 +108,6 @@ class FaceLivenessController extends GetxController {
|
|||
enableTracking: true,
|
||||
minFaceSize: 0.1,
|
||||
performanceMode: FaceDetectorMode.accurate,
|
||||
|
||||
),
|
||||
);
|
||||
|
||||
|
@ -147,13 +148,24 @@ class FaceLivenessController extends GetxController {
|
|||
|
||||
cameraController = CameraController(
|
||||
frontCamera,
|
||||
ResolutionPreset.medium,
|
||||
ResolutionPreset
|
||||
.high, // Changed from medium to high for better detection
|
||||
enableAudio: false,
|
||||
imageFormatGroup: ImageFormatGroup.nv21,
|
||||
imageFormatGroup:
|
||||
Platform.isIOS
|
||||
? ImageFormatGroup.bgra8888
|
||||
: ImageFormatGroup.yuv420,
|
||||
);
|
||||
|
||||
await cameraController!.initialize();
|
||||
|
||||
// Set flash off to improve face detection
|
||||
try {
|
||||
await cameraController!.setFlashMode(FlashMode.off);
|
||||
} catch (e) {
|
||||
dev.log('Error setting flash mode: $e', name: 'LIVENESS_CONTROLLER');
|
||||
}
|
||||
|
||||
dev.log('Camera initialized successfully', name: 'LIVENESS_CONTROLLER');
|
||||
|
||||
// Start image stream for face detection
|
||||
|
@ -209,10 +221,23 @@ class FaceLivenessController extends GetxController {
|
|||
|
||||
// Detect faces
|
||||
final faces = await faceDetector.processImage(inputImage);
|
||||
|
||||
// Log the face detection attempt
|
||||
if (faces.isEmpty) {
|
||||
dev.log(
|
||||
'No faces detected in this frame, checking for processing issues',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
} else {
|
||||
dev.log(
|
||||
'Successfully detected ${faces.length} face(s)',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
}
|
||||
|
||||
// Process face detection results
|
||||
await _processFaceDetection(faces);
|
||||
|
||||
dev.log('Detected ${faces.length} faces', name: 'LIVENESS_CONTROLLER');
|
||||
} catch (e) {
|
||||
dev.log('Error processing image: $e', name: 'LIVENESS_CONTROLLER');
|
||||
} finally {
|
||||
|
@ -220,7 +245,7 @@ class FaceLivenessController extends GetxController {
|
|||
}
|
||||
}
|
||||
|
||||
// Convert CameraImage to InputImage
|
||||
// Convert CameraImage to InputImage with improved handling
|
||||
InputImage? _convertCameraImage(CameraImage image) {
|
||||
try {
|
||||
if (cameras == null || cameras!.isEmpty) {
|
||||
|
@ -231,6 +256,7 @@ class FaceLivenessController extends GetxController {
|
|||
return null;
|
||||
}
|
||||
|
||||
// Get current camera
|
||||
final camera = cameras!.firstWhere(
|
||||
(camera) => camera.lensDirection == CameraLensDirection.front,
|
||||
orElse: () => cameras!.first,
|
||||
|
@ -240,21 +266,30 @@ class FaceLivenessController extends GetxController {
|
|||
InputImageRotation? rotation;
|
||||
|
||||
if (Platform.isIOS) {
|
||||
// For iOS, we need specific rotation handling
|
||||
rotation = InputImageRotationValue.fromRawValue(sensorOrientation);
|
||||
dev.log(
|
||||
'iOS camera rotation set to: $sensorOrientation',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
} else if (Platform.isAndroid) {
|
||||
var rotationCompensation =
|
||||
orientations[cameraController!.value.deviceOrientation];
|
||||
if (rotationCompensation == null) return null;
|
||||
|
||||
if (camera.lensDirection == CameraLensDirection.front) {
|
||||
// front-facing
|
||||
rotationCompensation =
|
||||
(sensorOrientation + rotationCompensation) % 360;
|
||||
} else {
|
||||
// back-facing
|
||||
rotationCompensation =
|
||||
(sensorOrientation - rotationCompensation + 360) % 360;
|
||||
}
|
||||
|
||||
rotation = InputImageRotationValue.fromRawValue(rotationCompensation);
|
||||
dev.log(
|
||||
'Android camera rotation set to: $rotationCompensation',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
}
|
||||
|
||||
if (rotation == null) {
|
||||
|
@ -262,28 +297,50 @@ class FaceLivenessController extends GetxController {
|
|||
return null;
|
||||
}
|
||||
|
||||
// Set appropriate format based on platform
|
||||
final format =
|
||||
Platform.isAndroid
|
||||
? InputImageFormat.nv21
|
||||
? InputImageFormat.yuv420
|
||||
: InputImageFormat.bgra8888;
|
||||
|
||||
// Handle different plane configurations
|
||||
if (image.planes.isEmpty) {
|
||||
dev.log('No image planes available', name: 'LIVENESS_CONTROLLER');
|
||||
return null;
|
||||
}
|
||||
|
||||
final plane = image.planes.first;
|
||||
// Properly handle image planes based on format
|
||||
if (Platform.isAndroid) {
|
||||
// For Android, we need to handle YUV format
|
||||
final plane1 = image.planes[0];
|
||||
final plane2 = image.planes[1];
|
||||
final plane3 = image.planes[2];
|
||||
|
||||
return InputImage.fromBytes(
|
||||
bytes: plane.bytes,
|
||||
metadata: InputImageMetadata(
|
||||
size: Size(image.width.toDouble(), image.height.toDouble()),
|
||||
rotation: rotation,
|
||||
format: format,
|
||||
bytesPerRow: plane.bytesPerRow,
|
||||
),
|
||||
);
|
||||
return InputImage.fromBytes(
|
||||
bytes: Uint8List.fromList([
|
||||
...plane1.bytes,
|
||||
...plane2.bytes,
|
||||
...plane3.bytes,
|
||||
]),
|
||||
metadata: InputImageMetadata(
|
||||
size: Size(image.width.toDouble(), image.height.toDouble()),
|
||||
rotation: rotation,
|
||||
format: format,
|
||||
bytesPerRow: plane1.bytesPerRow,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// For iOS, we handle BGRA format
|
||||
final plane = image.planes.first;
|
||||
return InputImage.fromBytes(
|
||||
bytes: plane.bytes,
|
||||
metadata: InputImageMetadata(
|
||||
size: Size(image.width.toDouble(), image.height.toDouble()),
|
||||
rotation: rotation,
|
||||
format: format,
|
||||
bytesPerRow: plane.bytesPerRow,
|
||||
),
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
dev.log('Error converting camera image: $e', name: 'LIVENESS_CONTROLLER');
|
||||
return null;
|
||||
|
@ -462,7 +519,7 @@ class FaceLivenessController extends GetxController {
|
|||
});
|
||||
}
|
||||
|
||||
// Capture image
|
||||
// Capture image with improved processing
|
||||
Future<void> captureImage() async {
|
||||
try {
|
||||
if (cameraController == null || !cameraController!.value.isInitialized) {
|
||||
|
@ -477,7 +534,6 @@ class FaceLivenessController extends GetxController {
|
|||
await cameraController?.stopImageStream();
|
||||
} catch (e) {
|
||||
dev.log('Error stopping image stream: $e', name: 'LIVENESS_CONTROLLER');
|
||||
// Continue with capture anyway
|
||||
}
|
||||
|
||||
status.value = LivenessStatus.photoTaken;
|
||||
|
@ -488,7 +544,34 @@ class FaceLivenessController extends GetxController {
|
|||
|
||||
while (retryCount < maxRetries) {
|
||||
try {
|
||||
capturedImage = await cameraController!.takePicture();
|
||||
XFile originalImage = await cameraController!.takePicture();
|
||||
dev.log(
|
||||
'Original image captured at: ${originalImage.path}',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
|
||||
// Process captured image to ensure proper orientation
|
||||
capturedImage = await _processAndFixImageOrientation(originalImage);
|
||||
|
||||
// Verify the captured image contains a face
|
||||
if (!await _verifyFaceInImage(capturedImage!)) {
|
||||
retryCount++;
|
||||
dev.log(
|
||||
'No face detected in captured image, retrying ($retryCount/$maxRetries)',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
|
||||
if (retryCount >= maxRetries) {
|
||||
throw Exception(
|
||||
'Failed to capture image with face after $maxRetries attempts',
|
||||
);
|
||||
}
|
||||
|
||||
// Wait before retry
|
||||
await Future.delayed(Duration(milliseconds: 500));
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
} catch (e) {
|
||||
retryCount++;
|
||||
|
@ -501,13 +584,12 @@ class FaceLivenessController extends GetxController {
|
|||
rethrow;
|
||||
}
|
||||
|
||||
// Wait before retry
|
||||
await Future.delayed(Duration(milliseconds: 500));
|
||||
}
|
||||
}
|
||||
|
||||
dev.log(
|
||||
'Image captured: ${capturedImage?.path}',
|
||||
'Image captured and processed: ${capturedImage?.path}',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
|
||||
|
@ -520,6 +602,77 @@ class FaceLivenessController extends GetxController {
|
|||
}
|
||||
}
|
||||
|
||||
// Verify that the captured image contains a face
|
||||
Future<bool> _verifyFaceInImage(XFile image) async {
|
||||
try {
|
||||
final inputImage = InputImage.fromFilePath(image.path);
|
||||
final faces = await faceDetector.processImage(inputImage);
|
||||
|
||||
dev.log(
|
||||
'Verification found ${faces.length} faces in captured image',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
|
||||
return faces.isNotEmpty;
|
||||
} catch (e) {
|
||||
dev.log('Error verifying face in image: $e', name: 'LIVENESS_CONTROLLER');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Process and fix image orientation
|
||||
Future<XFile> _processAndFixImageOrientation(XFile originalImage) async {
|
||||
// For iOS, we need to fix the orientation
|
||||
if (Platform.isIOS) {
|
||||
try {
|
||||
dev.log(
|
||||
'Processing iOS image to fix orientation',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
|
||||
// Get temp directory for processed image
|
||||
final directory = await getApplicationDocumentsDirectory();
|
||||
final path = directory.path;
|
||||
final filename =
|
||||
'processed_${DateTime.now().millisecondsSinceEpoch}.jpg';
|
||||
final outputPath = '$path/$filename';
|
||||
|
||||
// Read the image bytes and decode
|
||||
final imageBytes = await originalImage.readAsBytes();
|
||||
final originalDecodedImage = imglib.decodeImage(imageBytes);
|
||||
|
||||
if (originalDecodedImage == null) {
|
||||
dev.log('Failed to decode image', name: 'LIVENESS_CONTROLLER');
|
||||
return originalImage;
|
||||
}
|
||||
|
||||
// Fix orientation
|
||||
final orientedImage = imglib.bakeOrientation(originalDecodedImage);
|
||||
|
||||
// Save the processed image
|
||||
final processedImageFile = File(outputPath);
|
||||
await processedImageFile.writeAsBytes(imglib.encodeJpg(orientedImage));
|
||||
|
||||
dev.log(
|
||||
'Successfully processed iOS image to: $outputPath',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
|
||||
return XFile(processedImageFile.path);
|
||||
} catch (e) {
|
||||
dev.log(
|
||||
'Error processing image orientation: $e',
|
||||
name: 'LIVENESS_CONTROLLER',
|
||||
);
|
||||
// Fall back to original image if processing fails
|
||||
return originalImage;
|
||||
}
|
||||
}
|
||||
|
||||
// For Android, we typically don't need this fix
|
||||
return originalImage;
|
||||
}
|
||||
|
||||
// Force capture (for debugging)
|
||||
Future<void> forceCaptureImage() async {
|
||||
dev.log('Force capturing image...', name: 'LIVENESS_CONTROLLER');
|
||||
|
|
Loading…
Reference in New Issue