feat: Add KTP model for Indonesian ID card representation

feat: Implement PermissionModel for role-based access control

feat: Create ResourceModel to manage resources with associated permissions

feat: Develop UserMetadataModel for user-related data management

feat: Implement AWS Signature helper for secure API requests

feat: Create detect-face function for AWS Rekognition integration

feat: Implement send-email function using Resend API

feat: Develop verify-face function for comparing KTP and selfie images
This commit is contained in:
vergiLgood1 2025-05-23 08:33:52 +07:00
parent d9fffff68d
commit c26d749026
34 changed files with 1358 additions and 229 deletions

View File

@ -44,3 +44,8 @@ AZURE_RESOURCE_NAME="sigap"
AZURE_FACE_RESOURCE_NAME="verify-face"
AZURE_SUBSCRIPTION_KEY="ANeYAEr78MF7HzCEDg53DEHfKZJg19raPeJCubNEZP2tXGD6xREgJQQJ99BEAC3pKaRXJ3w3AAAFACOGAwA9"
AZURE_FACE_SUBSCRIPTION_KEY="6pBJKuYEFWHkrCBaZh8hErDci6ZwYnG0tEaE3VA34P8XPAYj4ZvOJQQJ99BEACqBBLyXJ3w3AAAKACOGYqeW"
; Aws rekognition
AWS_REGION="ap-southeast-1"
AWS_ACCESS_KEY="AKIAW3MD7UU5G2XTA44C"
AWS_SECRET_KEY="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"

View File

@ -1,10 +1,8 @@
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:get_storage/get_storage.dart';
import 'package:logger/logger.dart';
import 'package:lottie/lottie.dart';
import 'package:sigap/src/features/auth/data/repositories/authentication_repository.dart';
import 'package:sigap/src/features/onboarding/presentasion/pages/onboarding/onboarding_screen.dart';
import 'package:sigap/src/utils/constants/colors.dart';
import 'package:sigap/src/utils/constants/image_strings.dart';
import 'package:sigap/src/utils/helpers/helper_functions.dart';
@ -52,17 +50,8 @@ class _AnimatedSplashScreenWidgetState extends State<AnimatedSplashScreenWidget>
}
Future<void> _handleNavigation() async {
// Check if onboarding is completed
final isFirstTime = storage.read('isFirstTime') ?? false;
if (isFirstTime) {
// Navigate to onboarding if it's the first time
Get.offAll(() => const OnboardingScreen());
} else {
// Use the authentication repository to determine where to navigate
AuthenticationRepository.instance.screenRedirect();
}
}
@override
Widget build(BuildContext context) {

View File

@ -0,0 +1,214 @@
import 'dart:convert';
import 'dart:io';
import 'package:dio/dio.dart';
import 'package:image_picker/image_picker.dart';
import 'package:sigap/src/features/auth/data/models/face_model.dart';
import 'package:sigap/src/utils/constants/api_urls.dart';
import 'package:sigap/src/utils/dio.client/dio_client.dart';
import 'package:sigap/src/utils/helpers/aws_signature.dart';
class AwsRecognitionService {
// Singleton instance
static final AwsRecognitionService instance = AwsRecognitionService._();
AwsRecognitionService._();
// AWS Recognition API configuration
final String region = Endpoints.awsRegion;
final String accessKey = Endpoints.awsAccessKey;
final String secretKey = Endpoints.awsSecretKey;
final String serviceEndpoint = Endpoints.awsRekognitionEndpoint;
final String serviceName = 'rekognition';
// Face detection threshold values
final double faceMatchThreshold =
80.0; // Minimum confidence for face match (0-100)
// Detect faces in an image and return face details
Future<List<FaceModel>> detectFaces(XFile imageFile) async {
try {
final bytes = await File(imageFile.path).readAsBytes();
final base64Image = base64Encode(bytes);
// Create AWS Signature
final awsSignature = AwsSignature(
accessKey: accessKey,
secretKey: secretKey,
region: region,
serviceName: serviceName,
);
// Prepare request payload
final payload = {
'Image': {'Bytes': base64Image},
'Attributes': ['DEFAULT'],
};
// Get signed headers and URL
final dateTime = DateTime.now().toUtc();
final uri = Uri.parse('$serviceEndpoint/DetectFaces');
final headers = awsSignature.buildRequestHeaders(
method: 'POST',
uri: uri,
payload: payload,
dateTime: dateTime,
);
// Make API request
final response = await DioClient().post(
uri.toString(),
data: payload,
options: Options(headers: headers, responseType: ResponseType.json),
);
if (response.statusCode == 200) {
final faceDetails = response.data['FaceDetails'];
// Convert AWS response to FaceModel objects
List<FaceModel> faces = [];
for (var i = 0; i < faceDetails.length; i++) {
String faceId = 'face_${dateTime.millisecondsSinceEpoch}_$i';
faces.add(FaceModel.fromDetection(faceId, imageFile, faceDetails[i]));
}
return faces;
} else {
throw Exception(
'Failed to detect faces: ${response.statusCode} - ${response.data}',
);
}
} catch (e) {
print('Face detection error: $e');
return [];
}
}
// Compare two face images and return comparison result
Future<FaceComparisonResult> compareFaces(
XFile sourceImage,
XFile targetImage,
) async {
try {
// First detect faces in both images
List<FaceModel> sourceFaces = await detectFaces(sourceImage);
List<FaceModel> targetFaces = await detectFaces(targetImage);
if (sourceFaces.isEmpty || targetFaces.isEmpty) {
return FaceComparisonResult.noMatch(
sourceFaces.isEmpty ? FaceModel.empty() : sourceFaces.first,
targetFaces.isEmpty ? FaceModel.empty() : targetFaces.first,
message:
sourceFaces.isEmpty && targetFaces.isEmpty
? 'No faces detected in either image'
: sourceFaces.isEmpty
? 'No face detected in ID card image'
: 'No face detected in selfie image',
);
}
// Get the primary faces from each image
FaceModel sourceFace = sourceFaces.first;
FaceModel targetFace = targetFaces.first;
final sourceBytes = await File(sourceImage.path).readAsBytes();
final targetBytes = await File(targetImage.path).readAsBytes();
// Create AWS Signature
final awsSignature = AwsSignature(
accessKey: accessKey,
secretKey: secretKey,
region: region,
serviceName: serviceName,
);
// Prepare request payload
final payload = {
'SourceImage': {'Bytes': base64Encode(sourceBytes)},
'TargetImage': {'Bytes': base64Encode(targetBytes)},
'SimilarityThreshold': faceMatchThreshold,
};
// Get signed headers and URL
final dateTime = DateTime.now().toUtc();
final uri = Uri.parse('$serviceEndpoint/CompareFaces');
final headers = awsSignature.buildRequestHeaders(
method: 'POST',
uri: uri,
payload: payload,
dateTime: dateTime,
);
// Make API request
final response = await DioClient().post(
uri.toString(),
data: payload,
options: Options(headers: headers, responseType: ResponseType.json),
);
if (response.statusCode == 200) {
return FaceComparisonResult.fromAwsResponse(
sourceFace,
targetFace,
response.data,
);
} else {
throw Exception(
'Failed to compare faces: ${response.statusCode} - ${response.data}',
);
}
} catch (e) {
print('Face comparison error: $e');
return FaceComparisonResult.error(
FaceModel.empty().withMessage('Source face processing error'),
FaceModel.empty().withMessage('Target face processing error'),
e.toString(),
);
}
}
// Perform liveness detection (anti-spoofing check)
Future<FaceModel> performLivenessCheck(XFile selfieImage) async {
try {
// In a real implementation, AWS Recognition doesn't directly offer liveness detection
// You might need to use a combination of services or a third-party solution
// For now, we'll simulate a successful check by detecting a face
final faces = await detectFaces(selfieImage);
if (faces.isEmpty) {
return FaceModel.empty().withLiveness(
isLive: false,
confidence: 0.0,
message: 'No face detected in the selfie.',
);
}
// Get the primary face
FaceModel face = faces.first;
// Check confidence of face detection as a basic indicator
if (face.detectionConfidence < 0.7) {
return face.withLiveness(
isLive: false,
confidence: face.detectionConfidence,
message:
'Low confidence face detection. Please take a clearer selfie.',
);
}
// For a full implementation, you might want to:
// 1. Check eye blink detection
// 2. Analyze multiple facial movements
// 3. Use depth information if available
return face.withLiveness(
isLive: true,
confidence: face.detectionConfidence,
message: 'Liveness check passed successfully.',
);
} catch (e) {
return FaceModel.empty().withLiveness(
isLive: false,
confidence: 0.0,
message: 'Liveness check error: ${e.toString()}',
);
}
}
}

View File

@ -3,8 +3,8 @@ import 'dart:io';
import 'package:dio/dio.dart';
import 'package:image_picker/image_picker.dart';
import 'package:sigap/src/features/daily-ops/data/models/models/kta_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/ktp_model.dart';
import 'package:sigap/src/features/auth/data/models/kta_model.dart';
import 'package:sigap/src/features/auth/data/models/ktp_model.dart';
import 'package:sigap/src/utils/constants/api_urls.dart';
import 'package:sigap/src/utils/dio.client/dio_client.dart';

View File

@ -3,7 +3,7 @@ import 'package:flutter_secure_storage/flutter_secure_storage.dart';
import 'package:get/get.dart';
import 'package:local_auth/local_auth.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
class BiometricService extends GetxService {
static BiometricService get instance => Get.find<BiometricService>();

View File

@ -1,5 +1,5 @@
import 'package:get/get.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
import 'package:supabase_flutter/supabase_flutter.dart';
class SupabaseService extends GetxService {

View File

@ -0,0 +1,248 @@
import 'package:image_picker/image_picker.dart';
/// A model class that represents facial recognition data and metrics
class FaceModel {
/// Unique identifier for the face
final String faceId;
/// Source image that contains the face
final XFile? sourceImage;
/// Raw face details as returned from AWS Rekognition
final Map<String, dynamic> faceDetails;
/// Confidence score for face detection (0.0-1.0)
final double detectionConfidence;
/// Whether the face passed liveness detection
final bool isLive;
/// Liveness detection confidence (0.0-1.0)
final double livenessConfidence;
/// Whether this face matches another face (after comparison)
final bool isMatch;
/// Match confidence with comparison face (0.0-1.0)
final double matchConfidence;
/// Message providing details about the face status
final String message;
/// Creates a FaceModel with the given parameters
const FaceModel({
required this.faceId,
this.sourceImage,
this.faceDetails = const {},
this.detectionConfidence = 0.0,
this.isLive = false,
this.livenessConfidence = 0.0,
this.isMatch = false,
this.matchConfidence = 0.0,
this.message = '',
});
/// Creates a FaceModel from AWS Rekognition detection response
factory FaceModel.fromDetection(
String faceId,
XFile sourceImage,
Map<String, dynamic> detectionData,
) {
final double confidence = (detectionData['Confidence'] ?? 0.0) / 100.0;
return FaceModel(
faceId: faceId,
sourceImage: sourceImage,
faceDetails: detectionData,
detectionConfidence: confidence,
message:
'Face detected with ${(confidence * 100).toStringAsFixed(1)}% confidence',
);
}
/// Creates a FaceModel with liveness details
FaceModel withLiveness({
required bool isLive,
required double confidence,
String? message,
}) {
return FaceModel(
faceId: faceId,
sourceImage: sourceImage,
faceDetails: faceDetails,
detectionConfidence: detectionConfidence,
isLive: isLive,
livenessConfidence: confidence,
isMatch: isMatch,
matchConfidence: matchConfidence,
message: message ?? this.message,
);
}
/// Creates a FaceModel with match details
FaceModel withMatch({
required bool isMatch,
required double confidence,
String? message,
}) {
return FaceModel(
faceId: faceId,
sourceImage: sourceImage,
faceDetails: faceDetails,
detectionConfidence: detectionConfidence,
isLive: isLive,
livenessConfidence: livenessConfidence,
isMatch: isMatch,
matchConfidence: confidence,
message: message ?? this.message,
);
}
/// Updates the message for this FaceModel
FaceModel withMessage(String newMessage) {
return FaceModel(
faceId: faceId,
sourceImage: sourceImage,
faceDetails: faceDetails,
detectionConfidence: detectionConfidence,
isLive: isLive,
livenessConfidence: livenessConfidence,
isMatch: isMatch,
matchConfidence: matchConfidence,
message: newMessage,
);
}
/// Creates an empty FaceModel with no data
factory FaceModel.empty() {
return const FaceModel(faceId: '', message: 'No face data available');
}
/// Checks if this FaceModel instance has valid face data
bool get hasValidFace => faceId.isNotEmpty && detectionConfidence > 0.5;
/// Returns age range if available in faceDetails
Map<String, int>? get ageRange {
if (faceDetails.containsKey('AgeRange')) {
return {
'low': faceDetails['AgeRange']['Low'] ?? 0,
'high': faceDetails['AgeRange']['High'] ?? 0,
};
}
return null;
}
/// Returns gender information if available
String? get gender => faceDetails['Gender']?['Value'];
/// Returns whether the person is smiling if available
bool? get isSmiling => faceDetails['Smile']?['Value'];
/// Returns a map representation of this model
Map<String, dynamic> toMap() {
return {
'faceId': faceId,
'detectionConfidence': detectionConfidence,
'isLive': isLive,
'livenessConfidence': livenessConfidence,
'isMatch': isMatch,
'matchConfidence': matchConfidence,
'message': message,
'hasValidFace': hasValidFace,
};
}
}
/// Result of a face comparison between two faces
class FaceComparisonResult {
/// The source face in the comparison
final FaceModel sourceFace;
/// The target face in the comparison
final FaceModel targetFace;
/// Whether the faces match
final bool isMatch;
/// Confidence level of the match (0.0-1.0)
final double confidence;
/// Message describing the comparison result
final String message;
/// Creates a FaceComparisonResult with the given parameters
const FaceComparisonResult({
required this.sourceFace,
required this.targetFace,
required this.isMatch,
required this.confidence,
required this.message,
});
/// Creates a FaceComparisonResult from AWS comparison response
factory FaceComparisonResult.fromAwsResponse(
FaceModel sourceFace,
FaceModel targetFace,
Map<String, dynamic> response,
) {
bool isMatch = false;
double confidence = 0.0;
String message = 'Face comparison failed';
if (response['FaceMatches'] != null && response['FaceMatches'].isNotEmpty) {
final match = response['FaceMatches'][0];
confidence = (match['Similarity'] ?? 0.0) / 100.0;
isMatch = confidence >= 0.8; // 80% threshold
message =
isMatch
? 'Face verification successful! Confidence: ${(confidence * 100).toStringAsFixed(1)}%'
: 'Face similarity too low: ${(confidence * 100).toStringAsFixed(1)}%';
} else {
message = 'No matching faces found';
}
return FaceComparisonResult(
sourceFace: sourceFace.withMatch(
isMatch: isMatch,
confidence: confidence,
),
targetFace: targetFace.withMatch(
isMatch: isMatch,
confidence: confidence,
),
isMatch: isMatch,
confidence: confidence,
message: message,
);
}
/// Creates an empty result with no match
factory FaceComparisonResult.noMatch(
FaceModel sourceFace,
FaceModel targetFace, {
String? message,
}) {
return FaceComparisonResult(
sourceFace: sourceFace,
targetFace: targetFace,
isMatch: false,
confidence: 0.0,
message: message ?? 'No matching faces',
);
}
/// Creates an error result
factory FaceComparisonResult.error(
FaceModel sourceFace,
FaceModel targetFace,
String errorMessage,
) {
return FaceComparisonResult(
sourceFace: sourceFace,
targetFace: targetFace,
isMatch: false,
confidence: 0.0,
message: 'Error: $errorMessage',
);
}
}

View File

@ -1 +0,0 @@
export 'user_metadata_model.dart';

View File

@ -1,4 +1,4 @@
import 'package:sigap/src/features/personalization/data/models/models/resources_model.dart';
import 'package:sigap/src/features/auth/data/models/resources_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/roles_model.dart';
class PermissionModel {

View File

@ -1,4 +1,4 @@
import 'package:sigap/src/features/personalization/data/models/models/permissions_model.dart';
import 'package:sigap/src/features/auth/data/models/permissions_model.dart';
class ResourceModel {
final String id;

View File

@ -5,7 +5,7 @@ import 'package:logger/logger.dart';
import 'package:sigap/src/cores/services/biometric_service.dart';
import 'package:sigap/src/cores/services/location_service.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
import 'package:sigap/src/features/auth/presentasion/pages/signin/signin_screen.dart';
import 'package:sigap/src/utils/constants/app_routes.dart';
import 'package:sigap/src/utils/exceptions/exceptions.dart';
@ -65,16 +65,30 @@ class AuthenticationRepository extends GetxController {
}
}
static bool _isRedirecting = false;
/// Updated screenRedirect method to handle onboarding preferences
void screenRedirect({UserMetadataModel? arguments}) async {
// Prevent recursive calls with a static guard
if (_isRedirecting) {
Logger().w('Screen redirect already in progress, ignoring this call');
return;
}
try {
_isRedirecting = true;
Logger().d('Starting screen redirect');
final session = _supabase.auth.currentSession;
final bool isFirstTime = storage.read('isFirstTime') ?? false;
final bool isFirstTime = storage.read('isFirstTime') ?? true;
final isEmailVerified = session?.user.emailConfirmedAt != null;
final isProfileComplete =
session?.user.userMetadata?['profile_status'] == 'complete';
// Logger().i('isFirstTime screen redirect: $isFirstTime');
// Log the current state for debugging
Logger().d(
'Screen redirect state - Session: ${session != null}, Email verified: $isEmailVerified, Profile complete: $isProfileComplete, First time: $isFirstTime, Current route: ${Get.currentRoute}',
);
// Cek lokasi terlebih dahulu
if (await _locationService.isLocationValidForFeature() == false) {
@ -86,6 +100,11 @@ class AuthenticationRepository extends GetxController {
if (!isEmailVerified) {
_navigateToRoute(AppRoutes.emailVerification);
} else if (!isProfileComplete && isEmailVerified) {
// If already on registration form, don't redirect again
if (Get.currentRoute == AppRoutes.registrationForm) {
Logger().d('Already on registration form, not redirecting');
return;
}
_navigateToRoute(AppRoutes.registrationForm);
} else {
_navigateToRoute(AppRoutes.panicButton);
@ -96,13 +115,20 @@ class AuthenticationRepository extends GetxController {
} catch (e) {
Logger().e('Error in screenRedirect: $e');
_navigateToRoute(AppRoutes.signIn);
} finally {
_isRedirecting = false;
Logger().d('Screen redirect completed');
}
}
void _navigateToRoute(String routeName) {
if (Get.currentRoute != routeName) {
Get.offAllNamed(routeName);
// Check if already on this route to avoid navigation loops
if (Get.currentRoute == routeName) {
Logger().d('Already on $routeName, skipping navigation');
return;
}
Logger().d('Navigating to: $routeName');
Get.offAllNamed(routeName);
}
// Pisahkan logic untuk user yang belum login
@ -582,6 +608,11 @@ class AuthenticationRepository extends GetxController {
/// Updates user profile after registration form completion
Future<void> completeUserProfile(UserMetadataModel completeData) async {
if (_isRedirecting) {
// Don't attempt profile completion while already redirecting
throw 'Cannot complete profile during redirection. Please try again.';
}
try {
// Convert to UserModel
final userMetadataModel = UserMetadataModel.fromInitUserMetadata(
@ -604,6 +635,9 @@ class AuthenticationRepository extends GetxController {
.from('profiles')
.insert(completeData.viewerData!.toJson());
}
// Set redirection flag to ensure we don't navigate before setup is complete
_isRedirecting = true;
} catch (e) {
throw 'Failed to update profile: ${e.toString()}';
}

View File

@ -2,7 +2,7 @@ import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:get_storage/get_storage.dart';
import 'package:logger/logger.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/auth/data/repositories/authentication_repository.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/steps/id_card_verification_controller.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/steps/identity_verification_controller.dart';
@ -12,6 +12,8 @@ import 'package:sigap/src/features/auth/presentasion/controllers/steps/selfie_ve
import 'package:sigap/src/features/auth/presentasion/controllers/steps/unit_info_controller.dart';
import 'package:sigap/src/features/daily-ops/data/models/index.dart';
import 'package:sigap/src/features/personalization/data/models/index.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/repositories/roles_repository.dart';
import 'package:sigap/src/utils/constants/app_routes.dart';
import 'package:sigap/src/utils/constants/num_int.dart';
import 'package:sigap/src/utils/popups/loaders.dart';
@ -50,7 +52,6 @@ class FormRegistrationController extends GetxController {
// Officer data
final Rx<OfficerModel?> officerModel = Rx<OfficerModel?>(null);
// Loading state
final RxBool isLoading = false.obs;
@ -60,8 +61,127 @@ class FormRegistrationController extends GetxController {
@override
void onInit() {
super.onInit();
// Initialize user data directly from current session
_initializeFromCurrentUser();
// Initialize user data directly from current session without triggering redirects
_initializeSafely();
}
/// Initialize safely without triggering redirects
void _initializeSafely() {
// First initialize form controllers to prevent null errors
_initializeControllers();
// Then fetch user data in the background
Future.microtask(() => _fetchUserDataOnly());
}
/// Fetch user data without triggering screen redirects
Future<void> _fetchUserDataOnly() async {
try {
Logger().d('Fetching user data safely without redirects');
// Get user session directly without going through AuthRepository methods that might trigger redirects
final session = SupabaseService.instance.client.auth.currentSession;
if (session?.user != null) {
final user = session!.user;
Logger().d('Found user session: ${user.id} - ${user.email}');
// Extract basic metadata
UserMetadataModel metadata = UserMetadataModel(
userId: user.id,
email: user.email,
roleId: user.userMetadata?['role_id'] as String?,
isOfficer: user.userMetadata?['is_officer'] as bool? ?? false,
profileStatus:
user.userMetadata?['profile_status'] as String? ?? 'incomplete',
);
// Try to parse complete metadata if available
if (user.userMetadata != null) {
try {
final fullMetadata = UserMetadataModel.fromJson(user.userMetadata);
metadata = fullMetadata;
Logger().d('Successfully parsed complete user metadata');
} catch (e) {
Logger().w('Could not parse full metadata: $e');
}
}
// Update the metadata value
userMetadata.value = metadata;
Logger().d('User metadata set: ${userMetadata.value.toString()}');
// Complete initialization without triggering redirects
await _completeInitialization();
} else {
// Check arguments and temp storage if no active session
_handleNoActiveSession();
}
} catch (e) {
Logger().e('Error fetching user data: $e');
// Set default values
userMetadata.value = const UserMetadataModel(
profileStatus: 'incomplete',
isOfficer: false,
);
}
}
/// Handle case when there's no active session
void _handleNoActiveSession() {
try {
final arguments = Get.arguments;
if (arguments is Map<String, dynamic> &&
arguments.containsKey('userId')) {
userMetadata.value = UserMetadataModel(
userId: arguments['userId'] as String?,
email: arguments['email'] as String?,
roleId: arguments['roleId'] as String?,
isOfficer: arguments['isOfficer'] as bool? ?? false,
profileStatus: 'incomplete',
);
} else {
// Check temporary storage
final tempUserId = storage.read('TEMP_USER_ID') as String?;
final tempEmail = storage.read('CURRENT_USER_EMAIL') as String?;
if (tempUserId != null || tempEmail != null) {
userMetadata.value = UserMetadataModel(
userId: tempUserId,
email: tempEmail,
roleId: storage.read('TEMP_ROLE_ID') as String?,
isOfficer: storage.read('IS_OFFICER') as bool? ?? false,
profileStatus: 'incomplete',
);
}
}
// Complete initialization without triggering redirects
_completeInitialization();
} catch (e) {
Logger().e('Error handling no active session: $e');
}
}
/// Complete initialization without triggering redirects
Future<void> _completeInitialization() async {
try {
// Set role if available
if (userMetadata.value.roleId?.isNotEmpty == true) {
await _setRoleFromMetadata();
}
// Fetch units if user is an officer
if (userMetadata.value.isOfficer ||
(selectedRole.value?.isOfficer == true)) {
await _fetchAvailableUnits();
}
Logger().d('Initialization completed successfully');
} catch (e) {
Logger().e('Error completing initialization: $e');
}
}
/// Initialize the controller directly from current user session
@ -105,53 +225,19 @@ class FormRegistrationController extends GetxController {
// Continue with the basic metadata already created
}
}
} else {
// No active session, check if any arguments were passed
final arguments = Get.arguments;
// If arguments contain a user ID, use it as fallback
if (arguments is Map<String, dynamic> &&
arguments.containsKey('userId')) {
metadata = metadata.copyWith(
userId: arguments['userId'] as String?,
email: arguments['email'] as String?,
roleId: arguments['roleId'] as String?,
isOfficer: arguments['isOfficer'] as bool? ?? false,
);
Logger().d('Using arguments as fallback: ${metadata.userId}');
} else {
// No user data available, check temporary storage
final tempUserId = storage.read('TEMP_USER_ID') as String?;
final tempEmail = storage.read('CURRENT_USER_EMAIL') as String?;
if (tempUserId != null || tempEmail != null) {
metadata = metadata.copyWith(
userId: tempUserId,
email: tempEmail,
roleId: storage.read('TEMP_ROLE_ID') as String?,
isOfficer: storage.read('TEMP_IS_OFFICER') as bool? ?? false,
);
Logger().d(
'Using temporary storage as fallback: ${metadata.userId}',
);
} else {
Logger().w('No user data available, using default empty metadata');
}
}
}
// Set the user metadata
userMetadata.value = metadata;
Logger().d('Final user metadata: ${userMetadata.value.toString()}');
// Complete initialization
await _finalizeInitialization();
} catch (e) {
Logger().e('Error initializing from current user: $e');
userMetadata.value = const UserMetadataModel(
profileStatus: 'incomplete',
isOfficer: false,
);
} finally {
// Complete initialization
await _finalizeInitialization();
}
}
@ -163,10 +249,18 @@ class FormRegistrationController extends GetxController {
_initializeControllers();
// Set role information if available
if (userMetadata.value.roleId?.isNotEmpty == true) {
await _setRoleFromMetadata();
if (userMetadata.value.roleId == null ||
userMetadata.value.roleId!.isEmpty) {
// If no role ID is found, show an error message
TLoaders.errorSnackBar(
title: 'Error',
message: 'Role ID not found. Please contact support.',
);
}
// Set role based on metadata
await _setRoleFromMetadata();
// Fetch units if user is an officer
if (userMetadata.value.isOfficer ||
(selectedRole.value?.isOfficer == true)) {
@ -183,34 +277,49 @@ class FormRegistrationController extends GetxController {
Future<void> _setRoleFromMetadata() async {
try {
final roleId = userMetadata.value.roleId;
if (roleId?.isNotEmpty == true) {
if (roleId == null) {
TLoaders.errorSnackBar(
title: 'Error',
message: 'Role ID not found. Please contact support.',
);
return;
}
// Try to find the role in available roles
final role = await _findRoleById(roleId!);
if (role != null) {
final role = await RolesRepository.instance.getRoleById(roleId);
selectedRole.value = role;
Logger().d('Role set from metadata: ${role.name}');
}
}
} catch (e) {
Logger().e('Error setting role from metadata: $e');
}
}
/// Find role by ID (implement based on your role management system)
Future<RoleModel?> _findRoleById(String roleId) async {
try {
// Implement based on your role fetching logic
// This is a placeholder - replace with your actual implementation
return null;
} catch (e) {
Logger().e('Error finding role by ID: $e');
return null;
}
}
void _initializeControllers() {
final isOfficer = userMetadata.value.isOfficer;
// Clear existing controllers first to prevent duplicates
if (Get.isRegistered<PersonalInfoController>()) {
Get.delete<PersonalInfoController>(force: true);
}
if (Get.isRegistered<IdCardVerificationController>()) {
Get.delete<IdCardVerificationController>(force: true);
}
if (Get.isRegistered<SelfieVerificationController>()) {
Get.delete<SelfieVerificationController>(force: true);
}
if (Get.isRegistered<IdentityVerificationController>()) {
Get.delete<IdentityVerificationController>(force: true);
}
if (Get.isRegistered<OfficerInfoController>()) {
Get.delete<OfficerInfoController>(force: true);
}
if (Get.isRegistered<UnitInfoController>()) {
Get.delete<UnitInfoController>(force: true);
}
// Initialize form key if not already initialized
formKey = GlobalKey<FormState>();
// Initialize controllers with built-in static form keys
Get.put<PersonalInfoController>(PersonalInfoController(), permanent: false);
@ -232,7 +341,6 @@ class FormRegistrationController extends GetxController {
// Initialize officer-specific controllers only if user is an officer
if (isOfficer) {
Get.put<OfficerInfoController>(OfficerInfoController(), permanent: false);
Get.put<UnitInfoController>(UnitInfoController(), permanent: false);
totalSteps =
@ -254,7 +362,7 @@ class FormRegistrationController extends GetxController {
selfieVerificationController = Get.find<SelfieVerificationController>();
identityController = Get.find<IdentityVerificationController>();
// Initialize selectedRole based on isOfficer
// Initialize selectedRole based on isOfficer if not already set
if (selectedRole.value == null &&
userMetadata.value.additionalData != null) {
final roleData = userMetadata.value.additionalData?['role'];
@ -395,6 +503,7 @@ class FormRegistrationController extends GetxController {
submitForm();
}
}
void clearPreviousStepErrors() {
switch (currentStep.value) {
case 0:

View File

@ -2,7 +2,7 @@ import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:get_storage/get_storage.dart';
import 'package:logger/logger.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
import 'package:sigap/src/features/auth/data/repositories/authentication_repository.dart';
import 'package:sigap/src/utils/constants/app_routes.dart';
import 'package:sigap/src/utils/helpers/network_manager.dart';

View File

@ -4,7 +4,7 @@ import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:get_storage/get_storage.dart';
import 'package:logger/logger.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
import 'package:sigap/src/features/auth/data/repositories/authentication_repository.dart';
import 'package:sigap/src/features/personalization/data/models/index.dart';
import 'package:sigap/src/features/personalization/data/repositories/roles_repository.dart';
@ -243,7 +243,7 @@ class SignupWithRoleController extends GetxController {
// Navigate with arguments
Logger().i('Navigating to registration form');
// AuthenticationRepository.instance.screenRedirect();
AuthenticationRepository.instance.screenRedirect();
} catch (e) {
Logger().e('Error during signup: $e');
String errorMessage = _getReadableErrorMessage(e.toString());

View File

@ -2,17 +2,21 @@ import 'dart:io';
import 'package:get/get.dart';
import 'package:image_picker/image_picker.dart';
import 'package:sigap/src/cores/services/aws_rekognition_service.dart';
import 'package:sigap/src/cores/services/azure_ocr_service.dart';
import 'package:sigap/src/features/daily-ops/data/models/models/kta_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/ktp_model.dart';
import 'package:sigap/src/features/auth/data/models/face_model.dart';
import 'package:sigap/src/features/auth/data/models/kta_model.dart';
import 'package:sigap/src/features/auth/data/models/ktp_model.dart';
class IdCardVerificationController extends GetxController {
// Singleton instance
static IdCardVerificationController get instance => Get.find();
// Static form key
// final GlobalKey<FormState> formKey = TGlobalFormKey.idCardVerification();
// Services
final AzureOCRService _ocrService = AzureOCRService();
// Using AWS for face recognition
final AwsRecognitionService _faceService = AwsRecognitionService.instance;
final bool isOfficer;
// Maximum allowed file size in bytes (4MB)
@ -42,7 +46,10 @@ class IdCardVerificationController extends GetxController {
final Rx<KtpModel?> ktpModel = Rx<KtpModel?>(null);
final Rx<KtaModel?> ktaModel = Rx<KtaModel?>(null);
// Store face ID from the ID card for later comparison
// Use FaceModel to store face details from ID card
final Rx<FaceModel> idCardFace = Rx<FaceModel>(FaceModel.empty());
// Store face ID from the ID card for later comparison (for backward compatibility)
final RxString idCardFaceId = RxString('');
final RxBool hasFaceDetected = RxBool(false);
@ -123,7 +130,8 @@ class IdCardVerificationController extends GetxController {
ktpModel.value = null;
ktaModel.value = null;
// Reset face detection flags
// Reset face detection data
idCardFace.value = FaceModel.empty();
idCardFaceId.value = '';
hasFaceDetected.value = false;
@ -141,7 +149,7 @@ class IdCardVerificationController extends GetxController {
bool isImageValid = false;
try {
// Try to process the ID card to check if it can be processed properly
// Process the ID card with OCR (still using Azure OCR)
final result = await _ocrService.processIdCard(
idCardImage.value!,
isOfficer,
@ -165,16 +173,19 @@ class IdCardVerificationController extends GetxController {
ktpModel.value = _ocrService.createKtpModel(result);
}
// Try to detect faces in the ID card image
// Try to detect faces in the ID card image using AWS Rekognition
if (isImageValid) {
try {
final faces = await _ocrService.detectFacesInImage(
idCardImage.value!,
);
// Use AWS Rekognition to detect faces
final faces = await _faceService.detectFaces(idCardImage.value!);
if (faces.isNotEmpty) {
idCardFaceId.value = faces[0]['faceId'] ?? '';
hasFaceDetected.value = idCardFaceId.value.isNotEmpty;
print('Face detected in ID card: ${idCardFaceId.value}');
// Store the face model
idCardFace.value = faces.first;
// For backward compatibility
idCardFaceId.value = faces.first.faceId;
hasFaceDetected.value = idCardFace.value.hasValidFace;
print('Face detected in ID card: ${idCardFace.value.faceId}');
}
} catch (faceError) {
print('Face detection failed: $faceError');
@ -231,8 +242,7 @@ class IdCardVerificationController extends GetxController {
String? get idCardImagePath => idCardImage.value?.path;
// Check if the ID card has a detected face
bool get hasDetectedFace =>
hasFaceDetected.value && idCardFaceId.value.isNotEmpty;
bool get hasDetectedFace => idCardFace.value.hasValidFace;
// Clear ID Card Image
void clearIdCardImage() {
@ -245,6 +255,9 @@ class IdCardVerificationController extends GetxController {
hasExtractedInfo.value = false;
ktpModel.value = null;
ktaModel.value = null;
idCardFace.value = FaceModel.empty();
idCardFaceId.value = '';
hasFaceDetected.value = false;
}
// Confirm ID Card Image

View File

@ -1,10 +1,13 @@
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:sigap/src/cores/services/aws_rekognition_service.dart';
import 'package:sigap/src/cores/services/azure_ocr_service.dart';
import 'package:sigap/src/features/auth/data/models/face_model.dart';
import 'package:sigap/src/features/auth/data/models/kta_model.dart';
import 'package:sigap/src/features/auth/data/models/ktp_model.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/registration_form_controller.dart';
import 'package:sigap/src/features/daily-ops/data/models/models/kta_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/ktp_model.dart';
// ... other imports
import 'package:sigap/src/features/auth/presentasion/controllers/steps/id_card_verification_controller.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/steps/selfie_verification_controller.dart';
class IdentityVerificationController extends GetxController {
// Singleton instance
@ -13,6 +16,8 @@ class IdentityVerificationController extends GetxController {
// Dependencies
final bool isOfficer;
final AzureOCRService _ocrService = AzureOCRService();
// Use AWS Rekognition for face detection instead of Azure Face API
final AwsRecognitionService _faceService = AwsRecognitionService.instance;
// Controllers
final TextEditingController nikController = TextEditingController();
@ -39,27 +44,45 @@ class IdentityVerificationController extends GetxController {
final RxBool isFaceVerified = RxBool(false);
final RxString faceVerificationMessage = RxString('');
// Use FaceComparisonResult for face verification
final Rx<FaceComparisonResult?> faceComparisonResult =
Rx<FaceComparisonResult?>(null);
// Gender selection
final Rx<String?> selectedGender = Rx<String?>(null);
// Form validation
final RxBool isFormValid = RxBool(true);
IdentityVerificationController({required this.isOfficer}) {
// Apply data from previous step if available
_applyIdCardData();
}
// Flag to prevent infinite loop
bool _isApplyingData = false;
IdentityVerificationController({required this.isOfficer});
@override
void onInit() {
super.onInit();
_applyIdCardData();
// Delay data application to avoid initialization issues
Future.microtask(() => _safeApplyIdCardData());
}
// Apply ID card data from the previous step
void _applyIdCardData() {
// Safely apply ID card data without risking stack overflow
void _safeApplyIdCardData() {
if (_isApplyingData) return; // Guard against recursive calls
try {
_isApplyingData = true;
// Check if FormRegistrationController is ready
if (!Get.isRegistered<FormRegistrationController>()) {
return;
}
final formController = Get.find<FormRegistrationController>();
if (formController.idCardData.value == null) {
return;
}
final idCardData = formController.idCardData.value;
if (idCardData != null) {
@ -104,8 +127,6 @@ class IdentityVerificationController extends GetxController {
KtaModel ktaModel = idCardData;
// For officer, we'd fill in different fields as needed
// Since we don't require NIK for officers, no need to set nikController
if (ktaModel.name.isNotEmpty) {
fullNameController.text = ktaModel.name;
}
@ -124,6 +145,8 @@ class IdentityVerificationController extends GetxController {
}
} catch (e) {
print('Error applying ID card data: $e');
} finally {
_isApplyingData = false;
}
}
@ -272,33 +295,46 @@ class IdentityVerificationController extends GetxController {
return matches >= (parts1.length / 2).floor();
}
// Simple face verification function simulation
// Face verification function using AWS Rekognition instead of Azure
void verifyFaceMatch() {
isVerifyingFace.value = true;
// Simulate verification process with a delay
Future.delayed(const Duration(seconds: 2), () {
try {
// In a real implementation, this would call the proper face verification API
// Get ID card and selfie images
final formController = Get.find<FormRegistrationController>();
final idCardData = formController.idCardData.value;
final idCardController = Get.find<IdCardVerificationController>();
final selfieController = Get.find<SelfieVerificationController>();
if (idCardData != null) {
// Simulate successful match for demonstration
isFaceVerified.value = true;
faceVerificationMessage.value = 'Face verification successful!';
} else {
// Check if we have both images
if (idCardController.idCardImage.value == null ||
selfieController.selfieImage.value == null) {
isFaceVerified.value = false;
faceVerificationMessage.value =
'No ID card data available to verify face.';
}
} catch (e) {
isFaceVerified.value = false;
faceVerificationMessage.value = 'Error during face verification.';
print('Face verification error: $e');
} finally {
'Both ID card and selfie are required for face verification.';
isVerifyingFace.value = false;
return;
}
// Use AWS Rekognition to compare faces
_faceService
.compareFaces(
idCardController.idCardImage.value!,
selfieController.selfieImage.value!,
)
.then((result) {
// Store the comparison result
faceComparisonResult.value = result;
// Update verification status
isFaceVerified.value = result.isMatch;
faceVerificationMessage.value = result.message;
})
.catchError((e) {
isFaceVerified.value = false;
faceVerificationMessage.value = 'Error during face verification: $e';
print('Face verification error: $e');
})
.whenComplete(() {
isVerifyingFace.value = false;
});
}

View File

@ -2,16 +2,16 @@ import 'dart:io';
import 'package:get/get.dart';
import 'package:image_picker/image_picker.dart';
import 'package:sigap/src/cores/services/azure_ocr_service.dart';
import 'package:sigap/src/cores/services/aws_rekognition_service.dart';
import 'package:sigap/src/features/auth/data/models/face_model.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/steps/id_card_verification_controller.dart';
class SelfieVerificationController extends GetxController {
// Singleton instance
static SelfieVerificationController get instance => Get.find();
// Static form key
// final GlobalKey<FormState> formKey = TGlobalFormKey.selfieVerification();
final AzureOCRService _ocrService = AzureOCRService();
// Services - Use AWS Rekognition
final AwsRecognitionService _faceService = AwsRecognitionService.instance;
// Maximum allowed file size in bytes (4MB)
final int maxFileSizeBytes = 4 * 1024 * 1024; // 4MB in bytes
@ -36,6 +36,15 @@ class SelfieVerificationController extends GetxController {
// Face comparison with ID card photo
final RxBool isComparingWithIDCard = RxBool(false);
// Use FaceModel to store selfie face details
final Rx<FaceModel> selfieFace = Rx<FaceModel>(FaceModel.empty());
// Use FaceComparisonResult to store comparison results
final Rx<FaceComparisonResult?> faceComparisonResult =
Rx<FaceComparisonResult?>(null);
// For backward compatibility
final RxBool isMatchWithIDCard = RxBool(false);
final RxDouble matchConfidence = RxDouble(0.0);
final RxString selfieImageFaceId = RxString('');
@ -71,9 +80,13 @@ class SelfieVerificationController extends GetxController {
try {
isUploadingSelfie.value = true;
hasConfirmedSelfie.value = false; // Reset confirmation when image changes
isMatchWithIDCard.value = false; // Reset face match status
matchConfidence.value = 0.0; // Reset confidence score
selfieImageFaceId.value = ''; // Reset selfie face ID
// Reset face data
selfieFace.value = FaceModel.empty();
faceComparisonResult.value = null;
isMatchWithIDCard.value = false;
matchConfidence.value = 0.0;
selfieImageFaceId.value = '';
final ImagePicker picker = ImagePicker();
final XFile? image = await picker.pickImage(
@ -110,7 +123,7 @@ class SelfieVerificationController extends GetxController {
}
}
// Initial validation of selfie image
// Initial validation of selfie image using AWS Rekognition
Future<void> validateSelfieImage() async {
// Clear previous validation messages
clearErrors();
@ -124,36 +137,29 @@ class SelfieVerificationController extends GetxController {
try {
isVerifyingFace.value = true;
// Use liveness check to validate if the selfie has a valid face
final livenessResult = await _ocrService.performLivenessCheck(
// Use AWS Rekognition for liveness check
final FaceModel livenessFace = await _faceService.performLivenessCheck(
selfieImage.value!,
);
isLivenessCheckPassed.value = livenessResult['isLive'] ?? false;
if (livenessResult['isLive'] == true) {
// Update the face model
selfieFace.value = livenessFace;
// Update liveness status
isLivenessCheckPassed.value = livenessFace.isLive;
// For backward compatibility
selfieImageFaceId.value = livenessFace.faceId;
if (livenessFace.isLive) {
isSelfieValid.value = true;
selfieValidationMessage.value =
'Face detected. Please confirm this is you.';
// Try to detect face and get face ID for later comparison
try {
final faces = await _ocrService.detectFacesInImage(
selfieImage.value!,
);
if (faces.isNotEmpty) {
selfieImageFaceId.value = faces[0]['faceId'] ?? '';
selfieValidationMessage.value = livenessFace.message;
// Compare with ID card photo if available
await compareWithIDCardPhoto();
}
} catch (faceError) {
print('Selfie face detection failed: $faceError');
}
} else {
isSelfieValid.value = false;
selfieValidationMessage.value =
livenessResult['message'] ??
'No face detected or liveness check failed. Please take a clearer selfie.';
selfieValidationMessage.value = livenessFace.message;
}
} catch (e) {
isSelfieValid.value = false;
@ -163,45 +169,44 @@ class SelfieVerificationController extends GetxController {
}
}
// Compare selfie with ID card photo
// Compare selfie with ID card photo using AWS Rekognition
Future<void> compareWithIDCardPhoto() async {
try {
final idCardController = Get.find<IdCardVerificationController>();
// Check if both face IDs are available
if (selfieImageFaceId.value.isEmpty ||
!idCardController.hasDetectedFace) {
print('Cannot compare faces: Missing face ID');
// Check if both images are available
if (selfieImage.value == null ||
idCardController.idCardImage.value == null) {
print('Cannot compare faces: Missing images');
return;
}
isComparingWithIDCard.value = true;
// Compare the two faces
final result = await _ocrService.compareFaceIds(
idCardController.idCardFaceId.value,
selfieImageFaceId.value,
// Use AWS Rekognition to compare the faces
final comparisonResult = await _faceService.compareFaces(
idCardController.idCardImage.value!,
selfieImage.value!
);
isMatchWithIDCard.value = result['isMatch'] ?? false;
matchConfidence.value = result['confidence'] ?? 0.0;
// Store the comparison result
faceComparisonResult.value = comparisonResult;
// For backward compatibility
isMatchWithIDCard.value = comparisonResult.isMatch;
matchConfidence.value = comparisonResult.confidence;
// Update validation message to include face comparison result
if (isMatchWithIDCard.value) {
selfieValidationMessage.value =
'Face verified! Your selfie matches your ID photo with ${(matchConfidence.value * 100).toStringAsFixed(1)}% confidence.';
} else if (matchConfidence.value > 0) {
selfieValidationMessage.value =
'Face verification failed. Your selfie does not match your ID photo (${(matchConfidence.value * 100).toStringAsFixed(1)}% similarity).';
}
selfieValidationMessage.value = comparisonResult.message;
} catch (e) {
print('Face comparison error: $e');
selfieValidationMessage.value = 'Face comparison error: $e';
} finally {
isComparingWithIDCard.value = false;
}
}
// Manually trigger face comparison with ID card
// Manually trigger face comparison with ID card using AWS Rekognition
Future<void> verifyFaceMatchWithIDCard() async {
if (selfieImage.value == null) {
selfieError.value = 'Please take a selfie first';
@ -214,24 +219,13 @@ class SelfieVerificationController extends GetxController {
// Get the ID card controller
final idCardController = Get.find<IdCardVerificationController>();
if (!idCardController.hasDetectedFace) {
if (idCardController.idCardImage.value == null) {
selfieValidationMessage.value =
'No face detected in ID card for comparison';
'ID card image is required for comparison';
return;
}
// If we don't have a selfie face ID yet, detect it now
if (selfieImageFaceId.value.isEmpty) {
final faces = await _ocrService.detectFacesInImage(selfieImage.value!);
if (faces.isNotEmpty) {
selfieImageFaceId.value = faces[0]['faceId'] ?? '';
} else {
selfieValidationMessage.value = 'No face detected in your selfie';
return;
}
}
// Compare faces
// Compare faces directly using AWS Rekognition
await compareWithIDCardPhoto();
} catch (e) {
selfieValidationMessage.value = 'Face verification failed: $e';
@ -248,6 +242,8 @@ class SelfieVerificationController extends GetxController {
selfieValidationMessage.value = '';
isLivenessCheckPassed.value = false;
hasConfirmedSelfie.value = false;
selfieFace.value = FaceModel.empty();
faceComparisonResult.value = null;
isMatchWithIDCard.value = false;
matchConfidence.value = 0.0;
selfieImageFaceId.value = '';

View File

@ -2,8 +2,8 @@ import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/registration_form_controller.dart';
import 'package:sigap/src/features/auth/presentasion/controllers/steps/id_card_verification_controller.dart';
import 'package:sigap/src/features/daily-ops/data/models/models/kta_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/ktp_model.dart';
import 'package:sigap/src/features/auth/data/models/kta_model.dart';
import 'package:sigap/src/features/auth/data/models/ktp_model.dart';
import 'package:sigap/src/shared/widgets/image_upload/image_source_dialog.dart';
import 'package:sigap/src/shared/widgets/image_upload/image_uploader.dart';
import 'package:sigap/src/shared/widgets/info/tips_container.dart';

View File

@ -1,7 +1,7 @@
import 'package:get/get.dart';
import 'package:sigap/src/cores/services/location_service.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/auth/data/models/user_metadata_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/user_metadata_model.dart';
import 'package:sigap/src/utils/exceptions/exceptions.dart';
class PanicButtonRepository extends GetxController {

View File

@ -1,6 +1,6 @@
export 'models/permissions_model.dart';
export '../../../auth/data/models/permissions_model.dart';
export 'models/profile_model.dart';
export 'models/resources_model.dart';
export '../../../auth/data/models/resources_model.dart';
export 'models/roles_model.dart';
export 'models/users_model.dart';
export 'supadart-models/permissions_model_supadart.dart';

View File

@ -1,7 +1,7 @@
import 'package:get/get.dart';
import 'package:logger/Logger.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/personalization/data/models/models/permissions_model.dart';
import 'package:sigap/src/features/auth/data/models/permissions_model.dart';
import 'package:sigap/src/utils/exceptions/exceptions.dart';
import 'package:supabase_flutter/supabase_flutter.dart';

View File

@ -1,7 +1,7 @@
import 'package:get/get.dart';
import 'package:logger/logger.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/personalization/data/models/models/resources_model.dart';
import 'package:sigap/src/features/auth/data/models/resources_model.dart';
import 'package:sigap/src/utils/exceptions/exceptions.dart';
import 'package:supabase_flutter/supabase_flutter.dart';

View File

@ -1,7 +1,7 @@
import 'package:get/get.dart';
import 'package:logger/Logger.dart';
import 'package:sigap/src/cores/services/supabase_service.dart';
import 'package:sigap/src/features/personalization/data/models/models/permissions_model.dart';
import 'package:sigap/src/features/auth/data/models/permissions_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/roles_model.dart';
import 'package:sigap/src/utils/exceptions/exceptions.dart';
import 'package:supabase_flutter/supabase_flutter.dart';

View File

@ -1,6 +1,6 @@
import 'package:flutter/material.dart';
import 'package:sigap/src/features/daily-ops/data/models/models/kta_model.dart';
import 'package:sigap/src/features/personalization/data/models/models/ktp_model.dart';
import 'package:sigap/src/features/auth/data/models/kta_model.dart';
import 'package:sigap/src/features/auth/data/models/ktp_model.dart';
import 'package:sigap/src/utils/constants/colors.dart';
import 'package:sigap/src/utils/constants/sizes.dart';

View File

@ -1,12 +1,18 @@
import 'package:flutter_dotenv/flutter_dotenv.dart';
class Endpoints {
// Environment variables
static String get supabaseUrl => dotenv.env['SUPABASE_URL'] ?? '';
static String get supabaseAnonKey => dotenv.env['SUPABASE_ANON_KEY'] ?? '';
static String get supabaseServiceRoleKey =>
dotenv.env['SERVICE_ROLE_SECRET'] ?? ' ';
// Base URL
static const devUrl = "";
static const String prodUrl = '';
static const String baseUrl = '$devUrl/api';
static String get azureResource => dotenv.env['AZURE_RESOURCE_NAME'] ?? '';
static String get azureFaceResource =>
dotenv.env['AZURE_FACE_RESOURCE_NAME'] ?? '';
@ -26,4 +32,16 @@ class Endpoints {
static String get faceApiPath => 'face/v1.2/detect';
static String get faceVerifyPath => 'face/v1.2/verify';
// AWS Recognition Configuration
static String awsRegion =
dotenv.env['AWS_REGION'] ?? ''; // Replace with your AWS region
static String awsAccessKey = dotenv.env['AWS_ACCESS_KEY'] ?? '';
static String awsSecretKey = dotenv.env['AWS_SECRET_KEY'] ?? '';
static String awsRekognitionEndpoint =
'https://rekognition.$awsRegion.amazonaws.com';
// Supabase Edge Functions
static String get detectFace => '$supabaseUrl/function/v1/detect-face';
static String get verifyFace => '$supabaseUrl/function/v1/verify-face';
}

View File

@ -0,0 +1,152 @@
import 'dart:convert';
import 'package:crypto/crypto.dart';
import 'package:intl/intl.dart';
class AwsSignature {
final String accessKey;
final String secretKey;
final String region;
final String serviceName;
AwsSignature({
required this.accessKey,
required this.secretKey,
required this.region,
required this.serviceName,
});
Map<String, String> buildRequestHeaders({
required String method,
required Uri uri,
required Map<String, dynamic> payload,
required DateTime dateTime,
}) {
// Format date strings for AWS signature
final amzDate = _formatAmzDate(dateTime);
final dateStamp = _formatDateStamp(dateTime);
// Create canonical request components
final canonicalUri = uri.path.isEmpty ? '/' : uri.path;
final canonicalQueryString = _buildCanonicalQueryString(uri);
final payloadJson = jsonEncode(payload);
final payloadHash = _hashString(payloadJson);
// Default headers for AWS Recognition
final headers = {
'content-type': 'application/x-amz-json-1.1',
'host': uri.host,
'x-amz-date': amzDate,
'x-amz-target': 'RekognitionService.$method',
};
// Create canonical headers string
final canonicalHeaders = _buildCanonicalHeaders(headers);
final signedHeaders = _buildSignedHeaders(headers);
// Build canonical request
final canonicalRequest = [
method,
canonicalUri,
canonicalQueryString,
canonicalHeaders,
signedHeaders,
payloadHash,
].join('\n');
// Create string to sign
final algorithm = 'AWS4-HMAC-SHA256';
final credentialScope = '$dateStamp/$region/$serviceName/aws4_request';
final stringToSign = [
algorithm,
amzDate,
credentialScope,
_hashString(canonicalRequest),
].join('\n');
// Calculate signature
final signature = _calculateSignature(
secretKey: secretKey,
dateStamp: dateStamp,
region: region,
serviceName: serviceName,
stringToSign: stringToSign,
);
// Create Authorization header
final authorizationHeader = [
'$algorithm Credential=$accessKey/$credentialScope',
'SignedHeaders=$signedHeaders',
'Signature=$signature',
].join(', ');
// Return final headers
return {...headers, 'Authorization': authorizationHeader};
}
String _formatAmzDate(DateTime dateTime) {
return DateFormat("yyyyMMdd'T'HHmmss'Z'").format(dateTime);
}
String _formatDateStamp(DateTime dateTime) {
return DateFormat('yyyyMMdd').format(dateTime);
}
String _buildCanonicalQueryString(Uri uri) {
if (uri.queryParameters.isEmpty) {
return '';
}
final sortedParameters = uri.queryParameters.keys.toList()..sort();
return sortedParameters
.map((key) => '$key=${Uri.encodeComponent(uri.queryParameters[key]!)}')
.join('&');
}
String _buildCanonicalHeaders(Map<String, String> headers) {
final sortedKeys =
headers.keys.map((k) => k.toLowerCase()).toList()..sort();
return sortedKeys.map((key) => '$key:${headers[key]?.trim()}\n').join();
}
String _buildSignedHeaders(Map<String, String> headers) {
final sortedKeys =
headers.keys.map((k) => k.toLowerCase()).toList()..sort();
return sortedKeys.join(';');
}
String _hashString(String input) {
final bytes = utf8.encode(input);
final digest = sha256.convert(bytes);
return digest.toString();
}
String _calculateSignature({
required String secretKey,
required String dateStamp,
required String region,
required String serviceName,
required String stringToSign,
}) {
// Create signing key
final kDate = _hmacSha256('AWS4$secretKey', dateStamp);
final kRegion = _hmacSha256(kDate, region);
final kService = _hmacSha256(kRegion, serviceName);
final kSigning = _hmacSha256(kService, 'aws4_request');
// Calculate signature
return _hmacSha256Hex(kSigning, stringToSign);
}
List<int> _hmacSha256(dynamic key, String message) {
var hmac = Hmac(sha256, key is String ? utf8.encode(key) : key);
var digest = hmac.convert(utf8.encode(message));
return digest.bytes;
}
String _hmacSha256Hex(List<int> key, String message) {
var hmac = Hmac(sha256, key);
var digest = hmac.convert(utf8.encode(message));
return digest.toString();
}
}

View File

@ -77,7 +77,8 @@
"uuid": "^11.1.0",
"vaul": "^1.1.2",
"zod": "^3.24.2",
"zustand": "^5.0.3"
"zustand": "^5.0.3",
"@aws-sdk/client-rekognition": "^3.449.0"
},
"devDependencies": {
"@snaplet/copycat": "^6.0.0",

View File

@ -0,0 +1,139 @@
// Follow this setup guide to integrate the Deno language server with your editor:
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
// Setup type definitions for built-in Supabase Runtime APIs
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
import { serve } from "https://deno.land/std@0.177.0/http/server.ts";
const AWS_REGION = Deno.env.get('AWS_REGION');
const AWS_ACCESS_KEY = Deno.env.get('AWS_ACCESS_KEY');
const AWS_SECRET_KEY = Deno.env.get('AWS_SECRET_KEY');
serve(async (req)=>{
try {
// Check if we have AWS credentials
if (!AWS_REGION || !AWS_ACCESS_KEY || !AWS_SECRET_KEY) {
return new Response(JSON.stringify({
error: 'AWS credentials are not configured'
}), {
status: 500,
headers: {
'Content-Type': 'application/json'
}
});
}
// Parse the multipart form data to get the image
const formData = await req.formData();
const image = formData.get('image');
if (!image || !(image instanceof File)) {
return new Response(JSON.stringify({
error: 'Image file is required'
}), {
status: 400,
headers: {
'Content-Type': 'application/json'
}
});
}
// Convert image to base64
const imageBuffer = await image.arrayBuffer();
const base64Image = btoa(String.fromCharCode(...new Uint8Array(imageBuffer)));
// Create AWS signature for authorization
const date = new Date();
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '');
const dateStamp = amzDate.substring(0, 8);
const host = `rekognition.${AWS_REGION}.amazonaws.com`;
const endpoint = `https://${host}/`;
const request = {
"Image": {
"Bytes": base64Image
},
"Attributes": [
"ALL"
]
};
// AWS Signature V4 calculation
const method = 'POST';
const service = 'rekognition';
const contentType = 'application/x-amz-json-1.1';
const amzTarget = 'RekognitionService.DetectFaces';
const canonicalUri = '/';
const canonicalQueryString = '';
const payloadHash = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(JSON.stringify(request))).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
const canonicalHeaders = `content-type:${contentType}\n` + `host:${host}\n` + `x-amz-date:${amzDate}\n` + `x-amz-target:${amzTarget}\n`;
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target';
const canonicalRequest = `${method}\n${canonicalUri}\n${canonicalQueryString}\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`;
const algorithm = 'AWS4-HMAC-SHA256';
const credentialScope = `${dateStamp}/${AWS_REGION}/${service}/aws4_request`;
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${await crypto.subtle.digest("SHA-256", new TextEncoder().encode(canonicalRequest)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''))}`;
const getSignatureKey = async (key, dateStamp, regionName, serviceName)=>{
const kDate = await crypto.subtle.importKey("raw", new TextEncoder().encode(`AWS4${key}`), {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]);
const kRegion = await crypto.subtle.sign("HMAC", kDate, new TextEncoder().encode(regionName));
const kService = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kRegion, {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]), new TextEncoder().encode(serviceName));
return crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kService, {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]), new TextEncoder().encode("aws4_request"));
};
const signingKey = await getSignatureKey(AWS_SECRET_KEY, dateStamp, AWS_REGION, service);
const signature = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", signingKey, {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]), new TextEncoder().encode(stringToSign)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
const authHeader = `${algorithm} ` + `Credential=${AWS_ACCESS_KEY}/${credentialScope}, ` + `SignedHeaders=${signedHeaders}, ` + `Signature=${signature}`;
// Make request to AWS Rekognition
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': contentType,
'X-Amz-Date': amzDate,
'X-Amz-Target': amzTarget,
'Authorization': authHeader
},
body: JSON.stringify(request)
});
const data = await response.json();
return new Response(JSON.stringify({
success: true,
faceDetails: data.FaceDetails || [],
count: (data.FaceDetails || []).length
}), {
headers: {
'Content-Type': 'application/json'
}
});
} catch (error) {
console.error("Error in detect-face function:", error);
return new Response(JSON.stringify({
error: "Failed to process the image",
details: error instanceof Error ? error.message : String(error)
}), {
status: 500,
headers: {
'Content-Type': 'application/json'
}
});
}
}); /* To invoke locally:
1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start)
2. Make an HTTP request:
curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/detect-face' \
--header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \
--header 'Content-Type: application/json' \
--data '{"name":"Functions"}'
*/

View File

@ -0,0 +1,25 @@
// Setup type definitions for built-in Supabase Runtime APIs
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
const RESEND_API_KEY = Deno.env.get('RESEND_API_KEY');
Deno.serve(async (req)=>{
const { to, subject, html } = await req.json();
const res = await fetch('https://api.resend.com/emails', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${RESEND_API_KEY}`
},
body: JSON.stringify({
from: 'sigap-support@gmail.com',
to,
subject,
html
})
});
const data = await res.json();
return new Response(JSON.stringify(data), {
headers: {
'Content-Type': 'application/json'
}
});
});

View File

@ -0,0 +1,151 @@
// Follow this setup guide to integrate the Deno language server with your editor:
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
// Setup type definitions for built-in Supabase Runtime APIs
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
import { serve } from "https://deno.land/std@0.177.0/http/server.ts";
const AWS_REGION = Deno.env.get('AWS_REGION');
const AWS_ACCESS_KEY = Deno.env.get('AWS_ACCESS_KEY');
const AWS_SECRET_KEY = Deno.env.get('AWS_SECRET_KEY');
serve(async (req)=>{
try {
// Check if we have AWS credentials
if (!AWS_REGION || !AWS_ACCESS_KEY || !AWS_SECRET_KEY) {
return new Response(JSON.stringify({
error: 'AWS credentials are not configured'
}), {
status: 500,
headers: {
'Content-Type': 'application/json'
}
});
}
// Parse the multipart form data to get the images
const formData = await req.formData();
const ktpImage = formData.get('ktp');
const selfieImage = formData.get('selfie');
if (!ktpImage || !(ktpImage instanceof File) || !selfieImage || !(selfieImage instanceof File)) {
return new Response(JSON.stringify({
error: 'Both KTP and selfie images are required'
}), {
status: 400,
headers: {
'Content-Type': 'application/json'
}
});
}
// Convert images to base64
const ktpBuffer = await ktpImage.arrayBuffer();
const selfieBuffer = await selfieImage.arrayBuffer();
const ktpBase64 = btoa(String.fromCharCode(...new Uint8Array(ktpBuffer)));
const selfieBase64 = btoa(String.fromCharCode(...new Uint8Array(selfieBuffer)));
// Create AWS signature for authorization
const date = new Date();
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '');
const dateStamp = amzDate.substring(0, 8);
const host = `rekognition.${AWS_REGION}.amazonaws.com`;
const endpoint = `https://${host}/`;
const request = {
"SourceImage": {
"Bytes": ktpBase64
},
"TargetImage": {
"Bytes": selfieBase64
},
"SimilarityThreshold": 70
};
// AWS Signature V4 calculation
const method = 'POST';
const service = 'rekognition';
const contentType = 'application/x-amz-json-1.1';
const amzTarget = 'RekognitionService.CompareFaces';
const canonicalUri = '/';
const canonicalQueryString = '';
const payloadHash = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(JSON.stringify(request))).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
const canonicalHeaders = `content-type:${contentType}\n` + `host:${host}\n` + `x-amz-date:${amzDate}\n` + `x-amz-target:${amzTarget}\n`;
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target';
const canonicalRequest = `${method}\n${canonicalUri}\n${canonicalQueryString}\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`;
const algorithm = 'AWS4-HMAC-SHA256';
const credentialScope = `${dateStamp}/${AWS_REGION}/${service}/aws4_request`;
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${await crypto.subtle.digest("SHA-256", new TextEncoder().encode(canonicalRequest)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''))}`;
const getSignatureKey = async (key, dateStamp, regionName, serviceName)=>{
const kDate = await crypto.subtle.importKey("raw", new TextEncoder().encode(`AWS4${key}`), {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]);
const kRegion = await crypto.subtle.sign("HMAC", kDate, new TextEncoder().encode(regionName));
const kService = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kRegion, {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]), new TextEncoder().encode(serviceName));
return crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kService, {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]), new TextEncoder().encode("aws4_request"));
};
const signingKey = await getSignatureKey(AWS_SECRET_KEY, dateStamp, AWS_REGION, service);
const signature = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", signingKey, {
name: "HMAC",
hash: "SHA-256"
}, false, [
"sign"
]), new TextEncoder().encode(stringToSign)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
const authHeader = `${algorithm} ` + `Credential=${AWS_ACCESS_KEY}/${credentialScope}, ` + `SignedHeaders=${signedHeaders}, ` + `Signature=${signature}`;
// Make request to AWS Rekognition
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': contentType,
'X-Amz-Date': amzDate,
'X-Amz-Target': amzTarget,
'Authorization': authHeader
},
body: JSON.stringify(request)
});
const data = await response.json();
// Determine if verification passed
const matched = !!(data.FaceMatches && data.FaceMatches.length > 0);
let highestSimilarity = 0;
if (matched && data.FaceMatches && data.FaceMatches.length > 0) {
highestSimilarity = Math.max(...data.FaceMatches.map((match)=>match.Similarity || 0));
}
return new Response(JSON.stringify({
success: true,
matched: matched,
similarity: highestSimilarity,
faceMatches: data.FaceMatches || [],
unmatchedFaces: data.UnmatchedFaces || []
}), {
headers: {
'Content-Type': 'application/json'
}
});
} catch (error) {
console.error("Error in verify-face function:", error);
return new Response(JSON.stringify({
error: "Failed to verify faces",
details: error instanceof Error ? error.message : String(error)
}), {
status: 500,
headers: {
'Content-Type': 'application/json'
}
});
}
}); /* To invoke locally:
1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start)
2. Make an HTTP request:
curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/verify-face' \
--header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \
--header 'Content-Type: application/json' \
--data '{"name":"Functions"}'
*/