add edge function for detect and verify faces with aws rekognition
This commit is contained in:
parent
d216088b32
commit
512b29c54d
File diff suppressed because it is too large
Load Diff
|
@ -11,6 +11,7 @@
|
|||
"seed:reset": "ts-node --compiler-options {\"module\":\"CommonJS\"} prisma/seed.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-rekognition": "^3.449.0",
|
||||
"@evyweb/ioctopus": "^1.2.0",
|
||||
"@faker-js/faker": "^9.7.0",
|
||||
"@hookform/resolvers": "^4.1.2",
|
||||
|
@ -47,6 +48,7 @@
|
|||
"@turf/turf": "^7.2.0",
|
||||
"@types/mapbox-gl": "^3.4.1",
|
||||
"autoprefixer": "10.4.20",
|
||||
"aws-sdk": "^2.1692.0",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"cobe": "^0.6.3",
|
||||
|
@ -77,8 +79,7 @@
|
|||
"uuid": "^11.1.0",
|
||||
"vaul": "^1.1.2",
|
||||
"zod": "^3.24.2",
|
||||
"zustand": "^5.0.3",
|
||||
"@aws-sdk/client-rekognition": "^3.449.0"
|
||||
"zustand": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@snaplet/copycat": "^6.0.0",
|
||||
|
|
|
@ -1,142 +1,295 @@
|
|||
// detect-face/index.ts
|
||||
// Follow this setup guide to integrate the Deno language server with your editor:
|
||||
// https://deno.land/manual/getting_started/setup_your_environment
|
||||
// This enables autocomplete, go to definition, etc.
|
||||
|
||||
// Setup type definitions for built-in Supabase Runtime APIs
|
||||
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
|
||||
import { serve } from "https://deno.land/std@0.177.0/http/server.ts";
|
||||
const AWS_REGION = Deno.env.get('AWS_REGION');
|
||||
const AWS_ACCESS_KEY = Deno.env.get('AWS_ACCESS_KEY');
|
||||
const AWS_SECRET_KEY = Deno.env.get('AWS_SECRET_KEY');
|
||||
serve(async (req)=>{
|
||||
console.log('AWS_REGION:', AWS_REGION);
|
||||
console.log('AWS_ACCESS_KEY:', AWS_ACCESS_KEY?.slice(0, 5)); // for security, partial only
|
||||
console.log('AWS_SECRET_KEY:', AWS_SECRET_KEY?.slice(0, 5)); // for security, partial only
|
||||
import { RekognitionClient, DetectFacesCommand } from "npm:@aws-sdk/client-rekognition@^3.0.0";
|
||||
import {
|
||||
validateAWSCredentials,
|
||||
validateFile,
|
||||
createErrorResponse,
|
||||
createSuccessResponse,
|
||||
type AWSCredentials
|
||||
} from "../shared/aws-utils.ts";
|
||||
|
||||
interface DetectFaceResponse {
|
||||
success: true;
|
||||
faceDetails: any[];
|
||||
count: number;
|
||||
imageSize?: number;
|
||||
processingTime?: number;
|
||||
}
|
||||
|
||||
// Configuration constants
|
||||
const MAX_FILE_SIZE = 15 * 1024 * 1024; // 15MB
|
||||
const ALLOWED_MIME_TYPES = ['image/jpeg', 'image/jpg', 'image/png'];
|
||||
|
||||
// Logger utility for structured logs
|
||||
const logger = {
|
||||
info: (message: string, data?: any) => {
|
||||
console.log(`[INFO] [${new Date().toISOString()}] [detect-face] ${message}`, data ? data : '');
|
||||
},
|
||||
warn: (message: string, data?: any) => {
|
||||
console.warn(`[WARN] [${new Date().toISOString()}] [detect-face] ${message}`, data ? data : '');
|
||||
},
|
||||
error: (message: string, error?: any) => {
|
||||
console.error(`[ERROR] [${new Date().toISOString()}] [detect-face] ${message}`, error ? error : '');
|
||||
},
|
||||
debug: (message: string, data?: any) => {
|
||||
console.debug(`[DEBUG] [${new Date().toISOString()}] [detect-face] ${message}`, data ? data : '');
|
||||
},
|
||||
success: (message: string, data?: any) => {
|
||||
console.log(`[SUCCESS] [${new Date().toISOString()}] [detect-face] ${message}`, data ? data : '');
|
||||
}
|
||||
};
|
||||
|
||||
async function fileToUint8Array(file: File): Promise<Uint8Array> {
|
||||
const buffer = await file.arrayBuffer();
|
||||
return new Uint8Array(buffer);
|
||||
}
|
||||
|
||||
serve(async (req: Request): Promise<Response> => {
|
||||
const startTime = Date.now();
|
||||
const requestId = crypto.randomUUID();
|
||||
|
||||
logger.info(`Starting face detection request [ID: ${requestId}]`);
|
||||
|
||||
try {
|
||||
// Check if we have AWS credentials
|
||||
if (!AWS_REGION || !AWS_ACCESS_KEY || !AWS_SECRET_KEY) {
|
||||
return new Response(JSON.stringify({
|
||||
error: 'AWS credentials are not configured'
|
||||
}), {
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
}
|
||||
// Parse the multipart form data to get the image
|
||||
const formData = await req.formData();
|
||||
const image = formData.get('image');
|
||||
if (!image || !(image instanceof File)) {
|
||||
return new Response(JSON.stringify({
|
||||
error: 'Image file is required'
|
||||
}), {
|
||||
status: 400,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
}
|
||||
// Convert image to base64
|
||||
const imageBuffer = await image.arrayBuffer();
|
||||
const base64Image = btoa(String.fromCharCode(...new Uint8Array(imageBuffer)));
|
||||
// Create AWS signature for authorization
|
||||
const date = new Date();
|
||||
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||
const dateStamp = amzDate.substring(0, 8);
|
||||
const host = `rekognition.${AWS_REGION}.amazonaws.com`;
|
||||
const endpoint = `https://${host}/`;
|
||||
const request = {
|
||||
"Image": {
|
||||
"Bytes": base64Image
|
||||
},
|
||||
"Attributes": [
|
||||
"ALL"
|
||||
]
|
||||
};
|
||||
// AWS Signature V4 calculation
|
||||
const method = 'POST';
|
||||
const service = 'rekognition';
|
||||
const contentType = 'application/x-amz-json-1.1';
|
||||
const amzTarget = 'RekognitionService.DetectFaces';
|
||||
const canonicalUri = '/';
|
||||
const canonicalQueryString = '';
|
||||
const payloadHash = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(JSON.stringify(request))).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
|
||||
const canonicalHeaders = `content-type:${contentType}\n` + `host:${host}\n` + `x-amz-date:${amzDate}\n` + `x-amz-target:${amzTarget}\n`;
|
||||
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target';
|
||||
const canonicalRequest = `${method}\n${canonicalUri}\n${canonicalQueryString}\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`;
|
||||
const algorithm = 'AWS4-HMAC-SHA256';
|
||||
const credentialScope = `${dateStamp}/${AWS_REGION}/${service}/aws4_request`;
|
||||
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${await crypto.subtle.digest("SHA-256", new TextEncoder().encode(canonicalRequest)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''))}`;
|
||||
const getSignatureKey = async (key, dateStamp, regionName, serviceName)=>{
|
||||
const kDate = await crypto.subtle.importKey("raw", new TextEncoder().encode(`AWS4${key}`), {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]);
|
||||
const kRegion = await crypto.subtle.sign("HMAC", kDate, new TextEncoder().encode(regionName));
|
||||
const kService = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kRegion, {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]), new TextEncoder().encode(serviceName));
|
||||
return crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kService, {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]), new TextEncoder().encode("aws4_request"));
|
||||
};
|
||||
const signingKey = await getSignatureKey(AWS_SECRET_KEY, dateStamp, AWS_REGION, service);
|
||||
const signature = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", signingKey, {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]), new TextEncoder().encode(stringToSign)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
|
||||
const authHeader = `${algorithm} ` + `Credential=${AWS_ACCESS_KEY}/${credentialScope}, ` + `SignedHeaders=${signedHeaders}, ` + `Signature=${signature}`;
|
||||
// Make request to AWS Rekognition
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': contentType,
|
||||
'X-Amz-Date': amzDate,
|
||||
'X-Amz-Target': amzTarget,
|
||||
'Authorization': authHeader
|
||||
},
|
||||
body: JSON.stringify(request)
|
||||
});
|
||||
const data = await response.json();
|
||||
return new Response(JSON.stringify({
|
||||
success: true,
|
||||
faceDetails: data.FaceDetails || [],
|
||||
count: (data.FaceDetails || []).length
|
||||
}), {
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
// Validate HTTP method
|
||||
if (req.method !== 'POST') {
|
||||
logger.warn(`Invalid HTTP method ${req.method} [ID: ${requestId}]`);
|
||||
return createErrorResponse('Method not allowed. Use POST', 405);
|
||||
}
|
||||
|
||||
// Validate AWS credentials
|
||||
logger.debug(`Validating AWS credentials [ID: ${requestId}]`);
|
||||
const { credentials, error: credError } = validateAWSCredentials();
|
||||
if (credError || !credentials) {
|
||||
logger.error(`AWS credentials validation failed [ID: ${requestId}]`, credError);
|
||||
return createErrorResponse(credError || 'AWS credentials not configured');
|
||||
}
|
||||
|
||||
logger.debug(`AWS Region: ${credentials.region} [ID: ${requestId}]`);
|
||||
|
||||
// Initialize Rekognition client
|
||||
logger.debug(`Initializing Rekognition client [ID: ${requestId}]`);
|
||||
const rekognitionClient = new RekognitionClient({
|
||||
region: credentials.region,
|
||||
credentials: {
|
||||
accessKeyId: credentials.accessKey,
|
||||
secretAccessKey: credentials.secretKey
|
||||
}
|
||||
});
|
||||
|
||||
// Parse multipart form data
|
||||
logger.debug(`Parsing form data [ID: ${requestId}]`);
|
||||
let formData: FormData;
|
||||
try {
|
||||
formData = await req.formData();
|
||||
} catch (error) {
|
||||
console.error("Error in detect-face function:", error);
|
||||
return new Response(JSON.stringify({
|
||||
error: "Failed to process the image",
|
||||
details: error instanceof Error ? error.message : String(error)
|
||||
}), {
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
logger.error(`Failed to parse form data [ID: ${requestId}]`, error);
|
||||
return createErrorResponse('Invalid form data. Expected multipart/form-data', 400);
|
||||
}
|
||||
|
||||
// Get and validate image file
|
||||
const image = formData.get('image') as File | null;
|
||||
logger.debug(`Validating image [ID: ${requestId}]`);
|
||||
const validation = validateFile(image, 'image');
|
||||
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`Image validation failed [ID: ${requestId}]`, validation.error);
|
||||
return createErrorResponse(validation.error || 'Invalid image file', 400);
|
||||
}
|
||||
|
||||
logger.info(`Processing image [ID: ${requestId}]`, {
|
||||
name: image!.name,
|
||||
size: `${(image!.size / 1024).toFixed(2)} KB`,
|
||||
type: image!.type
|
||||
});
|
||||
|
||||
// Convert image to Uint8Array
|
||||
logger.debug(`Converting image to binary format [ID: ${requestId}]`);
|
||||
let imageBytes: Uint8Array;
|
||||
try {
|
||||
imageBytes = await fileToUint8Array(image!);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to convert image to bytes [ID: ${requestId}]`, error);
|
||||
return createErrorResponse('Failed to process image data', 500);
|
||||
}
|
||||
|
||||
// Create DetectFaces command
|
||||
logger.info(`Sending request to AWS Rekognition DetectFaces [ID: ${requestId}]`);
|
||||
const detectFacesCommand = new DetectFacesCommand({
|
||||
Image: {
|
||||
Bytes: imageBytes
|
||||
},
|
||||
Attributes: ["ALL"]
|
||||
});
|
||||
|
||||
// Execute AWS Rekognition request
|
||||
let rekognitionResponse: any;
|
||||
try {
|
||||
rekognitionResponse = await rekognitionClient.send(detectFacesCommand);
|
||||
} catch (error) {
|
||||
logger.error(`AWS Rekognition request failed [ID: ${requestId}]`, error);
|
||||
|
||||
// Handle specific AWS errors
|
||||
if (error.name === 'InvalidImageFormatException') {
|
||||
return createErrorResponse('Invalid image format. Please use JPEG or PNG format', 400);
|
||||
} else if (error.name === 'ImageTooLargeException') {
|
||||
return createErrorResponse('Image too large. Please reduce image size', 400);
|
||||
} else if (error.name === 'InvalidParameterException') {
|
||||
return createErrorResponse('Invalid parameters provided', 400);
|
||||
} else if (error.name === 'InvalidS3ObjectException') {
|
||||
return createErrorResponse('Invalid image data', 400);
|
||||
}
|
||||
|
||||
return createErrorResponse('Failed to analyze image with AWS Rekognition', 500);
|
||||
}
|
||||
|
||||
const processingTime = Date.now() - startTime;
|
||||
const faceCount = (rekognitionResponse.FaceDetails || []).length;
|
||||
|
||||
logger.success(`Face detection completed [ID: ${requestId}]`, {
|
||||
facesDetected: faceCount,
|
||||
processingTime: `${processingTime}ms`,
|
||||
imageSize: `${(image!.size / 1024).toFixed(2)} KB`
|
||||
});
|
||||
|
||||
// Log detailed face information if faces are detected
|
||||
if (faceCount > 0) {
|
||||
const faceDetails = rekognitionResponse.FaceDetails.map((face: any, index: number) => {
|
||||
return {
|
||||
faceIndex: index + 1,
|
||||
ageRange: face.AgeRange ? `${face.AgeRange.Low}-${face.AgeRange.High}` : 'Unknown',
|
||||
gender: face.Gender ? `${face.Gender.Value} (${face.Gender.Confidence.toFixed(2)}%)` : 'Unknown',
|
||||
emotions: face.Emotions ?
|
||||
face.Emotions.map((e: any) => `${e.Type}(${e.Confidence.toFixed(2)}%)`).join(', ') :
|
||||
'None detected',
|
||||
quality: face.Quality ?
|
||||
`Brightness=${face.Quality.Brightness.toFixed(2)}, Sharpness=${face.Quality.Sharpness.toFixed(2)}` :
|
||||
'Unknown'
|
||||
};
|
||||
});
|
||||
|
||||
logger.info(`Face details [ID: ${requestId}]`, { faces: faceDetails });
|
||||
} else {
|
||||
logger.info(`No faces detected in image [ID: ${requestId}]`);
|
||||
}
|
||||
|
||||
// Prepare response
|
||||
const response: DetectFaceResponse = {
|
||||
success: true,
|
||||
faceDetails: rekognitionResponse.FaceDetails || [],
|
||||
count: faceCount,
|
||||
imageSize: image!.size,
|
||||
processingTime
|
||||
};
|
||||
|
||||
return createSuccessResponse(response);
|
||||
|
||||
} catch (error) {
|
||||
const processingTime = Date.now() - startTime;
|
||||
logger.error(`Unexpected error in detect-face function [ID: ${requestId}]`, error);
|
||||
logger.error(`Processing time before error: ${processingTime}ms [ID: ${requestId}]`);
|
||||
|
||||
return createErrorResponse(
|
||||
"An unexpected error occurred while processing the image",
|
||||
500
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}); /* To invoke locally:
|
||||
|
||||
/*
|
||||
To invoke locally:
|
||||
|
||||
1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start)
|
||||
2. Make an HTTP request:
|
||||
|
||||
curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/detect-face' \
|
||||
--header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{"name":"Functions"}'
|
||||
--header 'Authorization: Bearer [YOUR_ANON_KEY]' \
|
||||
--form 'image=@"path/to/your/image.jpg"'
|
||||
|
||||
Example response:
|
||||
{
|
||||
"success": true,
|
||||
"faceDetails": [
|
||||
{
|
||||
"BoundingBox": {
|
||||
"Width": 0.23,
|
||||
"Height": 0.34,
|
||||
"Left": 0.35,
|
||||
"Top": 0.25
|
||||
},
|
||||
"AgeRange": {
|
||||
"Low": 25,
|
||||
"High": 35
|
||||
},
|
||||
"Smile": {
|
||||
"Value": true,
|
||||
"Confidence": 95.5
|
||||
},
|
||||
"Eyeglasses": {
|
||||
"Value": false,
|
||||
"Confidence": 99.2
|
||||
},
|
||||
"Sunglasses": {
|
||||
"Value": false,
|
||||
"Confidence": 99.8
|
||||
},
|
||||
"Gender": {
|
||||
"Value": "Male",
|
||||
"Confidence": 96.8
|
||||
},
|
||||
"Beard": {
|
||||
"Value": false,
|
||||
"Confidence": 85.6
|
||||
},
|
||||
"Mustache": {
|
||||
"Value": false,
|
||||
"Confidence": 90.3
|
||||
},
|
||||
"EyesOpen": {
|
||||
"Value": true,
|
||||
"Confidence": 98.7
|
||||
},
|
||||
"MouthOpen": {
|
||||
"Value": false,
|
||||
"Confidence": 89.4
|
||||
},
|
||||
"Emotions": [
|
||||
{
|
||||
"Type": "HAPPY",
|
||||
"Confidence": 92.5
|
||||
},
|
||||
{
|
||||
"Type": "CALM",
|
||||
"Confidence": 5.2
|
||||
}
|
||||
],
|
||||
"Landmarks": [...],
|
||||
"Pose": {
|
||||
"Roll": -2.1,
|
||||
"Yaw": 1.8,
|
||||
"Pitch": -3.5
|
||||
},
|
||||
"Quality": {
|
||||
"Brightness": 78.5,
|
||||
"Sharpness": 95.2
|
||||
},
|
||||
"Confidence": 99.8
|
||||
}
|
||||
],
|
||||
"count": 1,
|
||||
"imageSize": 1048576,
|
||||
"processingTime": 1250
|
||||
}
|
||||
|
||||
Environment Variables Required:
|
||||
- AWS_ACCESS_KEY_ID
|
||||
- AWS_SECRET_ACCESS_KEY
|
||||
- AWS_REGION (optional, defaults to us-east-1)
|
||||
*/
|
|
@ -0,0 +1,258 @@
|
|||
// utils/aws-utils.ts
|
||||
// Shared utilities for AWS Rekognition functions
|
||||
|
||||
export interface AWSCredentials {
|
||||
region: string;
|
||||
accessKey: string;
|
||||
secretKey: string;
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
isValid: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate AWS credentials from environment variables
|
||||
*/
|
||||
export function validateAWSCredentials(): { credentials?: AWSCredentials; error?: string } {
|
||||
const region = Deno.env.get('AWS_REGION');
|
||||
const accessKey = Deno.env.get('AWS_ACCESS_KEY_ID');
|
||||
const secretKey = Deno.env.get('AWS_SECRET_ACCESS_KEY');
|
||||
|
||||
const requiredEnvVars = [
|
||||
{ name: 'AWS_REGION', value: region },
|
||||
{ name: 'AWS_ACCESS_KEY_ID', value: accessKey },
|
||||
{ name: 'AWS_SECRET_ACCESS_KEY', value: secretKey }
|
||||
];
|
||||
|
||||
const missingVars = requiredEnvVars
|
||||
.filter(envVar => !envVar.value)
|
||||
.map(envVar => envVar.name);
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
return {
|
||||
error: `Missing required environment variables: ${missingVars.join(', ')}`
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
credentials: {
|
||||
region: region!,
|
||||
accessKey: accessKey!,
|
||||
secretKey: secretKey!
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate uploaded file
|
||||
*/
|
||||
export function validateFile(file: File | null, fieldName: string): ValidationResult {
|
||||
if (!file || !(file instanceof File)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `${fieldName} file is required`
|
||||
};
|
||||
}
|
||||
|
||||
// Check file size (max 5MB)
|
||||
const MAX_FILE_SIZE = 5 * 1024 * 1024;
|
||||
if (file.size > MAX_FILE_SIZE) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `${fieldName} file size too large. Maximum 5MB allowed`
|
||||
};
|
||||
}
|
||||
|
||||
// Check file type
|
||||
const ALLOWED_TYPES = ['image/jpeg', 'image/jpg', 'image/png'];
|
||||
if (!ALLOWED_TYPES.includes(file.type)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Invalid ${fieldName} file type. Only JPEG and PNG allowed`
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert file to base64
|
||||
*/
|
||||
export async function fileToBase64(file: File): Promise<string> {
|
||||
const buffer = await file.arrayBuffer();
|
||||
return btoa(String.fromCharCode(...new Uint8Array(buffer)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create AWS Signature V4 for Rekognition requests
|
||||
*/
|
||||
export async function createAWSSignature(
|
||||
request: any,
|
||||
service: string,
|
||||
target: string,
|
||||
credentials: AWSCredentials
|
||||
): Promise<{ authHeader: string; amzDate: string }> {
|
||||
const date = new Date();
|
||||
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||
const dateStamp = amzDate.substring(0, 8);
|
||||
const host = `${service}.${credentials.region}.amazonaws.com`;
|
||||
|
||||
// Request details
|
||||
const method = 'POST';
|
||||
const contentType = 'application/x-amz-json-1.1';
|
||||
const canonicalUri = '/';
|
||||
const canonicalQueryString = '';
|
||||
|
||||
// Create payload hash
|
||||
const payloadHash = await crypto.subtle.digest(
|
||||
"SHA-256",
|
||||
new TextEncoder().encode(JSON.stringify(request))
|
||||
).then(hash =>
|
||||
Array.from(new Uint8Array(hash))
|
||||
.map(b => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
);
|
||||
|
||||
// Create canonical headers
|
||||
const canonicalHeaders =
|
||||
`content-type:${contentType}\n` +
|
||||
`host:${host}\n` +
|
||||
`x-amz-date:${amzDate}\n` +
|
||||
`x-amz-target:${target}\n`;
|
||||
|
||||
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target';
|
||||
|
||||
// Create canonical request
|
||||
const canonicalRequest =
|
||||
`${method}\n${canonicalUri}\n${canonicalQueryString}\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`;
|
||||
|
||||
// Create string to sign
|
||||
const algorithm = 'AWS4-HMAC-SHA256';
|
||||
const credentialScope = `${dateStamp}/${credentials.region}/${service}/aws4_request`;
|
||||
|
||||
const canonicalRequestHash = await crypto.subtle.digest(
|
||||
"SHA-256",
|
||||
new TextEncoder().encode(canonicalRequest)
|
||||
).then(hash =>
|
||||
Array.from(new Uint8Array(hash))
|
||||
.map(b => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
);
|
||||
|
||||
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${canonicalRequestHash}`;
|
||||
|
||||
// Create signing key
|
||||
const signingKey = await getSignatureKey(
|
||||
credentials.secretKey,
|
||||
dateStamp,
|
||||
credentials.region,
|
||||
service
|
||||
);
|
||||
|
||||
// Create signature
|
||||
const signature = await crypto.subtle.sign(
|
||||
"HMAC",
|
||||
await crypto.subtle.importKey("raw", signingKey, { name: "HMAC", hash: "SHA-256" }, false, ["sign"]),
|
||||
new TextEncoder().encode(stringToSign)
|
||||
).then(hash =>
|
||||
Array.from(new Uint8Array(hash))
|
||||
.map(b => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
);
|
||||
|
||||
// Create authorization header
|
||||
const authHeader =
|
||||
`${algorithm} ` +
|
||||
`Credential=${credentials.accessKey}/${credentialScope}, ` +
|
||||
`SignedHeaders=${signedHeaders}, ` +
|
||||
`Signature=${signature}`;
|
||||
|
||||
return { authHeader, amzDate };
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to create AWS signing key
|
||||
*/
|
||||
async function getSignatureKey(
|
||||
key: string,
|
||||
dateStamp: string,
|
||||
regionName: string,
|
||||
serviceName: string
|
||||
): Promise<ArrayBuffer> {
|
||||
const kDate = await crypto.subtle.importKey(
|
||||
"raw",
|
||||
new TextEncoder().encode(`AWS4${key}`),
|
||||
{ name: "HMAC", hash: "SHA-256" },
|
||||
false,
|
||||
["sign"]
|
||||
);
|
||||
|
||||
const kRegion = await crypto.subtle.sign(
|
||||
"HMAC",
|
||||
kDate,
|
||||
new TextEncoder().encode(regionName)
|
||||
);
|
||||
|
||||
const kService = await crypto.subtle.sign(
|
||||
"HMAC",
|
||||
await crypto.subtle.importKey("raw", kRegion, { name: "HMAC", hash: "SHA-256" }, false, ["sign"]),
|
||||
new TextEncoder().encode(serviceName)
|
||||
);
|
||||
|
||||
return crypto.subtle.sign(
|
||||
"HMAC",
|
||||
await crypto.subtle.importKey("raw", kService, { name: "HMAC", hash: "SHA-256" }, false, ["sign"]),
|
||||
new TextEncoder().encode("aws4_request")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Make request to AWS Rekognition
|
||||
*/
|
||||
export async function makeRekognitionRequest(
|
||||
request: any,
|
||||
target: string,
|
||||
credentials: AWSCredentials
|
||||
): Promise<any> {
|
||||
const { authHeader, amzDate } = await createAWSSignature(request, 'rekognition', target, credentials);
|
||||
const endpoint = `https://rekognition.${credentials.region}.amazonaws.com/`;
|
||||
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-amz-json-1.1',
|
||||
'X-Amz-Date': amzDate,
|
||||
'X-Amz-Target': target,
|
||||
'Authorization': authHeader
|
||||
},
|
||||
body: JSON.stringify(request)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`AWS Rekognition request failed: ${response.status} ${errorText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create error response
|
||||
*/
|
||||
export function createErrorResponse(error: string, status: number = 500): Response {
|
||||
return new Response(JSON.stringify({ error }), {
|
||||
status,
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create success response
|
||||
*/
|
||||
export function createSuccessResponse(data: any): Response {
|
||||
return new Response(JSON.stringify(data), {
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
}
|
|
@ -1,151 +1,283 @@
|
|||
// verify-face/index.ts
|
||||
// Follow this setup guide to integrate the Deno language server with your editor:
|
||||
// https://deno.land/manual/getting_started/setup_your_environment
|
||||
// This enables autocomplete, go to definition, etc.
|
||||
|
||||
// Setup type definitions for built-in Supabase Runtime APIs
|
||||
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
|
||||
import { serve } from "https://deno.land/std@0.177.0/http/server.ts";
|
||||
const AWS_REGION = Deno.env.get('AWS_REGION');
|
||||
const AWS_ACCESS_KEY = Deno.env.get('AWS_ACCESS_KEY');
|
||||
const AWS_SECRET_KEY = Deno.env.get('AWS_SECRET_KEY');
|
||||
serve(async (req)=>{
|
||||
import { RekognitionClient, CompareFacesCommand } from "npm:@aws-sdk/client-rekognition@^3.0.0";
|
||||
import {
|
||||
validateAWSCredentials,
|
||||
validateFile,
|
||||
createErrorResponse,
|
||||
createSuccessResponse
|
||||
} from "../shared/aws-utils.ts";
|
||||
|
||||
interface VerifyFaceResponse {
|
||||
success: true;
|
||||
matched: boolean;
|
||||
similarity: number;
|
||||
similarityThreshold: number;
|
||||
faceMatches: any[];
|
||||
unmatchedFaces: any[];
|
||||
idCardImageSize?: number;
|
||||
selfieImageSize?: number;
|
||||
processingTime?: number;
|
||||
confidence?: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
}
|
||||
|
||||
// Configuration constants
|
||||
const DEFAULT_SIMILARITY_THRESHOLD = 70;
|
||||
const HIGH_CONFIDENCE_THRESHOLD = 85;
|
||||
const MEDIUM_CONFIDENCE_THRESHOLD = 75;
|
||||
const MAX_FILE_SIZE = 15 * 1024 * 1024; // 15MB
|
||||
const ALLOWED_MIME_TYPES = ['image/jpeg', 'image/jpg', 'image/png'];
|
||||
|
||||
// Logger utility for structured logs
|
||||
const logger = {
|
||||
info: (message: string, data?: any) => {
|
||||
console.log(`[INFO] [${new Date().toISOString()}] [verify-face] ${message}`, data ? data : '');
|
||||
},
|
||||
warn: (message: string, data?: any) => {
|
||||
console.warn(`[WARN] [${new Date().toISOString()}] [verify-face] ${message}`, data ? data : '');
|
||||
},
|
||||
error: (message: string, error?: any) => {
|
||||
console.error(`[ERROR] [${new Date().toISOString()}] [verify-face] ${message}`, error ? error : '');
|
||||
},
|
||||
debug: (message: string, data?: any) => {
|
||||
console.debug(`[DEBUG] [${new Date().toISOString()}] [verify-face] ${message}`, data ? data : '');
|
||||
},
|
||||
success: (message: string, data?: any) => {
|
||||
console.log(`[SUCCESS] [${new Date().toISOString()}] [verify-face] ${message}`, data ? data : '');
|
||||
}
|
||||
};
|
||||
|
||||
async function fileToUint8Array(file: File): Promise<Uint8Array> {
|
||||
const buffer = await file.arrayBuffer();
|
||||
return new Uint8Array(buffer);
|
||||
}
|
||||
|
||||
serve(async (req: Request): Promise<Response> => {
|
||||
const startTime = Date.now();
|
||||
const requestId = crypto.randomUUID();
|
||||
|
||||
logger.info(`Starting face verification request [ID: ${requestId}]`);
|
||||
|
||||
try {
|
||||
// Check if we have AWS credentials
|
||||
if (!AWS_REGION || !AWS_ACCESS_KEY || !AWS_SECRET_KEY) {
|
||||
return new Response(JSON.stringify({
|
||||
error: 'AWS credentials are not configured'
|
||||
}), {
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
// Validate HTTP method
|
||||
if (req.method !== 'POST') {
|
||||
logger.warn(`Invalid HTTP method ${req.method} [ID: ${requestId}]`);
|
||||
return createErrorResponse('Method not allowed. Use POST', 405);
|
||||
}
|
||||
|
||||
// Validate AWS credentials
|
||||
logger.debug(`Validating AWS credentials [ID: ${requestId}]`);
|
||||
const { credentials, error: credError } = validateAWSCredentials();
|
||||
if (credError || !credentials) {
|
||||
logger.error(`AWS credentials validation failed [ID: ${requestId}]`, credError);
|
||||
return createErrorResponse(credError || 'AWS credentials not configured');
|
||||
}
|
||||
|
||||
logger.debug(`AWS Region: ${credentials.region} [ID: ${requestId}]`);
|
||||
|
||||
// Initialize Rekognition client
|
||||
logger.debug(`Initializing Rekognition client [ID: ${requestId}]`);
|
||||
const rekognitionClient = new RekognitionClient({
|
||||
region: credentials.region,
|
||||
credentials: {
|
||||
accessKeyId: credentials.accessKey,
|
||||
secretAccessKey: credentials.secretKey
|
||||
}
|
||||
});
|
||||
|
||||
// Parse multipart form data
|
||||
logger.debug(`Parsing form data [ID: ${requestId}]`);
|
||||
let formData: FormData;
|
||||
try {
|
||||
formData = await req.formData();
|
||||
} catch (error) {
|
||||
logger.error(`Failed to parse form data [ID: ${requestId}]`, error);
|
||||
return createErrorResponse('Invalid form data. Expected multipart/form-data', 400);
|
||||
}
|
||||
// Parse the multipart form data to get the images
|
||||
const formData = await req.formData();
|
||||
const ktpImage = formData.get('ktp');
|
||||
const selfieImage = formData.get('selfie');
|
||||
if (!ktpImage || !(ktpImage instanceof File) || !selfieImage || !(selfieImage instanceof File)) {
|
||||
return new Response(JSON.stringify({
|
||||
error: 'Both KTP and selfie images are required'
|
||||
}), {
|
||||
status: 400,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
|
||||
// Get and validate both images
|
||||
const idCardImage = formData.get('idCard') as File | null;
|
||||
const selfieImage = formData.get('selfie') as File | null;
|
||||
|
||||
// Validate IDCARD image
|
||||
logger.debug(`Validating IDCARD image [ID: ${requestId}]`);
|
||||
const idCardValidation = validateFile(idCardImage, 'IDCARD');
|
||||
if (!idCardValidation.isValid) {
|
||||
logger.warn(`IDCARD image validation failed [ID: ${requestId}]`, idCardValidation.error);
|
||||
return createErrorResponse(idCardValidation.error || 'Invalid IDCARD image', 400);
|
||||
}
|
||||
|
||||
// Validate selfie image
|
||||
logger.debug(`Validating selfie image [ID: ${requestId}]`);
|
||||
const selfieValidation = validateFile(selfieImage, 'selfie');
|
||||
if (!selfieValidation.isValid) {
|
||||
logger.warn(`Selfie image validation failed [ID: ${requestId}]`, selfieValidation.error);
|
||||
return createErrorResponse(selfieValidation.error || 'Invalid selfie image', 400);
|
||||
}
|
||||
|
||||
// Log image details
|
||||
logger.info(`Processing images [ID: ${requestId}]`, {
|
||||
idCard: {
|
||||
name: idCardImage!.name,
|
||||
size: `${(idCardImage!.size / 1024).toFixed(2)} KB`,
|
||||
type: idCardImage!.type
|
||||
},
|
||||
selfie: {
|
||||
name: selfieImage!.name,
|
||||
size: `${(selfieImage!.size / 1024).toFixed(2)} KB`,
|
||||
type: selfieImage!.type
|
||||
}
|
||||
});
|
||||
|
||||
// Get similarity threshold from form data or use default
|
||||
const thresholdParam = formData.get('similarity_threshold');
|
||||
let similarityThreshold = DEFAULT_SIMILARITY_THRESHOLD;
|
||||
|
||||
if (thresholdParam) {
|
||||
const parsedThreshold = parseFloat(thresholdParam.toString());
|
||||
if (!isNaN(parsedThreshold) && parsedThreshold >= 0 && parsedThreshold <= 100) {
|
||||
similarityThreshold = parsedThreshold;
|
||||
logger.debug(`Using custom similarity threshold: ${similarityThreshold}% [ID: ${requestId}]`);
|
||||
} else {
|
||||
logger.warn(`Invalid similarity threshold provided, using default: ${DEFAULT_SIMILARITY_THRESHOLD}% [ID: ${requestId}]`);
|
||||
}
|
||||
// Convert images to base64
|
||||
const ktpBuffer = await ktpImage.arrayBuffer();
|
||||
const selfieBuffer = await selfieImage.arrayBuffer();
|
||||
const ktpBase64 = btoa(String.fromCharCode(...new Uint8Array(ktpBuffer)));
|
||||
const selfieBase64 = btoa(String.fromCharCode(...new Uint8Array(selfieBuffer)));
|
||||
// Create AWS signature for authorization
|
||||
const date = new Date();
|
||||
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||
const dateStamp = amzDate.substring(0, 8);
|
||||
const host = `rekognition.${AWS_REGION}.amazonaws.com`;
|
||||
const endpoint = `https://${host}/`;
|
||||
const request = {
|
||||
"SourceImage": {
|
||||
"Bytes": ktpBase64
|
||||
},
|
||||
"TargetImage": {
|
||||
"Bytes": selfieBase64
|
||||
},
|
||||
"SimilarityThreshold": 70
|
||||
};
|
||||
// AWS Signature V4 calculation
|
||||
const method = 'POST';
|
||||
const service = 'rekognition';
|
||||
const contentType = 'application/x-amz-json-1.1';
|
||||
const amzTarget = 'RekognitionService.CompareFaces';
|
||||
const canonicalUri = '/';
|
||||
const canonicalQueryString = '';
|
||||
const payloadHash = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(JSON.stringify(request))).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
|
||||
const canonicalHeaders = `content-type:${contentType}\n` + `host:${host}\n` + `x-amz-date:${amzDate}\n` + `x-amz-target:${amzTarget}\n`;
|
||||
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target';
|
||||
const canonicalRequest = `${method}\n${canonicalUri}\n${canonicalQueryString}\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`;
|
||||
const algorithm = 'AWS4-HMAC-SHA256';
|
||||
const credentialScope = `${dateStamp}/${AWS_REGION}/${service}/aws4_request`;
|
||||
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${await crypto.subtle.digest("SHA-256", new TextEncoder().encode(canonicalRequest)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''))}`;
|
||||
const getSignatureKey = async (key, dateStamp, regionName, serviceName)=>{
|
||||
const kDate = await crypto.subtle.importKey("raw", new TextEncoder().encode(`AWS4${key}`), {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
}
|
||||
|
||||
// Convert images to Uint8Array
|
||||
logger.debug(`Converting images to binary format [ID: ${requestId}]`);
|
||||
let idCardBytes: Uint8Array, selfieBytes: Uint8Array;
|
||||
try {
|
||||
[idCardBytes, selfieBytes] = await Promise.all([
|
||||
fileToUint8Array(idCardImage!),
|
||||
fileToUint8Array(selfieImage!)
|
||||
]);
|
||||
const kRegion = await crypto.subtle.sign("HMAC", kDate, new TextEncoder().encode(regionName));
|
||||
const kService = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kRegion, {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]), new TextEncoder().encode(serviceName));
|
||||
return crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", kService, {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]), new TextEncoder().encode("aws4_request"));
|
||||
};
|
||||
const signingKey = await getSignatureKey(AWS_SECRET_KEY, dateStamp, AWS_REGION, service);
|
||||
const signature = await crypto.subtle.sign("HMAC", await crypto.subtle.importKey("raw", signingKey, {
|
||||
name: "HMAC",
|
||||
hash: "SHA-256"
|
||||
}, false, [
|
||||
"sign"
|
||||
]), new TextEncoder().encode(stringToSign)).then((hash)=>Array.from(new Uint8Array(hash)).map((b)=>b.toString(16).padStart(2, '0')).join(''));
|
||||
const authHeader = `${algorithm} ` + `Credential=${AWS_ACCESS_KEY}/${credentialScope}, ` + `SignedHeaders=${signedHeaders}, ` + `Signature=${signature}`;
|
||||
// Make request to AWS Rekognition
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': contentType,
|
||||
'X-Amz-Date': amzDate,
|
||||
'X-Amz-Target': amzTarget,
|
||||
'Authorization': authHeader
|
||||
},
|
||||
body: JSON.stringify(request)
|
||||
});
|
||||
const data = await response.json();
|
||||
// Determine if verification passed
|
||||
const matched = !!(data.FaceMatches && data.FaceMatches.length > 0);
|
||||
let highestSimilarity = 0;
|
||||
if (matched && data.FaceMatches && data.FaceMatches.length > 0) {
|
||||
highestSimilarity = Math.max(...data.FaceMatches.map((match)=>match.Similarity || 0));
|
||||
} catch (error) {
|
||||
logger.error(`Failed to convert images to bytes [ID: ${requestId}]`, error);
|
||||
return createErrorResponse('Failed to process image data', 500);
|
||||
}
|
||||
return new Response(JSON.stringify({
|
||||
|
||||
// Create CompareFaces command
|
||||
logger.info(`Sending request to AWS Rekognition CompareFaces with threshold: ${similarityThreshold}% [ID: ${requestId}]`);
|
||||
const compareFacesCommand = new CompareFacesCommand({
|
||||
SourceImage: {
|
||||
Bytes: idCardBytes
|
||||
},
|
||||
TargetImage: {
|
||||
Bytes: selfieBytes
|
||||
},
|
||||
SimilarityThreshold: similarityThreshold
|
||||
});
|
||||
|
||||
// Execute AWS Rekognition request
|
||||
let rekognitionResponse: any;
|
||||
try {
|
||||
rekognitionResponse = await rekognitionClient.send(compareFacesCommand);
|
||||
} catch (error) {
|
||||
logger.error(`AWS Rekognition request failed [ID: ${requestId}]`, error);
|
||||
|
||||
// Handle specific AWS errors
|
||||
if (error.name === 'InvalidImageFormatException') {
|
||||
return createErrorResponse('Invalid image format. Please use JPEG or PNG format', 400);
|
||||
} else if (error.name === 'ImageTooLargeException') {
|
||||
return createErrorResponse('Image too large. Please reduce image size', 400);
|
||||
} else if (error.name === 'InvalidParameterException') {
|
||||
return createErrorResponse('Invalid parameters provided', 400);
|
||||
}
|
||||
|
||||
return createErrorResponse('Failed to compare faces with AWS Rekognition', 500);
|
||||
}
|
||||
|
||||
// Process results
|
||||
const processingTime = Date.now() - startTime;
|
||||
const matched = !!(rekognitionResponse.FaceMatches && rekognitionResponse.FaceMatches.length > 0);
|
||||
|
||||
let highestSimilarity = 0;
|
||||
if (matched && rekognitionResponse.FaceMatches && rekognitionResponse.FaceMatches.length > 0) {
|
||||
highestSimilarity = Math.max(...rekognitionResponse.FaceMatches.map((match: any) => match.Similarity || 0));
|
||||
}
|
||||
|
||||
// Determine confidence level
|
||||
let confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
if (highestSimilarity >= HIGH_CONFIDENCE_THRESHOLD) {
|
||||
confidence = 'HIGH';
|
||||
} else if (highestSimilarity >= MEDIUM_CONFIDENCE_THRESHOLD) {
|
||||
confidence = 'MEDIUM';
|
||||
} else {
|
||||
confidence = 'LOW';
|
||||
}
|
||||
|
||||
// Log results in a structured format
|
||||
logger.success(`Face verification completed [ID: ${requestId}]`, {
|
||||
matched: matched,
|
||||
similarity: `${highestSimilarity.toFixed(2)}%`,
|
||||
confidence: confidence,
|
||||
processingTime: `${processingTime}ms`,
|
||||
faceMatchesCount: rekognitionResponse.FaceMatches?.length || 0,
|
||||
unmatchedFacesCount: rekognitionResponse.UnmatchedFaces?.length || 0
|
||||
});
|
||||
|
||||
// Prepare response
|
||||
const response: VerifyFaceResponse = {
|
||||
success: true,
|
||||
matched: matched,
|
||||
similarity: highestSimilarity,
|
||||
faceMatches: data.FaceMatches || [],
|
||||
unmatchedFaces: data.UnmatchedFaces || []
|
||||
}), {
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
similarity: parseFloat(highestSimilarity.toFixed(2)),
|
||||
similarityThreshold: similarityThreshold,
|
||||
faceMatches: rekognitionResponse.FaceMatches || [],
|
||||
unmatchedFaces: rekognitionResponse.UnmatchedFaces || [],
|
||||
idCardImageSize: idCardImage!.size,
|
||||
selfieImageSize: selfieImage!.size,
|
||||
processingTime,
|
||||
confidence
|
||||
};
|
||||
|
||||
return createSuccessResponse(response);
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error in verify-face function:", error);
|
||||
return new Response(JSON.stringify({
|
||||
error: "Failed to verify faces",
|
||||
details: error instanceof Error ? error.message : String(error)
|
||||
}), {
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
const processingTime = Date.now() - startTime;
|
||||
logger.error(`Unexpected error in verify-face function [ID: ${requestId}]`, error);
|
||||
logger.error(`Processing time before error: ${processingTime}ms [ID: ${requestId}]`);
|
||||
|
||||
return createErrorResponse(
|
||||
"An unexpected error occurred while verifying faces",
|
||||
500
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}); /* To invoke locally:
|
||||
|
||||
/*
|
||||
To invoke locally:
|
||||
|
||||
1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start)
|
||||
2. Make an HTTP request:
|
||||
|
||||
curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/verify-face' \
|
||||
--header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{"name":"Functions"}'
|
||||
--header 'Authorization: Bearer [YOUR_ANON_KEY]' \
|
||||
--form 'idCard=@"path/to/idCard.jpg"' \
|
||||
--form 'selfie=@"path/to/selfie.jpg"' \
|
||||
--form 'similarity_threshold=75'
|
||||
|
||||
Example response:
|
||||
{
|
||||
"success": true,
|
||||
"matched": true,
|
||||
"similarity": 87.45,
|
||||
"similarityThreshold": 75,
|
||||
"faceMatches": [...],
|
||||
"unmatchedFaces": [],
|
||||
"idCardImageSize": 1048576,
|
||||
"selfieImageSize": 2097152,
|
||||
"processingTime": 1450,
|
||||
"confidence": "HIGH"
|
||||
}
|
||||
|
||||
Environment Variables Required:
|
||||
- AWS_ACCESS_KEY_ID
|
||||
- AWS_SECRET_ACCESS_KEY
|
||||
- AWS_REGION (optional, defaults to us-east-1)
|
||||
*/
|
Loading…
Reference in New Issue