script.js
1 document.addEventListener("DOMContentLoaded", (event) => { 2 startVideo(); 3 }); 4 5 async function startVideo() { 6 // Set desired dimensions 7 const desiredWidth = 225; 8 const desiredHeight = 225; 9 10 try { 11 const stream = await navigator.mediaDevices.getUserMedia({ 12 video: { width: desiredWidth, height: desiredHeight } 13 }); 14 15 const video = document.querySelector('video'); // Ensure you have a video element in your HTML 16 const canvas = document.querySelector('canvas'); // Ensure you have a canvas element in your HTML 17 18 video.srcObject = stream; 19 video.width = desiredWidth; // Set video width 20 video.height = desiredHeight; // Set video height 21 canvas.width = desiredWidth; // Set canvas width 22 canvas.height = desiredHeight; // Set canvas height 23 } catch (err) { 24 console.error('Error accessing media devices:', err); 25 } 26 } 27 28 window.addEventListener('fetchDescriptor', (event) => { 29 setTimeout(() => { 30 referenceDescriptors = {} 31 initializeFaceRecognition(referenceDescriptors); 32 }, 2000); // Executes after 2 seconds 33 }); 34 35 async function initializeFaceRecognition(referenceDescriptors) { 36 const video = document.getElementById("video"); 37 const canvas = document.getElementById("canvas"); 38 const checkButton = window.parent.document.getElementById('check-btn'); 39 const confidenceThreshold = 0.70; 40 const numDescriptorsToCollect = 10; // Number of descriptors to collect 41 let collectedDescriptors = []; // Array to store descriptors 42 let isCollecting = false; // Flag to indicate if collecting is in progress 43 let intervalId; // Variable to store the interval ID 44 45 // Liveness challenge variables 46 let challenge = null; 47 let challengeStartTime = 0; 48 const CHALLENGE_TIMEOUT = 30000; // Time to complete the challenge 49 let challengeSuccess = false; // Track challenge success 50 // Constants for head nod 51 const HEAD_NOD_HISTORY_LENGTH = 5; // Number of frames to track head position 52 const NOD_THRESHOLD = 3; // Adjust this value based on testing 53 let headPositionHistory = []; // Array to store head positions 54 let nodDetected = false; // Flag to indicate if nod is detected 55 56 // Function to generate a random challenge 57 function generateChallenge() { 58 const challenges = ["nod"]; 59 const randomIndex = Math.floor(Math.random() * challenges.length); 60 return challenges[randomIndex]; 61 } 62 63 function drawFaceGuide(ctx, width, height, color) { 64 ctx.save(); 65 ctx.strokeStyle = color; 66 ctx.lineWidth = 3; 67 ctx.beginPath(); 68 // Draw an oval centered in the canvas 69 ctx.ellipse(width / 2, height / 2, width * 0.35, height * 0.45, 0, 0, 2 * Math.PI); 70 ctx.stroke(); 71 ctx.restore(); 72 } 73 74 if (!(video instanceof HTMLVideoElement)) { 75 console.error("Error: 'video' is NOT an HTMLVideoElement.", video); 76 // alert("Critical Error: Video element is not valid. See console."); // Make it obvious 77 location.reload(); 78 return; // Stop initialization 79 } 80 81 if (!video.srcObject) { 82 console.error("Error: 'video' element has no srcObject (no stream).", video.srcObject); 83 // alert("Critical Error: Video stream not connected. See console."); 84 location.reload(); 85 return; 86 } 87 88 if (!video || !canvas || !checkButton) { 89 console.error('Video, canvas, or button element not found!'); 90 return; 91 } 92 93 const ctx = canvas.getContext('2d'); 94 95 if (!ctx) { 96 console.error('Could not get 2D context!'); 97 return; 98 } 99 100 // Load face-api.js models 101 await Promise.all([ 102 faceapi.nets.tinyFaceDetector.loadFromUri('/web/models'), 103 faceapi.nets.faceLandmark68Net.loadFromUri('/web/models'), 104 faceapi.nets.faceRecognitionNet.loadFromUri('/web/models'), 105 faceapi.nets.faceExpressionNet.loadFromUri('/web/models') 106 ]); 107 108 // Instead of global 'tf', use: 109 const tf = faceapi.tf; 110 // Load anti-spoofing model 111 const spoofModel = await tf.loadGraphModel('/web/models/anti-spoofing.json'); 112 113 const videoWidth = video.videoWidth; 114 const videoHeight = video.videoHeight; 115 const displaySize = { width: videoWidth, height: videoHeight }; 116 faceapi.matchDimensions(canvas, displaySize); 117 118 intervalId = setInterval(async () => { 119 const detections = await faceapi.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions()).withFaceLandmarks().withFaceDescriptors().withFaceExpressions(); // Added face expressions 120 const resizedDetections = faceapi.resizeResults(detections, displaySize); 121 ctx.clearRect(0, 0, canvas.width, canvas.height); 122 drawFaceGuide(ctx, canvas.width, canvas.height, "red"); 123 124 // Check for multiple faces 125 if (resizedDetections.length > 1) { 126 console.warn("Multiple faces detected. Waiting for a single face."); 127 ctx.font = "20px Arial"; 128 ctx.fillStyle = "red"; 129 // ctx.fillText("Multiple faces detected. Please show only one face.", 10, 50); 130 return; 131 } 132 133 if (resizedDetections.length === 0) { 134 console.warn("No faces detected."); 135 ctx.font = "20px Arial"; 136 ctx.fillStyle = "red"; 137 // ctx.fillText("No faces detected.", 10, 50); 138 return; 139 } 140 141 // Now we know resizedDetections.length === 1 142 const detection = resizedDetections[0]; // Access the first (and only) element 143 const box = detection.detection.box; 144 145 // --- Alignment Check --- 146 const guideWidth = canvas.width * 0.4; // Same as in drawFaceGuide 147 const guideHeight = canvas.height * 0.5; // Same as in drawFaceGuide 148 const guideX = canvas.width / 2; // Center X 149 const guideY = canvas.height / 2; // Center Y 150 151 // Convert bounding box coordinates to match the ellipse center 152 const faceCenterX = box.x + (box.width / 2); 153 const faceCenterY = box.y + (box.height / 2); 154 155 // Check if face center is within the ellipse bounds 156 const isAligned = (Math.pow((faceCenterX - guideX) / guideWidth, 2) + Math.pow((faceCenterY - guideY) / guideHeight, 2)) <= 1; 157 if (isAligned) { 158 // Change guide color or show a message 159 drawFaceGuide(ctx, canvas.width, canvas.height, "green"); 160 } else { 161 drawFaceGuide(ctx, canvas.width, canvas.height, "red"); 162 } 163 164 const SPOOF_INPUT_SIZE = 128; // 128x128 pixels 165 const SPOOF_THRESHOLD = 0.8; // 80% confidence threshold 166 167 // Anti-spoofing processing 168 let isRealFace = false; 169 try { 170 // Extract face region 171 const regions = await faceapi.extractFaces(video, [detection.detection.box]); 172 if (regions && regions.length > 0) { 173 // Convert to tensor and preprocess 174 const tensor = faceapi.tf.browser.fromPixels(regions[0]) 175 .resizeBilinear([SPOOF_INPUT_SIZE, SPOOF_INPUT_SIZE]) 176 .toFloat() 177 .div(255.0) // Normalize to [0,1] 178 .expandDims(0); 179 180 // Run anti-spoofing model 181 const predictions = await spoofModel.predict(tensor); 182 const score = predictions.dataSync()[0]; // Assuming single output 183 tensor.dispose(); // Clean up memory 184 185 // Determine real vs spoof 186 isRealFace = score < SPOOF_THRESHOLD; 187 } 188 } catch (error) { 189 console.error('Anti-spoofing error:', error); 190 } 191 192 // Draw Challenge on Canvas 193 if (challenge) { 194 ctx.font = "24px Montserrat sans-serif"; 195 ctx.fillStyle = "cyan"; 196 ctx.fillText(`Please ${challenge}`, 10, 80); 197 } 198 199 // Check for challenge timeout 200 if (challenge && (Date.now() - challengeStartTime > CHALLENGE_TIMEOUT)) { 201 console.warn("Challenge failed: timeout. Potential spoof!"); 202 ctx.font = "20px Montserrat sans-serif"; 203 ctx.fillStyle = "red"; 204 ctx.fillText("Liveness check failed: timeout!", 10, 110); 205 challenge = null; // Reset challenge 206 challengeSuccess = false; // Reset challenge success 207 } 208 209 // Generate a new challenge if not already active 210 if (!challenge) { 211 challenge = generateChallenge(); 212 challengeStartTime = Date.now(); 213 challengeSuccess = false; // Reset challenge success flag 214 nodDetected = false; // Reset nod detection flag 215 } 216 217 // Challenge Success Detection 218 const landmarks = detection.landmarks; 219 220 // Challenge Success Detection - Head Nod 221 if (challenge === "nod") { 222 const currentNose = detection.landmarks.getNose()[0]; // Get the tip of the nose 223 headPositionHistory.push({ x: currentNose.x, y: currentNose.y }); 224 225 if (headPositionHistory.length > HEAD_NOD_HISTORY_LENGTH) { 226 headPositionHistory.shift(); // Remove the oldest entry 227 } 228 229 if (headPositionHistory.length === HEAD_NOD_HISTORY_LENGTH) { 230 // Calculate vertical movement (nod) 231 let yDiffSum = 0; 232 for (let i = 1; i < HEAD_NOD_HISTORY_LENGTH; i++) { 233 yDiffSum += (headPositionHistory[i].y - headPositionHistory[i - 1].y); // Only Y-axis 234 } 235 const totalVerticalMovement = yDiffSum; 236 237 if (Math.abs(totalVerticalMovement) > NOD_THRESHOLD) { 238 console.log("Head nod detected!"); 239 nodDetected = true; // Nod detected 240 challengeSuccess = true; // Set challenge success 241 } 242 } 243 244 if (nodDetected) { 245 console.log("Nod Liveness challenge passed!"); 246 challengeSuccess = true; // Set challenge success flag 247 // challenge = null; // Reset challenge 248 // nodDetected = false; // Reset nod 249 // headPositionHistory = []; // Reset history 250 } 251 } 252 253 if (detection.detection.score > confidenceThreshold && isRealFace) { // Check confidence 254 // New face detected 255 256 if (!isCollecting && challengeSuccess) { // ADDED challengeSuccess check 257 isCollecting = true; 258 collectedDescriptors = []; // Reset the array 259 console.log("Collecting new descriptors..."); 260 } 261 262 if (isCollecting && collectedDescriptors.length < numDescriptorsToCollect && challengeSuccess) { // ADDED challengeSuccess check 263 collectedDescriptors.push(detection.descriptor); 264 if (collectedDescriptors.length === numDescriptorsToCollect) { 265 // Collection complete 266 console.log("Descriptor collection complete."); 267 268 // Calculate the average descriptor 269 const averageDescriptor = calculateAverageDescriptor(collectedDescriptors); 270 if (averageDescriptor) { 271 // Dispatch custom event with the raw average descriptor 272 const checkEvent = new CustomEvent('click', { 273 detail: { 274 descriptor: JSON.stringify(Array.from(averageDescriptor)), 275 } 276 }); 277 278 checkButton.dispatchEvent(checkEvent); 279 280 console.log("Check data dispatched!", checkEvent.detail); 281 282 // Stop the interval 283 clearInterval(intervalId); 284 } else { 285 console.warn("Could not calculate average descriptor. Registration aborted."); 286 isCollecting = false; // Reset collection 287 } 288 } 289 } 290 } else { 291 // Show spoof warning 292 if (!isRealFace) { 293 ctx.fillStyle = "red"; 294 ctx.font = "20px Montserrat sans-serif"; 295 ctx.fillText("Potential spoof detected!", 10, 110); 296 } 297 // Prevent login/registration on spoof 298 return; 299 } 300 301 }, 100); 302 303 // Function to calculate average descriptor 304 function calculateAverageDescriptor(descriptors) { 305 if (!descriptors || descriptors.length === 0) { 306 return null; 307 } 308 309 const numDescriptors = descriptors.length; 310 const descriptorSize = descriptors[0].length; // Should be 128 311 312 const sumDescriptor = new Float64Array(descriptorSize); 313 314 for (let i = 0; i < numDescriptors; i++) { 315 const descriptor = descriptors[i]; 316 if (!descriptor || descriptor.length !== descriptorSize) { 317 console.warn("Invalid descriptor length in calculateAverageDescriptor. Skipping."); 318 continue; // Skip malformed descriptors 319 } 320 for (let j = 0; j < descriptorSize; j++) { 321 sumDescriptor[j] += parseFloat(descriptor[j].toFixed(7)); 322 } 323 } 324 325 // Calculate the average 326 const averageDescriptor = new Float64Array(descriptorSize); 327 for (let i = 0; i < descriptorSize; i++) { 328 averageDescriptor[i] = parseFloat((sumDescriptor[i] / numDescriptors).toFixed(7)); 329 } 330 331 return averageDescriptor; 332 } 333 } 334 335 336