"use strict"; process.env.TZ = "Etc/GMT"; require('@tensorflow/tfjs-node'); const config = require("config"), Promise = require("bluebird"), { exists, mkdir, unlink } = require("./lib/util"), faceapi = require("face-api.js"), fs = require("fs"), canvas = require("canvas"); const { Canvas, Image, ImageData } = canvas; faceapi.env.monkeyPatch({ Canvas, Image, ImageData }); const maxConcurrency = require("os").cpus().length; require("./console-line.js"); /* Monkey-patch console.log with line numbers */ faceapi.nets.ssdMobilenetv1.loadFromDisk('./models') .then(() => { console.log("ssdMobileNetv1 loaded."); return faceapi.nets.faceLandmark68Net.loadFromDisk('./models'); }).then(() => { console.log("landmark68 loaded."); return faceapi.nets.faceRecognitionNet.loadFromDisk('./models'); }).then(async () => { console.log("faceRecognitionNet loaded."); let faces = []; for (let a = 2; a < process.argv.length; a++) { const file = process.argv[a]; process.stdout.write(`Loading ${file}...`); const image = await canvas.loadImage(file), detectors = await faceapi.detectAllFaces(image, new faceapi.SsdMobilenetv1Options({ minConfidence: 0.8 }) ).withFaceLandmarks().withFaceDescriptors(); process.stdout.write(`${detectors.length} faces.\n`); detectors.forEach((face, index) => { faces.push({ file: file, index: index, descriptor: face.descriptor }) }); } for (let i = 0; i < faces.length; i++) { for (let j = 0; j < faces.length; j++) { const a = faces[i], b = faces[j]; const distance = faceapi.euclideanDistance(a.descriptor, b.descriptor); console.log(`${a.file}.${a.index} to ${b.file}.${b.index} = ${distance}`); } } }).then(() => { console.log("Face detection scanning completed."); }).catch((error) => { console.error(error); process.exit(-1); });