282 lines
8.7 KiB
JavaScript
282 lines
8.7 KiB
JavaScript
"use strict";
|
|
|
|
process.env.TZ = "Etc/GMT";
|
|
|
|
require('@tensorflow/tfjs-node');
|
|
|
|
let photoDB = null;
|
|
|
|
const config = require("config"),
|
|
Promise = require("bluebird"),
|
|
{ exists, mkdir, unlink } = require("./lib/util"),
|
|
faceapi = require("face-api.js"),
|
|
fs = require("fs"),
|
|
canvas = require("canvas");
|
|
|
|
const { createCanvas, Canvas, Image, ImageData } = canvas;
|
|
|
|
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
|
|
|
|
const maxConcurrency = require("os").cpus().length;
|
|
|
|
require("./console-line.js"); /* Monkey-patch console.log with line numbers */
|
|
|
|
const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/",
|
|
facesPath = config.get("facesPath").replace(/\/$/, "") + "/";
|
|
|
|
function alignFromLandmarks(image, landmarks, drawLandmarks) {
|
|
const faceMargin = 0.45,
|
|
width = 512, height = 512,
|
|
dY = landmarks._positions[45]._y - landmarks._positions[36]._y,
|
|
dX = landmarks._positions[45]._x - landmarks._positions[36]._x,
|
|
mid = {
|
|
x: landmarks._positions[36]._x + 0.5 * dX,
|
|
y: landmarks._positions[36]._y + 0.5 * dY
|
|
},
|
|
rotation = -Math.atan2(dY, dX),
|
|
cosRotation = Math.cos(rotation),
|
|
sinRotation = Math.sin(rotation),
|
|
eyeDistance = Math.sqrt(dY * dY + dX * dX),
|
|
scale = width * (1.0 - 2. * faceMargin) / eyeDistance,
|
|
canvas = createCanvas(width, height),
|
|
ctx = canvas.getContext("2d");
|
|
|
|
const prime = {
|
|
x: mid.x * cosRotation - mid.y * sinRotation,
|
|
y: mid.y * cosRotation + mid.x * sinRotation
|
|
};
|
|
|
|
mid.x = prime.x;
|
|
mid.y = prime.y;
|
|
|
|
ctx.translate(
|
|
0.5 * width - mid.x * scale,
|
|
0.5 * height - (height * (0.5 - faceMargin)) - mid.y * scale);
|
|
ctx.rotate(rotation);
|
|
ctx.scale(scale, scale);
|
|
ctx.drawImage(image, 0, 0);
|
|
|
|
if (drawLandmarks) {
|
|
ctx.strokeStyle = "red";
|
|
ctx.strokeWidth = "1";
|
|
ctx.beginPath();
|
|
landmarks._positions.forEach((point, index) => {
|
|
if (index == 0) {
|
|
ctx.moveTo(point._x, point._y);
|
|
} else {
|
|
ctx.lineTo(point._x, point._y);
|
|
}
|
|
});
|
|
ctx.stroke();
|
|
}
|
|
|
|
return canvas;
|
|
}
|
|
|
|
process.stdout.write("Loading DB.");
|
|
require("./db/photos").then(function(db) {
|
|
process.stdout.write("done\n");
|
|
photoDB = db;
|
|
}).then(() => {
|
|
console.log("DB connected.");
|
|
process.stdout.write("Loading models.");
|
|
return faceapi.nets.ssdMobilenetv1.loadFromDisk('./models');
|
|
}).then(() => {
|
|
process.stdout.write(".");
|
|
return faceapi.nets.faceLandmark68Net.loadFromDisk('./models');
|
|
}).then(() => {
|
|
process.stdout.write(".");
|
|
return faceapi.nets.faceRecognitionNet.loadFromDisk('./models');
|
|
}).then(async () => {
|
|
process.stdout.write(".done\n");
|
|
|
|
if (process.argv[0].match(/node/)) {
|
|
process.argv.shift(); /* node */
|
|
}
|
|
process.argv.shift(); /* script name */
|
|
|
|
return Promise.resolve().then(() => {
|
|
if (process.argv.length != 0) {
|
|
return process.argv;
|
|
}
|
|
|
|
/* If no parameters provided, scan all faces to create image crops */
|
|
return photoDB.sequelize.query("SELECT id FROM faces ORDER BY id ASC", {
|
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
|
raw: true
|
|
}).then((results) => {
|
|
return results.map(result => result.id);
|
|
});
|
|
});
|
|
}).then((args) => {
|
|
const faces = [];
|
|
|
|
console.log(`Scanning ${args.length} faces.`);
|
|
return Promise.map(args, (arg) => {
|
|
const file = arg;
|
|
let id = parseInt(arg);
|
|
|
|
let loader;
|
|
|
|
if (id == file) {
|
|
/* This is a face id */
|
|
console.log(`Looking up face-id ${id}...`);
|
|
loader = photoDB.sequelize.query(
|
|
"SELECT albums.path,photos.filename,photos.width,photos.height,faces.* " +
|
|
"FROM faces,photos,albums " +
|
|
"WHERE photos.id=faces.photoId " +
|
|
"AND albums.id=photos.albumId " +
|
|
"AND faces.id=:id", {
|
|
replacements: {
|
|
id: id
|
|
},
|
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
|
raw: true
|
|
}).then((results) => {
|
|
if (results.length != 1) {
|
|
console.error(`...error. No face-id found: ${id}.\n`);
|
|
process.exit(-1);
|
|
}
|
|
const photo = results[0];
|
|
console.log(`...loading ${photo.filename}`);
|
|
|
|
const file = photo.path + photo.filename;
|
|
return canvas.loadImage(picturesPath + file).then(async (image) => {
|
|
const detectors = [ {
|
|
detection: {
|
|
_box: {
|
|
_x: photo.left * photo.width,
|
|
_y: photo.top * photo.height,
|
|
_width: (photo.right - photo.left) * photo.width,
|
|
_height: (photo.bottom - photo.top) * photo.height,
|
|
}
|
|
},
|
|
descriptor: JSON.parse(fs.readFileSync(facesPath + (id % 100) + "/" + id + "-data.json"))
|
|
} ];
|
|
return [ file, image, detectors ];
|
|
});
|
|
});
|
|
} else {
|
|
/* This is a file */
|
|
console.log(`Loading ${file}...`);
|
|
id = undefined;
|
|
loader = canvas.loadImage(picturesPath + file).then(async (image) => {
|
|
const detectors = await faceapi.detectAllFaces(image,
|
|
new faceapi.SsdMobilenetv1Options({
|
|
minConfidence: 0.9
|
|
})
|
|
).withFaceLandmarks();
|
|
|
|
await detectors.forEach(async (detector, index) => {
|
|
const canvas = alignFromLandmarks(image, detector.landmarks, false);
|
|
fs.writeFileSync(`rotation-pre-${index}.png`, canvas.toBuffer("image/png", {
|
|
quality: 0.95,
|
|
chromaSubsampling: false
|
|
}));
|
|
const detected = await faceapi.detectSingleFace(canvas,
|
|
new faceapi.SsdMobilenetv1Options({
|
|
minConfidence: 0.1
|
|
})
|
|
).withFaceLandmarks();
|
|
const descriptor = await faceapi.computeFaceDescriptor(canvas);
|
|
console.log(`Processing face ${index}...`);
|
|
console.log(`...pre aligned score: ${detector.detection._score}`);
|
|
if (!detected) {
|
|
console.log("No face found in re-scaled and aligned image");
|
|
return;
|
|
}
|
|
console.log(`...post-aligned score: ${detected.detection._score}`);
|
|
const newCanvas = alignFromLandmarks(canvas, detected.landmarks, true);
|
|
|
|
fs.writeFileSync(`rotation-post-${index}.png`, newCanvas.toBuffer("image/png", {
|
|
quality: 0.95,
|
|
chromaSubsampling: false
|
|
}));
|
|
|
|
console.log(`Wrote rotation-${index}.png`);
|
|
|
|
const data = [];
|
|
/* Confert from sparse object to dense array */
|
|
for (let i = 0; i < 128; i++) {
|
|
data.push(descriptor[i]);
|
|
}
|
|
detector.descriptor = data;
|
|
});
|
|
|
|
return [ file, image, detectors ];
|
|
});
|
|
}
|
|
|
|
return loader.then((results) => {
|
|
const filepath = results[0],
|
|
image = results[1],
|
|
detectors = results[2];
|
|
|
|
process.stdout.write(`${detectors.length} faces.\n`);
|
|
|
|
return Promise.map(detectors, (face, index) => {
|
|
faces.push({
|
|
filepath: filepath,
|
|
index: index,
|
|
descriptor: face.descriptor
|
|
})
|
|
|
|
/* If this is a face-id, output the -original.png
|
|
* meta-data file */
|
|
if (!id) {
|
|
return;
|
|
}
|
|
|
|
const path = "face-data/" + (id % 100),
|
|
target = `${path}/${id}-original.png`,
|
|
box = face.detection._box,
|
|
aspect = box._width / box._height,
|
|
dx = (aspect > 1.0) ? 200 : (200 * aspect),
|
|
dy = (aspect < 1.0) ? 200 : (200 / aspect);
|
|
|
|
return exists(target).then((doesExist) => {
|
|
if (doesExist) {
|
|
console.log(`...${target} already exists.`);
|
|
return;
|
|
}
|
|
const canvas = createCanvas(200, 200),
|
|
ctx = canvas.getContext('2d');
|
|
|
|
ctx.fillStyle = "rgba(0, 0, 0, 0)";
|
|
ctx.fillRect(0, 0, 200, 200);
|
|
ctx.drawImage(image, box._x, box._y, box._width, box._height,
|
|
Math.floor((200 - dx) * 0.5),
|
|
Math.floor((200 - dy) * 0.5), dx, dy);
|
|
|
|
console.log(`...writing to ${target}.`);
|
|
|
|
return mkdir(path).then(() => {
|
|
fs.writeFileSync(picturesPath + target, canvas.toBuffer("image/png", {
|
|
quality: 0.95,
|
|
chromaSubsampling: false
|
|
}));
|
|
});
|
|
});
|
|
});
|
|
});
|
|
}, {
|
|
concurrency: maxConcurrency
|
|
}).then(() => {
|
|
console.log("Face detection scanning completed.");
|
|
if (0) faces.forEach((a, i) => {
|
|
faces.forEach((b, j) => {
|
|
if (i == j) {
|
|
return;
|
|
}
|
|
const distance = faceapi.euclideanDistance(a.descriptor, b.descriptor);
|
|
if (distance < 0.4) {
|
|
console.log(`${a.filepath}.${a.index} is similar to ${b.filepath}.${b.index}: ${distance}`);
|
|
}
|
|
})
|
|
});
|
|
});
|
|
}).catch((error) => {
|
|
console.error(error);
|
|
process.exit(-1);
|
|
});
|