James Ketrenos 553a80fce1 Add face-crop creation
Signed-off-by: James Ketrenos <james_gitlab@ketrenos.com>
2020-01-05 15:25:41 -08:00

201 lines
6.1 KiB
JavaScript

"use strict";
process.env.TZ = "Etc/GMT";
require('@tensorflow/tfjs-node');
let photoDB = null;
const config = require("config"),
Promise = require("bluebird"),
{ exists, mkdir, unlink } = require("./lib/util"),
faceapi = require("face-api.js"),
fs = require("fs"),
canvas = require("canvas");
const { createCanvas, Canvas, Image, ImageData } = canvas;
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
const maxConcurrency = require("os").cpus().length;
require("./console-line.js"); /* Monkey-patch console.log with line numbers */
const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/",
faceData = picturesPath + "face-data/";
process.stdout.write("Loading DB.");
require("./db/photos").then(function(db) {
process.stdout.write("done\n");
photoDB = db;
}).then(() => {
console.log("DB connected.");
process.stdout.write("Loading models.");
return faceapi.nets.ssdMobilenetv1.loadFromDisk('./models');
}).then(() => {
process.stdout.write(".");
return faceapi.nets.faceLandmark68Net.loadFromDisk('./models');
}).then(() => {
process.stdout.write(".");
return faceapi.nets.faceRecognitionNet.loadFromDisk('./models');
}).then(async () => {
process.stdout.write(".done\n");
if (process.argv[0].match(/node/)) {
process.argv.shift(); /* node */
}
process.argv.shift(); /* script name */
return Promise.resolve().then(() => {
console.log(process.argv.length);
if (process.argv.length != 0) {
return process.argv;
}
/* If no parameters provided, scan all faces to create image crops */
return photoDB.sequelize.query("SELECT id FROM faces ORDER BY id ASC", {
type: photoDB.sequelize.QueryTypes.SELECT,
raw: true
}).then((results) => {
return results.map(result => result.id);
});
});
}).then((args) => {
const faces = [];
return Promise.map(args, (arg) => {
const file = arg,
id = parseInt(arg);
let loader;
if (id == file) {
/* This is a face id */
console.log(`Looking up face-id ${id}...`);
loader = photoDB.sequelize.query(
"SELECT albums.path,photos.filename,photos.width,photos.height,faces.* " +
"FROM faces,photos,albums " +
"WHERE photos.id=faces.photoId " +
"AND albums.id=photos.albumId " +
"AND faces.id=:id", {
replacements: {
id: id
},
type: photoDB.sequelize.QueryTypes.SELECT,
raw: true
}).then((results) => {
if (results.length != 1) {
console.error(`...error. No face-id found: ${id}.\n`);
process.exit(-1);
}
const photo = results[0];
console.log(`...loading ${photo.filename}`);
const file = photo.path + photo.filename;
return canvas.loadImage(picturesPath + file).then(async (image) => {
const detectors = [ {
detection: {
_box: {
_x: photo.left * photo.width,
_y: photo.top * photo.height,
_width: (photo.right - photo.left) * photo.width,
_height: (photo.bottom - photo.top) * photo.height,
}
},
descriptor: JSON.parse(fs.readFileSync(faceData + (id % 100) + "/" + id + "-data.json"))
} ];
return [ file, image, detectors ];
});
});
} else {
/* This is a file */
console.log(`Loading ${file}...`);
id = undefined;
loader = canvas.loadImage(picturesPath + file).then(async (image) => {
const detectors = await faceapi.detectAllFaces(image,
new faceapi.SsdMobilenetv1Options({
minConfidence: 0.8
})
).withFaceLandmarks().withFaceDescriptors();
detectors.forEach((detector) => {
const data = [];
/* Confert from sparse object to dense array */
for (let i = 0; i < 128; i++) {
data.push(detector.descriptor[i]);
}
detector.descriptor = data;
});
return [ file, image, detectors ];
});
}
return loader.spread((filepath, image, detectors) => {
process.stdout.write(`${detectors.length} faces.\n`);
return Promise.map(detectors, (face, index) => {
faces.push({
filepath: filepath,
index: index,
descriptor: face.descriptor
})
/* If this is a face-id, output the -original.png
* meta-data file */
if (!id) {
return;
}
const path = "face-data/" + (id % 100),
target = `${path}/${id}-original.png`,
box = face.detection._box,
aspect = box._width / box._height,
dx = (aspect > 1.0) ? 200 : (200 * aspect),
dy = (aspect < 1.0) ? 200 : (200 / aspect);
return exists(target).then((doesExist) => {
if (doesExist) {
console.log(`...${target} already exists.`);
return;
}
const canvas = createCanvas(200, 200),
ctx = canvas.getContext('2d');
ctx.fillStyle = "rgba(0, 0, 0, 0)";
ctx.fillRect(0, 0, 200, 200);
ctx.drawImage(image, box._x, box._y, box._width, box._height,
Math.floor((200 - dx) * 0.5),
Math.floor((200 - dy) * 0.5), dx, dy);
console.log(`...writing to ${target}.`);
return mkdir(path).then(() => {
fs.writeFileSync(picturesPath + target, canvas.toBuffer("image/png", {
quality: 0.95,
chromaSubsampling: false
}));
});
});
});
});
}, {
concurrency: maxConcurrency
}).then(() => {
console.log("Face detection scanning completed.");
faces.forEach((a, i) => {
faces.forEach((b, j) => {
if (i == j) {
return;
}
const distance = faceapi.euclideanDistance(a.descriptor, b.descriptor);
if (distance < 0.4) {
console.log(`${a.filepath}.${a.index} is similar to ${b.filepath}.${b.index}: ${distance}`);
}
})
});
});
}).catch((error) => {
console.error(error);
process.exit(-1);
});