diff --git a/.gitignore b/.gitignore index 6ed48a9..81ac384 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ .env node_modules +frontend/auto-clusters.html diff --git a/Dockerfile b/Dockerfile index 7c25e58..ce0b84f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,5 +50,4 @@ COPY /frontend /website/frontend COPY /db /website/db COPY /config /website/config - CMD [ "/entrypoint.sh" ] diff --git a/config/default.json b/config/default.json index 32788b1..7784b81 100644 --- a/config/default.json +++ b/config/default.json @@ -25,7 +25,7 @@ }, "picturesPath": "./pictures", "basePath": "/photos", - "faceData": "./pictures/face-data", + "facesPath": "./pictures/face-data", "sessions": { "db": "sessions.db", "store-secret": "234j23jffj23f!41$@#!1113j3" diff --git a/docker-compose.yml b/docker-compose.yml index 017885e..751ca26 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,7 +11,7 @@ services: # - db restart: always ports: - - ${PORT}:${PORT} + - ${PORT}:8123 volumes: - ${PICTURES}:/pictures - ${PWD}/db:/db diff --git a/ketrface/cluster.py b/ketrface/cluster.py index 3897431..57926c0 100644 --- a/ketrface/cluster.py +++ b/ketrface/cluster.py @@ -14,13 +14,13 @@ from ketrface.dbscan import * from ketrface.db import * from ketrface.config import * -html_base = '../' -db_path = '../db/photos.db' - config = read_config() -json_str = json.dumps(config, indent = 2) -print(json_str) -exit(0) + +html_path = merge_config_path(config['path'], 'frontend') +pictures_path = merge_config_path(config['path'], config['picturesPath']) +faces_path = merge_config_path(config['path'], config['facesPath']) +db_path = merge_config_path(config['path'], config["db"]["photos"]["host"]) +html_base = config['basePath'] # TODO # Switch to using DBSCAN @@ -52,7 +52,7 @@ def gen_html(identities): label = f'Cluster ({face["cluster"]["id"]})' print('
') - path = f'{html_base}/faces/{"{:02d}".format(faceId % 10)}' + path = f'{html_base}/faces/{"{:02d}".format(faceId % 100)}' print(f'') print(f'
{label}: {distance}
') print(f'
{faceId} {photoId} {confidence} {focus}
') @@ -140,7 +140,17 @@ def cluster_sort(A, B): elif diff < 0: return -1 return 0 - + +def build_straglers(faces): + noise = [] + undefined = [] + for face in faces: + if face['cluster'] == Noise: + noise.append(face) + elif face['cluster'] == Undefined: + undefined.append(face) + return noise + undefined + print('Loading faces from database') faces = load_faces() print(f'{len(faces)} faces loaded') @@ -165,8 +175,6 @@ while removed != 0: if removed > 0: print(f'Excluded {removed} faces this epoch') - - print(f'{len(identities)} identities seeded.') # Cluster the clusters... @@ -185,19 +193,22 @@ for cluster in reduced: # Creating a set containing those faces which have not been bound # to an identity to recluster them in isolation from the rest of # the faces -noise = [] -undefined = [] -clustered = [] -for face in faces: - if face['cluster'] == Noise: - noise.append(face) - elif face['cluster'] == Undefined: - undefined.append(face) +straglers = build_straglers(faces) +reduced = reduced + DBSCAN(straglers) -print(f'Stats: Noise = {len(noise)}, Undefined = {len(undefined)}') - -straglers = DBSCAN(noise + undefined) -reduced = update_cluster_averages(reduced + straglers) +# Build a final cluster with all remaining uncategorized faces +remaining_cluster = { + 'id': len(reduced) + 1, + 'distance': 0, + 'descriptors': [], + 'cluster': Undefined, + 'faces': [] +} +straglers = build_straglers(faces) +for face in straglers: + face['cluster'] = remaining_cluster + remaining_cluster['faces'].append(face) +reduced.append(remaining_cluster) # Give all merged identity lists a unique ID for id, identity in enumerate(reduced): @@ -205,6 +216,8 @@ for id, identity in enumerate(reduced): for face in identity['faces']: face['cluster'] = identity +reduced = update_cluster_averages(reduced) + update_distances(reduced) sort_identities(reduced) @@ -224,7 +237,7 @@ for i, A in enumerate(reduced): distance = "{:0.4f}".format(distance) print(f'{A["id"]} to {B["id"]} = {distance}: MERGE') -print('Writing to "identities.html"') -redirect_on('identities.html') +print('Writing to "auto-clusters.html"') +redirect_on(os.path.join(html_path, 'auto-clusters.html')) gen_html(reduced) redirect_off() diff --git a/ketrface/detect.py b/ketrface/detect.py index f3613de..b696ec5 100644 --- a/ketrface/detect.py +++ b/ketrface/detect.py @@ -11,8 +11,16 @@ import numpy as np import cv2 from ketrface.util import * from ketrface.db import * +from ketrface.config import * + +config = read_config() + +html_path = merge_config_path(config['path'], 'frontend') +pictures_path = merge_config_path(config['path'], config['picturesPath']) +faces_path = merge_config_path(config['path'], config['facesPath']) +db_path = merge_config_path(config['path'], config["db"]["photos"]["host"]) +html_base = config['basePath'] -face_base = '/pictures/' model_name = 'VGG-Face' # 'ArcFace' detector_backend = 'mtcnn' # 'retinaface' model = DeepFace.build_model(model_name) @@ -237,7 +245,7 @@ with conn: 'descriptorId': faceDescriptorId, }) - path = f'{face_base}faces/{"{:02d}".format(faceId % 10)}' + path = f'{faces_path}/{"{:02d}".format(faceId % 100)}' try: os.makedirs(path) except FileExistsError: @@ -252,6 +260,9 @@ with conn: exif_ifd = {piexif.ExifIFD.UserComment: compressed_str} exif_dict = {"0th": {}, "Exif": exif_ifd, "1st": {}, "thumbnail": None, "GPS": {}} - image.save(f'{path}/{faceId}.jpg', exif = piexif.dump(exif_dict)) + image.save( + f'{path}/{faceId}.jpg', + quality = 'maximum', + exif = piexif.dump(exif_dict)) update_face_count(conn, photoId, len(faces)) diff --git a/ketrface/identities.html b/ketrface/identities.html deleted file mode 100644 index 1aa55f0..0000000 --- a/ketrface/identities.html +++ /dev/null @@ -1,155 +0,0 @@ -
-
Identity 1 has 13
-
-
- -
Cluster (1): 0.0345
-
134 43 0.999 328
-
-
- -
Cluster (1): 0.0472
-
132 42 1.000 259
-
-
- -
Cluster (1): 0.0490
-
73 10 1.000 2266
-
-
- -
Cluster (1): 0.0651
-
47 7 0.997 4824
-
-
- -
Cluster (1): 0.0660
-
139 46 0.998 126
-
-
- -
Cluster (1): 0.0674
-
137 44 0.998 727
-
-
- -
Cluster (1): 0.0747
-
71 9 1.000 3734
-
-
- -
Cluster (1): 0.0753
-
146 50 1.000 178
-
-
- -
Cluster (1): 0.0826
-
130 41 1.000 425
-
-
- -
Cluster (1): 0.0950
-
56 8 0.999 4833
-
-
- -
Cluster (1): 0.0964
-
143 48 1.000 222
-
-
- -
Cluster (1): 0.1522
-
144 49 1.000 230
-
-
- -
Cluster (1): 0.2030
-
25 6 1.000 205
-
-
-
-
-
Identity 0 has 9
-
-
- -
Cluster (0): 0.0690
-
136 44 1.000 779
-
-
- -
Cluster (0): 0.0826
-
131 41 0.991 489
-
-
- -
Cluster (0): 0.0832
-
147 50 0.998 223
-
-
- -
Cluster (0): 0.0923
-
135 43 0.997 262
-
-
- -
Cluster (0): 0.1008
-
138 45 0.992 612
-
-
- -
Cluster (0): 0.1021
-
133 42 0.992 772
-
-
- -
Cluster (0): 0.1084
-
14 5 0.995 2610
-
-
- -
Cluster (0): 0.1120
-
3 1 0.999 25
-
-
- -
Cluster (0): 0.1525
-
46 7 0.997 4384
-
-
-
-
-
Identity 2 has 6
-
-
- -
Cluster (2): 0.0413
-
83 20 0.999 724
-
-
- -
Cluster (2): 0.1122
-
86 20 0.996 766
-
-
- -
Cluster (2): 0.1245
-
85 20 0.998 931
-
-
- -
Cluster (2): 0.1343
-
87 20 0.995 475
-
-
- -
Cluster (2): 0.1386
-
123 37 0.998 1097
-
-
- -
Cluster (2): 0.1708
-
125 39 1.000 162
-
-
-
diff --git a/ketrface/ketrface/config.py b/ketrface/ketrface/config.py index fb844a7..0d6a805 100644 --- a/ketrface/ketrface/config.py +++ b/ketrface/ketrface/config.py @@ -1,6 +1,6 @@ import os import json -import collections +import re def dict_merge(dct, merge_dct): """ Recursive dict merge. Inspired by :meth:``dict.update()``, instead of @@ -25,7 +25,6 @@ def read_config(): while file == None: try: config_path = os.path.join(path, 'config', 'default.json') - print(f'Trying {config_path}') file = open(config_path, 'r') break except: @@ -37,6 +36,8 @@ def read_config(): if file is None: return res + res['path'] = path + data = json.load(file) file.close() dict_merge(res, data) @@ -55,3 +56,9 @@ def read_config(): dict_merge(res, data) return res + +def merge_config_path(config_path, path): + if path[0] == '/': + return os.path.normpath(path) + return os.path.normpath(os.path.join(config_path, path)) + \ No newline at end of file diff --git a/server/app.js b/server/app.js index c339c21..2e6b388 100755 --- a/server/app.js +++ b/server/app.js @@ -16,6 +16,7 @@ const express = require("express"), require("./console-line.js"); /* Monkey-patch console.log with line numbers */ const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/", + facesPath = config.get("facesPath").replace(/\/$/, "") + "/", serverConfig = config.get("server"); let basePath = config.get("basePath"); @@ -25,8 +26,9 @@ if (basePath == "//") { } let photoDB = null, userDB = null; -console.log("Loading pictures out of: " + picturesPath); -console.log("Hosting server from: " + basePath); +console.log(`Loading pictures out of: ${picturesPath}`); +console.log(`Loading faces out of: ${facesPath} (mapped to ${basePath}faces})`); +console.log(`Hosting server from: ${basePath}`); const app = express(); @@ -238,7 +240,7 @@ app.use(basePath, index); const users = require("./routes/users"); app.use(basePath + "api/v1/users", users.router); -app.use(function(err, req, res, next) { +app.use((err, req, res, next) => { res.status(err.status || 500).json({ message: err.message, error: {} @@ -246,7 +248,7 @@ app.use(function(err, req, res, next) { }); /* Check authentication */ -app.use(basePath, function(req, res, next) { +app.use(basePath, (req, res, next) => { return users.getSessionUser(req).then(function(user) { if (user.restriction) { return res.status(401).send(user.restriction); @@ -258,6 +260,11 @@ app.use(basePath, function(req, res, next) { }); }); +app.use(`${basePath}faces/`, express.static(facesPath, { + maxAge: '14d', + index: false +})); + /* Everything below here requires a successful authentication */ app.use(basePath, express.static(picturesPath, { maxAge: '14d', diff --git a/server/face-recognizer.js b/server/face-recognizer.js index da8fcc3..aed1ec4 100644 --- a/server/face-recognizer.js +++ b/server/face-recognizer.js @@ -37,7 +37,7 @@ const maxConcurrency = require("os").cpus().length; require("./console-line.js"); /* Monkey-patch console.log with line numbers */ const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/", - faceData = picturesPath + "face-data/"; + facesPath = config.get("facesPath").replace(/\/$/, "") + "/"; let photoDB = null; @@ -137,7 +137,7 @@ require("./db/photos").then(function(db) { return Promise.map(faces, (face) => { return Promise.map([ "-data.json", "-original.png" ], (suffix) => { const id = face.id, - dataPath = faceData + (id % 100) + "/" + id + suffix; + dataPath = facesPath + (id % 100) + "/" + id + suffix; return exists(dataPath).then((result) => { if (result) { console.log(`...removing ${dataPath}`); @@ -185,7 +185,7 @@ require("./db/photos").then(function(db) { }).spread((results, metadata) => { return metadata.lastID; }).then((id) => { - const path = faceData + (id % 100); + const path = facesPath + (id % 100); return mkdir(path).then(() => { const dataPath = `${path}/${id}-data.json`, data = []; console.log(`...writing descriptor data to ${dataPath}...`); @@ -280,7 +280,7 @@ require("./db/photos").then(function(db) { console.log(`...reading ${allFaces.length} descriptors...`); return Promise.map(allFaces, (face) => { const id = face.id, - dataPath = faceData + "/" + (id % 100) + "/" + id + "-data.json"; + dataPath = facesPath + "/" + (id % 100) + "/" + id + "-data.json"; if (id in descriptors) { return; diff --git a/server/face.js b/server/face.js index eccce41..80e6104 100644 --- a/server/face.js +++ b/server/face.js @@ -22,7 +22,7 @@ const maxConcurrency = require("os").cpus().length; require("./console-line.js"); /* Monkey-patch console.log with line numbers */ const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/", - faceData = picturesPath + "face-data/"; + facesPath = config.get("facesPath").replace(/\/$/, "") + "/"; function alignFromLandmarks(image, landmarks, drawLandmarks) { const faceMargin = 0.45, @@ -151,7 +151,7 @@ require("./db/photos").then(function(db) { _height: (photo.bottom - photo.top) * photo.height, } }, - descriptor: JSON.parse(fs.readFileSync(faceData + (id % 100) + "/" + id + "-data.json")) + descriptor: JSON.parse(fs.readFileSync(facesPath + (id % 100) + "/" + id + "-data.json")) } ]; return [ file, image, detectors ]; });