Seeding server code from ketr.photos
Signed-off-by: James Ketrenos <james.p.ketrenos@intel.com>
This commit is contained in:
parent
a22c360d90
commit
dca37b1723
6
ketr.ketran/.gitignore
vendored
Normal file
6
ketr.ketran/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
!.gitignore
|
||||||
|
!package.json
|
||||||
|
node_modules
|
||||||
|
config/local.json
|
||||||
|
package-lock.json
|
||||||
|
*.log
|
74
ketr.ketran/README.md
Normal file
74
ketr.ketran/README.md
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# Ketr.Ketran REST API
|
||||||
|
|
||||||
|
## POST /api/v1/game
|
||||||
|
|
||||||
|
### Request
|
||||||
|
|
||||||
|
```json
|
||||||
|
{}
|
||||||
|
````
|
||||||
|
|
||||||
|
### Response
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
gameId: id
|
||||||
|
gameState: {
|
||||||
|
tiles: []
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
# Configuring / installing
|
||||||
|
|
||||||
|
|
||||||
|
## Build
|
||||||
|
```bash
|
||||||
|
git clone ...
|
||||||
|
cd server
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Install
|
||||||
|
```bash
|
||||||
|
sudo cp ketr.ketran /etc/logrotate.d/
|
||||||
|
sudo cp ketr.ketran.service /etc/systemd/system/
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
```
|
||||||
|
|
||||||
|
Install the following into your nginx server configuration:
|
||||||
|
|
||||||
|
```nginx
|
||||||
|
location ~ /ketr.ketran/api/.* {
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-NginX-Proxy true;
|
||||||
|
proxy_pass_header Set-Cookie;
|
||||||
|
proxy_pass_header P3P;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_pass http://localhost:8930;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Add security tokens in ketr.ketran/config/local.json:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat << EOF > config/local.json
|
||||||
|
{
|
||||||
|
"tokens": [ {
|
||||||
|
"$(whoami)": "$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo;)"
|
||||||
|
} ]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
## Launch
|
||||||
|
```bash
|
||||||
|
sudo systemctl start ketr.ketran
|
||||||
|
```
|
||||||
|
|
6
ketr.ketran/config/default.json
Executable file
6
ketr.ketran/config/default.json
Executable file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"http": {
|
||||||
|
"base": "ketr.ketran",
|
||||||
|
"port": 8930
|
||||||
|
}
|
||||||
|
}
|
1
ketr.ketran/config/production.json
Normal file
1
ketr.ketran/config/production.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
{}
|
11
ketr.ketran/ketr.ketran
Normal file
11
ketr.ketran/ketr.ketran
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
/var/log/osgc-aptly-rest.log
|
||||||
|
{
|
||||||
|
rotate 4
|
||||||
|
weekly
|
||||||
|
missingok
|
||||||
|
notifempty
|
||||||
|
compress
|
||||||
|
postrotate
|
||||||
|
/usr/lib/rsyslog/rsyslog-rotate
|
||||||
|
endscript
|
||||||
|
}
|
19
ketr.ketran/ketr.ketran.service
Executable file
19
ketr.ketran/ketr.ketran.service
Executable file
@ -0,0 +1,19 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=OSGC-Aptly REST API
|
||||||
|
Requires=
|
||||||
|
After=networking.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Environment="HTTP_PROXY=http://proxy-chain.intel.com:911/" "NO_PROXY=localhost,127.0.0.1,.intel.com"
|
||||||
|
ExecStart=/usr/bin/npm run backend
|
||||||
|
WorkingDirectory=/home/jketreno/docker/osgc-aptly/osgc-aptly-rest
|
||||||
|
Restart=always
|
||||||
|
# Restart service after 10 seconds if node service crashes
|
||||||
|
RestartSec=10
|
||||||
|
StandardOutput=syslog
|
||||||
|
StandardError=syslog
|
||||||
|
SyslogIdentifier=osgc-aptly-rest
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
569
ketr.ketran/manifest.json
Normal file
569
ketr.ketran/manifest.json
Normal file
@ -0,0 +1,569 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:16+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "linux"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:16+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "linux/ubuntu"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:23+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10",
|
||||||
|
"size": 8192,
|
||||||
|
"path": "linux/ubuntu/19.10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:15:08+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-cmc-2.0+embargo-169.u19.10-release.x86_64.deb",
|
||||||
|
"size": 31039028,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-cmc-2.0+embargo-169.u19.10-release.x86_64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:22:21+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-gpu-tools-dbg_1.24+embargo169_amd64.deb",
|
||||||
|
"size": 6465620,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-gpu-tools-dbg_1.24+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:22:21+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-gpu-tools_1.24+embargo169_amd64.deb",
|
||||||
|
"size": 1955740,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-gpu-tools_1.24+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:13:12+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-level-zero-gpu_0.8.016262+embargo169_amd64.deb",
|
||||||
|
"size": 1107520,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-level-zero-gpu_0.8.016262+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:35:29+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-media-va-driver-non-free_20.1~pre+embargo169_amd64.deb",
|
||||||
|
"size": 6244784,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-media-va-driver-non-free_20.1~pre+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:18:25+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-metrics-discovery_1.5.114+embargo169_amd64.deb",
|
||||||
|
"size": 715152,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-metrics-discovery_1.5.114+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:19:21+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-metrics-library_1.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 157448,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-metrics-library_1.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:57:35+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/intel-opencl-icd_016320+embargo169_amd64.deb",
|
||||||
|
"size": 1223400,
|
||||||
|
"path": "linux/ubuntu/19.10/intel-opencl-icd_016320+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:57:52+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/level-zero-dev_0.91.7+embargo169_amd64.deb",
|
||||||
|
"size": 90364,
|
||||||
|
"path": "linux/ubuntu/19.10/level-zero-dev_0.91.7+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:57:52+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/level-zero_0.91.7+embargo169_amd64.deb",
|
||||||
|
"size": 49700,
|
||||||
|
"path": "linux/ubuntu/19.10/level-zero_0.91.7+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm-amdgpu1_2.4.100+embargo169_amd64.deb",
|
||||||
|
"size": 28272,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm-amdgpu1_2.4.100+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm-common_2.4.100+embargo169_all.deb",
|
||||||
|
"size": 14208,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm-common_2.4.100+embargo169_all.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm-dev_2.4.100+embargo169_amd64.deb",
|
||||||
|
"size": 120324,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm-dev_2.4.100+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm-intel1_2.4.100+embargo169_amd64.deb",
|
||||||
|
"size": 70724,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm-intel1_2.4.100+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm-nouveau2_2.4.100+embargo169_amd64.deb",
|
||||||
|
"size": 26316,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm-nouveau2_2.4.100+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm-radeon1_2.4.100+embargo169_amd64.deb",
|
||||||
|
"size": 29580,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm-radeon1_2.4.100+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:36+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libdrm2_2.4.100+embargo169_amd64.deb",
|
||||||
|
"size": 41172,
|
||||||
|
"path": "linux/ubuntu/19.10/libdrm2_2.4.100+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libegl-mesa0_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 139724,
|
||||||
|
"path": "linux/ubuntu/19.10/libegl-mesa0_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libegl1-mesa-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 51120,
|
||||||
|
"path": "linux/ubuntu/19.10/libegl1-mesa-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libegl1-mesa_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 49392,
|
||||||
|
"path": "linux/ubuntu/19.10/libegl1-mesa_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgbm-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 52940,
|
||||||
|
"path": "linux/ubuntu/19.10/libgbm-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgbm1_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 71004,
|
||||||
|
"path": "linux/ubuntu/19.10/libgbm1_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgl1-mesa-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 49408,
|
||||||
|
"path": "linux/ubuntu/19.10/libgl1-mesa-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgl1-mesa-dri_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 9929788,
|
||||||
|
"path": "linux/ubuntu/19.10/libgl1-mesa-dri_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgl1-mesa-glx_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 49412,
|
||||||
|
"path": "linux/ubuntu/19.10/libgl1-mesa-glx_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libglapi-mesa_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 69888,
|
||||||
|
"path": "linux/ubuntu/19.10/libglapi-mesa_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgles2-mesa-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 49416,
|
||||||
|
"path": "linux/ubuntu/19.10/libgles2-mesa-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libgles2-mesa_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 49400,
|
||||||
|
"path": "linux/ubuntu/19.10/libgles2-mesa_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libglx-mesa0_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 182580,
|
||||||
|
"path": "linux/ubuntu/19.10/libglx-mesa0_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:48:07+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigc-dev_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 2376,
|
||||||
|
"path": "linux/ubuntu/19.10/libigc-dev_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:48:07+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigc-tools_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 1830108,
|
||||||
|
"path": "linux/ubuntu/19.10/libigc-tools_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:48:07+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigc1_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 11786756,
|
||||||
|
"path": "linux/ubuntu/19.10/libigc1_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:48:07+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigdfcl-dev_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 125484,
|
||||||
|
"path": "linux/ubuntu/19.10/libigdfcl-dev_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:48:07+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigdfcl1_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 18982892,
|
||||||
|
"path": "linux/ubuntu/19.10/libigdfcl1_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:45+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigdgmm-dev_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 1126848,
|
||||||
|
"path": "linux/ubuntu/19.10/libigdgmm-dev_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:45+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigdgmm6_8020561+embargo169_amd64.deb",
|
||||||
|
"size": 127364,
|
||||||
|
"path": "linux/ubuntu/19.10/libigdgmm6_8020561+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:35:29+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigfxcmrt-dev_20.1~pre+embargo169_amd64.deb",
|
||||||
|
"size": 69096,
|
||||||
|
"path": "linux/ubuntu/19.10/libigfxcmrt-dev_20.1~pre+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:35:29+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libigfxcmrt7_20.1~pre+embargo169_amd64.deb",
|
||||||
|
"size": 30668,
|
||||||
|
"path": "linux/ubuntu/19.10/libigfxcmrt7_20.1~pre+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:17:58+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libmfx-dev_20.1~pre+embargo169_amd64.deb",
|
||||||
|
"size": 39952,
|
||||||
|
"path": "linux/ubuntu/19.10/libmfx-dev_20.1~pre+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:17:58+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libmfx-tools_20.1~pre+embargo169_amd64.deb",
|
||||||
|
"size": 1473148,
|
||||||
|
"path": "linux/ubuntu/19.10/libmfx-tools_20.1~pre+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:17:58+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libmfx1_20.1~pre+embargo169_amd64.deb",
|
||||||
|
"size": 2833028,
|
||||||
|
"path": "linux/ubuntu/19.10/libmfx1_20.1~pre+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libosmesa6-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 52776,
|
||||||
|
"path": "linux/ubuntu/19.10/libosmesa6-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libosmesa6_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 2741820,
|
||||||
|
"path": "linux/ubuntu/19.10/libosmesa6_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libva-dev_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 104500,
|
||||||
|
"path": "linux/ubuntu/19.10/libva-dev_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libva-drm2_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 17760,
|
||||||
|
"path": "linux/ubuntu/19.10/libva-drm2_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libva-glx2_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 21344,
|
||||||
|
"path": "linux/ubuntu/19.10/libva-glx2_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libva-wayland2_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 19664,
|
||||||
|
"path": "linux/ubuntu/19.10/libva-wayland2_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libva-x11-2_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 22392,
|
||||||
|
"path": "linux/ubuntu/19.10/libva-x11-2_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libva2_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 60736,
|
||||||
|
"path": "linux/ubuntu/19.10/libva2_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libwayland-egl1-mesa_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 49416,
|
||||||
|
"path": "linux/ubuntu/19.10/libwayland-egl1-mesa_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libxatracker-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 52896,
|
||||||
|
"path": "linux/ubuntu/19.10/libxatracker-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/libxatracker2_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 1644200,
|
||||||
|
"path": "linux/ubuntu/19.10/libxatracker2_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:20+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/linux-headers-5.4.17-169+_5.4.17-169+-1_amd64.deb",
|
||||||
|
"size": 11420780,
|
||||||
|
"path": "linux/ubuntu/19.10/linux-headers-5.4.17-169+_5.4.17-169+-1_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T16:55:17+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/linux-i915-firmware_2020.12+embargo169.deb",
|
||||||
|
"size": 4758832,
|
||||||
|
"path": "linux/ubuntu/19.10/linux-i915-firmware_2020.12+embargo169.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:21+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/linux-image-5.4.17-169+-dbg_5.4.17-169+-1_amd64.deb",
|
||||||
|
"size": 903956216,
|
||||||
|
"path": "linux/ubuntu/19.10/linux-image-5.4.17-169+-dbg_5.4.17-169+-1_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:21+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/linux-image-5.4.17-169+_5.4.17-169+-1_amd64.deb",
|
||||||
|
"size": 60596400,
|
||||||
|
"path": "linux/ubuntu/19.10/linux-image-5.4.17-169+_5.4.17-169+-1_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:20:21+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/linux-libc-dev_5.4.17-169+-1_amd64.deb",
|
||||||
|
"size": 1069796,
|
||||||
|
"path": "linux/ubuntu/19.10/linux-libc-dev_5.4.17-169+-1_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T16:55:07+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/manifest.yml",
|
||||||
|
"size": 5595,
|
||||||
|
"path": "linux/ubuntu/19.10/manifest.yml"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/mesa-common-dev_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 677236,
|
||||||
|
"path": "linux/ubuntu/19.10/mesa-common-dev_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/mesa-opencl-icd_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 10299076,
|
||||||
|
"path": "linux/ubuntu/19.10/mesa-opencl-icd_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/mesa-va-drivers_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 2684040,
|
||||||
|
"path": "linux/ubuntu/19.10/mesa-va-drivers_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/mesa-vdpau-drivers_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 2810196,
|
||||||
|
"path": "linux/ubuntu/19.10/mesa-vdpau-drivers_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:27:11+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/mesa-vulkan-drivers_20.1.0-devel+embargo169_amd64.deb",
|
||||||
|
"size": 3587736,
|
||||||
|
"path": "linux/ubuntu/19.10/mesa-vulkan-drivers_20.1.0-devel+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/va-driver-all_2.7.0.1+embargo169_amd64.deb",
|
||||||
|
"size": 13948,
|
||||||
|
"path": "linux/ubuntu/19.10/va-driver-all_2.7.0.1+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T17:23:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/linux/ubuntu/19.10/vainfo_2.6.0.0+embargo169_amd64.deb",
|
||||||
|
"size": 18908,
|
||||||
|
"path": "linux/ubuntu/19.10/vainfo_2.6.0.0+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:26+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "utils"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:26+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "utils/ubuntu"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:27+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "utils/ubuntu/19.10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:29:15+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/crucible_1.0.20200316+i169_amd64.deb",
|
||||||
|
"size": 14641300,
|
||||||
|
"path": "utils/ubuntu/19.10/crucible_1.0.20200316+i169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:20:14+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/libwaffle-1-0_1.6.90+embargo169_amd64.deb",
|
||||||
|
"size": 28100,
|
||||||
|
"path": "utils/ubuntu/19.10/libwaffle-1-0_1.6.90+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:20:14+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/libwaffle-dev_1.6.90+embargo169_amd64.deb",
|
||||||
|
"size": 7444,
|
||||||
|
"path": "utils/ubuntu/19.10/libwaffle-dev_1.6.90+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:20:14+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/libwaffle-doc_1.6.90+embargo169_all.deb",
|
||||||
|
"size": 4504,
|
||||||
|
"path": "utils/ubuntu/19.10/libwaffle-doc_1.6.90+embargo169_all.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:29:15+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/piglit-shards_20200330+embargo169.deb",
|
||||||
|
"size": 15768,
|
||||||
|
"path": "utils/ubuntu/19.10/piglit-shards_20200330+embargo169.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:28:06+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/piglit_1.0.20200212+embargo169_amd64.deb",
|
||||||
|
"size": 33672144,
|
||||||
|
"path": "utils/ubuntu/19.10/piglit_1.0.20200212+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:20:14+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/utils/ubuntu/19.10/waffle-utils_1.6.90+embargo169_amd64.deb",
|
||||||
|
"size": 11236,
|
||||||
|
"path": "utils/ubuntu/19.10/waffle-utils_1.6.90+embargo169_amd64.deb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:27+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/TestTools",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "TestTools"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": true,
|
||||||
|
"date_modified": "2020-03-30T18:29:27+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/TestTools/Linux",
|
||||||
|
"size": 4096,
|
||||||
|
"path": "TestTools/Linux"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"directory": false,
|
||||||
|
"date_modified": "2020-03-30T18:29:16+0000",
|
||||||
|
"url": "https://ubitstore.intel.com/webstores/fm/sfa/Artifacts/Graphics/Builds/cogd/dynamic/run/builds/b561/8020561/artifacts/TestTools/Linux/TestTools-Release-Internal-x64.tar.xz",
|
||||||
|
"size": 368,
|
||||||
|
"path": "TestTools/Linux/TestTools-Release-Internal-x64.tar.xz"
|
||||||
|
}
|
||||||
|
]
|
31
ketr.ketran/package.json
Normal file
31
ketr.ketran/package.json
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
{
|
||||||
|
"name": "ketr.ketran",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Peddlers of Ketran: REST API",
|
||||||
|
"main": "index.html",
|
||||||
|
"scripts": {
|
||||||
|
"start": "NODE_CONFIG_ENV='devel' node server/app.js",
|
||||||
|
"backend": "NODE_CONFIG_ENV='production' node server/app.js"
|
||||||
|
},
|
||||||
|
"repository": "ssh://git@gitlab.ketrenos.com:/jketreno/ketr.ketran",
|
||||||
|
"author": "James Ketrenos <james_ketran@ketrenos.com>",
|
||||||
|
"license": "MIT",
|
||||||
|
"private": true,
|
||||||
|
"dependencies": {
|
||||||
|
"bluebird": "^3.5.5",
|
||||||
|
"config": "^3.1.0",
|
||||||
|
"connect-sqlite3": "^0.9.11",
|
||||||
|
"cookie-parser": "^1.4.4",
|
||||||
|
"express": "^4.17.1",
|
||||||
|
"express-session": "^1.17.1",
|
||||||
|
"handlebars": "^4.7.6",
|
||||||
|
"moment": "^2.24.0",
|
||||||
|
"morgan": "^1.9.1",
|
||||||
|
"node-fetch": "^2.6.0",
|
||||||
|
"node-gzip": "^1.1.2",
|
||||||
|
"nodemailer": "^6.3.0",
|
||||||
|
"sequelize": "^5.21.6",
|
||||||
|
"sqlite3": "^4.1.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {}
|
||||||
|
}
|
308
ketr.ketran/server/app.js
Executable file
308
ketr.ketran/server/app.js
Executable file
@ -0,0 +1,308 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
process.env.TZ = "Etc/GMT";
|
||||||
|
|
||||||
|
console.log("Loading ketr.ketran");
|
||||||
|
|
||||||
|
const express = require("express"),
|
||||||
|
morgan = require("morgan"),
|
||||||
|
bodyParser = require("body-parser"),
|
||||||
|
config = require("config"),
|
||||||
|
session = require('express-session'),
|
||||||
|
hb = require("handlebars"),
|
||||||
|
SQLiteStore = require('connect-sqlite3')(session),
|
||||||
|
scanner = require("./scanner");
|
||||||
|
|
||||||
|
require("./console-line.js"); /* Monkey-patch console.log with line numbers */
|
||||||
|
|
||||||
|
const serverConfig = config.get("server");
|
||||||
|
|
||||||
|
let basePath = config.get("basePath");
|
||||||
|
basePath = "/" + basePath.replace(/^\/+/, "").replace(/\/+$/, "") + "/";
|
||||||
|
if (basePath == "//") {
|
||||||
|
basePath = "/";
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Hosting server from: " + basePath);
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
|
||||||
|
app.set("basePath", basePath);
|
||||||
|
|
||||||
|
/* App is behind an nginx proxy which we trust, so use the remote address
|
||||||
|
* set in the headers */
|
||||||
|
app.set("trust proxy", true);
|
||||||
|
|
||||||
|
app.use(basePath, require("./routes/basepath.js"));
|
||||||
|
|
||||||
|
/* Handle static files first so excessive logging doesn't occur */
|
||||||
|
app.use(basePath, express.static("frontend", { index: false }));
|
||||||
|
|
||||||
|
app.use(bodyParser.json());
|
||||||
|
app.use(bodyParser.urlencoded({
|
||||||
|
extended: false
|
||||||
|
}));
|
||||||
|
|
||||||
|
/* *******************************************************************************
|
||||||
|
* Logging - begin
|
||||||
|
*
|
||||||
|
* This runs before after cookie parsing, but before routes. If we set
|
||||||
|
* immediate: true on the morgan options, it happens before cookie parsing
|
||||||
|
* */
|
||||||
|
|
||||||
|
morgan.token('remote-user', function (req) {
|
||||||
|
return req.user ? req.user.username : "N/A";
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Any path starting with the following won't be logged via morgan */
|
||||||
|
const logSkipPaths = new RegExp("^" + basePath + "(" + [
|
||||||
|
".*thumbs\\/",
|
||||||
|
"bower_components",
|
||||||
|
].join(")|(") + ")");
|
||||||
|
app.use(morgan('common', {
|
||||||
|
skip: function (req) {
|
||||||
|
return logSkipPaths.exec(req.originalUrl);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Logging - end
|
||||||
|
* ******************************************************************************* */
|
||||||
|
|
||||||
|
/* body-parser does not support text/*, so add support for that here */
|
||||||
|
app.use(function(req, res, next){
|
||||||
|
if (!req.is('text/*')) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
req.setEncoding('utf8');
|
||||||
|
let text = '';
|
||||||
|
req.on('data', function(chunk) {
|
||||||
|
text += chunk;
|
||||||
|
});
|
||||||
|
req.on('end', function() {
|
||||||
|
req.text = text;
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
app.use(session({
|
||||||
|
store: new SQLiteStore({ db: config.get("sessions.db") }),
|
||||||
|
secret: config.get("sessions.store-secret"),
|
||||||
|
cookie: { maxAge: 7 * 24 * 60 * 60 * 1000 }, // 1 week
|
||||||
|
saveUninitialized: false,
|
||||||
|
resave: true
|
||||||
|
}));
|
||||||
|
|
||||||
|
const index = require("./routes/index");
|
||||||
|
|
||||||
|
if (config.has("admin.mail") &&
|
||||||
|
config.has("smtp.host") &&
|
||||||
|
config.has("smtp.sender")) {
|
||||||
|
app.set("transporter", require("nodemailer").createTransport({
|
||||||
|
host: config.get("smtp.host"),
|
||||||
|
pool: true,
|
||||||
|
port: config.has("smtp.port") ? config.get("smtp.port") : 25
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
console.log("SMTP disabled. To enable SMTP, configure admin.mail, smtp.host, and smtp.sender");
|
||||||
|
}
|
||||||
|
|
||||||
|
const templates = {
|
||||||
|
"html": [
|
||||||
|
"<p>The user {{displayName}} has verified their email address ({{mail}}).</p>",
|
||||||
|
"",
|
||||||
|
"<p>They indicated they know:</p>",
|
||||||
|
"<pre>{{notes}}</pre>",
|
||||||
|
"",
|
||||||
|
"<p>To authenticate:</p>",
|
||||||
|
"<p>echo 'UPDATE users SET authenticated=1 WHERE id={{id}};' | sqlite3 users.db</p>",
|
||||||
|
"",
|
||||||
|
"<p>Sincerely,<br>",
|
||||||
|
"James</p>"
|
||||||
|
].join("\n"),
|
||||||
|
"text": [
|
||||||
|
"The user {{displayName}} has verified their email address ({{mail}}).",
|
||||||
|
"",
|
||||||
|
"They indicated they know:",
|
||||||
|
"{{notes}}",
|
||||||
|
"",
|
||||||
|
"To authenticate:",
|
||||||
|
"echo 'UPDATE users SET authenticated=1 WHERE id={{id}};' | sqlite3 users.db",
|
||||||
|
"",
|
||||||
|
"Sincerely,",
|
||||||
|
"James"
|
||||||
|
].join("\n")
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Look for action-token URLs and process; this does not require a user to be logged
|
||||||
|
* in */
|
||||||
|
app.use(basePath, function(req, res, next) {
|
||||||
|
let match = req.url.match(/^\/([0-9a-f]+)$/);
|
||||||
|
if (!match) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
let key = match[1];
|
||||||
|
return userDB.sequelize.query("SELECT * FROM authentications WHERE key=:key", {
|
||||||
|
replacements: {
|
||||||
|
key: key
|
||||||
|
},
|
||||||
|
type: userDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
let token;
|
||||||
|
if (results.length == 0) {
|
||||||
|
console.log("Invalid key. Ignoring.");
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
token = results[0];
|
||||||
|
|
||||||
|
console.log("Matched token: " + JSON.stringify(token, null, 2));
|
||||||
|
switch (token.type) {
|
||||||
|
case "account-setup":
|
||||||
|
return userDB.sequelize.query("UPDATE users SET mailVerified=1 WHERE id=:userId", {
|
||||||
|
replacements: token
|
||||||
|
}).then(function() {
|
||||||
|
return userDB.sequelize.query("DELETE FROM authentications WHERE key=:key", {
|
||||||
|
replacements: token
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
return userDB.sequelize.query("SELECT * FROM users WHERE id=:userId", {
|
||||||
|
replacements: token,
|
||||||
|
type: userDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
if (results.length == 0) {
|
||||||
|
throw "DB mis-match between authentications and users table";
|
||||||
|
}
|
||||||
|
const transporter = app.get("transporter");
|
||||||
|
if (!transporter) {
|
||||||
|
console.log("Not sending VERIFIED email; SMTP not configured.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let user = results[0],
|
||||||
|
envelope = {
|
||||||
|
to: config.get("admin.mail"),
|
||||||
|
from: config.get("smtp.sender"),
|
||||||
|
subject: "VERIFIED: Account'" + user.displayName + "'",
|
||||||
|
cc: "",
|
||||||
|
bcc: "",
|
||||||
|
text: hb.compile(templates.text)(user),
|
||||||
|
html: hb.compile(templates.html)(user)
|
||||||
|
};
|
||||||
|
|
||||||
|
req.session.userId = user.id;
|
||||||
|
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
let attempts = 10;
|
||||||
|
|
||||||
|
function send(envelope) {
|
||||||
|
/* Rate limit to ten per second */
|
||||||
|
transporter.sendMail(envelope, function (error, info) {
|
||||||
|
if (!error) {
|
||||||
|
console.log('Message sent: ' + info.response);
|
||||||
|
return resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempts == 0) {
|
||||||
|
console.log("Error sending email: ", error)
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
attempts--;
|
||||||
|
console.log("Unable to send mail. Trying again in 100ms (" + attempts + " attempts remain): ", error);
|
||||||
|
setTimeout(send.bind(undefined, envelope), 100);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
send(envelope);
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
return res.redirect(308, basePath);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Allow loading of the app w/out being logged in */
|
||||||
|
app.use(basePath, index);
|
||||||
|
|
||||||
|
/* Allow access to the 'users' API w/out being logged in */
|
||||||
|
const users = require("./routes/users");
|
||||||
|
app.use(basePath + "api/v1/users", users.router);
|
||||||
|
|
||||||
|
app.use(function(err, req, res, next) {
|
||||||
|
res.status(err.status || 500).json({
|
||||||
|
message: err.message,
|
||||||
|
error: {}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Check authentication */
|
||||||
|
app.use(basePath, function(req, res, next) {
|
||||||
|
return users.getSessionUser(req).then(function(user) {
|
||||||
|
if (user.restriction) {
|
||||||
|
return res.status(401).send(user.restriction);
|
||||||
|
}
|
||||||
|
req.user = user;
|
||||||
|
return next();
|
||||||
|
}).catch(function(error) {
|
||||||
|
return res.status(403).send(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Everything below here requires a successful authentication */
|
||||||
|
app.use(basePath, express.static(picturesPath, { index: false }));
|
||||||
|
|
||||||
|
app.use(basePath + "api/v1/games", require("./routes/games"));
|
||||||
|
|
||||||
|
/* Declare the "catch all" index route last; the final route is a 404 dynamic router */
|
||||||
|
app.use(basePath, index);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create HTTP server and listen for new connections
|
||||||
|
*/
|
||||||
|
app.set("port", serverConfig.port);
|
||||||
|
|
||||||
|
const server = require("http").createServer(app);
|
||||||
|
|
||||||
|
require("./db/games").then(function(db) {
|
||||||
|
gamesDB = db;
|
||||||
|
}).then(function() {
|
||||||
|
return require("./db/users").then(function(db) {
|
||||||
|
userDB = db;
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
console.log("DB connected. Opening server.");
|
||||||
|
server.listen(serverConfig.port);
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(-1);
|
||||||
|
});
|
||||||
|
|
||||||
|
server.on("error", function(error) {
|
||||||
|
if (error.syscall !== "listen") {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle specific listen errors with friendly messages
|
||||||
|
switch (error.code) {
|
||||||
|
case "EACCES":
|
||||||
|
console.error(serverConfig.port + " requires elevated privileges");
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
case "EADDRINUSE":
|
||||||
|
console.error(serverConfig.port + " is already in use");
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.on("listening", function() {
|
||||||
|
console.log("Listening on " + serverConfig.port);
|
||||||
|
});
|
30
ketr.ketran/server/console-line.js
Normal file
30
ketr.ketran/server/console-line.js
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
/* monkey-patch console.log to prefix with file/line-number */
|
||||||
|
if (process.env.LOG_LINE) {
|
||||||
|
let cwd = process.cwd(),
|
||||||
|
cwdRe = new RegExp("^[^/]*" + cwd.replace("/", "\\/") + "\/([^:]*:[0-9]*).*$");
|
||||||
|
[ "log", "warn", "error" ].forEach(function(method) {
|
||||||
|
console[method] = (function () {
|
||||||
|
let orig = console[method];
|
||||||
|
return function () {
|
||||||
|
function getErrorObject() {
|
||||||
|
try {
|
||||||
|
throw Error('');
|
||||||
|
} catch (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let err = getErrorObject(),
|
||||||
|
caller_line = err.stack.split("\n")[4],
|
||||||
|
args = [caller_line.replace(cwdRe, "$1 -")];
|
||||||
|
|
||||||
|
/* arguments.unshift() doesn't exist... */
|
||||||
|
for (var i = 0; i < arguments.length; i++) {
|
||||||
|
args.push(arguments[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
orig.apply(this, args);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
});
|
||||||
|
}
|
BIN
ketr.ketran/server/db/games.db
Normal file
BIN
ketr.ketran/server/db/games.db
Normal file
Binary file not shown.
BIN
ketr.ketran/server/db/users.db
Normal file
BIN
ketr.ketran/server/db/users.db
Normal file
Binary file not shown.
404
ketr.ketran/server/face-recognizer.js
Normal file
404
ketr.ketran/server/face-recognizer.js
Normal file
@ -0,0 +1,404 @@
|
|||||||
|
/*
|
||||||
|
* Face recognition:
|
||||||
|
* 1. For each photo, extract all faces. Store face rectangles.
|
||||||
|
* face_id unique
|
||||||
|
* photo_id foreign key
|
||||||
|
* top left bottom right
|
||||||
|
* identity_id
|
||||||
|
* distance (0 == truth; manually assigned identity)
|
||||||
|
* 2. For each face_id, create:
|
||||||
|
* /${picturesPath}face-data/${face_id % 100}/
|
||||||
|
* ${face_id}-normalized
|
||||||
|
* ${face_id}-original
|
||||||
|
* ${face_id}-data
|
||||||
|
*/
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
process.env.TZ = "Etc/GMT";
|
||||||
|
|
||||||
|
console.log("Loading face-recognizer");
|
||||||
|
|
||||||
|
require('@tensorflow/tfjs-node');
|
||||||
|
|
||||||
|
const config = require("config"),
|
||||||
|
Promise = require("bluebird"),
|
||||||
|
{ exists, mkdir, unlink } = require("./lib/util"),
|
||||||
|
faceapi = require("face-api.js"),
|
||||||
|
fs = require("fs"),
|
||||||
|
canvas = require("canvas");
|
||||||
|
|
||||||
|
const { createCanvas, Canvas, Image, ImageData } = canvas;
|
||||||
|
|
||||||
|
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
|
||||||
|
|
||||||
|
const maxConcurrency = require("os").cpus().length;
|
||||||
|
|
||||||
|
require("./console-line.js"); /* Monkey-patch console.log with line numbers */
|
||||||
|
|
||||||
|
const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/",
|
||||||
|
faceData = picturesPath + "face-data/";
|
||||||
|
|
||||||
|
let photoDB = null;
|
||||||
|
|
||||||
|
console.log("Loading pictures out of: " + picturesPath);
|
||||||
|
|
||||||
|
function alignFromLandmarks(image, landmarks) {
|
||||||
|
const faceMargin = 0.3,
|
||||||
|
width = 256, height = 256,
|
||||||
|
dY = landmarks._positions[45]._y - landmarks._positions[36]._y,
|
||||||
|
dX = landmarks._positions[45]._x - landmarks._positions[36]._x,
|
||||||
|
mid = {
|
||||||
|
x: landmarks._positions[36]._x + 0.5 * dX,
|
||||||
|
y: landmarks._positions[36]._y + 0.5 * dY
|
||||||
|
},
|
||||||
|
rotation = -Math.atan2(dY, dX),
|
||||||
|
cosRotation = Math.cos(rotation),
|
||||||
|
sinRotation = Math.sin(rotation),
|
||||||
|
eyeDistance = Math.sqrt(dY * dY + dX * dX),
|
||||||
|
scale = width * (1.0 - 2. * faceMargin) / eyeDistance,
|
||||||
|
canvas = createCanvas(width, height),
|
||||||
|
ctx = canvas.getContext("2d");
|
||||||
|
|
||||||
|
const prime = {
|
||||||
|
x: mid.x * cosRotation - mid.y * sinRotation,
|
||||||
|
y: mid.y * cosRotation + mid.x * sinRotation
|
||||||
|
};
|
||||||
|
|
||||||
|
mid.x = prime.x;
|
||||||
|
mid.y = prime.y;
|
||||||
|
|
||||||
|
ctx.translate(
|
||||||
|
0.5 * width - mid.x * scale,
|
||||||
|
0.5 * height - (height * (0.5 - faceMargin)) - mid.y * scale);
|
||||||
|
ctx.rotate(rotation);
|
||||||
|
ctx.scale(scale, scale);
|
||||||
|
ctx.drawImage(image, 0, 0);
|
||||||
|
/*
|
||||||
|
ctx.strokeStyle = "red";
|
||||||
|
ctx.strokeWidth = "1";
|
||||||
|
ctx.beginPath();
|
||||||
|
landmarks._positions.forEach((point, index) => {
|
||||||
|
if (index == 0) {
|
||||||
|
ctx.moveTo(point._x, point._y);
|
||||||
|
} else {
|
||||||
|
ctx.lineTo(point._x, point._y);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ctx.stroke();
|
||||||
|
*/
|
||||||
|
return canvas;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write("Loading DB.");
|
||||||
|
require("./db/photos").then(function(db) {
|
||||||
|
process.stdout.write("done\n");
|
||||||
|
photoDB = db;
|
||||||
|
}).then(() => {
|
||||||
|
console.log("DB connected.");
|
||||||
|
process.stdout.write("Loading models.");
|
||||||
|
return faceapi.nets.ssdMobilenetv1.loadFromDisk('./models');
|
||||||
|
}).then(() => {
|
||||||
|
process.stdout.write(".");
|
||||||
|
return faceapi.nets.faceLandmark68Net.loadFromDisk('./models');
|
||||||
|
}).then(() => {
|
||||||
|
process.stdout.write(".");
|
||||||
|
return faceapi.nets.faceRecognitionNet.loadFromDisk('./models');
|
||||||
|
}).then(() => {
|
||||||
|
console.log("Beginning face detection scanning.");
|
||||||
|
return photoDB.sequelize.query("SELECT photos.id,photos.filename,photos.width,photos.height,albums.path " +
|
||||||
|
"FROM photos " +
|
||||||
|
"LEFT JOIN albums ON (albums.id=photos.albumId) " +
|
||||||
|
"WHERE faces=-1 AND photos.duplicate=0 AND photos.deleted=0 ORDER BY albums.path,photos.filename", {
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
});
|
||||||
|
}).then((needToScan) => {
|
||||||
|
const total = needToScan.length;
|
||||||
|
let remaining = total,
|
||||||
|
processed = 0,
|
||||||
|
lastStatus = Date.now();
|
||||||
|
|
||||||
|
console.log(`${needToScan.length} photos have not had faces scanned.`);
|
||||||
|
|
||||||
|
return Promise.map(needToScan, (photo) => {
|
||||||
|
const photoPath = photo.path + photo.filename;
|
||||||
|
|
||||||
|
console.log(`Processing ${photoPath}...`);
|
||||||
|
|
||||||
|
/* Remove any existing face data for this photo */
|
||||||
|
return photoDB.sequelize.query("SELECT id FROM faces WHERE photoId=:id", {
|
||||||
|
replacements: photo,
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then((faces) => {
|
||||||
|
/* For each face-id, remove any face-data files, and then remove all the entries
|
||||||
|
* from the DB */
|
||||||
|
return Promise.map(faces, (face) => {
|
||||||
|
return Promise.map([ "-data.json", "-original.png" ], (suffix) => {
|
||||||
|
const id = face.id,
|
||||||
|
dataPath = faceData + (id % 100) + "/" + id + suffix;
|
||||||
|
return exists(dataPath).then((result) => {
|
||||||
|
if (result) {
|
||||||
|
console.log(`...removing ${dataPath}`);
|
||||||
|
return unlink(dataPath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
return photoDB.sequelize.query("DELETE FROM faces WHERE photoId=:id", {
|
||||||
|
replacements: photo,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(async () => {
|
||||||
|
/* Process image for faces data */
|
||||||
|
const image = await canvas.loadImage(picturesPath + photoPath);
|
||||||
|
const detections = await faceapi.detectAllFaces(image,
|
||||||
|
new faceapi.SsdMobilenetv1Options({
|
||||||
|
minConfidence: 0.9
|
||||||
|
})
|
||||||
|
).withFaceLandmarks();
|
||||||
|
|
||||||
|
if (detections.length > 0) {
|
||||||
|
console.log(`...${detections.length} faces identified in ${photoPath}.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.map(detections, async (face) => {
|
||||||
|
const detection = face.detection,
|
||||||
|
canvas = alignFromLandmarks(image, face.landmarks);
|
||||||
|
face.descriptor = await faceapi.computeFaceDescriptor(canvas);
|
||||||
|
|
||||||
|
const width = detection._box._width,
|
||||||
|
height = detection._box._height,
|
||||||
|
replacements = {
|
||||||
|
id: photo.id,
|
||||||
|
top: detection._box._y / detection._imageDims.height,
|
||||||
|
left: detection._box._x / detection._imageDims.width,
|
||||||
|
bottom: (detection._box._y + height) / detection._imageDims.height,
|
||||||
|
right: (detection._box._x + width) / detection._imageDims.width,
|
||||||
|
faceConfidence: detection._score
|
||||||
|
};
|
||||||
|
|
||||||
|
return photoDB.sequelize.query("INSERT INTO faces (photoId,top,left,bottom,right,faceConfidence) " +
|
||||||
|
"VALUES (:id,:top,:left,:bottom,:right,:faceConfidence)", {
|
||||||
|
replacements: replacements
|
||||||
|
}).spread((results, metadata) => {
|
||||||
|
return metadata.lastID;
|
||||||
|
}).then((id) => {
|
||||||
|
const path = faceData + (id % 100);
|
||||||
|
return mkdir(path).then(() => {
|
||||||
|
const dataPath = `${path}/${id}-data.json`, data = [];
|
||||||
|
console.log(`...writing descriptor data to ${dataPath}...`);
|
||||||
|
/* Confert from sparse object to dense array */
|
||||||
|
for (let i = 0; i < 128; i++) {
|
||||||
|
data.push(face.descriptor[i]);
|
||||||
|
}
|
||||||
|
fs.writeFileSync(dataPath, JSON.stringify(data));
|
||||||
|
}).then(() => {
|
||||||
|
const target = `${path}/${id}-original.png`;
|
||||||
|
console.log(`...writing aligned face crop to ${target}.`);
|
||||||
|
fs.writeFileSync(target, canvas.toBuffer("image/png", {
|
||||||
|
quality: 0.95,
|
||||||
|
chromaSubsampling: false
|
||||||
|
}));
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(-1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
return photoDB.sequelize.query("UPDATE photos SET faces=:faces WHERE id=:id", {
|
||||||
|
replacements: {
|
||||||
|
id: photo.id,
|
||||||
|
faces: detections.length
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).catch((error) => {
|
||||||
|
console.log(error);
|
||||||
|
console.warn("Skipping out on image " + photoPath + " and marking to 0 faces to prevent future scanning.");
|
||||||
|
return photoDB.sequelize.query("UPDATE photos SET faces=:faces WHERE id=:id", {
|
||||||
|
replacements: {
|
||||||
|
id: photo.id,
|
||||||
|
faces: 0
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
processed++;
|
||||||
|
const now = Date.now();
|
||||||
|
if (now - lastStatus > 5000) {
|
||||||
|
const rate = Math.round(10000 * (remaining - (total - processed)) / (now - lastStatus)) / 10,
|
||||||
|
eta = Math.round((total - processed) / rate);
|
||||||
|
lastStatus = now;
|
||||||
|
remaining = total - processed;
|
||||||
|
console.log(`Processing ${rate} images per second. ${remaining} images to be processed. ETA: ${eta}s`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, {
|
||||||
|
concurrency: maxConcurrency
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
console.log("Looking for face distances that need to be updated...");
|
||||||
|
let maxId;
|
||||||
|
|
||||||
|
return photoDB.sequelize.query("SELECT faces.id FROM faces ORDER BY faces.id DESC LIMIT 1", {
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then((results) => {
|
||||||
|
if (!results.length) {
|
||||||
|
console.log("...no faces exist yet to generate distances.");
|
||||||
|
maxId = 0;
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
maxId = results[0].id;
|
||||||
|
return photoDB.sequelize.query(
|
||||||
|
"SELECT faces.id,faces.lastComparedId " +
|
||||||
|
"FROM faces INNER JOIN photos ON photos.duplicate=0 AND photos.deleted=0 AND photos.id=faces.photoId " +
|
||||||
|
"WHERE faces.lastComparedId<:maxId OR faces.lastComparedId IS NULL " +
|
||||||
|
"ORDER BY faces.id ASC", {
|
||||||
|
replacements: {
|
||||||
|
maxId: maxId
|
||||||
|
},
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
});
|
||||||
|
}).then((facesToUpdate) => {
|
||||||
|
console.log(`...${facesToUpdate.length} faces need distances updated.`);
|
||||||
|
console.log("---- run scanner/scanner !! ---");
|
||||||
|
return [];
|
||||||
|
if (facesToUpdate.length == 0) {
|
||||||
|
return facesToUpdate;
|
||||||
|
}
|
||||||
|
|
||||||
|
const descriptors = {};
|
||||||
|
|
||||||
|
return photoDB.sequelize.query(
|
||||||
|
"SELECT id FROM faces ORDER BY id ASC", {
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then((allFaces) => {
|
||||||
|
console.log(`...reading ${allFaces.length} descriptors...`);
|
||||||
|
return Promise.map(allFaces, (face) => {
|
||||||
|
const id = face.id,
|
||||||
|
dataPath = faceData + "/" + (id % 100) + "/" + id + "-data.json";
|
||||||
|
|
||||||
|
if (id in descriptors) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return exists(dataPath).then((doesExist) => {
|
||||||
|
if (!doesExist) {
|
||||||
|
console.warn(`${dataPath} is missing!`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
descriptors[id] = JSON.parse(fs.readFileSync(dataPath));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
const total = facesToUpdate.length;
|
||||||
|
let remaining = total,
|
||||||
|
processed = 0,
|
||||||
|
lastStatus = Date.now(),
|
||||||
|
targets = [];
|
||||||
|
|
||||||
|
for (let target in descriptors) {
|
||||||
|
targets.push({ id: target, descriptor: descriptors[target] });
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.mapSeries(facesToUpdate, (face) => {
|
||||||
|
if (!(face.id in descriptors)) {
|
||||||
|
console.warn(`...attempt to compare distance with no descriptor for ${face.id}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const faceDescriptor = descriptors[face.id];
|
||||||
|
|
||||||
|
return photoDB.sequelize.transaction((transaction) => {
|
||||||
|
return photoDB.sequelize.query(
|
||||||
|
"SELECT distance,face1Id,face2Id " +
|
||||||
|
"FROM facedistances " +
|
||||||
|
"WHERE face1Id=:id OR face2Id=:id " +
|
||||||
|
"ORDER BY face1Id ASC", {
|
||||||
|
replacements: {
|
||||||
|
id: face.id
|
||||||
|
},
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true,
|
||||||
|
transaction: transaction
|
||||||
|
}).then((distances) => {
|
||||||
|
return Promise.map(targets, (target) => {
|
||||||
|
/* Skip comparing to self */
|
||||||
|
if (target.id == face.id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Only compare against newer faces */
|
||||||
|
if (face.lastComparedId && target.id <= face.lastComparedId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const index = distances.findIndex((distance) => {
|
||||||
|
return distance.face1Id == target.id || distance.face2Id == target.id
|
||||||
|
});
|
||||||
|
|
||||||
|
if (index != -1) {
|
||||||
|
/* A distance has already been calculated between face and target */
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const distance = faceapi.euclideanDistance(faceDescriptor, target.descriptor);
|
||||||
|
|
||||||
|
/* If the distance > 0.6, we don't want to store this in the DB */
|
||||||
|
if (distance > 0.6) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (distance < 0.4) {
|
||||||
|
process.stdout.write(".");
|
||||||
|
// console.log(`Face ${face.id} and ${target.id} have a distance of: ${distance}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return photoDB.sequelize.query(
|
||||||
|
"INSERT INTO facedistances (face1Id,face2Id,distance) " +
|
||||||
|
"VALUES (:first,:second,:distance)", {
|
||||||
|
replacements: {
|
||||||
|
first: Math.min(face.id, target.id),
|
||||||
|
second: Math.max(face.id, target.id),
|
||||||
|
distance: distance
|
||||||
|
},
|
||||||
|
transaction: transaction
|
||||||
|
});
|
||||||
|
}, {
|
||||||
|
concurrency: maxConcurrency
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
return photoDB.sequelize.query(
|
||||||
|
"UPDATE faces SET lastComparedId=:lastId WHERE id=:id", {
|
||||||
|
replacements: {
|
||||||
|
lastId: maxId,
|
||||||
|
id: face.id
|
||||||
|
},
|
||||||
|
transaction: transaction
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
processed++;
|
||||||
|
const now = Date.now();
|
||||||
|
if (now - lastStatus > 5000) {
|
||||||
|
const rate = Math.round(10000 * (remaining - (total - processed)) / (now - lastStatus)) / 10,
|
||||||
|
eta = Math.round((total - processed) / rate);
|
||||||
|
lastStatus = now;
|
||||||
|
remaining = total - processed;
|
||||||
|
console.log(`\nProcessing ${rate} faces per second. ${remaining} faces to be processed. ETA: ${eta}s`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
console.log("Face detection scanning completed.");
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(-1);
|
||||||
|
});
|
281
ketr.ketran/server/face.js
Normal file
281
ketr.ketran/server/face.js
Normal file
@ -0,0 +1,281 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
process.env.TZ = "Etc/GMT";
|
||||||
|
|
||||||
|
require('@tensorflow/tfjs-node');
|
||||||
|
|
||||||
|
let photoDB = null;
|
||||||
|
|
||||||
|
const config = require("config"),
|
||||||
|
Promise = require("bluebird"),
|
||||||
|
{ exists, mkdir, unlink } = require("./lib/util"),
|
||||||
|
faceapi = require("face-api.js"),
|
||||||
|
fs = require("fs"),
|
||||||
|
canvas = require("canvas");
|
||||||
|
|
||||||
|
const { createCanvas, Canvas, Image, ImageData } = canvas;
|
||||||
|
|
||||||
|
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
|
||||||
|
|
||||||
|
const maxConcurrency = require("os").cpus().length;
|
||||||
|
|
||||||
|
require("./console-line.js"); /* Monkey-patch console.log with line numbers */
|
||||||
|
|
||||||
|
const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/",
|
||||||
|
faceData = picturesPath + "face-data/";
|
||||||
|
|
||||||
|
function alignFromLandmarks(image, landmarks, drawLandmarks) {
|
||||||
|
const faceMargin = 0.45,
|
||||||
|
width = 512, height = 512,
|
||||||
|
dY = landmarks._positions[45]._y - landmarks._positions[36]._y,
|
||||||
|
dX = landmarks._positions[45]._x - landmarks._positions[36]._x,
|
||||||
|
mid = {
|
||||||
|
x: landmarks._positions[36]._x + 0.5 * dX,
|
||||||
|
y: landmarks._positions[36]._y + 0.5 * dY
|
||||||
|
},
|
||||||
|
rotation = -Math.atan2(dY, dX),
|
||||||
|
cosRotation = Math.cos(rotation),
|
||||||
|
sinRotation = Math.sin(rotation),
|
||||||
|
eyeDistance = Math.sqrt(dY * dY + dX * dX),
|
||||||
|
scale = width * (1.0 - 2. * faceMargin) / eyeDistance,
|
||||||
|
canvas = createCanvas(width, height),
|
||||||
|
ctx = canvas.getContext("2d");
|
||||||
|
|
||||||
|
const prime = {
|
||||||
|
x: mid.x * cosRotation - mid.y * sinRotation,
|
||||||
|
y: mid.y * cosRotation + mid.x * sinRotation
|
||||||
|
};
|
||||||
|
|
||||||
|
mid.x = prime.x;
|
||||||
|
mid.y = prime.y;
|
||||||
|
|
||||||
|
ctx.translate(
|
||||||
|
0.5 * width - mid.x * scale,
|
||||||
|
0.5 * height - (height * (0.5 - faceMargin)) - mid.y * scale);
|
||||||
|
ctx.rotate(rotation);
|
||||||
|
ctx.scale(scale, scale);
|
||||||
|
ctx.drawImage(image, 0, 0);
|
||||||
|
|
||||||
|
if (drawLandmarks) {
|
||||||
|
ctx.strokeStyle = "red";
|
||||||
|
ctx.strokeWidth = "1";
|
||||||
|
ctx.beginPath();
|
||||||
|
landmarks._positions.forEach((point, index) => {
|
||||||
|
if (index == 0) {
|
||||||
|
ctx.moveTo(point._x, point._y);
|
||||||
|
} else {
|
||||||
|
ctx.lineTo(point._x, point._y);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
|
||||||
|
return canvas;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write("Loading DB.");
|
||||||
|
require("./db/photos").then(function(db) {
|
||||||
|
process.stdout.write("done\n");
|
||||||
|
photoDB = db;
|
||||||
|
}).then(() => {
|
||||||
|
console.log("DB connected.");
|
||||||
|
process.stdout.write("Loading models.");
|
||||||
|
return faceapi.nets.ssdMobilenetv1.loadFromDisk('./models');
|
||||||
|
}).then(() => {
|
||||||
|
process.stdout.write(".");
|
||||||
|
return faceapi.nets.faceLandmark68Net.loadFromDisk('./models');
|
||||||
|
}).then(() => {
|
||||||
|
process.stdout.write(".");
|
||||||
|
return faceapi.nets.faceRecognitionNet.loadFromDisk('./models');
|
||||||
|
}).then(async () => {
|
||||||
|
process.stdout.write(".done\n");
|
||||||
|
|
||||||
|
if (process.argv[0].match(/node/)) {
|
||||||
|
process.argv.shift(); /* node */
|
||||||
|
}
|
||||||
|
process.argv.shift(); /* script name */
|
||||||
|
|
||||||
|
return Promise.resolve().then(() => {
|
||||||
|
if (process.argv.length != 0) {
|
||||||
|
return process.argv;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If no parameters provided, scan all faces to create image crops */
|
||||||
|
return photoDB.sequelize.query("SELECT id FROM faces ORDER BY id ASC", {
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then((results) => {
|
||||||
|
return results.map(result => result.id);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then((args) => {
|
||||||
|
const faces = [];
|
||||||
|
|
||||||
|
console.log(`Scanning ${args.length} faces.`);
|
||||||
|
return Promise.map(args, (arg) => {
|
||||||
|
const file = arg;
|
||||||
|
let id = parseInt(arg);
|
||||||
|
|
||||||
|
let loader;
|
||||||
|
|
||||||
|
if (id == file) {
|
||||||
|
/* This is a face id */
|
||||||
|
console.log(`Looking up face-id ${id}...`);
|
||||||
|
loader = photoDB.sequelize.query(
|
||||||
|
"SELECT albums.path,photos.filename,photos.width,photos.height,faces.* " +
|
||||||
|
"FROM faces,photos,albums " +
|
||||||
|
"WHERE photos.id=faces.photoId " +
|
||||||
|
"AND albums.id=photos.albumId " +
|
||||||
|
"AND faces.id=:id", {
|
||||||
|
replacements: {
|
||||||
|
id: id
|
||||||
|
},
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then((results) => {
|
||||||
|
if (results.length != 1) {
|
||||||
|
console.error(`...error. No face-id found: ${id}.\n`);
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
const photo = results[0];
|
||||||
|
console.log(`...loading ${photo.filename}`);
|
||||||
|
|
||||||
|
const file = photo.path + photo.filename;
|
||||||
|
return canvas.loadImage(picturesPath + file).then(async (image) => {
|
||||||
|
const detectors = [ {
|
||||||
|
detection: {
|
||||||
|
_box: {
|
||||||
|
_x: photo.left * photo.width,
|
||||||
|
_y: photo.top * photo.height,
|
||||||
|
_width: (photo.right - photo.left) * photo.width,
|
||||||
|
_height: (photo.bottom - photo.top) * photo.height,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
descriptor: JSON.parse(fs.readFileSync(faceData + (id % 100) + "/" + id + "-data.json"))
|
||||||
|
} ];
|
||||||
|
return [ file, image, detectors ];
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
/* This is a file */
|
||||||
|
console.log(`Loading ${file}...`);
|
||||||
|
id = undefined;
|
||||||
|
loader = canvas.loadImage(picturesPath + file).then(async (image) => {
|
||||||
|
const detectors = await faceapi.detectAllFaces(image,
|
||||||
|
new faceapi.SsdMobilenetv1Options({
|
||||||
|
minConfidence: 0.9
|
||||||
|
})
|
||||||
|
).withFaceLandmarks();
|
||||||
|
|
||||||
|
await detectors.forEach(async (detector, index) => {
|
||||||
|
const canvas = alignFromLandmarks(image, detector.landmarks, false);
|
||||||
|
fs.writeFileSync(`rotation-pre-${index}.png`, canvas.toBuffer("image/png", {
|
||||||
|
quality: 0.95,
|
||||||
|
chromaSubsampling: false
|
||||||
|
}));
|
||||||
|
const detected = await faceapi.detectSingleFace(canvas,
|
||||||
|
new faceapi.SsdMobilenetv1Options({
|
||||||
|
minConfidence: 0.1
|
||||||
|
})
|
||||||
|
).withFaceLandmarks();
|
||||||
|
const descriptor = await faceapi.computeFaceDescriptor(canvas);
|
||||||
|
console.log(`Processing face ${index}...`);
|
||||||
|
console.log(`...pre aligned score: ${detector.detection._score}`);
|
||||||
|
if (!detected) {
|
||||||
|
console.log("No face found in re-scaled and aligned image");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.log(`...post-aligned score: ${detected.detection._score}`);
|
||||||
|
const newCanvas = alignFromLandmarks(canvas, detected.landmarks, true);
|
||||||
|
|
||||||
|
fs.writeFileSync(`rotation-post-${index}.png`, newCanvas.toBuffer("image/png", {
|
||||||
|
quality: 0.95,
|
||||||
|
chromaSubsampling: false
|
||||||
|
}));
|
||||||
|
|
||||||
|
console.log(`Wrote rotation-${index}.png`);
|
||||||
|
|
||||||
|
const data = [];
|
||||||
|
/* Confert from sparse object to dense array */
|
||||||
|
for (let i = 0; i < 128; i++) {
|
||||||
|
data.push(descriptor[i]);
|
||||||
|
}
|
||||||
|
detector.descriptor = data;
|
||||||
|
});
|
||||||
|
|
||||||
|
return [ file, image, detectors ];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return loader.then((results) => {
|
||||||
|
const filepath = results[0],
|
||||||
|
image = results[1],
|
||||||
|
detectors = results[2];
|
||||||
|
|
||||||
|
process.stdout.write(`${detectors.length} faces.\n`);
|
||||||
|
|
||||||
|
return Promise.map(detectors, (face, index) => {
|
||||||
|
faces.push({
|
||||||
|
filepath: filepath,
|
||||||
|
index: index,
|
||||||
|
descriptor: face.descriptor
|
||||||
|
})
|
||||||
|
|
||||||
|
/* If this is a face-id, output the -original.png
|
||||||
|
* meta-data file */
|
||||||
|
if (!id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const path = "face-data/" + (id % 100),
|
||||||
|
target = `${path}/${id}-original.png`,
|
||||||
|
box = face.detection._box,
|
||||||
|
aspect = box._width / box._height,
|
||||||
|
dx = (aspect > 1.0) ? 200 : (200 * aspect),
|
||||||
|
dy = (aspect < 1.0) ? 200 : (200 / aspect);
|
||||||
|
|
||||||
|
return exists(target).then((doesExist) => {
|
||||||
|
if (doesExist) {
|
||||||
|
console.log(`...${target} already exists.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const canvas = createCanvas(200, 200),
|
||||||
|
ctx = canvas.getContext('2d');
|
||||||
|
|
||||||
|
ctx.fillStyle = "rgba(0, 0, 0, 0)";
|
||||||
|
ctx.fillRect(0, 0, 200, 200);
|
||||||
|
ctx.drawImage(image, box._x, box._y, box._width, box._height,
|
||||||
|
Math.floor((200 - dx) * 0.5),
|
||||||
|
Math.floor((200 - dy) * 0.5), dx, dy);
|
||||||
|
|
||||||
|
console.log(`...writing to ${target}.`);
|
||||||
|
|
||||||
|
return mkdir(path).then(() => {
|
||||||
|
fs.writeFileSync(picturesPath + target, canvas.toBuffer("image/png", {
|
||||||
|
quality: 0.95,
|
||||||
|
chromaSubsampling: false
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}, {
|
||||||
|
concurrency: maxConcurrency
|
||||||
|
}).then(() => {
|
||||||
|
console.log("Face detection scanning completed.");
|
||||||
|
if (0) faces.forEach((a, i) => {
|
||||||
|
faces.forEach((b, j) => {
|
||||||
|
if (i == j) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const distance = faceapi.euclideanDistance(a.descriptor, b.descriptor);
|
||||||
|
if (distance < 0.4) {
|
||||||
|
console.log(`${a.filepath}.${a.index} is similar to ${b.filepath}.${b.index}: ${distance}`);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(-1);
|
||||||
|
});
|
137
ketr.ketran/server/http-server.js
Normal file
137
ketr.ketran/server/http-server.js
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
const express = require('express'),
|
||||||
|
morgan = require('morgan'),
|
||||||
|
cookieParser = require('cookie-parser'),
|
||||||
|
bodyParser = require('body-parser'),
|
||||||
|
http = require('http'),
|
||||||
|
config = require('config'),
|
||||||
|
app = express(),
|
||||||
|
{ timestamp } = require('./timestamp'),
|
||||||
|
fs = require('fs'),
|
||||||
|
util = require('util'),
|
||||||
|
mkdir = util.promisify(fs.mkdir),
|
||||||
|
unlink = util.promisify(fs.unlink),
|
||||||
|
path = require('path'),
|
||||||
|
fetch = require('node-fetch'),
|
||||||
|
Promise = require('bluebird'),
|
||||||
|
url = require('url'),
|
||||||
|
{ exec } = require('child_process');
|
||||||
|
|
||||||
|
fetch.Promise = Promise;
|
||||||
|
|
||||||
|
const basePath = "/" + config.get("http.base").replace(/^\/*/, "").replace(/\/*$/, "") + "/",
|
||||||
|
dataPath = "/" + config.get("dataPath").replace(/^\/*/, "").replace(/\/*$/, "") + "/";
|
||||||
|
/* */
|
||||||
|
|
||||||
|
if (!config.has("auth.idsid") || !config.has("auth.password")) {
|
||||||
|
console.error("You need to provide credentials to connect to ubit-gfx in config/local.json");
|
||||||
|
console.error(' "auth": { "idsid": "USERNAME", "password": "PASSWORD" }');
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
app.use(morgan('common'));
|
||||||
|
|
||||||
|
app.use(bodyParser.json({
|
||||||
|
verify: function(req,res,buf) {
|
||||||
|
req.rawBody = buf;
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
app.use(bodyParser.urlencoded({
|
||||||
|
extended: false
|
||||||
|
}));
|
||||||
|
app.use(cookieParser());
|
||||||
|
|
||||||
|
/* Routes:
|
||||||
|
* /api/v1/publish Publish content to repository
|
||||||
|
*/
|
||||||
|
|
||||||
|
app.get("/*", (req, res, next) => {
|
||||||
|
/* */
|
||||||
|
return res.status(400).send({ usage: `POST ${basePath}api/v1/publish/:distro/:releaseStream/:url` });
|
||||||
|
});
|
||||||
|
|
||||||
|
const auth = new Buffer(config.get("auth.idsid") + ":" + config.get("auth.password"), 'ascii').toString('base64');
|
||||||
|
|
||||||
|
app.post(basePath + 'api/v1/publish/:distro/:releaseStream/:url', function (req, res, next) {
|
||||||
|
const distro = req.params.distro,
|
||||||
|
releaseStream = req.params.releaseStream,
|
||||||
|
remoteUrl = req.params.url;
|
||||||
|
let filename;
|
||||||
|
|
||||||
|
try {
|
||||||
|
filename = path.basename(url.parse(remoteUrl).pathname);
|
||||||
|
} catch (error) {
|
||||||
|
return res.status(400).send({ error: `Unparsable URL: ${remoteUrl}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (distro.match(/\//) || releaseStream.match(/\//)) {
|
||||||
|
return res.status(400).send({ error: "Neither distro nor releaseStream may contain '/'" });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`POST publish/${distro}-${releaseStream}/${filename}`);
|
||||||
|
|
||||||
|
const filepath = `${dataPath}${distro}-${releaseStream}`;
|
||||||
|
|
||||||
|
return mkdir(filepath, { recursive: true }, () => {
|
||||||
|
const pathname = `${filepath}/${filename}`;
|
||||||
|
if (fs.existsSync(pathname)) {
|
||||||
|
return res.status(409).send({ message: `'${distro}-${releaseStream}/${filename}' already exists.` });
|
||||||
|
}
|
||||||
|
|
||||||
|
return fetch(remoteUrl, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Basic ${auth}`
|
||||||
|
}
|
||||||
|
}).then(result => {
|
||||||
|
const dest = fs.createWriteStream(pathname);
|
||||||
|
dest.on('finish', () => {
|
||||||
|
exec(`./update-repository.sh ${distro}-${releaseStream}`, {
|
||||||
|
cwd: ".." ,
|
||||||
|
shell: "/bin/bash"
|
||||||
|
}, (error, stdout, stderr) => {
|
||||||
|
if (error) {
|
||||||
|
return unlink(pathname).catch(() => {
|
||||||
|
console.error(`Unable to remove ${pathname} after update-repository.sh failed.`);
|
||||||
|
}).then(() => {
|
||||||
|
return res.status(500).send({ message: "Error while updating aptly database.", error: error, stderr: stderr, stdout: stdout });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return res.status(200).send({ message: "OK", stdout: stdout || "", stderr: stderr || "" });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
result.body.pipe(dest);
|
||||||
|
}).catch((error )=> {
|
||||||
|
const message = `Unable to download ${remoteUrl}: ${error}`;
|
||||||
|
console.error(message);
|
||||||
|
return res.status(500).send({ message: message });
|
||||||
|
});
|
||||||
|
}).catch((error) => {
|
||||||
|
const message = `Unable to mkdir ${filepath}: ${error}`;
|
||||||
|
console.error(message);
|
||||||
|
return res.status(500).send({ message: message });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post("/*", (req, res, next) => {
|
||||||
|
/* */
|
||||||
|
return res.status(400).send({ usage: `POST /${basePath}/api/v1/publish/:distro/:releaseStream/:url` });
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
const server = http.createServer(app),
|
||||||
|
port = config.has("port") ? config.get("port") : 6543;
|
||||||
|
|
||||||
|
server.listen(port);
|
||||||
|
server.on('listening', function() {
|
||||||
|
let addr = server.address();
|
||||||
|
let bind = typeof addr === 'string'
|
||||||
|
? 'pipe ' + addr
|
||||||
|
: 'port ' + addr.port;
|
||||||
|
console.log(timestamp() + ` Now serving ${basePath} on ${bind}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = server;
|
183
ketr.ketran/server/lib/mail.js
Normal file
183
ketr.ketran/server/lib/mail.js
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const config = require("config"),
|
||||||
|
crypto = require("crypto"),
|
||||||
|
hb = require("handlebars");
|
||||||
|
|
||||||
|
const templates = {
|
||||||
|
"verify": {
|
||||||
|
"html": [
|
||||||
|
"<p>Hello {{username}},</p>",
|
||||||
|
"",
|
||||||
|
"<p>Welcome to <b>ketrenos.com</b>. You are almost done creating your account. ",
|
||||||
|
"Before you can access the system, you must verify your email address.</p>",
|
||||||
|
"",
|
||||||
|
"<p>To do so, simply access this link:</p>",
|
||||||
|
"<p><a href=\"{{url}}{{secret}}\">VERIFY {{mail}} ADDRESS</a></p>",
|
||||||
|
"",
|
||||||
|
"<p>Sincerely,</p>",
|
||||||
|
"<p>James</p>"
|
||||||
|
].join("\n"),
|
||||||
|
"text": [
|
||||||
|
"Hello {{username}},",
|
||||||
|
"",
|
||||||
|
"Welcome to ketrenos.com. You are almost done creating your account. ",
|
||||||
|
"Before you can access the system, you must verify your email address.",
|
||||||
|
"",
|
||||||
|
"To do so, simply access this link:",
|
||||||
|
"",
|
||||||
|
"{{url}}{{secret}}",
|
||||||
|
"",
|
||||||
|
"Sincerely,",
|
||||||
|
"James"
|
||||||
|
].join("\n")
|
||||||
|
},
|
||||||
|
"password": {
|
||||||
|
"html": [
|
||||||
|
"<p>Hello {{username}},</p>",
|
||||||
|
"",
|
||||||
|
"<p>You changed your password on <b>ketrenos.com</b>.</p>",
|
||||||
|
"",
|
||||||
|
"<p>Sincerely,</p>",
|
||||||
|
"<p>James</p>"
|
||||||
|
].join("\n"),
|
||||||
|
"text": [
|
||||||
|
"Hello {{username}},",
|
||||||
|
"",
|
||||||
|
"You changed your password on ketrenos.com.",
|
||||||
|
"",
|
||||||
|
"Sincerely,</p>",
|
||||||
|
"James"
|
||||||
|
].join("\n")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const sendVerifyMail = function(userDB, req, user) {
|
||||||
|
return userDB.sequelize.query("DELETE FROM authentications WHERE userId=:id AND type='account-setup'", {
|
||||||
|
replacements: {
|
||||||
|
id: user.id
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
crypto.randomBytes(16, function(error, buffer) {
|
||||||
|
if (error) {
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
return resolve(buffer.toString('hex'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function(secret) {
|
||||||
|
return userDB.sequelize.query(
|
||||||
|
"INSERT INTO authentications " +
|
||||||
|
"(userId,issued,key,type) " +
|
||||||
|
"VALUES (:userId,CURRENT_TIMESTAMP,:key,'account-setup')", {
|
||||||
|
replacements: {
|
||||||
|
key: secret,
|
||||||
|
userId: user.id
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
return secret;
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.log(error);
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
|
}).then(function(secret) {
|
||||||
|
const transporter = req.app.get("transporter");
|
||||||
|
if (!transporter) {
|
||||||
|
console.log("Not sending VERIFY email; SMTP not configured.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = {
|
||||||
|
username: user.displayName,
|
||||||
|
mail: user.mail,
|
||||||
|
secret: secret,
|
||||||
|
url: req.protocol + "://" + req.hostname + req.app.get("basePath")
|
||||||
|
}, envelope = {
|
||||||
|
to: data.mail,
|
||||||
|
from: config.get("smtp.sender"),
|
||||||
|
subject: "Request to ketrenos.com create account for '" + data.username + "'",
|
||||||
|
cc: "",
|
||||||
|
bcc: config.get("admin.mail"),
|
||||||
|
text: hb.compile(templates.verify.text)(data),
|
||||||
|
html: hb.compile(templates.verify.html)(data)
|
||||||
|
};
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
let attempts = 10;
|
||||||
|
|
||||||
|
function send(envelope) {
|
||||||
|
/* Rate limit to ten per second */
|
||||||
|
transporter.sendMail(envelope, function (error, info) {
|
||||||
|
if (!error) {
|
||||||
|
console.log('Message sent: ' + info.response);
|
||||||
|
return resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempts == 0) {
|
||||||
|
console.log("Error sending email: ", error);
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
attempts--;
|
||||||
|
console.log("Unable to send mail. Trying again in 100ms (" + attempts + " attempts remain): ", error);
|
||||||
|
setTimeout(send.bind(undefined, envelope), 100);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
send(envelope);
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.log("Error creating account: ", error);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const sendPasswordChangedMail = function(userDB, req, user) {
|
||||||
|
const transporter = req.app.get("transporter");
|
||||||
|
if (!transporter) {
|
||||||
|
console.log("Not sending VERIFY email; SMTP not configured.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = {
|
||||||
|
username: user.displayName,
|
||||||
|
mail: user.mail,
|
||||||
|
url: req.protocol + "://" + req.hostname + req.app.get("basePath")
|
||||||
|
}, envelope = {
|
||||||
|
to: data.mail,
|
||||||
|
from: config.get("smtp.sender"),
|
||||||
|
subject: "Password changed on ketrenos.com for '" + data.username + "'",
|
||||||
|
cc: "",
|
||||||
|
bcc: config.get("admin.mail"),
|
||||||
|
text: hb.compile(templates.password.text)(data),
|
||||||
|
html: hb.compile(templates.password.html)(data)
|
||||||
|
};
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
let attempts = 10;
|
||||||
|
|
||||||
|
function send(envelope) {
|
||||||
|
/* Rate limit to ten per second */
|
||||||
|
transporter.sendMail(envelope, function (error, info) {
|
||||||
|
if (!error) {
|
||||||
|
console.log('Message sent: ' + info.response);
|
||||||
|
return resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempts == 0) {
|
||||||
|
console.log("Error sending email: ", error);
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
attempts--;
|
||||||
|
console.log("Unable to send mail. Trying again in 100ms (" + attempts + " attempts remain): ", error);
|
||||||
|
setTimeout(send.bind(undefined, envelope), 100);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
send(envelope);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
sendVerifyMail,
|
||||||
|
sendPasswordChangedMail
|
||||||
|
}
|
512
ketr.ketran/server/lib/pascha-dates.js
Normal file
512
ketr.ketran/server/lib/pascha-dates.js
Normal file
@ -0,0 +1,512 @@
|
|||||||
|
/*
|
||||||
|
EasterB - What date does Easter Sunday come on in a given year?
|
||||||
|
Version 1.23, last revised: 2007/07/22
|
||||||
|
Copyright (c) 1981-2007 by author: Harry J. Smith,
|
||||||
|
19628 Via Monte Dr., Saratoga CA 95070. All rights reserved.
|
||||||
|
|
||||||
|
Will write file: EasterB.Out for years 1875 - 2124
|
||||||
|
|
||||||
|
Gregorian Calendar
|
||||||
|
Easter Sunday: 1875 3/28 Western 4/25 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1876 4/16 Western Same Orthodox (April 4 Julian date)
|
||||||
|
Easter Sunday: 1877 4/ 1 Western 4/ 8 Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 1878 4/21 Western 4/28 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 1879 4/13 Western Same Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1880 3/28 Western 5/ 2 Orthodox (April 20 Julian date)
|
||||||
|
Easter Sunday: 1881 4/17 Western 4/24 Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 1882 4/ 9 Western Same Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 1883 3/25 Western 4/29 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 1884 4/13 Western 4/20 Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 1885 4/ 5 Western Same Orthodox (March 24 Julian date)
|
||||||
|
Easter Sunday: 1886 4/25 Western Same Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1887 4/10 Western 4/17 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 1888 4/ 1 Western 5/ 6 Orthodox (April 24 Julian date)
|
||||||
|
Easter Sunday: 1889 4/21 Western Same Orthodox (April 9 Julian date)
|
||||||
|
Easter Sunday: 1890 4/ 6 Western 4/13 Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1891 3/29 Western 5/ 3 Orthodox (April 21 Julian date)
|
||||||
|
Easter Sunday: 1892 4/17 Western Same Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 1893 4/ 2 Western 4/ 9 Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 1894 3/25 Western 4/29 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 1895 4/14 Western Same Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 1896 4/ 5 Western Same Orthodox (March 24 Julian date)
|
||||||
|
Easter Sunday: 1897 4/18 Western 4/25 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1898 4/10 Western 4/17 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 1899 4/ 2 Western 4/30 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 1900 4/15 Western 4/22 Orthodox (April 9 Julian date)
|
||||||
|
Easter Sunday: 1901 4/ 7 Western 4/14 Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1902 3/30 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 1903 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 1904 4/ 3 Western 4/10 Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 1905 4/23 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 1906 4/15 Western Same Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 1907 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 1908 4/19 Western 4/26 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1909 4/11 Western Same Orthodox (March 29 Julian date)
|
||||||
|
Easter Sunday: 1910 3/27 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 1911 4/16 Western 4/23 Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 1912 4/ 7 Western Same Orthodox (March 25 Julian date)
|
||||||
|
Easter Sunday: 1913 3/23 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 1914 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 1915 4/ 4 Western Same Orthodox (March 22 Julian date)
|
||||||
|
Easter Sunday: 1916 4/23 Western Same Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 1917 4/ 8 Western 4/15 Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 1918 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 1919 4/20 Western Same Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 1920 4/ 4 Western 4/11 Orthodox (March 29 Julian date)
|
||||||
|
Easter Sunday: 1921 3/27 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 1922 4/16 Western Same Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 1923 4/ 1 Western 4/ 8 Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 1924 4/20 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 1925 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 1926 4/ 4 Western 5/ 2 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 1927 4/17 Western 4/24 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 1928 4/ 8 Western 4/15 Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 1929 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 1930 4/20 Western Same Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 1931 4/ 5 Western 4/12 Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 1932 3/27 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 1933 4/16 Western Same Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 1934 4/ 1 Western 4/ 8 Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 1935 4/21 Western 4/28 Orthodox (April 15 Julian date)
|
||||||
|
Easter Sunday: 1936 4/12 Western Same Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 1937 3/28 Western 5/ 2 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 1938 4/17 Western 4/24 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 1939 4/ 9 Western Same Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 1940 3/24 Western 4/28 Orthodox (April 15 Julian date)
|
||||||
|
Easter Sunday: 1941 4/13 Western 4/20 Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 1942 4/ 5 Western Same Orthodox (March 23 Julian date)
|
||||||
|
Easter Sunday: 1943 4/25 Western Same Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 1944 4/ 9 Western 4/16 Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 1945 4/ 1 Western 5/ 6 Orthodox (April 23 Julian date)
|
||||||
|
Easter Sunday: 1946 4/21 Western Same Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 1947 4/ 6 Western 4/13 Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 1948 3/28 Western 5/ 2 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 1949 4/17 Western 4/24 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 1950 4/ 9 Western Same Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 1951 3/25 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 1952 4/13 Western 4/20 Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 1953 4/ 5 Western Same Orthodox (March 23 Julian date)
|
||||||
|
Easter Sunday: 1954 4/18 Western 4/25 Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 1955 4/10 Western 4/17 Orthodox (April 4 Julian date)
|
||||||
|
Easter Sunday: 1956 4/ 1 Western 5/ 6 Orthodox (April 23 Julian date)
|
||||||
|
Easter Sunday: 1957 4/21 Western Same Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 1958 4/ 6 Western 4/13 Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 1959 3/29 Western 5/ 3 Orthodox (April 20 Julian date)
|
||||||
|
Easter Sunday: 1960 4/17 Western Same Orthodox (April 4 Julian date)
|
||||||
|
Easter Sunday: 1961 4/ 2 Western 4/ 9 Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 1962 4/22 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 1963 4/14 Western Same Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1964 3/29 Western 5/ 3 Orthodox (April 20 Julian date)
|
||||||
|
Easter Sunday: 1965 4/18 Western 4/25 Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 1966 4/10 Western Same Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 1967 3/26 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 1968 4/14 Western 4/21 Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 1969 4/ 6 Western 4/13 Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 1970 3/29 Western 4/26 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1971 4/11 Western 4/18 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 1972 4/ 2 Western 4/ 9 Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 1973 4/22 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 1974 4/14 Western Same Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1975 3/30 Western 5/ 4 Orthodox (April 21 Julian date)
|
||||||
|
Easter Sunday: 1976 4/18 Western 4/25 Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 1977 4/10 Western Same Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 1978 3/26 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 1979 4/15 Western 4/22 Orthodox (April 9 Julian date)
|
||||||
|
Easter Sunday: 1980 4/ 6 Western Same Orthodox (March 24 Julian date)
|
||||||
|
Easter Sunday: 1981 4/19 Western 4/26 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1982 4/11 Western 4/18 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 1983 4/ 3 Western 5/ 8 Orthodox (April 25 Julian date)
|
||||||
|
Easter Sunday: 1984 4/22 Western Same Orthodox (April 9 Julian date)
|
||||||
|
Easter Sunday: 1985 4/ 7 Western 4/14 Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1986 3/30 Western 5/ 4 Orthodox (April 21 Julian date)
|
||||||
|
Easter Sunday: 1987 4/19 Western Same Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 1988 4/ 3 Western 4/10 Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 1989 3/26 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 1990 4/15 Western Same Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 1991 3/31 Western 4/ 7 Orthodox (March 25 Julian date)
|
||||||
|
Easter Sunday: 1992 4/19 Western 4/26 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 1993 4/11 Western 4/18 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 1994 4/ 3 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 1995 4/16 Western 4/23 Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 1996 4/ 7 Western 4/14 Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 1997 3/30 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 1998 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 1999 4/ 4 Western 4/11 Orthodox (March 29 Julian date)
|
||||||
|
Easter Sunday: 2000 4/23 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 2001 4/15 Western Same Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 2002 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 2003 4/20 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 2004 4/11 Western Same Orthodox (March 29 Julian date)
|
||||||
|
Easter Sunday: 2005 3/27 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 2006 4/16 Western 4/23 Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 2007 4/ 8 Western Same Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 2008 3/23 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 2009 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 2010 4/ 4 Western Same Orthodox (March 22 Julian date)
|
||||||
|
Easter Sunday: 2011 4/24 Western Same Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2012 4/ 8 Western 4/15 Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 2013 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 2014 4/20 Western Same Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 2015 4/ 5 Western 4/12 Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 2016 3/27 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 2017 4/16 Western Same Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 2018 4/ 1 Western 4/ 8 Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 2019 4/21 Western 4/28 Orthodox (April 15 Julian date)
|
||||||
|
Easter Sunday: 2020 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 2021 4/ 4 Western 5/ 2 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 2022 4/17 Western 4/24 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2023 4/ 9 Western 4/16 Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 2024 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 2025 4/20 Western Same Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 2026 4/ 5 Western 4/12 Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 2027 3/28 Western 5/ 2 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 2028 4/16 Western Same Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 2029 4/ 1 Western 4/ 8 Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 2030 4/21 Western 4/28 Orthodox (April 15 Julian date)
|
||||||
|
Easter Sunday: 2031 4/13 Western Same Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 2032 3/28 Western 5/ 2 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 2033 4/17 Western 4/24 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2034 4/ 9 Western Same Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 2035 3/25 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 2036 4/13 Western 4/20 Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 2037 4/ 5 Western Same Orthodox (March 23 Julian date)
|
||||||
|
Easter Sunday: 2038 4/25 Western Same Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 2039 4/10 Western 4/17 Orthodox (April 4 Julian date)
|
||||||
|
Easter Sunday: 2040 4/ 1 Western 5/ 6 Orthodox (April 23 Julian date)
|
||||||
|
Easter Sunday: 2041 4/21 Western Same Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 2042 4/ 6 Western 4/13 Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 2043 3/29 Western 5/ 3 Orthodox (April 20 Julian date)
|
||||||
|
Easter Sunday: 2044 4/17 Western 4/24 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2045 4/ 9 Western Same Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 2046 3/25 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 2047 4/14 Western 4/21 Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 2048 4/ 5 Western Same Orthodox (March 23 Julian date)
|
||||||
|
Easter Sunday: 2049 4/18 Western 4/25 Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 2050 4/10 Western 4/17 Orthodox (April 4 Julian date)
|
||||||
|
Easter Sunday: 2051 4/ 2 Western 5/ 7 Orthodox (April 24 Julian date)
|
||||||
|
Easter Sunday: 2052 4/21 Western Same Orthodox (April 8 Julian date)
|
||||||
|
Easter Sunday: 2053 4/ 6 Western 4/13 Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 2054 3/29 Western 5/ 3 Orthodox (April 20 Julian date)
|
||||||
|
Easter Sunday: 2055 4/18 Western Same Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 2056 4/ 2 Western 4/ 9 Orthodox (March 27 Julian date)
|
||||||
|
Easter Sunday: 2057 4/22 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 2058 4/14 Western Same Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 2059 3/30 Western 5/ 4 Orthodox (April 21 Julian date)
|
||||||
|
Easter Sunday: 2060 4/18 Western 4/25 Orthodox (April 12 Julian date)
|
||||||
|
Easter Sunday: 2061 4/10 Western Same Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 2062 3/26 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 2063 4/15 Western 4/22 Orthodox (April 9 Julian date)
|
||||||
|
Easter Sunday: 2064 4/ 6 Western 4/13 Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 2065 3/29 Western 4/26 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 2066 4/11 Western 4/18 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 2067 4/ 3 Western 4/10 Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 2068 4/22 Western 4/29 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 2069 4/14 Western Same Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 2070 3/30 Western 5/ 4 Orthodox (April 21 Julian date)
|
||||||
|
Easter Sunday: 2071 4/19 Western Same Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 2072 4/10 Western Same Orthodox (March 28 Julian date)
|
||||||
|
Easter Sunday: 2073 3/26 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 2074 4/15 Western 4/22 Orthodox (April 9 Julian date)
|
||||||
|
Easter Sunday: 2075 4/ 7 Western Same Orthodox (March 25 Julian date)
|
||||||
|
Easter Sunday: 2076 4/19 Western 4/26 Orthodox (April 13 Julian date)
|
||||||
|
Easter Sunday: 2077 4/11 Western 4/18 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 2078 4/ 3 Western 5/ 8 Orthodox (April 25 Julian date)
|
||||||
|
Easter Sunday: 2079 4/23 Western Same Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 2080 4/ 7 Western 4/14 Orthodox (April 1 Julian date)
|
||||||
|
Easter Sunday: 2081 3/30 Western 5/ 4 Orthodox (April 21 Julian date)
|
||||||
|
Easter Sunday: 2082 4/19 Western Same Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 2083 4/ 4 Western 4/11 Orthodox (March 29 Julian date)
|
||||||
|
Easter Sunday: 2084 3/26 Western 4/30 Orthodox (April 17 Julian date)
|
||||||
|
Easter Sunday: 2085 4/15 Western Same Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 2086 3/31 Western 4/ 7 Orthodox (March 25 Julian date)
|
||||||
|
Easter Sunday: 2087 4/20 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 2088 4/11 Western 4/18 Orthodox (April 5 Julian date)
|
||||||
|
Easter Sunday: 2089 4/ 3 Western 5/ 1 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 2090 4/16 Western 4/23 Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 2091 4/ 8 Western Same Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 2092 3/30 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 2093 4/12 Western 4/19 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 2094 4/ 4 Western 4/11 Orthodox (March 29 Julian date)
|
||||||
|
Easter Sunday: 2095 4/24 Western Same Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2096 4/15 Western Same Orthodox (April 2 Julian date)
|
||||||
|
Easter Sunday: 2097 3/31 Western 5/ 5 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 2098 4/20 Western 4/27 Orthodox (April 14 Julian date)
|
||||||
|
Easter Sunday: 2099 4/12 Western Same Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 2100 3/28 Western 5/ 2 Orthodox (April 18 Julian date)
|
||||||
|
Easter Sunday: 2101 4/17 Western 4/24 Orthodox (April 10 Julian date)
|
||||||
|
Easter Sunday: 2102 4/ 9 Western Same Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 2103 3/25 Western 4/29 Orthodox (April 15 Julian date)
|
||||||
|
Easter Sunday: 2104 4/13 Western 4/20 Orthodox (April 6 Julian date)
|
||||||
|
Easter Sunday: 2105 4/ 5 Western Same Orthodox (March 22 Julian date)
|
||||||
|
Easter Sunday: 2106 4/18 Western 4/25 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2107 4/10 Western 4/17 Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 2108 4/ 1 Western 5/ 6 Orthodox (April 22 Julian date)
|
||||||
|
Easter Sunday: 2109 4/21 Western Same Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 2110 4/ 6 Western 4/13 Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 2111 3/29 Western 5/ 3 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 2112 4/17 Western Same Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 2113 4/ 2 Western 4/ 9 Orthodox (March 26 Julian date)
|
||||||
|
Easter Sunday: 2114 4/22 Western 4/29 Orthodox (April 15 Julian date)
|
||||||
|
Easter Sunday: 2115 4/14 Western Same Orthodox (March 31 Julian date)
|
||||||
|
Easter Sunday: 2116 3/29 Western 5/ 3 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 2117 4/18 Western 4/25 Orthodox (April 11 Julian date)
|
||||||
|
Easter Sunday: 2118 4/10 Western 4/17 Orthodox (April 3 Julian date)
|
||||||
|
Easter Sunday: 2119 3/26 Western 4/30 Orthodox (April 16 Julian date)
|
||||||
|
Easter Sunday: 2120 4/14 Western 4/21 Orthodox (April 7 Julian date)
|
||||||
|
Easter Sunday: 2121 4/ 6 Western 4/13 Orthodox (March 30 Julian date)
|
||||||
|
Easter Sunday: 2122 3/29 Western 5/ 3 Orthodox (April 19 Julian date)
|
||||||
|
Easter Sunday: 2123 4/11 Western 4/18 Orthodox (April 4 Julian date)
|
||||||
|
Easter Sunday: 2124 4/ 2 Western 4/ 9 Orthodox (March 26 Julian date)
|
||||||
|
*/
|
||||||
|
module.exports = {
|
||||||
|
"1875": "1875-04-25",
|
||||||
|
"1876": "1876-04-16",
|
||||||
|
"1877": "1877-04-08",
|
||||||
|
"1878": "1878-04-28",
|
||||||
|
"1879": "1879-04-13",
|
||||||
|
"1880": "1880-05-02",
|
||||||
|
"1881": "1881-04-24",
|
||||||
|
"1882": "1882-04-09",
|
||||||
|
"1883": "1883-04-29",
|
||||||
|
"1884": "1884-04-20",
|
||||||
|
"1885": "1885-04-05",
|
||||||
|
"1886": "1886-04-25",
|
||||||
|
"1887": "1887-04-17",
|
||||||
|
"1888": "1888-05-06",
|
||||||
|
"1889": "1889-04-21",
|
||||||
|
"1890": "1890-04-13",
|
||||||
|
"1891": "1891-05-03",
|
||||||
|
"1892": "1892-04-17",
|
||||||
|
"1893": "1893-04-09",
|
||||||
|
"1894": "1894-04-29",
|
||||||
|
"1895": "1895-04-14",
|
||||||
|
"1896": "1896-04-05",
|
||||||
|
"1897": "1897-04-25",
|
||||||
|
"1898": "1898-04-17",
|
||||||
|
"1899": "1899-04-30",
|
||||||
|
"1900": "1900-04-22",
|
||||||
|
"1901": "1901-04-14",
|
||||||
|
"1902": "1902-04-27",
|
||||||
|
"1903": "1903-04-19",
|
||||||
|
"1904": "1904-04-10",
|
||||||
|
"1905": "1905-04-30",
|
||||||
|
"1906": "1906-04-15",
|
||||||
|
"1907": "1907-05-05",
|
||||||
|
"1908": "1908-04-26",
|
||||||
|
"1909": "1909-04-11",
|
||||||
|
"1910": "1910-05-01",
|
||||||
|
"1911": "1911-04-23",
|
||||||
|
"1912": "1912-04-07",
|
||||||
|
"1913": "1913-04-27",
|
||||||
|
"1914": "1914-04-19",
|
||||||
|
"1915": "1915-04-04",
|
||||||
|
"1916": "1916-04-23",
|
||||||
|
"1917": "1917-04-15",
|
||||||
|
"1918": "1918-05-05",
|
||||||
|
"1919": "1919-04-20",
|
||||||
|
"1920": "1920-04-11",
|
||||||
|
"1921": "1921-05-01",
|
||||||
|
"1922": "1922-04-16",
|
||||||
|
"1923": "1923-04-08",
|
||||||
|
"1924": "1924-04-27",
|
||||||
|
"1925": "1925-04-19",
|
||||||
|
"1926": "1926-05-02",
|
||||||
|
"1927": "1927-04-24",
|
||||||
|
"1928": "1928-04-15",
|
||||||
|
"1929": "1929-05-05",
|
||||||
|
"1930": "1930-04-20",
|
||||||
|
"1931": "1931-04-12",
|
||||||
|
"1932": "1932-05-01",
|
||||||
|
"1933": "1933-04-16",
|
||||||
|
"1934": "1934-04-08",
|
||||||
|
"1935": "1935-04-28",
|
||||||
|
"1936": "1936-04-12",
|
||||||
|
"1937": "1937-05-02",
|
||||||
|
"1938": "1938-04-24",
|
||||||
|
"1939": "1939-04-09",
|
||||||
|
"1940": "1940-04-28",
|
||||||
|
"1941": "1941-04-20",
|
||||||
|
"1942": "1942-04-05",
|
||||||
|
"1943": "1943-04-25",
|
||||||
|
"1944": "1944-04-16",
|
||||||
|
"1945": "1945-05-06",
|
||||||
|
"1946": "1946-04-21",
|
||||||
|
"1947": "1947-04-13",
|
||||||
|
"1948": "1948-05-02",
|
||||||
|
"1949": "1949-04-24",
|
||||||
|
"1950": "1950-04-09",
|
||||||
|
"1951": "1951-04-29",
|
||||||
|
"1952": "1952-04-20",
|
||||||
|
"1953": "1953-04-05",
|
||||||
|
"1954": "1954-04-25",
|
||||||
|
"1955": "1955-04-17",
|
||||||
|
"1956": "1956-05-06",
|
||||||
|
"1957": "1957-04-21",
|
||||||
|
"1958": "1958-04-13",
|
||||||
|
"1959": "1959-05-03",
|
||||||
|
"1960": "1960-04-17",
|
||||||
|
"1961": "1961-04-09",
|
||||||
|
"1962": "1962-04-29",
|
||||||
|
"1963": "1963-04-14",
|
||||||
|
"1964": "1964-05-03",
|
||||||
|
"1965": "1965-04-25",
|
||||||
|
"1966": "1966-04-10",
|
||||||
|
"1967": "1967-04-30",
|
||||||
|
"1968": "1968-04-21",
|
||||||
|
"1969": "1969-04-13",
|
||||||
|
"1970": "1970-04-26",
|
||||||
|
"1971": "1971-04-18",
|
||||||
|
"1972": "1972-04-09",
|
||||||
|
"1973": "1973-04-29",
|
||||||
|
"1974": "1974-04-14",
|
||||||
|
"1975": "1975-05-04",
|
||||||
|
"1976": "1976-04-25",
|
||||||
|
"1977": "1977-04-10",
|
||||||
|
"1978": "1978-04-30",
|
||||||
|
"1979": "1979-04-22",
|
||||||
|
"1980": "1980-04-06",
|
||||||
|
"1981": "1981-04-26",
|
||||||
|
"1982": "1982-04-18",
|
||||||
|
"1983": "1983-05-08",
|
||||||
|
"1984": "1984-04-22",
|
||||||
|
"1985": "1985-04-14",
|
||||||
|
"1986": "1986-05-04",
|
||||||
|
"1987": "1987-04-19",
|
||||||
|
"1988": "1988-04-10",
|
||||||
|
"1989": "1989-04-30",
|
||||||
|
"1990": "1990-04-15",
|
||||||
|
"1991": "1991-04-07",
|
||||||
|
"1992": "1992-04-26",
|
||||||
|
"1993": "1993-04-18",
|
||||||
|
"1994": "1994-05-01",
|
||||||
|
"1995": "1995-04-23",
|
||||||
|
"1996": "1996-04-14",
|
||||||
|
"1997": "1997-04-27",
|
||||||
|
"1998": "1998-04-19",
|
||||||
|
"1999": "1999-04-11",
|
||||||
|
"2000": "2000-04-30",
|
||||||
|
"2001": "2001-04-15",
|
||||||
|
"2002": "2002-05-05",
|
||||||
|
"2003": "2003-04-27",
|
||||||
|
"2004": "2004-04-11",
|
||||||
|
"2005": "2005-05-01",
|
||||||
|
"2006": "2006-04-23",
|
||||||
|
"2007": "2007-04-08",
|
||||||
|
"2008": "2008-04-27",
|
||||||
|
"2009": "2009-04-19",
|
||||||
|
"2010": "2010-04-04",
|
||||||
|
"2011": "2011-04-24",
|
||||||
|
"2012": "2012-04-15",
|
||||||
|
"2013": "2013-05-05",
|
||||||
|
"2014": "2014-04-20",
|
||||||
|
"2015": "2015-04-12",
|
||||||
|
"2016": "2016-05-01",
|
||||||
|
"2017": "2017-04-16",
|
||||||
|
"2018": "2018-04-08",
|
||||||
|
"2019": "2019-04-28",
|
||||||
|
"2020": "2020-04-19",
|
||||||
|
"2021": "2021-05-02",
|
||||||
|
"2022": "2022-04-24",
|
||||||
|
"2023": "2023-04-16",
|
||||||
|
"2024": "2024-05-05",
|
||||||
|
"2025": "2025-04-20",
|
||||||
|
"2026": "2026-04-12",
|
||||||
|
"2027": "2027-05-02",
|
||||||
|
"2028": "2028-04-16",
|
||||||
|
"2029": "2029-04-08",
|
||||||
|
"2030": "2030-04-28",
|
||||||
|
"2031": "2031-04-13",
|
||||||
|
"2032": "2032-05-02",
|
||||||
|
"2033": "2033-04-24",
|
||||||
|
"2034": "2034-04-09",
|
||||||
|
"2035": "2035-04-29",
|
||||||
|
"2036": "2036-04-20",
|
||||||
|
"2037": "2037-04-05",
|
||||||
|
"2038": "2038-04-25",
|
||||||
|
"2039": "2039-04-17",
|
||||||
|
"2040": "2040-05-06",
|
||||||
|
"2041": "2041-04-21",
|
||||||
|
"2042": "2042-04-13",
|
||||||
|
"2043": "2043-05-03",
|
||||||
|
"2044": "2044-04-24",
|
||||||
|
"2045": "2045-04-09",
|
||||||
|
"2046": "2046-04-29",
|
||||||
|
"2047": "2047-04-21",
|
||||||
|
"2048": "2048-04-05",
|
||||||
|
"2049": "2049-04-25",
|
||||||
|
"2050": "2050-04-17",
|
||||||
|
"2051": "2051-05-07",
|
||||||
|
"2052": "2052-04-21",
|
||||||
|
"2053": "2053-04-13",
|
||||||
|
"2054": "2054-05-03",
|
||||||
|
"2055": "2055-04-18",
|
||||||
|
"2056": "2056-04-09",
|
||||||
|
"2057": "2057-04-29",
|
||||||
|
"2058": "2058-04-14",
|
||||||
|
"2059": "2059-05-04",
|
||||||
|
"2060": "2060-04-25",
|
||||||
|
"2061": "2061-04-10",
|
||||||
|
"2062": "2062-04-30",
|
||||||
|
"2063": "2063-04-22",
|
||||||
|
"2064": "2064-04-13",
|
||||||
|
"2065": "2065-04-26",
|
||||||
|
"2066": "2066-04-18",
|
||||||
|
"2067": "2067-04-10",
|
||||||
|
"2068": "2068-04-29",
|
||||||
|
"2069": "2069-04-14",
|
||||||
|
"2070": "2070-05-04",
|
||||||
|
"2071": "2071-04-19",
|
||||||
|
"2072": "2072-04-10",
|
||||||
|
"2073": "2073-04-30",
|
||||||
|
"2074": "2074-04-22",
|
||||||
|
"2075": "2075-04-07",
|
||||||
|
"2076": "2076-04-26",
|
||||||
|
"2077": "2077-04-18",
|
||||||
|
"2078": "2078-05-08",
|
||||||
|
"2079": "2079-04-23",
|
||||||
|
"2080": "2080-04-14",
|
||||||
|
"2081": "2081-05-04",
|
||||||
|
"2082": "2082-04-19",
|
||||||
|
"2083": "2083-04-11",
|
||||||
|
"2084": "2084-04-30",
|
||||||
|
"2085": "2085-04-15",
|
||||||
|
"2086": "2086-04-07",
|
||||||
|
"2087": "2087-04-27",
|
||||||
|
"2088": "2088-04-18",
|
||||||
|
"2089": "2089-05-01",
|
||||||
|
"2090": "2090-04-23",
|
||||||
|
"2091": "2091-04-08",
|
||||||
|
"2092": "2092-04-27",
|
||||||
|
"2093": "2093-04-19",
|
||||||
|
"2094": "2094-04-11",
|
||||||
|
"2095": "2095-04-24",
|
||||||
|
"2096": "2096-04-15",
|
||||||
|
"2097": "2097-05-05",
|
||||||
|
"2098": "2098-04-27",
|
||||||
|
"2099": "2099-04-12",
|
||||||
|
"2100": "2100-05-02",
|
||||||
|
"2101": "2101-04-24",
|
||||||
|
"2102": "2102-04-09",
|
||||||
|
"2103": "2103-04-29",
|
||||||
|
"2104": "2104-04-20",
|
||||||
|
"2105": "2105-04-05",
|
||||||
|
"2106": "2106-04-25",
|
||||||
|
"2107": "2107-04-17",
|
||||||
|
"2108": "2108-05-06",
|
||||||
|
"2109": "2109-04-21",
|
||||||
|
"2110": "2110-04-13",
|
||||||
|
"2111": "2111-05-03",
|
||||||
|
"2112": "2112-04-17",
|
||||||
|
"2113": "2113-04-09",
|
||||||
|
"2114": "2114-04-29",
|
||||||
|
"2115": "2115-04-14",
|
||||||
|
"2116": "2116-05-03",
|
||||||
|
"2117": "2117-04-25",
|
||||||
|
"2118": "2118-04-17",
|
||||||
|
"2119": "2119-04-30",
|
||||||
|
"2120": "2120-04-21",
|
||||||
|
"2121": "2121-04-13",
|
||||||
|
"2122": "2122-05-03",
|
||||||
|
"2123": "2123-04-18",
|
||||||
|
"2124": "2124-04-09",
|
||||||
|
};
|
116
ketr.ketran/server/lib/pascha.js
Normal file
116
ketr.ketran/server/lib/pascha.js
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
//! moment-holiday.js locale configuration
|
||||||
|
//! locale : pascha Related Holidays
|
||||||
|
//! author : Kodie Grantham : https://github.com/kodie
|
||||||
|
|
||||||
|
//(function() {
|
||||||
|
// var moment = (typeof require !== 'undefined' && require !== null) && !require.amd ? require('moment') : this.moment;
|
||||||
|
function init(moment) {
|
||||||
|
// moment.holidays.pascha = {
|
||||||
|
moment.modifyHolidays.add({
|
||||||
|
"Lent": {
|
||||||
|
date: 'pascha-46|pascha-3'
|
||||||
|
},
|
||||||
|
/*
|
||||||
|
"Holy Monday": {
|
||||||
|
date: 'pascha-6',
|
||||||
|
keywords_y: ['great', 'monday']
|
||||||
|
},
|
||||||
|
"Holy Tuesday": {
|
||||||
|
date: 'pascha-5',
|
||||||
|
keywords_y: ['great', 'tuesday']
|
||||||
|
},
|
||||||
|
"Holy Wednesday": {
|
||||||
|
date: 'pascha-4',
|
||||||
|
keywords_y: ['great', 'wednesday']
|
||||||
|
},
|
||||||
|
"Holy Thursday": {
|
||||||
|
date: 'pascha-3',
|
||||||
|
keywords_y: ['great', 'thursday']
|
||||||
|
},
|
||||||
|
"Holy Friday": {
|
||||||
|
date: 'pascha-2',
|
||||||
|
keywords_y: ['great', 'friday']
|
||||||
|
},
|
||||||
|
"Holy Saturday": {
|
||||||
|
date: 'pascha-1',
|
||||||
|
keywords_y: ['holy', 'saturday']
|
||||||
|
},
|
||||||
|
*/
|
||||||
|
"Pascha Sunday": {
|
||||||
|
date: 'pascha',
|
||||||
|
keywords_y: ['pascha'],
|
||||||
|
keywords: ['sunday']
|
||||||
|
},
|
||||||
|
"Bright Week": {
|
||||||
|
date: 'pascha+1|pascha+6'
|
||||||
|
},
|
||||||
|
"Pentecost Sunday": {
|
||||||
|
date: 'pascha+49',
|
||||||
|
keywords_y: ['pentecost'],
|
||||||
|
keywords: ['sunday']
|
||||||
|
},
|
||||||
|
//};
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
const orthodoxDates = require("./pascha-dates.js");
|
||||||
|
|
||||||
|
var pascha = function(year) {
|
||||||
|
var date = orthodoxDates[year].split("-");
|
||||||
|
date[1] -= 1; // month needs to be zero index
|
||||||
|
return moment(date);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates Easter in the Gregorian/Western (Catholic and Protestant) calendar
|
||||||
|
* based on the algorithm by Oudin (1940) from http://www.tondering.dk/claus/cal/easter.php
|
||||||
|
* @returns {array} [int month, int day]
|
||||||
|
*/
|
||||||
|
var pascha = function(year) {
|
||||||
|
var f = Math.floor,
|
||||||
|
// Golden Number - 1
|
||||||
|
G = year % 19,
|
||||||
|
C = f(year / 100),
|
||||||
|
// related to Epact
|
||||||
|
H = (C - f(C / 4) - f((8 * C + 13)/25) + 19 * G + 15) % 30,
|
||||||
|
// number of days from 21 March to the Paschal full moon
|
||||||
|
I = H - f(H/28) * (1 - f(29/(H + 1)) * f((21-G)/11)),
|
||||||
|
// weekday for the Paschal full moon
|
||||||
|
J = (year + f(year / 4) + I + 2 - C + f(C / 4)) % 7,
|
||||||
|
// number of days from 21 March to the Sunday on or before the Paschal full moon
|
||||||
|
L = I - J,
|
||||||
|
month = 3 + f((L + 40)/44),
|
||||||
|
day = L + 28 - 31 * f(month / 4);
|
||||||
|
|
||||||
|
return moment([year, (month - 1),day]);
|
||||||
|
}
|
||||||
|
|
||||||
|
moment.modifyHolidays.extendParser(function(m, date){
|
||||||
|
if (~date.indexOf('pascha')) {
|
||||||
|
var dates = date.split('|');
|
||||||
|
var ds = [];
|
||||||
|
|
||||||
|
for (var i = 0; i < dates.length; i++) {
|
||||||
|
if (dates[i].substring(0, 6) === 'pascha') {
|
||||||
|
var e = pascha(m.year());
|
||||||
|
|
||||||
|
if (dates[i].charAt(6) === '-') { e.subtract(dates[i].substring(7), 'days'); }
|
||||||
|
if (dates[i].charAt(6) === '+') { e.add(dates[i].substring(7), 'days'); }
|
||||||
|
|
||||||
|
if (dates.length === 1) { return e; }
|
||||||
|
ds.push(e.format('M/D'));
|
||||||
|
} else {
|
||||||
|
ds.push(dates[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ds.length) { return ds.join('|'); }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = init;
|
||||||
|
|
||||||
|
// if ((typeof module !== 'undefined' && module !== null ? module.exports : void 0) != null) { module.exports = moment; }
|
||||||
|
//}).call(this);
|
88
ketr.ketran/server/lib/util.js
Normal file
88
ketr.ketran/server/lib/util.js
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const config = require("config"),
|
||||||
|
fs = require("fs"),
|
||||||
|
Promise = require("bluebird"),
|
||||||
|
picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/";
|
||||||
|
|
||||||
|
const stat = function (_path) {
|
||||||
|
if (_path.indexOf(picturesPath.replace(/\/$/, "")) == 0) {
|
||||||
|
_path = _path.substring(picturesPath.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = picturesPath + _path;
|
||||||
|
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
fs.stat(path, function (error, stats) {
|
||||||
|
if (error) {
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
return resolve(stats);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const unlink = function (_path) {
|
||||||
|
if (_path.indexOf(picturesPath.replace(/\/$/, "")) == 0) {
|
||||||
|
_path = _path.substring(picturesPath.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = picturesPath + _path;
|
||||||
|
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
fs.unlink(path, function (error) {
|
||||||
|
if (error) {
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
return resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const mkdir = function (_path) {
|
||||||
|
if (_path.indexOf(picturesPath) == 0) {
|
||||||
|
_path = _path.substring(picturesPath.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
let parts = _path.split("/"), path;
|
||||||
|
|
||||||
|
parts.unshift(picturesPath);
|
||||||
|
return Promise.mapSeries(parts, function (part) {
|
||||||
|
if (!path) {
|
||||||
|
path = picturesPath.replace(/\/$/, "");
|
||||||
|
} else {
|
||||||
|
path += "/" + part;
|
||||||
|
}
|
||||||
|
|
||||||
|
return stat(path).catch(function (error) {
|
||||||
|
if (error.code != "ENOENT") {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
fs.mkdir(path, function (error) {
|
||||||
|
if (error) {
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const exists = function(path) {
|
||||||
|
return stat(path).then(function() {
|
||||||
|
return true;
|
||||||
|
}).catch(function() {
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
stat,
|
||||||
|
exists,
|
||||||
|
mkdir,
|
||||||
|
unlink
|
||||||
|
};
|
61
ketr.ketran/server/mail.js
Normal file
61
ketr.ketran/server/mail.js
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const createTransport = require('nodemailer').createTransport,
|
||||||
|
{ timestamp } = require("./timestamp");
|
||||||
|
|
||||||
|
const transporter = createTransport({
|
||||||
|
host: 'email.ketrenos.com',
|
||||||
|
pool: true,
|
||||||
|
port: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
function sendMail(to, subject, message, cc) {
|
||||||
|
let envelope = {
|
||||||
|
subject: subject,
|
||||||
|
from: 'Ketr.Ketran <james_ketran@ketrenos.com>',
|
||||||
|
to: to || '',
|
||||||
|
cc: cc || ''
|
||||||
|
};
|
||||||
|
|
||||||
|
/* If there isn't a To: but there is a Cc:, promote Cc: to To: */
|
||||||
|
if (!envelope.to && envelope.cc) {
|
||||||
|
envelope.to = envelope.cc;
|
||||||
|
delete envelope.cc;
|
||||||
|
}
|
||||||
|
|
||||||
|
envelope.text = message
|
||||||
|
envelope.html = message.replace(/\n/g, "<br>\n");
|
||||||
|
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
let attempts = 10;
|
||||||
|
|
||||||
|
function attemptSend(envelope) {
|
||||||
|
/* Rate limit to ten per second */
|
||||||
|
transporter.sendMail(envelope, function (error, info) {
|
||||||
|
if (error) {
|
||||||
|
if (attempts) {
|
||||||
|
attempts--;
|
||||||
|
console.warn(timestamp() + " Unable to send mail. Trying again in 100ms (" + attempts + " attempts remain): ", error);
|
||||||
|
setTimeout(send.bind(undefined, envelope), 100);
|
||||||
|
} else {
|
||||||
|
console.error(timestamp() + " Error sending email: ", error)
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(timestamp() + " Mail sent to: " + envelope.to);
|
||||||
|
return resolve(true);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
attemptSend(envelope);
|
||||||
|
}).then(function(success) {
|
||||||
|
if (!success) {
|
||||||
|
console.error(timestamp() + " Mail not sent to: " + envelope.to);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
sendMail: sendMail
|
||||||
|
};
|
BIN
ketr.ketran/server/routes/.identities.js.swp
Normal file
BIN
ketr.ketran/server/routes/.identities.js.swp
Normal file
Binary file not shown.
33
ketr.ketran/server/routes/basepath.js
Normal file
33
ketr.ketran/server/routes/basepath.js
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const express = require("express"),
|
||||||
|
fs = require("fs"),
|
||||||
|
url = require("url");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
/* This router only handles HTML files and is used
|
||||||
|
* to replace BASEPATH */
|
||||||
|
router.get("/*", function(req, res, next) {
|
||||||
|
const parts = url.parse(req.url),
|
||||||
|
basePath = req.app.get("basePath");
|
||||||
|
|
||||||
|
if (!/^\/[^/]+\.html$/.exec(parts.pathname)) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Attempting to parse 'frontend" + parts.pathname + "'");
|
||||||
|
|
||||||
|
/* Replace <script>'<base href="/BASEPATH/">';</script> in index.html with
|
||||||
|
* the basePath */
|
||||||
|
fs.readFile("frontend" + parts.pathname, "utf8", function(error, content) {
|
||||||
|
if (error) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
res.send(content.replace(
|
||||||
|
/<script>'<base href="BASEPATH">';<\/script>/,
|
||||||
|
"<base href='" + basePath + "'>"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
1038
ketr.ketran/server/routes/games.js
Normal file
1038
ketr.ketran/server/routes/games.js
Normal file
File diff suppressed because it is too large
Load Diff
63
ketr.ketran/server/routes/index.js
Normal file
63
ketr.ketran/server/routes/index.js
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const express = require("express"),
|
||||||
|
fs = require("fs"),
|
||||||
|
url = require("url");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
/* List of filename extensions we know are "potential" file extensions for
|
||||||
|
* assets we don"t want to return "index.html" for */
|
||||||
|
const extensions = [
|
||||||
|
"html", "js", "css", "eot", "gif", "ico", "jpeg", "jpg", "mp4",
|
||||||
|
"md", "ttf", "txt", "woff", "woff2", "yml", "svg"
|
||||||
|
];
|
||||||
|
|
||||||
|
/* Build the extension match RegExp from the list of extensions */
|
||||||
|
const extensionMatch = new RegExp("^.*?(" + extensions.join("|") + ")$", "i");
|
||||||
|
|
||||||
|
/* To handle dynamic routes, we return index.html to every request that
|
||||||
|
* gets this far -- so this needs to be the last route.
|
||||||
|
*
|
||||||
|
* However, that introduces site development problems when assets are
|
||||||
|
* referenced which don't yet exist (due to bugs, or sequence of adds) --
|
||||||
|
* the server would return HTML content instead of the 404.
|
||||||
|
*
|
||||||
|
* So, check to see if the requested path is for an asset with a recognized
|
||||||
|
* file extension.
|
||||||
|
*
|
||||||
|
* If so, 404 because the asset isn't there. otherwise assume it is a
|
||||||
|
* dynamic client side route and *then* return index.html.
|
||||||
|
*/
|
||||||
|
router.get("/*", function(req, res, next) {
|
||||||
|
const parts = url.parse(req.url),
|
||||||
|
basePath = req.app.get("basePath");
|
||||||
|
|
||||||
|
/* If req.user isn't set yet (authentication hasn't happened) then
|
||||||
|
* only allow / to be loaded--everything else chains to the next
|
||||||
|
* handler */
|
||||||
|
if (!req.user && req.url != "/") {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.url == "/" || !extensionMatch.exec(parts.pathname)) {
|
||||||
|
console.log("Returning index for " + req.url);
|
||||||
|
|
||||||
|
/* Replace <script>'<base href="/BASEPATH/">';</script> in index.html with
|
||||||
|
* the basePath */
|
||||||
|
const index = fs.readFileSync("frontend/index.html", "utf8");
|
||||||
|
|
||||||
|
res.send(index.replace(
|
||||||
|
/<script>'<base href="BASEPATH">';<\/script>/,
|
||||||
|
"<base href='" + basePath + "'>"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Page not found: " + req.url);
|
||||||
|
return res.status(404).json({
|
||||||
|
message: "Page not found",
|
||||||
|
status: 404
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
387
ketr.ketran/server/routes/users.js
Normal file
387
ketr.ketran/server/routes/users.js
Normal file
@ -0,0 +1,387 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const express = require("express"),
|
||||||
|
config = require("config"),
|
||||||
|
LdapAuth = require("ldapauth-fork"),
|
||||||
|
{ sendVerifyMail, sendPasswordChangedMail } = require("../lib/mail"),
|
||||||
|
crypto = require("crypto");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
let userDB;
|
||||||
|
|
||||||
|
let ldap;
|
||||||
|
if (config.has("ldap.url")) {
|
||||||
|
ldap = new LdapAuth(config.get("ldap"));
|
||||||
|
} else {
|
||||||
|
ldap = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
require("../db/users").then(function(db) {
|
||||||
|
userDB = db;
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get("/", function(req, res/*, next*/) {
|
||||||
|
console.log("/users/");
|
||||||
|
return getSessionUser(req).then(function(user) {
|
||||||
|
return res.status(200).send(user);
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.log("User not logged in: " + error);
|
||||||
|
return res.status(200).send({});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
function ldapPromise(username, password) {
|
||||||
|
if (!ldap) {
|
||||||
|
return Promise.reject("LDAP not being used");
|
||||||
|
}
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
ldap.authenticate(username.replace(/@.*$/, ""), password, function(error, user) {
|
||||||
|
if (error) {
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
return resolve(user);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const ldapJS = require("ldapjs"),
|
||||||
|
ldapConfig = config.get("ldap");
|
||||||
|
|
||||||
|
|
||||||
|
const ldapSetPassword = function(username, password) {
|
||||||
|
const client = ldapJS.createClient({
|
||||||
|
url: ldapConfig.url
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
client.bind(ldapConfig.bindDn, ldapConfig.bindCredentials, function(err) {
|
||||||
|
if (err) {
|
||||||
|
return reject("Error binding to LDAP: " + err);
|
||||||
|
}
|
||||||
|
|
||||||
|
var change = new ldapJS.Change({
|
||||||
|
operation: "replace",
|
||||||
|
modification: {
|
||||||
|
userPassword : password,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
client.modify("uid=" + username + ",ou=people," + ldapConfig.searchBase, change, function(err) {
|
||||||
|
if (err) {
|
||||||
|
return reject("Error changing password: " + err);
|
||||||
|
}
|
||||||
|
return resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.error(error);
|
||||||
|
}).then(function() {
|
||||||
|
client.unbind(function(err) {
|
||||||
|
if (err) {
|
||||||
|
console.error("Error unbinding: " + err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
router.put("/password", function(req, res) {
|
||||||
|
console.log("/users/password");
|
||||||
|
|
||||||
|
const changes = {
|
||||||
|
currentPassword: req.query.c || req.body.c,
|
||||||
|
newPassword: req.query.n || req.body.n
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!changes.currentPassword || !changes.newPassword) {
|
||||||
|
return res.status(400).send("Missing current password and/or new password.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (changes.currentPassword == changes.newPassword) {
|
||||||
|
return res.status(400).send("Attempt to set new password to current password.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return getSessionUser(req).then(function(user) {
|
||||||
|
if (req.session.userId == "LDAP") {
|
||||||
|
return ldapPromise(user.username, changes.currentPassword).then(function() {
|
||||||
|
return user;
|
||||||
|
}).catch(function() {
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Not an LDAP user, so query in the DB */
|
||||||
|
return userDB.sequelize.query("SELECT id FROM users " +
|
||||||
|
"WHERE uid=:username AND password=:password", {
|
||||||
|
replacements: {
|
||||||
|
username: user.username,
|
||||||
|
password: crypto.createHash('sha256').update(changes.currentPassword).digest('base64')
|
||||||
|
},
|
||||||
|
type: userDB.Sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then(function(users) {
|
||||||
|
if (users.length != 1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return user;
|
||||||
|
});
|
||||||
|
}).then(function(user) {
|
||||||
|
if (!user) {
|
||||||
|
console.log("Invalid password");
|
||||||
|
/* Invalid password */
|
||||||
|
res.status(401).send("Invalid password");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let updatePromise;
|
||||||
|
if (req.session.userId == "LDAP") {
|
||||||
|
updatePromise = ldapSetPassword(user.username, changes.newPassword);
|
||||||
|
} else {
|
||||||
|
updatePromise = userDB.sequelize.query("UPDATE users SET password=:password WHERE uid=:username", {
|
||||||
|
replacements: {
|
||||||
|
username: user.username,
|
||||||
|
password: crypto.createHash('sha256').update(changes.newPassword).digest('base64')
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePromise.then(function() {
|
||||||
|
console.log("Password changed for user " + user.username + " to '" + changes.newPassword + "'.");
|
||||||
|
|
||||||
|
res.status(200).send(user);
|
||||||
|
user.id = req.session.userId;
|
||||||
|
return sendPasswordChangedMail(userDB, req, user);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
router.post("/create", function(req, res) {
|
||||||
|
console.log("/users/create");
|
||||||
|
|
||||||
|
const user = {
|
||||||
|
uid: req.query.m || req.body.m,
|
||||||
|
displayName: req.query.n || req.body.n || "",
|
||||||
|
password: req.query.p || req.body.p || "",
|
||||||
|
mail: req.query.m || req.body.m,
|
||||||
|
notes: req.query.w || req.body.w || ""
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!user.uid || !user.password || !user.displayName || !user.notes) {
|
||||||
|
return res.status(400).send("Missing email address, password, name, and/or who you know.");
|
||||||
|
}
|
||||||
|
|
||||||
|
user.password = crypto.createHash('sha256').update(user.password).digest('base64');
|
||||||
|
|
||||||
|
return userDB.sequelize.query("SELECT * FROM users WHERE uid=:uid", {
|
||||||
|
replacements: user,
|
||||||
|
type: userDB.Sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then(function(results) {
|
||||||
|
if (results.length != 0) {
|
||||||
|
return res.status(400).send("Email address already used.");
|
||||||
|
}
|
||||||
|
|
||||||
|
let re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
|
||||||
|
if (!re.exec(user.mail)) {
|
||||||
|
console.log("Invalid email address: " + user.mail);
|
||||||
|
throw "Invalid email address.";
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
return userDB.sequelize.query("INSERT INTO users " +
|
||||||
|
"(uid,displayName,password,mail,memberSince,authenticated,notes) " +
|
||||||
|
"VALUES(:uid,:displayName,:password,:mail,CURRENT_TIMESTAMP,0,:notes)", {
|
||||||
|
replacements: user
|
||||||
|
}).spread(function(results, metadata) {
|
||||||
|
req.session.userId = metadata.lastID;
|
||||||
|
}).then(function() {
|
||||||
|
return getSessionUser(req).then(function(user) {
|
||||||
|
res.status(200).send(user);
|
||||||
|
user.id = req.session.userId;
|
||||||
|
return sendVerifyMail(userDB, req, user);
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.log("Error creating account: ", error);
|
||||||
|
return res.status(401).send(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const getSessionUser = function(req) {
|
||||||
|
return Promise.resolve().then(function() {
|
||||||
|
if (!req.session || !req.session.userId) {
|
||||||
|
throw "Unauthorized. You must be logged in.";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.session.userId == "LDAP") {
|
||||||
|
if (req.session.ldapUser) {
|
||||||
|
return req.session.ldapUser;
|
||||||
|
}
|
||||||
|
req.session.userId = null;
|
||||||
|
req.session.ldapUser = null;
|
||||||
|
throw "Invalid LDAP session";
|
||||||
|
}
|
||||||
|
|
||||||
|
let query = "SELECT " +
|
||||||
|
"uid AS username,displayName,mailVerified,authenticated,memberSince AS name,mail " +
|
||||||
|
"FROM users WHERE id=:id";
|
||||||
|
return userDB.sequelize.query(query, {
|
||||||
|
replacements: {
|
||||||
|
id: req.session.userId
|
||||||
|
},
|
||||||
|
type: userDB.Sequelize.QueryTypes.SELECT,
|
||||||
|
raw: true
|
||||||
|
}).then(function(results) {
|
||||||
|
if (results.length != 1) {
|
||||||
|
throw "Invalid account.";
|
||||||
|
}
|
||||||
|
|
||||||
|
let user = results[0];
|
||||||
|
|
||||||
|
if (!user.mailVerified) {
|
||||||
|
user.restriction = user.restriction || "Email address not verified.";
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user.authenticated) {
|
||||||
|
user.restriction = user.restriction || "Accout not authorized.";
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
return user;
|
||||||
|
});
|
||||||
|
}).then(function(user) {
|
||||||
|
req.user = user;
|
||||||
|
|
||||||
|
/* If the user already has a restriction, or there are no album user restrictions,
|
||||||
|
* return the user to the next promise */
|
||||||
|
if (user.restriction || !config.has("restrictions")) {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
let allowed = config.get("restrictions");
|
||||||
|
if (!Array.isArray(allowed)) {
|
||||||
|
allowed = [ allowed ];
|
||||||
|
}
|
||||||
|
for (let i = 0; i < allowed.length; i++) {
|
||||||
|
if (allowed[i] == user.username) {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log("Unauthorized (logged in) access by user: " + user.username);
|
||||||
|
user.restriction = "Unauthorized access attempt to restricted album.";
|
||||||
|
|
||||||
|
return user;
|
||||||
|
}).then(function(user) {
|
||||||
|
/* If there are maintainers on this album, check if this user is a maintainer */
|
||||||
|
if (config.has("maintainers")) {
|
||||||
|
let maintainers = config.get("maintainers");
|
||||||
|
if (maintainers.indexOf(user.username) != -1) {
|
||||||
|
user.maintainer = true;
|
||||||
|
if (user.restriction) {
|
||||||
|
console.warn("User " + user.username + " is a maintainer AND has a restriction which will be ignored: " + user.restriction);
|
||||||
|
delete user.restriction;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return user;
|
||||||
|
}).then(function(user) {
|
||||||
|
/* Strip out any fields that shouldn't be there. The allowed fields are: */
|
||||||
|
let allowed = [
|
||||||
|
"maintainer", "username", "displayName", "mailVerified", "authenticated", "name", "mail", "restriction"
|
||||||
|
];
|
||||||
|
for (let field in user) {
|
||||||
|
if (allowed.indexOf(field) == -1) {
|
||||||
|
delete user[field];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return user;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
router.post("/login", function(req, res) {
|
||||||
|
console.log("/users/login");
|
||||||
|
|
||||||
|
let username = req.query.u || req.body.u || "",
|
||||||
|
password = req.query.p || req.body.p || "";
|
||||||
|
|
||||||
|
console.log("Login attempt");
|
||||||
|
|
||||||
|
if (!username || !password) {
|
||||||
|
return res.status(400).send("Missing username and/or password");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* We use LDAP as the primary authenticator; if the user is not
|
||||||
|
* found there, we look the user up in the site-specific user database */
|
||||||
|
|
||||||
|
return ldapPromise(username, password).then(function(ldap) {
|
||||||
|
let user = {};
|
||||||
|
user.id = "LDAP";
|
||||||
|
user.displayName = ldap.displayName;
|
||||||
|
user.username = ldap.uid;
|
||||||
|
user.mail = ldap.mail;
|
||||||
|
user.authenticated = 1;
|
||||||
|
user.mailVerified = 1;
|
||||||
|
req.session.userId = "LDAP";
|
||||||
|
req.session.ldapUser = user;
|
||||||
|
return user;
|
||||||
|
}).catch(function() {
|
||||||
|
console.log("User not found in LDAP. Looking up in DB.");
|
||||||
|
let query = "SELECT " +
|
||||||
|
"id,mailVerified,authenticated,uid AS username,displayName AS name,mail " +
|
||||||
|
"FROM users WHERE uid=:username AND password=:password";
|
||||||
|
return userDB.sequelize.query(query, {
|
||||||
|
replacements: {
|
||||||
|
username: username,
|
||||||
|
password: crypto.createHash('sha256').update(password).digest('base64')
|
||||||
|
},
|
||||||
|
type: userDB.Sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(users) {
|
||||||
|
if (users.length != 1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
let user = users[0];
|
||||||
|
req.session.userId = user.id;
|
||||||
|
return user;
|
||||||
|
});
|
||||||
|
}).then(function(user) {
|
||||||
|
if (!user) {
|
||||||
|
console.log(username + " not found (or invalid password.)");
|
||||||
|
req.session.userId = null;
|
||||||
|
return res.status(401).send("Invalid login credentials");
|
||||||
|
}
|
||||||
|
|
||||||
|
let message = "Logged in as " + user.username + " (" + user.id + ")";
|
||||||
|
if (!user.mailVerified) {
|
||||||
|
console.log(message + ", who is not verified email.");
|
||||||
|
} else if (!user.authenticated) {
|
||||||
|
console.log(message + ", who is not authenticated.");
|
||||||
|
} else {
|
||||||
|
console.log(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return getSessionUser(req).then(function(user) {
|
||||||
|
return res.status(200).send(user);
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.log(error);
|
||||||
|
return res.status(403).send(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get("/logout", function(req, res) {
|
||||||
|
console.log("/users/logout");
|
||||||
|
|
||||||
|
if (req.session && req.session.userId) {
|
||||||
|
if (req.session.userId == "LDAP") {
|
||||||
|
req.session.ldapUser = null;
|
||||||
|
}
|
||||||
|
req.session.userId = null;
|
||||||
|
}
|
||||||
|
res.status(200).send({});
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
router,
|
||||||
|
getSessionUser
|
||||||
|
};
|
748
ketr.ketran/server/scanner.js
Normal file
748
ketr.ketran/server/scanner.js
Normal file
@ -0,0 +1,748 @@
|
|||||||
|
/**
|
||||||
|
* scanner
|
||||||
|
*
|
||||||
|
* Face recognition:
|
||||||
|
* 1. For each photo, extract all faces. Store face rectangles.
|
||||||
|
* face_id unique
|
||||||
|
* photo_id foreign key
|
||||||
|
* top left bottom right
|
||||||
|
* identity_id
|
||||||
|
* distance (0 == truth; manually assigned identity)
|
||||||
|
* 2. For each face_id, create:
|
||||||
|
* normalized_file
|
||||||
|
* original_file
|
||||||
|
* 128 float
|
||||||
|
*/
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
/* meta directories are not scanned for photos */
|
||||||
|
const metaDirectories = [ "thumbs", "raw", "face-data", ".git", "corrupt" ];
|
||||||
|
|
||||||
|
const Promise = require("bluebird"),
|
||||||
|
fs = require("fs"),
|
||||||
|
config = require("config"),
|
||||||
|
moment = require("moment"),
|
||||||
|
crypto = require("crypto"),
|
||||||
|
{ stat, mkdir, exists } = require("./lib/util");
|
||||||
|
|
||||||
|
let photoDB = null;
|
||||||
|
|
||||||
|
const picturesPath = config.get("picturesPath").replace(/\/$/, "") + "/";
|
||||||
|
|
||||||
|
let processQueue = [], triedClean = [], lastScan = new Date("1800-01-01");
|
||||||
|
|
||||||
|
//const rawExtension = /\.(nef|orf)$/i, extensions = [ "jpg", "jpeg", "png", "gif", "nef", "orf" ];
|
||||||
|
|
||||||
|
const rawExtension = /\.nef$/i, extensions = [ "jpg", "jpeg", "png", "gif", "nef" ];
|
||||||
|
|
||||||
|
function removeNewerFile(path, fileA, fileB) {
|
||||||
|
fs.stat(path + fileA, function(err, statsA) {
|
||||||
|
if (err) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
fs.stat(path + fileB, function(err, statsB) {
|
||||||
|
if (err) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (statsA.mtime > statsB.mtime) {
|
||||||
|
setStatus("Removing file by moving to 'corrupt':" + fileA);
|
||||||
|
moveCorrupt(path, fileA);
|
||||||
|
} else {
|
||||||
|
setStatus("Removing file by moving to 'corrupt':" + fileB);
|
||||||
|
moveCorrupt(path, fileB);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let processRunning = false;
|
||||||
|
|
||||||
|
const { spawn } = require('child_process');
|
||||||
|
|
||||||
|
const sharp = require("sharp"), exif = require("exif-reader");
|
||||||
|
|
||||||
|
function convertRawToJpg(path, raw, file) {
|
||||||
|
setStatus(`Converting ${path}${raw} to ${file}.`);
|
||||||
|
|
||||||
|
path = picturesPath + path;
|
||||||
|
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
return exists(path + file.replace(rawExtension, ".jpg")).then(function(exist) {
|
||||||
|
if (exist) {
|
||||||
|
setStatus("Skipping already converted file: " + file);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const darktable = spawn("darktable-cli", [
|
||||||
|
path + raw,
|
||||||
|
path + file
|
||||||
|
]);
|
||||||
|
|
||||||
|
const stderr = [];
|
||||||
|
darktable.stderr.on('data', function(data) {
|
||||||
|
stderr.push(data);
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
darktable.on('exit', (code, signal) => {
|
||||||
|
if (signal || code != 0) {
|
||||||
|
let error = "darktable for " + path + file + " returned an error: " + code + "\n" + signal + "\n" + stderr.join("\n") + "\n";
|
||||||
|
setStatus(error, "error");
|
||||||
|
return moveCorrupt(path, file).then(function() {
|
||||||
|
setStatus("darktable failed", "warn");
|
||||||
|
return reject(error);
|
||||||
|
}).catch(function(error) {
|
||||||
|
setStatus("moveCorrupt failed", "warn");
|
||||||
|
return reject(error);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return mkdir(path + "raw").then(function() {
|
||||||
|
fs.rename(path + raw, path + "raw/" + raw, function(err) {
|
||||||
|
if (err) {
|
||||||
|
setStatus("Unable to move RAW file: " + path + raw, "error");
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
return resolve();
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
setStatus("mkdir failed", "warn");
|
||||||
|
return reject(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function moveCorrupt(path, file) {
|
||||||
|
if (path.indexOf(picturesPath) != 0) {
|
||||||
|
path = picturesPath + path;
|
||||||
|
}
|
||||||
|
|
||||||
|
setStatus("Moving corrupt file '" + file + "' to " + path + "corrupt", "warn");
|
||||||
|
|
||||||
|
return mkdir(path + "corrupt").then(function() {
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
fs.rename(path + file, path + "corrupt/" + file, function(err) {
|
||||||
|
if (err) {
|
||||||
|
setStatus("Unable to move corrupt file: " + path + file, "error");
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
return resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function processBlock(items) {
|
||||||
|
|
||||||
|
if (items) {
|
||||||
|
processQueue = processQueue.concat(items);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (processRunning) {
|
||||||
|
/* Invoke once per second to check if there are items to process */
|
||||||
|
setTimeout(processBlock, 1000);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let processing = processQueue.splice(0), needsProcessing = [], duplicates = [];
|
||||||
|
|
||||||
|
processRunning = true;
|
||||||
|
|
||||||
|
/* Sort to newest files to be processed first */
|
||||||
|
processing.sort(function(a, b) {
|
||||||
|
return b.stats.mtime - a.stats.mtime;
|
||||||
|
});
|
||||||
|
|
||||||
|
let toProcess = processing.length, lastMessage = moment();
|
||||||
|
setStatus("Items to be processed: " + toProcess);
|
||||||
|
return Promise.mapSeries(processing, (asset) => {
|
||||||
|
if (!asset.raw) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const path = asset.album.path;
|
||||||
|
|
||||||
|
return exists(picturesPath + path + asset.filename).then(function(exist) {
|
||||||
|
if (exist) {
|
||||||
|
return asset;
|
||||||
|
}
|
||||||
|
|
||||||
|
return mkdir(picturesPath + path + "raw").then(function() {
|
||||||
|
return convertRawToJpg(path, asset.raw, asset.filename);
|
||||||
|
}).then(function() {
|
||||||
|
console.log("Done converting...");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(() => {
|
||||||
|
return Promise.mapSeries(processing, (asset) => {
|
||||||
|
return computeHash(picturesPath + asset.album.path + asset.filename).then(function(hash) {
|
||||||
|
asset.hash = hash;
|
||||||
|
return asset;
|
||||||
|
}).then(function(asset) {
|
||||||
|
return photoDB.sequelize.query("SELECT photohashes.*,photos.filename,albums.path FROM photohashes " +
|
||||||
|
"LEFT JOIN photos ON (photos.id=photohashes.photoId) " +
|
||||||
|
"LEFT JOIN albums ON (albums.id=photos.albumId) " +
|
||||||
|
"WHERE hash=:hash OR photoId=:id", {
|
||||||
|
replacements: asset,
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
let query;
|
||||||
|
|
||||||
|
if (results.length == 0) {
|
||||||
|
query = "INSERT INTO photohashes (hash,photoId) VALUES(:hash,:id)";
|
||||||
|
} else if (results[0].hash != asset.hash) {
|
||||||
|
query = "UPDATE photohashes SET hash=:hash WHERE photoId=:id";
|
||||||
|
} else if (results[0].photoId != asset.id) {
|
||||||
|
setStatus("Duplicate asset: " +
|
||||||
|
"'" + asset.album.path + asset.filename + "' is a copy of " +
|
||||||
|
"'" + results[0].path + results[0].filename + "'");
|
||||||
|
if (asset.duplicate != results[0].photoId) {
|
||||||
|
asset.duplicate = results[0].photoId;
|
||||||
|
duplicates.push(asset);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Even if the hash doesn't need to be updated, the entry needs to be scanned */
|
||||||
|
// console.log("process needed because of " + query);
|
||||||
|
needsProcessing.push(asset);
|
||||||
|
|
||||||
|
if (!query) {
|
||||||
|
return asset;
|
||||||
|
}
|
||||||
|
|
||||||
|
return photoDB.sequelize.query(query, {
|
||||||
|
replacements: asset,
|
||||||
|
}).then(function() {
|
||||||
|
return asset;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function(asset) {
|
||||||
|
if (!asset) { /* The processed entry is a DUPLICATE. Skip it. */
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var path = asset.album.path,
|
||||||
|
file = asset.filename,
|
||||||
|
created = asset.stats.mtime,
|
||||||
|
albumId = asset.album.id;
|
||||||
|
|
||||||
|
var src = picturesPath + path + file,
|
||||||
|
image = sharp(src);
|
||||||
|
|
||||||
|
return image.limitInputPixels(1073741824).metadata().then(function(metadata) {
|
||||||
|
if (metadata.exif) {
|
||||||
|
metadata.exif = exif(metadata.exif);
|
||||||
|
delete metadata.exif.thumbnail;
|
||||||
|
delete metadata.exif.image;
|
||||||
|
for (var key in metadata.exif.exif) {
|
||||||
|
if (Buffer.isBuffer(metadata.exif.exif[key])) {
|
||||||
|
metadata.exif.exif[key] = "Buffer[" + metadata.exif.exif[key].length + "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
asset.width = metadata.width;
|
||||||
|
asset.height = metadata.height;
|
||||||
|
asset.added = moment().format();
|
||||||
|
|
||||||
|
if (metadata.exif && metadata.exif.exif && metadata.exif.exif.DateTimeOriginal && !isNaN(metadata.exif.exif.DateTimeOriginal.valueOf())) {
|
||||||
|
asset.taken = moment(metadata.exif.exif.DateTimeOriginal).format();
|
||||||
|
asset.modified = moment(metadata.exif.exif.DateTimeOriginal).format();
|
||||||
|
|
||||||
|
if (asset.taken == "Invalid date" || asset.taken.replace(/T.*/, "") == "1899-11-30") {
|
||||||
|
setStatus("Invalid EXIF date information for " + asset.album.path + asset.filename);
|
||||||
|
asset.taken = asset.modified = moment(created).format();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
/* Attempt to infer the datestamp from the filename */
|
||||||
|
let date = moment(created).format();
|
||||||
|
|
||||||
|
let match = file.match(/WhatsApp Image (20[0-9][0-9]-[0-9][0-9]-[0-9][0-9]) at (.*).(jpeg|jpg)/);
|
||||||
|
if (match) {
|
||||||
|
date = moment((match[1]+" "+match[2]), "YYYY-MM-DD h.mm.ss a").format();
|
||||||
|
if (date == "Invalid date") {
|
||||||
|
date = moment(created).format();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
match = file.match(/(20[0-9][0-9]-?[0-9][0-9]-?[0-9][0-9])[_\-]?([0-9]{6})?/);
|
||||||
|
if (match) {
|
||||||
|
if (match[2]) { /* Stamp had time in it */
|
||||||
|
date = moment((match[1]+""+match[2]).replace(/-/g, ""), "YYYYMMDDHHmmss").format();
|
||||||
|
} else {
|
||||||
|
date = moment(match[1].replace(/-/g, ""), "YYYYMMDD").format();
|
||||||
|
}
|
||||||
|
if (date == "Invalid date") {
|
||||||
|
date = moment(created).format();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
date = moment(created).format();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
asset.taken = asset.modified = date;
|
||||||
|
}
|
||||||
|
|
||||||
|
let dst = picturesPath + path + "thumbs/" + file;
|
||||||
|
|
||||||
|
return exists(dst).then(function(exist) {
|
||||||
|
if (exist) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return image.resize(256, 256).withMetadata().toFile(dst).catch(function(error) {
|
||||||
|
setStatus("Error resizing image: " + dst + "\n" + error, "error");
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
let dst = picturesPath + path + "thumbs/scaled/" + file;
|
||||||
|
return exists(dst).then(function(exist) {
|
||||||
|
if (exist) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return image.resize(Math.min(1024, metadata.width)).withMetadata().toFile(dst).catch(function(error) {
|
||||||
|
setStatus("Error resizing image: " + dst + "\n" + error, "error");
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
return photoDB.sequelize.query("UPDATE photos SET " +
|
||||||
|
"added=:added,modified=:modified,taken=:taken,width=:width,height=:height,size=:size,scanned=CURRENT_TIMESTAMP " +
|
||||||
|
"WHERE id=:id", {
|
||||||
|
replacements: asset,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
setStatus("Error reading image " + src + ":\n" + error, "error");
|
||||||
|
return moveCorrupt(path, file);
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
toProcess--;
|
||||||
|
if (moment().add(-5, 'seconds') > lastMessage) {
|
||||||
|
setStatus("Items to be processed: " + toProcess);
|
||||||
|
lastMessage = moment();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
setStatus("Error processing file. Continuing.", "error");
|
||||||
|
throw error;
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("Completed processing queue. Marking " + duplicates.length + " duplicates.");
|
||||||
|
return photoDB.sequelize.transaction(function(transaction) {
|
||||||
|
return Promise.mapSeries(duplicates, function(asset) {
|
||||||
|
return photoDB.sequelize.query("UPDATE photos " +
|
||||||
|
"SET duplicate=:duplicate,modified=CURRENT_TIMESTAMP,scanned=CURRENT_TIMESTAMP WHERE id=:id", {
|
||||||
|
replacements: asset,
|
||||||
|
transaction: transaction
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("Looking for removed assets");
|
||||||
|
return photoDB.sequelize.query("SELECT photos.scanned,photos.id,photos.filename,albums.path FROM photos " +
|
||||||
|
"LEFT JOIN albums ON (albums.id=photos.albumId) " +
|
||||||
|
"WHERE photos.deleted=0 AND (DATETIME(photos.scanned)<DATETIME(:lastScan) OR photos.scanned IS NULL)", {
|
||||||
|
replacements: {
|
||||||
|
lastScan: lastScan
|
||||||
|
},
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
let deleted = [];
|
||||||
|
setStatus("Checking " + results.length + " assets to see if they are on disk.");
|
||||||
|
return Promise.map(results, function(asset) {
|
||||||
|
return exists(asset.path + asset.filename).then(function(exist) {
|
||||||
|
if (!exist) {
|
||||||
|
setStatus(asset.path + asset.filename + " no longer exists on disk. Marking as deleted.");
|
||||||
|
deleted.push(asset.id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
return photoDB.sequelize.query("UPDATE photos SET deleted=1,scanned=CURRENT_TIMESTAMP WHERE id IN (:deleted)", {
|
||||||
|
replacements: {
|
||||||
|
deleted: deleted
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
return photoDB.sequelize.query("DELETE FROM photohashes WHERE photoId IN (:deleted)", {
|
||||||
|
replacements: {
|
||||||
|
deleted: deleted
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
setStatus(deleted.length + " assets deleted.");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
processRunning = false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function scanDir(parent, path) {
|
||||||
|
let re = new RegExp("\.((" + extensions.join(")|(") + "))$", "i"),
|
||||||
|
album = {
|
||||||
|
path: path.slice(picturesPath.length), /* path already ends in '/' */
|
||||||
|
name: path.replace(/\/$/, "").replace(/.*\//, "").replace(/_/g, " "),
|
||||||
|
parent: parent,
|
||||||
|
allAssetCount: 0,
|
||||||
|
allAlbumCount: 0
|
||||||
|
}, albums = [ album ], assets = [];
|
||||||
|
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
fs.readdir(path, function(error, files) {
|
||||||
|
if (error) {
|
||||||
|
setStatus("Could not readdir: " + path, "warn");
|
||||||
|
return resolve([]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Remove meta-data directories from being processed */
|
||||||
|
files = files.filter((file) => {
|
||||||
|
for (var i = 0; i < files.length; i++) {
|
||||||
|
/* If this file has an original NEF/ORF on the system, don't add the JPG to the DB */
|
||||||
|
if (rawExtension.exec(files[i]) && file == files[i].replace(rawExtension, ".jpg")) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If there is a different CASE (eg. JPG vs jpg) don't add it, and remove the 'lower case'
|
||||||
|
* version from disk. */
|
||||||
|
if (file != files[i] && file.toUpperCase() == files[i]) {
|
||||||
|
removeNewerFile(path, file, files[i]);
|
||||||
|
setStatus("Duplicate file in " + path + ": ", file, files[i]);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return metaDirectories.indexOf(file) == -1;
|
||||||
|
});
|
||||||
|
|
||||||
|
return resolve(files);
|
||||||
|
});
|
||||||
|
}).then(function(files) {
|
||||||
|
return Promise.map(files, function(file) {
|
||||||
|
let filepath = path + file;
|
||||||
|
return stat(filepath).then(function(stats) {
|
||||||
|
if (stats.isDirectory()) {
|
||||||
|
filepath += "/";
|
||||||
|
return scanDir(album, filepath).spread(function(_albums, _assets) {
|
||||||
|
album.allAssetCount += _assets.length;
|
||||||
|
album.allAlbumCount += _albums.length + 1;
|
||||||
|
albums = albums.concat(_albums);
|
||||||
|
assets = assets.concat(_assets);
|
||||||
|
}).catch(function(error) {
|
||||||
|
setStatus("Could not scanDir " + filepath + ": " + error, "error");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check file extensions */
|
||||||
|
if (!re.exec(file)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
album.hasAssets = true;
|
||||||
|
|
||||||
|
const asset = {
|
||||||
|
filename: file.replace(rawExtension, ".jpg"),
|
||||||
|
name: file.replace(/.[^.]*$/, ""),
|
||||||
|
stats: {
|
||||||
|
mtime: stats.mtime,
|
||||||
|
ctime: stats.ctime
|
||||||
|
},
|
||||||
|
size: stats.size,
|
||||||
|
album: album
|
||||||
|
}
|
||||||
|
if (file != asset.filename) {
|
||||||
|
asset.raw = file;
|
||||||
|
}
|
||||||
|
assets.push(asset);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
return Promise.map(albums, function(album) {
|
||||||
|
if (album.hasAssets) {
|
||||||
|
return mkdir(album.path + "thumbs/scaled");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
return [ albums, assets ];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function findOrCreateDBAlbum(transaction, album) {
|
||||||
|
let query = "SELECT id FROM albums WHERE path=:path AND ";
|
||||||
|
if (!album.parent) {
|
||||||
|
query += "parentId IS NULL";
|
||||||
|
album.parentId = null;
|
||||||
|
} else {
|
||||||
|
if (!album.parent.id) {
|
||||||
|
let error = "Albums in array in non ancestral order!";
|
||||||
|
setStatus(error, "error");
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
album.parentId = album.parent.id;
|
||||||
|
query += "parentId=:parentId";
|
||||||
|
}
|
||||||
|
|
||||||
|
return photoDB.sequelize.query(query, {
|
||||||
|
replacements: album,
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
if (results.length == 0) {
|
||||||
|
if (!album.parent) {
|
||||||
|
setStatus("Creating top level album: " + picturesPath, "warn" );
|
||||||
|
}
|
||||||
|
return photoDB.sequelize.query("INSERT INTO albums (path,parentId,name) VALUES(:path,:parentId,:name)", {
|
||||||
|
replacements: album,
|
||||||
|
transaction: transaction
|
||||||
|
}).spread(function(results, metadata) {
|
||||||
|
return metadata.lastID;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return results[0].id;
|
||||||
|
}
|
||||||
|
}).then(function(id) {
|
||||||
|
album.id = id;
|
||||||
|
return id;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function findOrUpdateDBAsset(transaction, asset) {
|
||||||
|
if (!asset.album || !asset.album.id) {
|
||||||
|
let error = "Asset being processed without an album";
|
||||||
|
setStatus(error, "warn");
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
asset.albumId = asset.album.id;
|
||||||
|
|
||||||
|
return photoDB.sequelize.query(
|
||||||
|
"SELECT id,DATETIME(scanned) AS scanned,size,DATETIME(modified) AS modified " +
|
||||||
|
"FROM photos " +
|
||||||
|
"WHERE albumId=:albumId AND filename=:filename", {
|
||||||
|
replacements: asset,
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
if (results.length == 0) {
|
||||||
|
return photoDB.sequelize.query("INSERT INTO photos " +
|
||||||
|
"(albumId,filename,name,size) VALUES(:albumId,:filename,:name,:size)", {
|
||||||
|
replacements: asset,
|
||||||
|
transaction: transaction
|
||||||
|
}).spread(function(results, metadata) {
|
||||||
|
asset.id = metadata.lastID;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
asset.id = results[0].id;
|
||||||
|
asset.scanned = new Date(results[0].scanned);
|
||||||
|
asset.modified = new Date(results[0].modified);
|
||||||
|
|
||||||
|
/* If the size on disk changed, update the size entry in the DB. This shouldn't happen in
|
||||||
|
* production unless someone modifies the file, then re-stamps the modified time */
|
||||||
|
if (asset.size != results[0].size) {
|
||||||
|
setStatus("File was modified with time-restamp (HASH regeneration will be queued): " + asset.filename);
|
||||||
|
delete asset.scanned;
|
||||||
|
delete asset.modified;
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
return asset;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function computeHash(filepath) {
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
let input = fs.createReadStream(filepath),
|
||||||
|
hash = crypto.createHash("sha256");
|
||||||
|
|
||||||
|
if (!input) {
|
||||||
|
console.warn("Unable to open " + filepath);
|
||||||
|
return reject();
|
||||||
|
}
|
||||||
|
|
||||||
|
input.on("error", function(error) {
|
||||||
|
console.warn("Error reading " + filepath);
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
input.on("readable", function() {
|
||||||
|
const data = input.read();
|
||||||
|
if (data) {
|
||||||
|
hash.update(data);
|
||||||
|
} else {
|
||||||
|
input.close();
|
||||||
|
resolve(hash.digest("hex"));
|
||||||
|
hash = null;
|
||||||
|
input = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let scanningStatus = [];
|
||||||
|
|
||||||
|
function setStatus(status, level) {
|
||||||
|
if (status == "idle") {
|
||||||
|
scanningStatus = [];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
level = level || "info";
|
||||||
|
scanningStatus.push({
|
||||||
|
level: level,
|
||||||
|
time: moment().format(),
|
||||||
|
log: status
|
||||||
|
});
|
||||||
|
switch (level) {
|
||||||
|
case "error":
|
||||||
|
console.error(status);
|
||||||
|
break;
|
||||||
|
case "warn":
|
||||||
|
console.warn(status);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
console.log(status);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function doScan() {
|
||||||
|
/* 1. Scan for all assets which will be managed by the system. readdir
|
||||||
|
* 2. Check if entry in DB. Check mod-time in DB vs. stats from #1
|
||||||
|
* - For albums
|
||||||
|
* - For assets
|
||||||
|
* 3. If not in DB, or mod-time changed, queue for HASH CHECK
|
||||||
|
*
|
||||||
|
* HASH CHECK
|
||||||
|
* 1. Compute HASH
|
||||||
|
* 2. Check for HASH in photohash -- skip?
|
||||||
|
* 3. Check for and create thumbs/FILE thumbs/scaled/FILE
|
||||||
|
* 4. If necessary, create JPG from RAW
|
||||||
|
* 5. Update last-scanned date in DB for entry
|
||||||
|
* 6. Look up all DB entries with last-scanned date < NOW -- purge from DB (they were
|
||||||
|
* removed on disk)? Also purge from the HASH table.
|
||||||
|
*/
|
||||||
|
let initialized = Date.now();
|
||||||
|
let now = Date.now();
|
||||||
|
let needsProcessing = [];
|
||||||
|
|
||||||
|
if (scanningStatus.length != 0) {
|
||||||
|
return Promise.resolve(scanningStatus);
|
||||||
|
}
|
||||||
|
|
||||||
|
return scanDir(null, picturesPath).spread(function(albums, assets) {
|
||||||
|
setStatus("Found " + assets.length + " assets in " + albums.length + " albums after " +
|
||||||
|
((Date.now() - now) / 1000) + "s");
|
||||||
|
/* One at a time, in series, as the album[] array has parents first, then descendants.
|
||||||
|
* Operating in parallel could result in a child being searched for prior to the parent */
|
||||||
|
now = Date.now();
|
||||||
|
|
||||||
|
let toProcess = albums.length, lastMessage = moment();
|
||||||
|
return photoDB.sequelize.transaction(function(transaction) {
|
||||||
|
return Promise.mapSeries(albums, function(album) {
|
||||||
|
return findOrCreateDBAlbum(transaction, album).then(function() {
|
||||||
|
toProcess--;
|
||||||
|
if (moment().add(-5, 'seconds') > lastMessage) {
|
||||||
|
setStatus("Albums to be created in DB: " + toProcess);
|
||||||
|
lastMessage = moment();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("Processed " + albums.length + " album DB entries in " +
|
||||||
|
((Date.now() - now) / 1000) + "s");
|
||||||
|
now = Date.now();
|
||||||
|
|
||||||
|
setStatus(assets.length + " assets remaining to be verified/updated. ETA N/A");
|
||||||
|
|
||||||
|
let processed = 0, start = Date.now(), last = 0, updateScanned = [], newEntries = 0;
|
||||||
|
return photoDB.sequelize.transaction(function(transaction) {
|
||||||
|
return Promise.map(assets, function(asset) {
|
||||||
|
return Promise.resolve(asset).then(function(asset) {
|
||||||
|
/* If both mtime and ctime of the asset are older than the lastScan, skip it
|
||||||
|
*
|
||||||
|
* Can only do this after a full scan has occurred */
|
||||||
|
if (lastScan != null && asset.stats.mtime < lastScan && asset.stats.ctime < lastScan) {
|
||||||
|
return asset;
|
||||||
|
}
|
||||||
|
|
||||||
|
return findOrUpdateDBAsset(transaction, asset).then(function(asset) {
|
||||||
|
if (!asset.scanned) {
|
||||||
|
newEntries++;
|
||||||
|
}
|
||||||
|
if (!asset.scanned || asset.scanned < asset.stats.mtime || !asset.modified) {
|
||||||
|
// if (!asset.scanned) { console.log("no scan date on asset"); }
|
||||||
|
// if (asset.scanned < asset.stats.mtime) { console.log("scan date older than mtime"); }
|
||||||
|
// if (!asset.modified) { console.log("no mtime."); }
|
||||||
|
needsProcessing.push(asset);
|
||||||
|
} else {
|
||||||
|
updateScanned.push(asset.id);
|
||||||
|
}
|
||||||
|
return asset;
|
||||||
|
}).then(function(asset) {
|
||||||
|
return asset;
|
||||||
|
});
|
||||||
|
}).then(function(asset) {
|
||||||
|
processed++;
|
||||||
|
|
||||||
|
let elapsed = Date.now() - start;
|
||||||
|
if (elapsed < 5000) {
|
||||||
|
return asset;
|
||||||
|
}
|
||||||
|
|
||||||
|
let remaining = assets.length - processed, eta = Math.ceil((elapsed / 1000) * remaining / (processed - last));
|
||||||
|
setStatus(remaining + " assets remaining be verified/updated " +
|
||||||
|
"(" + newEntries + " new entries, " + needsProcessing.length + " need processing," + (processed - newEntries) + " up-to-date so far). ETA " + eta + "s");
|
||||||
|
last = processed;
|
||||||
|
start = Date.now();
|
||||||
|
});
|
||||||
|
}, {
|
||||||
|
concurrency: 10
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
if (updateScanned.length) {
|
||||||
|
return photoDB.sequelize.query("UPDATE photos SET scanned=CURRENT_TIMESTAMP WHERE id IN (:ids)", {
|
||||||
|
replacements: {
|
||||||
|
ids: updateScanned
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("Updated scan date of " + updateScanned.length + " assets");
|
||||||
|
updateScanned = [];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}).then(function() {
|
||||||
|
setStatus(newEntries + " assets are new. " + (needsProcessing.length - newEntries) + " assets have been modified.\n" +
|
||||||
|
needsProcessing.length + " assets need HASH computed. " + (assets.length - needsProcessing.length) + " need no update.");;
|
||||||
|
processBlock(needsProcessing);
|
||||||
|
needsProcessing = [];
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("Scanned " + assets.length + " asset DB entries in " +
|
||||||
|
((Date.now() - now) / 1000) + "s");
|
||||||
|
assets = [];
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("Total time to initialize DB and all scans: " + ((Date.now() - initialized) / 1000) + "s");
|
||||||
|
return photoDB.sequelize.query("SELECT max(scanned) AS scanned FROM photos", {
|
||||||
|
type: photoDB.sequelize.QueryTypes.SELECT
|
||||||
|
}).then(function(results) {
|
||||||
|
if (results[0].scanned == null) {
|
||||||
|
lastScan = new Date("1800-01-01");
|
||||||
|
} else {
|
||||||
|
lastScan = new Date(results[0].scanned);
|
||||||
|
}
|
||||||
|
setStatus("Updating any asset newer than " + moment(lastScan).format());
|
||||||
|
});
|
||||||
|
}).then(function() {
|
||||||
|
setStatus("idle");
|
||||||
|
return "scan complete";
|
||||||
|
}).catch(function(error) {
|
||||||
|
setStatus(error);
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
init: function(db) {
|
||||||
|
photoDB = db;
|
||||||
|
},
|
||||||
|
scan: doScan
|
||||||
|
};
|
||||||
|
|
16
ketr.ketran/server/timestamp.js
Normal file
16
ketr.ketran/server/timestamp.js
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function twoDigit(number) {
|
||||||
|
return ("0" + number).slice(-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
function timestamp(date) {
|
||||||
|
date = date || new Date();
|
||||||
|
return [ date.getFullYear(), twoDigit(date.getMonth() + 1), twoDigit(date.getDate()) ].join("-") +
|
||||||
|
" " +
|
||||||
|
[ twoDigit(date.getHours()), twoDigit(date.getMinutes()), twoDigit(date.getSeconds()) ].join(":");
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
timestamp
|
||||||
|
};
|
@ -6,10 +6,8 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "webpack-dev-server --mode development --host 0.0.0.0 --config webpack.dev.js",
|
"start": "webpack-dev-server --mode development --host 0.0.0.0 --config webpack.dev.js",
|
||||||
"build": "webpack --config webpack.prod.js",
|
"build": "webpack --config webpack.prod.js",
|
||||||
"commit-build": "./commit-build.sh",
|
|
||||||
"watch": "webpack --config webpack.prod.js --watch",
|
"watch": "webpack --config webpack.prod.js --watch",
|
||||||
"update": "./update.sh",
|
"backend": "NODE_CONFIG_ENV='production' node ketr.ketran/server/app.js"
|
||||||
"backend": "NODE_CONFIG_ENV='production' node server/app.js"
|
|
||||||
},
|
},
|
||||||
"repository": "ssh://git@gitlab.ketrenos.com/jketreno/ketr.settlers.git",
|
"repository": "ssh://git@gitlab.ketrenos.com/jketreno/ketr.settlers.git",
|
||||||
"author": "James Ketrenos <james_settlers@ketrenos.com>",
|
"author": "James Ketrenos <james_settlers@ketrenos.com>",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user