Prevent duplicate hash entries in hash transaction
This commit is contained in:
parent
8b89ce7507
commit
7077862382
@ -212,13 +212,20 @@ function processBlock(items) {
|
|||||||
lastMessage = moment();
|
lastMessage = moment();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}, {
|
}, {
|
||||||
concurrency: 5
|
concurrency: 10
|
||||||
}).then(function() {
|
}).then(function() {
|
||||||
let toProcess = processing.length, lastMessage = moment();
|
let toProcess = processing.length, lastMessage = moment();
|
||||||
/* Needs to be one at a time in case there are multiple HASH collisions */
|
/* Needs to be one at a time in case there are multiple HASH collisions. To speed
|
||||||
|
* up commits to the DB we will batch these into 100 record transactions where HASH
|
||||||
|
* collions are done via DB query *AND* in-memory table lookup in the current batch */
|
||||||
|
let batchSize = 100, batches = [];
|
||||||
|
while (processing.length) {
|
||||||
|
batches.push(processing.splice(0, batchSize));
|
||||||
|
}
|
||||||
|
return Promise.mapSeries(batches, function(batch) {
|
||||||
return photoDB.sequelize.transaction(function(transaction) {
|
return photoDB.sequelize.transaction(function(transaction) {
|
||||||
return Promise.mapSeries(processing, function(asset) {
|
return Promise.mapSeries(batch, function(asset, index) {
|
||||||
return photoDB.sequelize.query("SELECT photohashes.*,photos.filename,albums.path FROM photohashes " +
|
return photoDB.sequelize.query("SELECT photohashes.*,photos.filename,albums.path FROM photohashes " +
|
||||||
"LEFT JOIN photos ON (photos.id=photohashes.photoId) " +
|
"LEFT JOIN photos ON (photos.id=photohashes.photoId) " +
|
||||||
"LEFT JOIN albums ON (albums.id=photos.albumId) " +
|
"LEFT JOIN albums ON (albums.id=photos.albumId) " +
|
||||||
@ -227,6 +234,14 @@ function processBlock(items) {
|
|||||||
type: photoDB.sequelize.QueryTypes.SELECT
|
type: photoDB.sequelize.QueryTypes.SELECT
|
||||||
}).then(function(results) {
|
}).then(function(results) {
|
||||||
let query;
|
let query;
|
||||||
|
|
||||||
|
/* If this asset exists in this transaction block, push it into the results */
|
||||||
|
for (let i = 0; i < index; i++) {
|
||||||
|
if (batch[i].hash == asset.hash) {
|
||||||
|
results.push(batch[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (results.length == 0) {
|
if (results.length == 0) {
|
||||||
query = "INSERT INTO photohashes (hash,photoId) VALUES(:hash,:id)";
|
query = "INSERT INTO photohashes (hash,photoId) VALUES(:hash,:id)";
|
||||||
} else if (results[0].hash != asset.hash) {
|
} else if (results[0].hash != asset.hash) {
|
||||||
@ -259,8 +274,12 @@ function processBlock(items) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
}).catch(function(error) {
|
||||||
|
console.log("Error commiting HASH transactions");
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
}).then(function() {
|
}).then(function() {
|
||||||
let toProcess = processing.length, lastMessage = moment();
|
let toProcess = needsProcessing.length, lastMessage = moment();
|
||||||
console.log(needsProcessing.length + " assets need to have metadata extracted");
|
console.log(needsProcessing.length + " assets need to have metadata extracted");
|
||||||
return Promise.map(needsProcessing, function(asset) {
|
return Promise.map(needsProcessing, function(asset) {
|
||||||
var path = asset.album.path,
|
var path = asset.album.path,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user