|
|
@@ -4,6 +4,7 @@ const fs = require("fs");
|
|
|
const path = require("path");
|
|
|
const minimist = require("minimist");
|
|
|
const low = require("lowdb");
|
|
|
+const Queue = require('better-queue');
|
|
|
const FileSync = require("lowdb/adapters/FileSync");
|
|
|
|
|
|
const chokidar = require("chokidar");
|
|
|
@@ -103,7 +104,7 @@ const processWithHandbrake = (input, output, preset) => {
|
|
|
processOutput(` --> processing ${output} error: ${err.message || err}.\n`);
|
|
|
reject(err);
|
|
|
}).on('progress', (progress) => {
|
|
|
- processOutput(` --> processing ${output} - ${progress.percentComplete}, ETA: ${progress.eta}`);
|
|
|
+ processOutput(` --> processing ${output} - ${progress.percentComplete}%, ETA: ${progress.eta}`);
|
|
|
}).on('cancelled', () => {
|
|
|
processOutput(` --> processing ${output} cancelled\n`);
|
|
|
reject(new Error(`Processing ${output} cancelled`));
|
|
|
@@ -114,62 +115,7 @@ const processWithHandbrake = (input, output, preset) => {
|
|
|
});
|
|
|
};
|
|
|
|
|
|
-if (args.version) return version();
|
|
|
-if (args.help || !args.config) return help();
|
|
|
-
|
|
|
-const ignoreInitial = (args.hasOwnProperty('ignoreInitial')) ? args.ignoreInitial : false; // ignore initial files
|
|
|
-const test = (args.hasOwnProperty('test')) ? args.test : false; // do a dry run ... just log
|
|
|
-
|
|
|
-// get our paths
|
|
|
-const paths = require(args.config);
|
|
|
-
|
|
|
-// init our defaults
|
|
|
-const defaults = {
|
|
|
- "preset": "Fast 1080p30",
|
|
|
- "clean": {},
|
|
|
- "titlecase": false,
|
|
|
- "folder": false,
|
|
|
- "database": "data/db.json"
|
|
|
-}
|
|
|
-
|
|
|
-// setup watcher options
|
|
|
-const opts = {
|
|
|
- ignored: /(^|[\/\\])\..|([s|S]ample\.*)/,
|
|
|
- ignoreInitial: ignoreInitial,
|
|
|
- persistent: true,
|
|
|
- usePolling: true,
|
|
|
- interval: 10000,
|
|
|
- depth: 1,
|
|
|
- awaitWriteFinish: {
|
|
|
- stabilityThreshold: 3000,
|
|
|
- pollInterval: 1000
|
|
|
- },
|
|
|
- ignorePermissionErrors: false
|
|
|
-};
|
|
|
-
|
|
|
-// parse the paths to dirs
|
|
|
-const dirs = Object.keys(paths);
|
|
|
-
|
|
|
-// initialize watches and db
|
|
|
-let watches = []; // array of things to watch
|
|
|
-
|
|
|
-for (let d in dirs) { // loop the dirs
|
|
|
- let dir = dirs[d]; // pointer
|
|
|
- let options = Object.assign({}, defaults, paths[dir]); // baseline the options
|
|
|
- let db = getDbForDir(dir);
|
|
|
- db.defaults({ files: [] }).write(); // init the database
|
|
|
- for (let e in options.exts) { // loop the exts to watch
|
|
|
- let ext = options.exts[e]; // alias the ext
|
|
|
- watches.push(`${dir}/**/*.${ext}`); // push the watch
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-const watcher = chokidar.watch(watches, opts); // init our watcher
|
|
|
-console.log('Watching', watches);
|
|
|
-
|
|
|
-// when a new file is added
|
|
|
-watcher.on('add', async (file) => {
|
|
|
-
|
|
|
+const processFile = async (file) => {
|
|
|
let adapter,
|
|
|
db,
|
|
|
options,
|
|
|
@@ -194,7 +140,7 @@ watcher.on('add', async (file) => {
|
|
|
let found = findFile(db, file); // does it already exist?
|
|
|
if (found && found.status && found.status === 'success') { // was it already processed?
|
|
|
//console.log(`File ${file} has already been successfully processed.`);
|
|
|
- return; // break this loop
|
|
|
+ return false; // break this loop
|
|
|
} else if (!found) { // was it found?
|
|
|
processOutput(`-> ${path.basename(file)} [processing]`);
|
|
|
setFile(db, { input:file, output:'', status:'', date:new Date() }); // push onto the list an entry
|
|
|
@@ -211,7 +157,6 @@ watcher.on('add', async (file) => {
|
|
|
break; // break the loop
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
// clean the output name
|
|
|
let cleanKeys = Object.keys(clean);
|
|
|
for (let c in cleanKeys) {
|
|
|
@@ -220,38 +165,36 @@ watcher.on('add', async (file) => {
|
|
|
let re = new RegExp(key, 'gi');
|
|
|
output = output.replace(re, val);
|
|
|
}
|
|
|
-
|
|
|
+ // baseline the output name
|
|
|
output = output.trim(); // trim the whitespace
|
|
|
output = output.replace(/(\d{4})$/, `($1)`); // if there is a date at the end wrap it
|
|
|
-
|
|
|
- if (folder) { // do we have a sub folder option?
|
|
|
+ // do we have a sub folder option?
|
|
|
+ if (folder) {
|
|
|
let match = output.match(/^(.*?)?\./gm); // get the name for the file before the first .
|
|
|
folder = (match && match.length > 0) ? match[0].slice(0, -1) : false; // get just the stuff before the dot ... or false
|
|
|
}
|
|
|
-
|
|
|
// do we have title case enabled?
|
|
|
if (options.titlecase) output = output.toTitleCase(); // titlecase that string
|
|
|
-
|
|
|
+ // baseline the target
|
|
|
let target = destination + ((folder) ? '/' + folder : ''); // setup the target location
|
|
|
output = target + '/' + output + '.' + ext; // update the new name
|
|
|
-
|
|
|
+ // do we already have an existing output that matches?
|
|
|
if (fs.existsSync(output)) {
|
|
|
processOutput(`-> ${path.basename(file)} [skipping ... already processed]\n`);
|
|
|
setFile(db, file, { output: output, status: 'success', date:new Date() }); // update database with status
|
|
|
- return;
|
|
|
+ return false;
|
|
|
+ } else {
|
|
|
+ processOutput('\n'); // send a new line
|
|
|
}
|
|
|
- processOutput('\n');
|
|
|
-
|
|
|
- setFile(db, file, { output: output }); // update database with output
|
|
|
-
|
|
|
+ // update database with output name
|
|
|
+ setFile(db, file, { output: output });
|
|
|
+ // create parent if required
|
|
|
if (target && !fs.existsSync(target)) {
|
|
|
console.log(' --> creating parent directory:', target);
|
|
|
fs.mkdirSync(target, {
|
|
|
recursive: true
|
|
|
});
|
|
|
}
|
|
|
-
|
|
|
- //console.log(` --> processing "${path.basename(input)}" to "${output}" with "${preset}"`);
|
|
|
// spawn handbrake
|
|
|
if (!test) {
|
|
|
try {
|
|
|
@@ -261,10 +204,11 @@ watcher.on('add', async (file) => {
|
|
|
setFile(db, file, { status: 'failure', date:new Date() }); // update database with status
|
|
|
}
|
|
|
}
|
|
|
-}).on('change', (file) => {
|
|
|
- console.log(` -> ${file} has been changed`);
|
|
|
-}).on('unlink', async (file) => {
|
|
|
- console.log(` -> ${file} has been removed`);
|
|
|
+ return true; // when complete return true
|
|
|
+};
|
|
|
+
|
|
|
+// cleanup removes from the db
|
|
|
+const cleanup = (file) => {
|
|
|
let adapter, db, options;
|
|
|
for (let i = 0, l = dirs.length; i < l; i++) {
|
|
|
let dir = dirs[i]; // pointer to the dir
|
|
|
@@ -273,6 +217,74 @@ watcher.on('add', async (file) => {
|
|
|
removeFile(db, file); // remove file form database
|
|
|
}
|
|
|
}
|
|
|
-}).on('error', (error) => {
|
|
|
- console.error(` -> Error: ${error.message || error}`);
|
|
|
-});
|
|
|
+};
|
|
|
+
|
|
|
+// handle args
|
|
|
+if (args.version) return version(); // show version
|
|
|
+if (args.help || !args.config) return help(); // show help
|
|
|
+const ignoreInitial = (args.hasOwnProperty('ignoreInitial')) ? args.ignoreInitial : false; // ignore initial files
|
|
|
+const test = (args.hasOwnProperty('test')) ? args.test : true; // do a dry run ... just log
|
|
|
+
|
|
|
+// get our paths
|
|
|
+const paths = require(args.config);
|
|
|
+
|
|
|
+// init our defaults
|
|
|
+const defaults = {
|
|
|
+ "preset": "Fast 1080p30",
|
|
|
+ "clean": {},
|
|
|
+ "titlecase": false,
|
|
|
+ "folder": false,
|
|
|
+ "database": "data/db.json"
|
|
|
+}
|
|
|
+
|
|
|
+// setup watcher options
|
|
|
+const opts = {
|
|
|
+ ignored: /(^|[\/\\])\..|([s|S]ample\.*)/,
|
|
|
+ ignoreInitial: ignoreInitial,
|
|
|
+ persistent: true,
|
|
|
+ usePolling: true,
|
|
|
+ interval: 10000,
|
|
|
+ depth: 1,
|
|
|
+ awaitWriteFinish: {
|
|
|
+ stabilityThreshold: 3000,
|
|
|
+ pollInterval: 1000
|
|
|
+ },
|
|
|
+ ignorePermissionErrors: false,
|
|
|
+ atomic: true
|
|
|
+};
|
|
|
+
|
|
|
+// parse the paths to dirs
|
|
|
+const dirs = Object.keys(paths);
|
|
|
+
|
|
|
+// initialize watches and db then start the watcher
|
|
|
+const main = () => {
|
|
|
+ var queue = new Queue((input,cb) => { // init the queue
|
|
|
+ let result = processFile(input); // process the queue
|
|
|
+ cb(null,result);
|
|
|
+ });
|
|
|
+ var watches = []; // array of things to watch
|
|
|
+ for (let d in dirs) { // loop the dirs
|
|
|
+ let dir = dirs[d]; // pointer
|
|
|
+ let options = Object.assign({}, defaults, paths[dir]); // baseline the options
|
|
|
+ let db = getDbForDir(dir);
|
|
|
+ db.defaults({ files: [] }).write(); // init the database
|
|
|
+ for (let e in options.exts) { // loop the exts to watch
|
|
|
+ let ext = options.exts[e]; // alias the ext
|
|
|
+ watches.push(`${dir}/**/*.${ext}`); // push the watch
|
|
|
+ }
|
|
|
+ }
|
|
|
+ const watcher = chokidar.watch(watches, opts); // init our watcher
|
|
|
+ console.log('Watching', watches);
|
|
|
+ watcher.on('add', async (file) => { // when a new file is added ...
|
|
|
+ queue.push(file); // push the file onto the queue to be processed
|
|
|
+ }).on('change', (file) => { // when a file changes ...
|
|
|
+ console.log(` -> ${file} has been changed`);
|
|
|
+ }).on('unlink', async (file) => { // when a file is removed ...
|
|
|
+ console.log(` -> ${file} has been removed`);
|
|
|
+ cleanup(file);
|
|
|
+ }).on('error', (error) => { // on errors ..
|
|
|
+ console.error(` -> Error: ${error.message || error}`);
|
|
|
+ });
|
|
|
+}
|
|
|
+
|
|
|
+main();
|