Began work on tasks

tasks-and-maintenence-page
Isaac Abadi 3 years ago
parent d6ed82134b
commit 2b1771d30d

@ -300,6 +300,7 @@ exports.getFileDirectoriesAndDBs = async () => {
}
exports.importUnregisteredFiles = async () => {
const imported_files = [];
const dirs_to_check = await exports.getFileDirectoriesAndDBs();
// run through check list and check each file to see if it's missing from the db
@ -316,12 +317,17 @@ exports.importUnregisteredFiles = async () => {
const file_is_registered = !!(files_with_same_url.find(file_with_same_url => path.resolve(file_with_same_url.path) === path.resolve(file.path)));
if (!file_is_registered) {
// add additional info
await exports.registerFileDB(file['path'], dir_to_check.type, dir_to_check.user_uid, null, dir_to_check.sub_id, null);
const file_obj = await exports.registerFileDB(file['path'], dir_to_check.type, dir_to_check.user_uid, null, dir_to_check.sub_id, null);
if (file_obj) {
imported_files.push(file_obj['uid']);
logger.verbose(`Added discovered file to the database: ${file.id}`);
} else {
logger.error(`Failed to import ${file['path']} automatically.`);
}
}
}
}
return imported_files;
}
exports.addMetadataPropertyToDB = async (property_key) => {
@ -744,6 +750,66 @@ exports.removeRecord = async (table, filter_obj) => {
return !!(output['result']['ok']);
}
// exports.removeRecordsByUIDBulk = async (table, uids) => {
// // local db override
// if (using_local_db) {
// applyFilterLocalDB(local_db.get(table), filter_obj, 'remove').write();
// return true;
// }
// const table_collection = database.collection(table);
// let bulk = table_collection.initializeOrderedBulkOp(); // Initialize the Ordered Batch
// const item_ids_to_remove =
// for (let i = 0; i < item_ids_to_update.length; i++) {
// const item_id_to_update = item_ids_to_update[i];
// bulk.find({[key_label]: item_id_to_update }).updateOne({
// "$set": update_obj[item_id_to_update]
// });
// }
// const output = await bulk.execute();
// return !!(output['result']['ok']);
// }
exports.findDuplicatesByKey = async (table, key) => {
let duplicates = [];
if (using_local_db) {
// this can probably be optimized
const all_records = await exports.getRecords(table);
const existing_records = {};
for (let i = 0; i < all_records.length; i++) {
const record = all_records[i];
const value = record[key];
if (existing_records[value]) {
duplicates.push(record);
}
existing_records[value] = true;
}
return duplicates;
}
const duplicated_values = await database.collection(table).aggregate([
{"$group" : { "_id": `$${key}`, "count": { "$sum": 1 } } },
{"$match": {"_id" :{ "$ne" : null } , "count" : {"$gt": 1} } },
{"$project": {[key] : "$_id", "_id" : 0} }
]).toArray();
for (let i = 0; i < duplicated_values.length; i++) {
const duplicated_value = duplicated_values[i];
const duplicated_records = await exports.getRecords(table, duplicated_value, false);
if (duplicated_records.length > 1) {
duplicates = duplicates.concat(duplicated_records.slice(1, duplicated_records.length));
}
}
return duplicates;
}
exports.removeAllRecords = async (table = null, filter_obj = null) => {
// local db override
const tables_to_remove = table ? [table] : tables_list;

@ -0,0 +1,112 @@
const utils = require('./utils');
const db_api = require('./db');
const fs = require('fs-extra');
const logger = require('./logger');
const TASKS = {
backup_local_db: {
run: utils.backupLocalDB,
title: 'Backup Local DB',
},
missing_files_check: {
run: checkForMissingFiles,
confirm: deleteMissingFiles,
title: 'Missing files check'
},
missing_db_records: {
run: db_api.importUnregisteredFiles,
title: 'Import missing DB records'
},
duplicate_files_check: {
run: checkForDuplicateFiles,
confirm: removeDuplicates,
title: 'Find duplicate files in DB'
}
}
exports.initialize = async () => {
const tasks_keys = Object.keys(TASKS);
for (let i = 0; i < tasks_keys.length; i++) {
const task_key = tasks_keys[i];
const task_in_db = await db_api.getRecord('tasks', {key: task_key});
if (!task_in_db) {
await db_api.insertRecordIntoTable('tasks', {
key: task_key,
last_ran: null,
last_confirmed: null,
running: false,
confirming: false,
data: null,
error: null
});
}
}
}
exports.executeTask = async (task_key) => {
if (!TASKS[task_key]) {
logger.error(`Task ${task_key} does not exist!`);
return;
}
logger.verbose(`Executing task ${task_key}`);
await exports.executeRun(task_key);
if (!TASKS[task_key]['confirm']) return;
await exports.executeConfirm(task_key);
logger.verbose(`Finished executing ${task_key}`);
}
exports.executeRun = async (task_key) => {
await db_api.updateRecord('tasks', {key: task_key}, {running: true});
const data = await TASKS[task_key].run();
await db_api.updateRecord('tasks', {key: task_key}, {data: data, last_ran: Date.now()/1000, running: false});
}
exports.executeConfirm = async (task_key) => {
if (!TASKS[task_key]['confirm']) {
return null;
}
await db_api.updateRecord('tasks', {key: task_key}, {confirming: true});
const task_obj = await db_api.getRecord('tasks', {key: task_key});
const data = task_obj['data'];
await TASKS[task_key].confirm(data);
await db_api.updateRecord('tasks', {key: task_key}, {confirming: false, last_confirmed: Date.now()/1000});
}
// missing files check
async function checkForMissingFiles() {
const missing_files = [];
const all_files = await db_api.getRecords('files');
for (let i = 0; i < all_files.length; i++) {
const file_to_check = all_files[i];
const file_exists = fs.existsSync(file_to_check['path']);
if (!file_exists) missing_files.push(file_to_check['uid']);
}
return {uids: missing_files};
}
async function deleteMissingFiles(data) {
const uids = data['uids'];
for (let i = 0; i < uids.length; i++) {
const uid = uids[i];
await db_api.removeRecord('files', {uid: uid});
}
}
// duplicate files check
async function checkForDuplicateFiles() {
const duplicate_files = await db_api.findDuplicatesByKey('files', 'path');
const duplicate_uids = duplicate_files.map(duplicate_file => duplicate_file['uid']);
if (duplicate_uids && duplicate_uids.length > 0) {
return {uids: duplicate_uids};
}
return {uids: []};
}
async function removeDuplicates(data) {
for (let i = 0; i < data['uids'].length; i++) {
await db_api.removeRecord('files', {uid: data['uids'][i]});
}
}

@ -83,12 +83,37 @@ describe('Database', async function() {
await db_api.removeAllRecords('test');
});
it('Add and read record', async function() {
this.timeout(120000);
await db_api.insertRecordIntoTable('test', {test_add: 'test', test_undefined: undefined, test_null: undefined});
const added_record = await db_api.getRecord('test', {test_add: 'test', test_undefined: undefined, test_null: null});
assert(added_record['test_add'] === 'test');
await db_api.removeRecord('test', {test_add: 'test'});
});
it('Find duplicates by key', async function() {
const test_duplicates = [
{
test: 'testing',
key: '1'
},
{
test: 'testing',
key: '2'
},
{
test: 'testing_missing',
key: '3'
},
{
test: 'testing',
key: '4'
}
];
await db_api.insertRecordsIntoTable('test', test_duplicates);
const duplicates = await db_api.findDuplicatesByKey('test', 'test');
console.log(duplicates);
});
it('Update record', async function() {
await db_api.insertRecordIntoTable('test', {test_update: 'test'});
await db_api.updateRecord('test', {test_update: 'test'}, {added_field: true});
@ -122,6 +147,7 @@ describe('Database', async function() {
});
it('Bulk add', async function() {
this.timeout(120000);
const NUM_RECORDS_TO_ADD = 2002; // max batch ops is 1000
const test_records = [];
for (let i = 0; i < NUM_RECORDS_TO_ADD; i++) {
@ -291,7 +317,6 @@ describe('Multi User', async function() {
describe('Downloader', function() {
const downloader_api = require('../downloader');
downloader_api.initialize(db_api);
const url = 'https://www.youtube.com/watch?v=dQw4w9WgXcQ';
const sub_id = 'dc834388-3454-41bf-a618-e11cb8c7de1c';
const options = {
@ -348,5 +373,69 @@ describe('Downloader', function() {
const sample_json = fs.readJSONSync('./test/sample.info.json');
downloader_api.generateNFOFile(sample_json, nfo_file_path);
assert(fs.existsSync(nfo_file_path), true);
fs.unlinkSync(nfo_file_path);
});
});
describe('Tasks', function() {
const tasks_api = require('../tasks');
beforeEach(async function() {
await db_api.connectToDB();
await db_api.removeAllRecords('tasks');
await tasks_api.initialize();
});
it('Backup local db', async function() {
const backups_original = await utils.recFindByExt('appdata', 'bak');
const original_length = backups_original.length;
await tasks_api.executeTask('backup_local_db');
const backups_new = await utils.recFindByExt('appdata', 'bak');
const new_length = backups_new.length;
assert(original_length, new_length-1);
});
it('Check for missing files', async function() {
await db_api.removeAllRecords('files', {uid: 'test'});
const test_missing_file = {uid: 'test', path: 'test/missing_file.mp4'};
await db_api.insertRecordIntoTable('files', test_missing_file);
await tasks_api.executeTask('missing_files_check');
const task_obj = await db_api.getRecord('tasks', {key: 'missing_files_check'});
assert(task_obj['data'] && task_obj['data']['uids'] && task_obj['data']['uids'].length >= 1, true);
});
it('Check for duplicate files', async function() {
this.timeout(300000);
await db_api.removeAllRecords('files', {uid: 'test1'});
await db_api.removeAllRecords('files', {uid: 'test2'});
const test_duplicate_file1 = {uid: 'test1', path: 'test/missing_file.mp4'};
const test_duplicate_file2 = {uid: 'test2', path: 'test/missing_file.mp4'};
const test_duplicate_file3 = {uid: 'test3', path: 'test/missing_file.mp4'};
await db_api.insertRecordIntoTable('files', test_duplicate_file1);
await db_api.insertRecordIntoTable('files', test_duplicate_file2);
await db_api.insertRecordIntoTable('files', test_duplicate_file3);
await tasks_api.executeTask('duplicate_files_check');
const task_obj = await db_api.getRecord('tasks', {key: 'duplicate_files_check'});
const duplicated_record_count = await db_api.getRecords('files', {path: 'test/missing_file.mp4'}, true);
assert(task_obj['data'] && task_obj['data']['uids'] && task_obj['data']['uids'].length >= 1, true);
assert(duplicated_record_count == 1, true);
});
it('Import unregistered files', async function() {
this.timeout(300000);
// pre-test cleanup
await db_api.removeAllRecords('files', {title: 'Sample File'});
if (fs.existsSync('video/sample.info.json')) fs.unlinkSync('video/sample.info.json');
if (fs.existsSync('video/sample.mp4')) fs.unlinkSync('video/sample.mp4');
// copies in files
fs.copyFileSync('test/sample.info.json', 'video/sample.info.json');
fs.copyFileSync('test/sample.mp4', 'video/sample.mp4');
await tasks_api.executeTask('missing_db_records');
const imported_file = await db_api.getRecord('files', {title: 'Sample File'});
assert(!!imported_file, true);
// post-test cleanup
if (fs.existsSync('video/sample.info.json')) fs.unlinkSync('video/sample.info.json');
if (fs.existsSync('video/sample.mp4')) fs.unlinkSync('video/sample.mp4');
});
});

@ -266,6 +266,12 @@ function getCurrentDownloader() {
return details_json['downloader'];
}
async function backupLocalDB() {
const path_to_backups = path.join('appdata', 'db_backup');
fs.ensureDir(path_to_backups);
await fs.copyFile('appdata/local_db.json', path.join(path_to_backups, `local_db.json.${Date.now()/1000}.bak`));
}
async function recFindByExt(base,ext,files,result)
{
files = files || (await fs.readdir(base))
@ -390,6 +396,7 @@ module.exports = {
getMatchingCategoryFiles: getMatchingCategoryFiles,
addUIDsToCategory: addUIDsToCategory,
getCurrentDownloader: getCurrentDownloader,
backupLocalDB: backupLocalDB,
recFindByExt: recFindByExt,
removeFileExtension: removeFileExtension,
formatDateString: formatDateString,

Loading…
Cancel
Save