From 54208ce6ce7e65338059bcf1ef738431cb9712e4 Mon Sep 17 00:00:00 2001 From: Tzahi12345 Date: Tue, 17 Jan 2023 23:35:53 -0500 Subject: [PATCH] Added preliminary backend support for custom archives --- backend/app.js | 13 ++++++- backend/archive.js | 82 +++++++++++++++++++++++++++++++++++++++++++ backend/db.js | 21 ++++++++--- backend/tasks.js | 7 +++- backend/test/tests.js | 65 ++++++++++++++++++++++++---------- 5 files changed, 162 insertions(+), 26 deletions(-) create mode 100644 backend/archive.js diff --git a/backend/app.js b/backend/app.js index 1ff1c20..66d896f 100644 --- a/backend/app.js +++ b/backend/app.js @@ -33,6 +33,7 @@ const subscriptions_api = require('./subscriptions'); const categories_api = require('./categories'); const twitch_api = require('./twitch'); const youtubedl_api = require('./youtube-dl'); +const archive_api = require('./archive'); var app = express(); @@ -70,7 +71,8 @@ db.defaults( downloads: {}, subscriptions: [], files_to_db_migration_complete: false, - tasks_manager_role_migration_complete: false + tasks_manager_role_migration_complete: false, + archives_migration_complete: false }).write(); users_db.defaults( @@ -200,6 +202,15 @@ async function checkMigrations() { db.set('tasks_manager_role_migration_complete', true).write(); } + const archives_migration_complete = db.get('archives_migration_complete').value(); + if (!archives_migration_complete) { + logger.info('Checking if archives have been migrated...'); + const imported_archives = await archive_api.importArchives(); + if (imported_archives) logger.info('Archives migration complete!'); + else logger.error('Failed to migrate archives!'); + db.set('archives_migration_complete', true).write(); + } + return true; } diff --git a/backend/archive.js b/backend/archive.js new file mode 100644 index 0000000..545c8cc --- /dev/null +++ b/backend/archive.js @@ -0,0 +1,82 @@ +const path = require('path'); +const fs = require('fs-extra'); + +const db_api = require('./db'); + +exports.generateArchive = async (user_uid = null, sub_id = null) => { + const archive_items = await db_api.getRecords('archives', {user_uid: user_uid, sub_id: sub_id}); + const archive_item_lines = archive_items.map(archive_item => `${archive_item['extractor']} ${archive_item['id']}`); + return archive_item_lines.join('\n'); +} + +exports.addToArchive = async (extractor, id, type, user_uid = null, sub_id = null) => { + const archive_item = createArchiveItem(extractor, id, type, user_uid, sub_id); + const success = await db_api.insertRecordIntoTable('archives', archive_item, {key: {extractor: extractor, id: id}, type: type}); + return success; +} + +exports.existsInArchive = async (extractor, id, type, user_uid, sub_id) => { + const archive_item = await db_api.getRecord('archives', {'key.extractor': extractor, 'key.id': id, type: type, user_uid: user_uid, sub_id: sub_id}); + return !!archive_item; +} + +exports.importArchiveFile = async (archive_text, type, user_uid = null, sub_id = null) => { + let archive_import_count = 0; + const lines = archive_text.split('\n'); + for (let line of lines) { + const archive_line_parts = line.trim().split(' '); + // should just be the extractor and the video ID + if (archive_line_parts.length !== 2) { + continue; + } + + const extractor = archive_line_parts[0]; + const id = archive_line_parts[1]; + if (!extractor || !id) continue; + + // we can't do a bulk write because we need to avoid duplicate archive items existing in db + + const archive_item = createArchiveItem(extractor, id, type, user_uid, sub_id); + await db_api.insertRecordIntoTable('archives', archive_item, {key: {extractor: extractor, id: id}}); + archive_import_count++; + } + return archive_import_count; +} + +exports.importArchives = async () => { + const imported_archives = []; + const dirs_to_check = await db_api.getFileDirectoriesAndDBs(); + + // run through check list and check each file to see if it's missing from the db + for (let i = 0; i < dirs_to_check.length; i++) { + const dir_to_check = dirs_to_check[i]; + if (!dir_to_check['archive_path']) continue; + + const files_to_import = [ + path.join(dir_to_check['archive_path'], `archive_${dir_to_check['type']}.txt`), + path.join(dir_to_check['archive_path'], `blacklist_${dir_to_check['type']}.txt`) + ] + + for (const file_to_import of files_to_import) { + const file_exists = await fs.pathExists(file_to_import); + if (!file_exists) continue; + + const archive_text = await fs.readFile(file_to_import, 'utf8'); + await exports.importArchiveFile(archive_text, dir_to_check.type, dir_to_check.user_uid, dir_to_check.sub_id); + imported_archives.push(file_to_import); + } + } + return imported_archives; +} + +const createArchiveItem = (extractor, id, type, user_uid = null, sub_id = null) => { + return { + key: { + extractor: extractor, + id: id + }, + type: type, + user_uid: user_uid ? user_uid : null, + sub_id: sub_id ? sub_id : null + } +} \ No newline at end of file diff --git a/backend/db.js b/backend/db.js index 39301ca..9811979 100644 --- a/backend/db.js +++ b/backend/db.js @@ -63,6 +63,10 @@ const tables = { name: 'notifications', primary_key: 'uid' }, + archives: { + name: 'archives', + primary_key: 'key' + }, test: { name: 'test' } @@ -258,13 +262,16 @@ exports.getFileDirectoriesAndDBs = async () => { dirs_to_check.push({ basePath: path.join(usersFileFolder, user.uid, 'audio'), user_uid: user.uid, - type: 'audio' + type: 'audio', + archive_path: utils.getArchiveFolder('audio', user.uid) }); // add user's video dir to check list dirs_to_check.push({ basePath: path.join(usersFileFolder, user.uid, 'video'), - type: 'video' + user_uid: user.uid, + type: 'video', + archive_path: utils.getArchiveFolder('video', user.uid) }); } } else { @@ -274,13 +281,15 @@ exports.getFileDirectoriesAndDBs = async () => { // add audio dir to check list dirs_to_check.push({ basePath: audioFolderPath, - type: 'audio' + type: 'audio', + archive_path: utils.getArchiveFolder('audio') }); // add video dir to check list dirs_to_check.push({ basePath: videoFolderPath, - type: 'video' + type: 'video', + archive_path: utils.getArchiveFolder('video') }); } @@ -301,7 +310,8 @@ exports.getFileDirectoriesAndDBs = async () => { : path.join(subscriptions_base_path, subscription_to_check.isPlaylist ? 'playlists/' : 'channels/', subscription_to_check.name), user_uid: subscription_to_check.user_uid, type: subscription_to_check.type, - sub_id: subscription_to_check['id'] + sub_id: subscription_to_check['id'], + archive_path: utils.getArchiveFolder(subscription_to_check.type, subscription_to_check.user_uid, subscription_to_check) }); } @@ -580,6 +590,7 @@ exports.setVideoProperty = async (file_uid, assignment_obj) => { exports.insertRecordIntoTable = async (table, doc, replaceFilter = null) => { // local db override if (using_local_db) { + if (replaceFilter) local_db.get(table).remove((doc) => _.isMatch(doc, replaceFilter)).write(); local_db.get(table).push(doc).write(); return true; } diff --git a/backend/tasks.js b/backend/tasks.js index eeb83ae..1c7315c 100644 --- a/backend/tasks.js +++ b/backend/tasks.js @@ -1,7 +1,7 @@ const db_api = require('./db'); const notifications_api = require('./notifications'); const youtubedl_api = require('./youtube-dl'); -const subscriptions_api = require('./subscriptions'); +const archive_api = require('./archive'); const fs = require('fs-extra'); const logger = require('./logger'); @@ -41,6 +41,11 @@ const TASKS = { confirm: autoDeleteFiles, title: 'Delete old files', job: null + }, + import_legacy_archives: { + run: archive_api.importArchives, + title: 'Import legacy archives', + job: null } } diff --git a/backend/test/tests.js b/backend/test/tests.js index bb3e2c3..1080ff3 100644 --- a/backend/test/tests.js +++ b/backend/test/tests.js @@ -37,6 +37,7 @@ var auth_api = require('../authentication/auth'); var db_api = require('../db'); const utils = require('../utils'); const subscriptions_api = require('../subscriptions'); +const archive_api = require('../archive'); const fs = require('fs-extra'); const { uuid } = require('uuidv4'); const NodeID3 = require('node-id3'); @@ -182,6 +183,16 @@ describe('Database', async function() { assert(!deleted_record); }); + it('Remove records', async function() { + await db_api.insertRecordIntoTable('test', {test_remove: 'test', test_property: 'test'}); + await db_api.insertRecordIntoTable('test', {test_remove: 'test', test_property: 'test2'}); + await db_api.insertRecordIntoTable('test', {test_remove: 'test'}); + const delete_succeeded = await db_api.removeAllRecords('test', {test_remove: 'test'}); + assert(delete_succeeded); + const count = await db_api.getRecords('test', {test_remove: 'test'}, true); + assert(count === 0); + }); + it('Push to record array', async function() { await db_api.insertRecordIntoTable('test', {test: 'test', test_array: []}); await db_api.pushToRecordsArray('test', {test: 'test'}, 'test_array', 'test_item'); @@ -613,30 +624,46 @@ describe('Tasks', function() { }); describe('Archive', async function() { - const archive_path = path.join('test', 'archives'); - fs.ensureDirSync(archive_path); - const archive_file_path = path.join(archive_path, 'archive_video.txt'); - const blacklist_file_path = path.join(archive_path, 'blacklist_video.txt'); beforeEach(async function() { - if (fs.existsSync(archive_file_path)) fs.unlinkSync(archive_file_path); - fs.writeFileSync(archive_file_path, 'youtube testing1\nyoutube testing2\nyoutube testing3\n'); + await db_api.removeAllRecords('archives', {user_uid: 'test_user'}); + }); - if (fs.existsSync(blacklist_file_path)) fs.unlinkSync(blacklist_file_path); - fs.writeFileSync(blacklist_file_path, ''); + afterEach(async function() { + await db_api.removeAllRecords('archives', {user_uid: 'test_user'}); }); - - it('Delete from archive', async function() { - await utils.deleteFileFromArchive('N/A', 'video', archive_path, 'testing2', false); - const new_archive = fs.readFileSync(archive_file_path); - assert(!new_archive.includes('testing2')); + + it('Import archive', async function() { + const archive_text = ` + testextractor1 testing1 + testextractor1 testing2 + testextractor2 testing1 + testextractor1 testing3 + + `; + const count = await archive_api.importArchiveFile(archive_text, 'video', 'test_user', 'test_sub'); + assert(count === 4) + const archive_items = await db_api.getRecords('archives', {user_uid: 'test_user', sub_id: 'test_sub'}); + console.log(archive_items); + assert(archive_items.length === 4); + assert(archive_items.filter(archive_item => archive_item.key.extractor === 'testextractor2').length === 1); + assert(archive_items.filter(archive_item => archive_item.key.extractor === 'testextractor1').length === 3); + + const success = await db_api.removeAllRecords('archives', {user_uid: 'test_user', sub_id: 'test_sub'}); + assert(success); }); - it('Delete from archive - blacklist', async function() { - await utils.deleteFileFromArchive('N/A', 'video', archive_path, 'testing2', true); - const new_archive = fs.readFileSync(archive_file_path); - const new_blacklist = fs.readFileSync(blacklist_file_path); - assert(!new_archive.includes('testing2')); - assert(new_blacklist.includes('testing2')); + it('Get archive', async function() { + await archive_api.addToArchive('testextractor1', 'video', 'testing1', 'test_user'); + await archive_api.addToArchive('testextractor2', 'video', 'testing1', 'test_user'); + await archive_api.addToArchive('testextractor2', 'video', 'testing1', 'test_user'); + + const archive_item1 = await db_api.getRecord('archives', {key: {extractor: 'testextractor1', id: 'testing1'}}); + const archive_item2 = await db_api.getRecord('archives', {key: {extractor: 'testextractor2', id: 'testing1'}}); + + assert(archive_item1 && archive_item2); + + const count = await db_api.getRecords('archives', {key: {id: 'testing1'}}, true); + assert(count === 2); }); });