const fs = require ( 'fs-extra' )
const path = require ( 'path' )
const { MongoClient } = require ( "mongodb" ) ;
const { uuid } = require ( 'uuidv4' ) ;
const _ = require ( 'lodash' ) ;
const config _api = require ( './config' ) ;
const utils = require ( './utils' )
const logger = require ( './logger' ) ;
const low = require ( 'lowdb' )
const FileSync = require ( 'lowdb/adapters/FileSync' ) ;
const { BehaviorSubject } = require ( 'rxjs' ) ;
const local _adapter = new FileSync ( './appdata/local_db.json' ) ;
const local _db = low ( local _adapter ) ;
let database = null ;
exports . database _initialized = false ;
exports . database _initialized _bs = new BehaviorSubject ( false ) ;
const tables = {
files : {
name : 'files' ,
primary _key : 'uid' ,
text _search : {
title : 'text' ,
uploader : 'text' ,
uid : 'text'
}
} ,
playlists : {
name : 'playlists' ,
primary _key : 'id'
} ,
categories : {
name : 'categories' ,
primary _key : 'uid'
} ,
subscriptions : {
name : 'subscriptions' ,
primary _key : 'id'
} ,
downloads : {
name : 'downloads'
} ,
users : {
name : 'users' ,
primary _key : 'uid'
} ,
roles : {
name : 'roles' ,
primary _key : 'key'
} ,
download _queue : {
name : 'download_queue' ,
primary _key : 'uid'
} ,
tasks : {
name : 'tasks' ,
primary _key : 'key'
} ,
notifications : {
name : 'notifications' ,
primary _key : 'uid'
} ,
archives : {
name : 'archives'
} ,
test : {
name : 'test'
}
}
const tables _list = Object . keys ( tables ) ;
const local _db _defaults = { }
tables _list . forEach ( table => { local _db _defaults [ table ] = [ ] } ) ;
local _db . defaults ( local _db _defaults ) . write ( ) ;
let using _local _db = null ;
function setDB ( input _db , input _users _db ) {
db = input _db ; users _db = input _users _db ;
exports . db = input _db ;
exports . users _db = input _users _db
}
exports . initialize = ( input _db , input _users _db ) => {
setDB ( input _db , input _users _db ) ;
// must be done here to prevent getConfigItem from being called before init
using _local _db = config _api . getConfigItem ( 'ytdl_use_local_db' ) ;
}
exports . connectToDB = async ( retries = 5 , no _fallback = false , custom _connection _string = null ) => {
const success = await exports . _connectToDB ( custom _connection _string ) ;
if ( success ) return true ;
if ( retries ) {
logger . warn ( ` MongoDB connection failed! Retrying ${ retries } times... ` ) ;
const retry _delay _ms = 2000 ;
for ( let i = 0 ; i < retries ; i ++ ) {
const retry _succeeded = await exports . _connectToDB ( ) ;
if ( retry _succeeded ) {
logger . info ( ` Successfully connected to DB after ${ i + 1 } attempt(s) ` ) ;
return true ;
}
if ( i !== retries - 1 ) {
logger . warn ( ` Retry ${ i + 1 } failed, waiting ${ retry _delay _ms } ms before trying again. ` ) ;
await utils . wait ( retry _delay _ms ) ;
} else {
logger . warn ( ` Retry ${ i + 1 } failed. ` ) ;
}
}
}
if ( no _fallback ) {
logger . error ( 'Failed to connect to MongoDB. Verify your connection string is valid.' ) ;
return ;
}
using _local _db = true ;
config _api . setConfigItem ( 'ytdl_use_local_db' , true ) ;
logger . error ( 'Failed to connect to MongoDB, using Local DB as a fallback. Make sure your MongoDB instance is accessible, or set Local DB as a default through the config.' ) ;
return true ;
}
exports . _connectToDB = async ( custom _connection _string = null ) => {
const uri = ! custom _connection _string ? config _api . getConfigItem ( 'ytdl_mongodb_connection_string' ) : custom _connection _string ; // "mongodb://127.0.0.1:27017/?compressors=zlib&gssapiServiceName=mongodb";
const client = new MongoClient ( uri , {
useNewUrlParser : true ,
useUnifiedTopology : true ,
} ) ;
try {
await client . connect ( ) ;
database = client . db ( 'ytdl_material' ) ;
// avoid doing anything else if it's just a test
if ( custom _connection _string ) return true ;
const existing _collections = ( await database . listCollections ( { } , { nameOnly : true } ) . toArray ( ) ) . map ( collection => collection . name ) ;
const missing _tables = tables _list . filter ( table => ! ( existing _collections . includes ( table ) ) ) ;
missing _tables . forEach ( async table => {
await database . createCollection ( table ) ;
} ) ;
tables _list . forEach ( async table => {
const primary _key = tables [ table ] [ 'primary_key' ] ;
if ( primary _key ) {
await database . collection ( table ) . createIndex ( { [ primary _key ] : 1 } , { unique : true } ) ;
}
const text _search = tables [ table ] [ 'text_search' ] ;
if ( text _search ) {
await database . collection ( table ) . createIndex ( text _search ) ;
}
} ) ;
using _local _db = false ; // needs to happen for tests (in normal operation using_local_db is guaranteed false)
return true ;
} catch ( err ) {
logger . error ( err ) ;
return false ;
} finally {
// Ensures that the client will close when you finish/error
// await client.close();
}
}
exports . setVideoProperty = async ( file _uid , assignment _obj ) => {
// TODO: check if video exists, throw error if not
await exports . updateRecord ( 'files' , { uid : file _uid } , assignment _obj ) ;
}
exports . getFileDirectoriesAndDBs = async ( ) => {
let dirs _to _check = [ ] ;
let subscriptions _to _check = [ ] ;
const subscriptions _base _path = config _api . getConfigItem ( 'ytdl_subscriptions_base_path' ) ; // only for single-user mode
const multi _user _mode = config _api . getConfigItem ( 'ytdl_multi_user_mode' ) ;
const usersFileFolder = config _api . getConfigItem ( 'ytdl_users_base_path' ) ;
const subscriptions _enabled = config _api . getConfigItem ( 'ytdl_allow_subscriptions' ) ;
if ( multi _user _mode ) {
const users = await exports . getRecords ( 'users' ) ;
for ( let i = 0 ; i < users . length ; i ++ ) {
const user = users [ i ] ;
// add user's audio dir to check list
dirs _to _check . push ( {
basePath : path . join ( usersFileFolder , user . uid , 'audio' ) ,
user _uid : user . uid ,
type : 'audio' ,
archive _path : utils . getArchiveFolder ( 'audio' , user . uid )
} ) ;
// add user's video dir to check list
dirs _to _check . push ( {
basePath : path . join ( usersFileFolder , user . uid , 'video' ) ,
user _uid : user . uid ,
type : 'video' ,
archive _path : utils . getArchiveFolder ( 'video' , user . uid )
} ) ;
}
} else {
const audioFolderPath = config _api . getConfigItem ( 'ytdl_audio_folder_path' ) ;
const videoFolderPath = config _api . getConfigItem ( 'ytdl_video_folder_path' ) ;
// add audio dir to check list
dirs _to _check . push ( {
basePath : audioFolderPath ,
type : 'audio' ,
archive _path : utils . getArchiveFolder ( 'audio' )
} ) ;
// add video dir to check list
dirs _to _check . push ( {
basePath : videoFolderPath ,
type : 'video' ,
archive _path : utils . getArchiveFolder ( 'video' )
} ) ;
}
if ( subscriptions _enabled ) {
const subscriptions = await exports . getRecords ( 'subscriptions' ) ;
subscriptions _to _check = subscriptions _to _check . concat ( subscriptions ) ;
}
// add subscriptions to check list
for ( let i = 0 ; i < subscriptions _to _check . length ; i ++ ) {
let subscription _to _check = subscriptions _to _check [ i ] ;
if ( ! subscription _to _check . name ) {
// TODO: Remove subscription as it'll never complete
continue ;
}
dirs _to _check . push ( {
basePath : subscription _to _check . user _uid ? path . join ( usersFileFolder , subscription _to _check . user _uid , 'subscriptions' , subscription _to _check . isPlaylist ? 'playlists/' : 'channels/' , subscription _to _check . name )
: path . join ( subscriptions _base _path , subscription _to _check . isPlaylist ? 'playlists/' : 'channels/' , subscription _to _check . name ) ,
user _uid : subscription _to _check . user _uid ,
type : subscription _to _check . type ,
sub _id : subscription _to _check [ 'id' ] ,
archive _path : utils . getArchiveFolder ( subscription _to _check . type , subscription _to _check . user _uid , subscription _to _check )
} ) ;
}
return dirs _to _check ;
}
// Basic DB functions
// Create
exports . insertRecordIntoTable = async ( table , doc , replaceFilter = null ) => {
// local db override
if ( using _local _db ) {
if ( replaceFilter ) local _db . get ( table ) . remove ( ( doc ) => _ . isMatch ( doc , replaceFilter ) ) . write ( ) ;
local _db . get ( table ) . push ( doc ) . write ( ) ;
return true ;
}
if ( replaceFilter ) {
const output = await database . collection ( table ) . bulkWrite ( [
{
deleteMany : {
filter : replaceFilter
}
} ,
{
insertOne : {
document : doc
}
}
] ) ;
logger . debug ( ` Inserted doc into ${ table } with filter: ${ JSON . stringify ( replaceFilter ) } ` ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
const output = await database . collection ( table ) . insertOne ( doc ) ;
logger . debug ( ` Inserted doc into ${ table } ` ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . insertRecordsIntoTable = async ( table , docs , ignore _errors = false ) => {
// local db override
if ( using _local _db ) {
const records _limit = 30000 ;
if ( docs . length < records _limit ) {
local _db . get ( table ) . push ( ... docs ) . write ( ) ;
} else {
for ( let i = 0 ; i < docs . length ; i += records _limit ) {
const records _to _push = docs . slice ( i , i + records _limit > docs . length ? docs . length : i + records _limit )
local _db . get ( table ) . push ( ... records _to _push ) . write ( ) ;
}
}
return true ;
}
const output = await database . collection ( table ) . insertMany ( docs , { ordered : ! ignore _errors } ) ;
logger . debug ( ` Inserted ${ output . insertedCount } docs into ${ table } ` ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . bulkInsertRecordsIntoTable = async ( table , docs ) => {
// local db override
if ( using _local _db ) {
return await exports . insertRecordsIntoTable ( table , docs ) ;
}
// not a necessary function as insertRecords does the same thing but gives us more control on batch size if needed
const table _collection = database . collection ( table ) ;
let bulk = table _collection . initializeOrderedBulkOp ( ) ; // Initialize the Ordered Batch
for ( let i = 0 ; i < docs . length ; i ++ ) {
bulk . insert ( docs [ i ] ) ;
}
const output = await bulk . execute ( ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
// Read
exports . getRecord = async ( table , filter _obj ) => {
// local db override
if ( using _local _db ) {
return exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . value ( ) ;
}
return await database . collection ( table ) . findOne ( filter _obj ) ;
}
exports . getRecords = async ( table , filter _obj = null , return _count = false , sort = null , range = null ) => {
// local db override
if ( using _local _db ) {
let cursor = filter _obj ? exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'filter' ) . value ( ) : local _db . get ( table ) . value ( ) ;
if ( sort ) {
cursor = cursor . sort ( ( a , b ) => ( a [ sort [ 'by' ] ] > b [ sort [ 'by' ] ] ? sort [ 'order' ] : sort [ 'order' ] * - 1 ) ) ;
}
if ( range ) {
cursor = cursor . slice ( range [ 0 ] , range [ 1 ] ) ;
}
return ! return _count ? cursor : cursor . length ;
}
const cursor = filter _obj ? database . collection ( table ) . find ( filter _obj ) : database . collection ( table ) . find ( ) ;
if ( sort ) {
cursor . sort ( { [ sort [ 'by' ] ] : sort [ 'order' ] } ) ;
}
if ( range ) {
cursor . skip ( range [ 0 ] ) . limit ( range [ 1 ] - range [ 0 ] ) ;
}
return ! return _count ? await cursor . toArray ( ) : await cursor . count ( ) ;
}
// Update
exports . updateRecord = async ( table , filter _obj , update _obj , nested _mode = false ) => {
// local db override
if ( using _local _db ) {
if ( nested _mode ) {
// if object is nested we need to handle it differently
update _obj = utils . convertFlatObjectToNestedObject ( update _obj ) ;
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . merge ( update _obj ) . write ( ) ;
return true ;
}
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . assign ( update _obj ) . write ( ) ;
return true ;
}
// sometimes _id will be in the update obj, this breaks mongodb
if ( update _obj [ '_id' ] ) delete update _obj [ '_id' ] ;
const output = await database . collection ( table ) . updateOne ( filter _obj , { $set : update _obj } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . updateRecords = async ( table , filter _obj , update _obj ) => {
// local db override
if ( using _local _db ) {
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'filter' ) . each ( ( record ) => {
const props _to _update = Object . keys ( update _obj ) ;
for ( let i = 0 ; i < props _to _update . length ; i ++ ) {
const prop _to _update = props _to _update [ i ] ;
const prop _value = update _obj [ prop _to _update ] ;
record [ prop _to _update ] = prop _value ;
}
} ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateMany ( filter _obj , { $set : update _obj } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . removePropertyFromRecord = async ( table , filter _obj , remove _obj ) => {
// local db override
if ( using _local _db ) {
const props _to _remove = Object . keys ( remove _obj ) ;
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . unset ( props _to _remove ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateOne ( filter _obj , { $unset : remove _obj } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . bulkUpdateRecordsByKey = async ( table , key _label , update _obj ) => {
// local db override
if ( using _local _db ) {
local _db . get ( table ) . each ( ( record ) => {
const item _id _to _update = record [ key _label ] ;
if ( ! update _obj [ item _id _to _update ] ) return ;
const props _to _update = Object . keys ( update _obj [ item _id _to _update ] ) ;
for ( let i = 0 ; i < props _to _update . length ; i ++ ) {
const prop _to _update = props _to _update [ i ] ;
const prop _value = update _obj [ item _id _to _update ] [ prop _to _update ] ;
record [ prop _to _update ] = prop _value ;
}
} ) . write ( ) ;
return true ;
}
const table _collection = database . collection ( table ) ;
let bulk = table _collection . initializeOrderedBulkOp ( ) ; // Initialize the Ordered Batch
const item _ids _to _update = Object . keys ( update _obj ) ;
for ( let i = 0 ; i < item _ids _to _update . length ; i ++ ) {
const item _id _to _update = item _ids _to _update [ i ] ;
bulk . find ( { [ key _label ] : item _id _to _update } ) . updateOne ( {
"$set" : update _obj [ item _id _to _update ]
} ) ;
}
const output = await bulk . execute ( ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . pushToRecordsArray = async ( table , filter _obj , key , value ) => {
// local db override
if ( using _local _db ) {
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . get ( key ) . push ( value ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateOne ( filter _obj , { $push : { [ key ] : value } } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . pullFromRecordsArray = async ( table , filter _obj , key , value ) => {
// local db override
if ( using _local _db ) {
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . get ( key ) . pull ( value ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateOne ( filter _obj , { $pull : { [ key ] : value } } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
// Delete
exports . removeRecord = async ( table , filter _obj ) => {
// local db override
if ( using _local _db ) {
exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'remove' ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . deleteOne ( filter _obj ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
// exports.removeRecordsByUIDBulk = async (table, uids) => {
// // local db override
// if (using_local_db) {
// exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'remove').write();
// return true;
// }
// const table_collection = database.collection(table);
// let bulk = table_collection.initializeOrderedBulkOp(); // Initialize the Ordered Batch
// const item_ids_to_remove =
// for (let i = 0; i < item_ids_to_update.length; i++) {
// const item_id_to_update = item_ids_to_update[i];
// bulk.find({[key_label]: item_id_to_update }).updateOne({
// "$set": update_obj[item_id_to_update]
// });
// }
// const output = await bulk.execute();
// return !!(output['result']['ok']);
// }
exports . findDuplicatesByKey = async ( table , key ) => {
let duplicates = [ ] ;
if ( using _local _db ) {
// this can probably be optimized
const all _records = await exports . getRecords ( table ) ;
const existing _records = { } ;
for ( let i = 0 ; i < all _records . length ; i ++ ) {
const record = all _records [ i ] ;
const value = record [ key ] ;
if ( existing _records [ value ] ) {
duplicates . push ( record ) ;
}
existing _records [ value ] = true ;
}
return duplicates ;
}
const duplicated _values = await database . collection ( table ) . aggregate ( [
{ "$group" : { "_id" : ` $ ${ key } ` , "count" : { "$sum" : 1 } } } ,
{ "$match" : { "_id" : { "$ne" : null } , "count" : { "$gt" : 1 } } } ,
{ "$project" : { [ key ] : "$_id" , "_id" : 0 } }
] ) . toArray ( ) ;
for ( let i = 0 ; i < duplicated _values . length ; i ++ ) {
const duplicated _value = duplicated _values [ i ] ;
const duplicated _records = await exports . getRecords ( table , duplicated _value , false ) ;
if ( duplicated _records . length > 1 ) {
duplicates = duplicates . concat ( duplicated _records . slice ( 1 , duplicated _records . length ) ) ;
}
}
return duplicates ;
}
exports . removeAllRecords = async ( table = null , filter _obj = null ) => {
// local db override
const tables _to _remove = table ? [ table ] : tables _list ;
logger . debug ( ` Removing all records from: ${ tables _to _remove } with filter: ${ JSON . stringify ( filter _obj ) } ` )
if ( using _local _db ) {
for ( let i = 0 ; i < tables _to _remove . length ; i ++ ) {
const table _to _remove = tables _to _remove [ i ] ;
if ( filter _obj ) exports . applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'remove' ) . write ( ) ;
else local _db . assign ( { [ table _to _remove ] : [ ] } ) . write ( ) ;
logger . debug ( ` Successfully removed records from ${ table _to _remove } ` ) ;
}
return true ;
}
let success = true ;
for ( let i = 0 ; i < tables _to _remove . length ; i ++ ) {
const table _to _remove = tables _to _remove [ i ] ;
const output = await database . collection ( table _to _remove ) . deleteMany ( filter _obj ? filter _obj : { } ) ;
logger . debug ( ` Successfully removed records from ${ table _to _remove } ` ) ;
success &= ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
return success ;
}
// Stats
exports . getDBStats = async ( ) => {
const stats _by _table = { } ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
if ( table === 'test' ) continue ;
stats _by _table [ table ] = await getDBTableStats ( table ) ;
}
return { stats _by _table : stats _by _table , using _local _db : using _local _db } ;
}
const getDBTableStats = async ( table ) => {
const table _stats = { } ;
// local db override
if ( using _local _db ) {
table _stats [ 'records_count' ] = local _db . get ( table ) . value ( ) . length ;
} else {
const stats = await database . collection ( table ) . stats ( ) ;
table _stats [ 'records_count' ] = stats . count ;
}
return table _stats ;
}
// JSON to DB
exports . generateJSONTables = async ( db _json , users _json ) => {
// create records
let files = db _json [ 'files' ] || [ ] ;
let playlists = db _json [ 'playlists' ] || [ ] ;
let categories = db _json [ 'categories' ] || [ ] ;
let subscriptions = db _json [ 'subscriptions' ] || [ ] ;
const users = users _json [ 'users' ] ;
for ( let i = 0 ; i < users . length ; i ++ ) {
const user = users [ i ] ;
if ( user [ 'files' ] ) {
user [ 'files' ] = user [ 'files' ] . map ( file => ( { ... file , user _uid : user [ 'uid' ] } ) ) ;
files = files . concat ( user [ 'files' ] ) ;
}
if ( user [ 'playlists' ] ) {
user [ 'playlists' ] = user [ 'playlists' ] . map ( playlist => ( { ... playlist , user _uid : user [ 'uid' ] } ) ) ;
playlists = playlists . concat ( user [ 'playlists' ] ) ;
}
if ( user [ 'categories' ] ) {
user [ 'categories' ] = user [ 'categories' ] . map ( category => ( { ... category , user _uid : user [ 'uid' ] } ) ) ;
categories = categories . concat ( user [ 'categories' ] ) ;
}
if ( user [ 'subscriptions' ] ) {
user [ 'subscriptions' ] = user [ 'subscriptions' ] . map ( subscription => ( { ... subscription , user _uid : user [ 'uid' ] } ) ) ;
subscriptions = subscriptions . concat ( user [ 'subscriptions' ] ) ;
}
}
const tables _obj = { } ;
// TODO: use create*Records funcs to strip unnecessary properties
tables _obj . files = createFilesRecords ( files , subscriptions ) ;
tables _obj . playlists = playlists ;
tables _obj . categories = categories ;
tables _obj . subscriptions = createSubscriptionsRecords ( subscriptions ) ;
tables _obj . users = createUsersRecords ( users ) ;
tables _obj . roles = createRolesRecords ( users _json [ 'roles' ] ) ;
tables _obj . downloads = createDownloadsRecords ( db _json [ 'downloads' ] )
return tables _obj ;
}
exports . importJSONToDB = async ( db _json , users _json ) => {
await fs . writeFile ( ` appdata/db.json. ${ Date . now ( ) / 1000 } .bak ` , JSON . stringify ( db _json , null , 2 ) ) ;
await fs . writeFile ( ` appdata/users_db.json. ${ Date . now ( ) / 1000 } .bak ` , JSON . stringify ( users _json , null , 2 ) ) ;
await exports . removeAllRecords ( ) ;
const tables _obj = await exports . generateJSONTables ( db _json , users _json ) ;
const table _keys = Object . keys ( tables _obj ) ;
let success = true ;
for ( let i = 0 ; i < table _keys . length ; i ++ ) {
const table _key = table _keys [ i ] ;
if ( ! tables _obj [ table _key ] || tables _obj [ table _key ] . length === 0 ) continue ;
success &= await exports . insertRecordsIntoTable ( table _key , tables _obj [ table _key ] , true ) ;
}
return success ;
}
const createFilesRecords = ( files , subscriptions ) => {
for ( let i = 0 ; i < subscriptions . length ; i ++ ) {
const subscription = subscriptions [ i ] ;
if ( ! subscription [ 'videos' ] ) continue ;
subscription [ 'videos' ] = subscription [ 'videos' ] . map ( file => ( { ... file , sub _id : subscription [ 'id' ] , user _uid : subscription [ 'user_uid' ] ? subscription [ 'user_uid' ] : undefined } ) ) ;
files = files . concat ( subscriptions [ i ] [ 'videos' ] ) ;
}
return files ;
}
const createPlaylistsRecords = async ( playlists ) => {
}
const createCategoriesRecords = async ( categories ) => {
}
const createSubscriptionsRecords = ( subscriptions ) => {
for ( let i = 0 ; i < subscriptions . length ; i ++ ) {
delete subscriptions [ i ] [ 'videos' ] ;
}
return subscriptions ;
}
const createUsersRecords = ( users ) => {
users . forEach ( user => {
delete user [ 'files' ] ;
delete user [ 'playlists' ] ;
delete user [ 'subscriptions' ] ;
} ) ;
return users ;
}
const createRolesRecords = ( roles ) => {
const new _roles = [ ] ;
Object . keys ( roles ) . forEach ( role _key => {
new _roles . push ( {
key : role _key ,
... roles [ role _key ]
} ) ;
} ) ;
return new _roles ;
}
const createDownloadsRecords = ( downloads ) => {
const new _downloads = [ ] ;
Object . keys ( downloads ) . forEach ( session _key => {
new _downloads . push ( {
key : session _key ,
... downloads [ session _key ]
} ) ;
} ) ;
return new _downloads ;
}
exports . backupDB = async ( ) => {
const backup _dir = path . join ( 'appdata' , 'db_backup' ) ;
fs . ensureDirSync ( backup _dir ) ;
const backup _file _name = ` ${ using _local _db ? 'local' : 'remote' } _db.json. ${ Date . now ( ) / 1000 } .bak ` ;
const path _to _backups = path . join ( backup _dir , backup _file _name ) ;
logger . info ( ` Backing up ${ using _local _db ? 'local' : 'remote' } DB to ${ path _to _backups } ` ) ;
const table _to _records = { } ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
table _to _records [ table ] = await exports . getRecords ( table ) ;
}
fs . writeJsonSync ( path _to _backups , table _to _records ) ;
return backup _file _name ;
}
exports . restoreDB = async ( file _name ) => {
const path _to _backup = path . join ( 'appdata' , 'db_backup' , file _name ) ;
logger . debug ( 'Reading database backup file.' ) ;
const table _to _records = fs . readJSONSync ( path _to _backup ) ;
if ( ! table _to _records ) {
logger . error ( ` Failed to restore DB! Backup file ' ${ path _to _backup } ' could not be read. ` ) ;
return false ;
}
logger . debug ( 'Clearing database.' ) ;
await exports . removeAllRecords ( ) ;
logger . debug ( 'Database cleared! Beginning restore.' ) ;
let success = true ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
if ( ! table _to _records [ table ] || table _to _records [ table ] . length === 0 ) continue ;
success &= await exports . bulkInsertRecordsIntoTable ( table , table _to _records [ table ] ) ;
}
logger . debug ( 'Restore finished!' ) ;
return success ;
}
exports . transferDB = async ( local _to _remote ) => {
const table _to _records = { } ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
table _to _records [ table ] = await exports . getRecords ( table ) ;
}
logger . info ( 'Backup up DB...' ) ;
await exports . backupDB ( ) ; // should backup always
using _local _db = ! local _to _remote ;
if ( local _to _remote ) {
const db _connected = await exports . connectToDB ( 5 , true ) ;
if ( ! db _connected ) {
logger . error ( 'Failed to transfer database - could not connect to MongoDB. Verify that your connection URL is valid.' ) ;
return false ;
}
}
success = true ;
logger . debug ( 'Clearing new database before transfer...' ) ;
await exports . removeAllRecords ( ) ;
logger . debug ( 'Database cleared! Beginning transfer.' ) ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
if ( ! table _to _records [ table ] || table _to _records [ table ] . length === 0 ) continue ;
success &= await exports . bulkInsertRecordsIntoTable ( table , table _to _records [ table ] ) ;
}
config _api . setConfigItem ( 'ytdl_use_local_db' , using _local _db ) ;
logger . debug ( 'Transfer finished!' ) ;
return success ;
}
/ *
This function is necessary to emulate mongodb ' s ability to search for null or missing values .
A filter of null or undefined for a property will find docs that have that property missing , or have it
null or undefined . We want that same functionality for the local DB as well
error : { $ne : null }
^ ^
| |
filter _prop filter _prop _value
* /
exports . applyFilterLocalDB = ( db _path , filter _obj , operation ) => {
const filter _props = Object . keys ( filter _obj ) ;
const return _val = db _path [ operation ] ( record => {
if ( ! filter _props ) return true ;
let filtered = true ;
for ( let i = 0 ; i < filter _props . length ; i ++ ) {
const filter _prop = filter _props [ i ] ;
const filter _prop _value = filter _obj [ filter _prop ] ;
if ( filter _prop _value === undefined || filter _prop _value === null ) {
filtered &= record [ filter _prop ] === undefined || record [ filter _prop ] === null ;
} else {
if ( typeof filter _prop _value === 'object' ) {
if ( '$regex' in filter _prop _value ) {
filtered &= ( record [ filter _prop ] . search ( new RegExp ( filter _prop _value [ '$regex' ] , filter _prop _value [ '$options' ] ) ) !== - 1 ) ;
} else if ( '$ne' in filter _prop _value ) {
filtered &= filter _prop in record && record [ filter _prop ] !== filter _prop _value [ '$ne' ] ;
} else if ( '$lt' in filter _prop _value ) {
filtered &= filter _prop in record && record [ filter _prop ] < filter _prop _value [ '$lt' ] ;
} else if ( '$gt' in filter _prop _value ) {
filtered &= filter _prop in record && record [ filter _prop ] > filter _prop _value [ '$gt' ] ;
} else if ( '$lte' in filter _prop _value ) {
filtered &= filter _prop in record && record [ filter _prop ] <= filter _prop _value [ '$lt' ] ;
} else if ( '$gte' in filter _prop _value ) {
filtered &= filter _prop in record && record [ filter _prop ] >= filter _prop _value [ '$gt' ] ;
}
} else {
// handle case of nested property check
if ( filter _prop . includes ( '.' ) )
filtered &= utils . searchObjectByString ( record , filter _prop ) === filter _prop _value ;
else
filtered &= record [ filter _prop ] === filter _prop _value ;
}
}
}
return filtered ;
} ) ;
return return _val ;
}
// should only be used for tests
exports . setLocalDBMode = ( mode ) => {
using _local _db = mode ;
}