var fs = require ( 'fs-extra' )
var path = require ( 'path' )
const { MongoClient } = require ( "mongodb" ) ;
const { uuid } = require ( 'uuidv4' ) ;
const config _api = require ( './config' ) ;
var utils = require ( './utils' )
const logger = require ( './logger' ) ;
const low = require ( 'lowdb' )
const FileSync = require ( 'lowdb/adapters/FileSync' ) ;
const { BehaviorSubject } = require ( 'rxjs' ) ;
const local _adapter = new FileSync ( './appdata/local_db.json' ) ;
const local _db = low ( local _adapter ) ;
let database = null ;
exports . database _initialized = false ;
exports . database _initialized _bs = new BehaviorSubject ( false ) ;
const tables = {
files : {
name : 'files' ,
primary _key : 'uid' ,
text _search : {
title : 'text' ,
uploader : 'text' ,
uid : 'text'
}
} ,
playlists : {
name : 'playlists' ,
primary _key : 'id'
} ,
categories : {
name : 'categories' ,
primary _key : 'uid'
} ,
subscriptions : {
name : 'subscriptions' ,
primary _key : 'id'
} ,
downloads : {
name : 'downloads'
} ,
users : {
name : 'users' ,
primary _key : 'uid'
} ,
roles : {
name : 'roles' ,
primary _key : 'key'
} ,
download _queue : {
name : 'download_queue' ,
primary _key : 'uid'
} ,
test : {
name : 'test'
}
}
const tables _list = Object . keys ( tables ) ;
const local _db _defaults = { }
tables _list . forEach ( table => { local _db _defaults [ table ] = [ ] } ) ;
local _db . defaults ( local _db _defaults ) . write ( ) ;
let using _local _db = null ;
function setDB ( input _db , input _users _db ) {
db = input _db ; users _db = input _users _db ;
exports . db = input _db ;
exports . users _db = input _users _db
}
exports . initialize = ( input _db , input _users _db ) => {
setDB ( input _db , input _users _db ) ;
// must be done here to prevent getConfigItem from being called before init
using _local _db = config _api . getConfigItem ( 'ytdl_use_local_db' ) ;
}
exports . connectToDB = async ( retries = 5 , no _fallback = false , custom _connection _string = null ) => {
if ( using _local _db && ! custom _connection _string ) return ;
const success = await exports . _connectToDB ( custom _connection _string ) ;
if ( success ) return true ;
if ( retries ) {
logger . warn ( ` MongoDB connection failed! Retrying ${ retries } times... ` ) ;
const retry _delay _ms = 2000 ;
for ( let i = 0 ; i < retries ; i ++ ) {
const retry _succeeded = await exports . _connectToDB ( ) ;
if ( retry _succeeded ) {
logger . info ( ` Successfully connected to DB after ${ i + 1 } attempt(s) ` ) ;
return true ;
}
if ( i !== retries - 1 ) {
logger . warn ( ` Retry ${ i + 1 } failed, waiting ${ retry _delay _ms } ms before trying again. ` ) ;
await utils . wait ( retry _delay _ms ) ;
} else {
logger . warn ( ` Retry ${ i + 1 } failed. ` ) ;
}
}
}
if ( no _fallback ) {
logger . error ( 'Failed to connect to MongoDB. Verify your connection string is valid.' ) ;
return ;
}
using _local _db = true ;
config _api . setConfigItem ( 'ytdl_use_local_db' , true ) ;
logger . error ( 'Failed to connect to MongoDB, using Local DB as a fallback. Make sure your MongoDB instance is accessible, or set Local DB as a default through the config.' ) ;
return true ;
}
exports . _connectToDB = async ( custom _connection _string = null ) => {
const uri = ! custom _connection _string ? config _api . getConfigItem ( 'ytdl_mongodb_connection_string' ) : custom _connection _string ; // "mongodb://127.0.0.1:27017/?compressors=zlib&gssapiServiceName=mongodb";
const client = new MongoClient ( uri , {
useNewUrlParser : true ,
useUnifiedTopology : true ,
} ) ;
try {
await client . connect ( ) ;
database = client . db ( 'ytdl_material' ) ;
// avoid doing anything else if it's just a test
if ( custom _connection _string ) return true ;
const existing _collections = ( await database . listCollections ( { } , { nameOnly : true } ) . toArray ( ) ) . map ( collection => collection . name ) ;
const missing _tables = tables _list . filter ( table => ! ( existing _collections . includes ( table ) ) ) ;
missing _tables . forEach ( async table => {
await database . createCollection ( table ) ;
} ) ;
tables _list . forEach ( async table => {
const primary _key = tables [ table ] [ 'primary_key' ] ;
if ( primary _key ) {
await database . collection ( table ) . createIndex ( { [ primary _key ] : 1 } , { unique : true } ) ;
}
const text _search = tables [ table ] [ 'text_search' ] ;
if ( text _search ) {
await database . collection ( table ) . createIndex ( text _search ) ;
}
} ) ;
return true ;
} catch ( err ) {
logger . error ( err ) ;
return false ;
} finally {
// Ensures that the client will close when you finish/error
// await client.close();
}
}
exports . registerFileDB = async ( file _path , type , user _uid = null , category = null , sub _id = null , cropFileSettings = null , file _object = null ) => {
if ( ! file _object ) file _object = generateFileObject ( file _path , type ) ;
if ( ! file _object ) {
logger . error ( ` Could not find associated JSON file for ${ type } file ${ file _path } ` ) ;
return false ;
}
utils . fixVideoMetadataPerms ( file _path , type ) ;
// add thumbnail path
file _object [ 'thumbnailPath' ] = utils . getDownloadedThumbnail ( file _path ) ;
// if category exists, only include essential info
if ( category ) file _object [ 'category' ] = { name : category [ 'name' ] , uid : category [ 'uid' ] } ;
// modify duration
if ( cropFileSettings ) {
file _object [ 'duration' ] = ( cropFileSettings . cropFileEnd || file _object . duration ) - cropFileSettings . cropFileStart ;
}
if ( user _uid ) file _object [ 'user_uid' ] = user _uid ;
if ( sub _id ) file _object [ 'sub_id' ] = sub _id ;
const file _obj = await registerFileDBManual ( file _object ) ;
// remove metadata JSON if needed
if ( ! config _api . getConfigItem ( 'ytdl_include_metadata' ) ) {
utils . deleteJSONFile ( file _path , type )
}
return file _obj ;
}
async function registerFileDBManual ( file _object ) {
// add additional info
file _object [ 'uid' ] = uuid ( ) ;
file _object [ 'registered' ] = Date . now ( ) ;
path _object = path . parse ( file _object [ 'path' ] ) ;
file _object [ 'path' ] = path . format ( path _object ) ;
exports . insertRecordIntoTable ( 'files' , file _object , { path : file _object [ 'path' ] } )
return file _object ;
}
function generateFileObject ( file _path , type ) {
var jsonobj = utils . getJSON ( file _path , type ) ;
if ( ! jsonobj ) {
return null ;
} else if ( ! jsonobj [ '_filename' ] ) {
logger . error ( ` Failed to get filename from info JSON! File ${ jsonobj [ 'title' ] } could not be added. ` ) ;
return null ;
}
const ext = ( type === 'audio' ) ? '.mp3' : '.mp4'
const true _file _path = utils . getTrueFileName ( jsonobj [ '_filename' ] , type ) ;
// console.
var stats = fs . statSync ( true _file _path ) ;
const file _id = utils . removeFileExtension ( path . basename ( file _path ) ) ;
var title = jsonobj . title ;
var url = jsonobj . webpage _url ;
var uploader = jsonobj . uploader ;
var upload _date = utils . formatDateString ( jsonobj . upload _date ) ;
var size = stats . size ;
var thumbnail = jsonobj . thumbnail ;
var duration = jsonobj . duration ;
var isaudio = type === 'audio' ;
var description = jsonobj . description ;
var file _obj = new utils . File ( file _id , title , thumbnail , isaudio , duration , url , uploader , size , true _file _path , upload _date , description , jsonobj . view _count , jsonobj . height , jsonobj . abr ) ;
return file _obj ;
}
function getAppendedBasePathSub ( sub , base _path ) {
return path . join ( base _path , ( sub . isPlaylist ? 'playlists/' : 'channels/' ) , sub . name ) ;
}
exports . getFileDirectoriesAndDBs = async ( ) => {
let dirs _to _check = [ ] ;
let subscriptions _to _check = [ ] ;
const subscriptions _base _path = config _api . getConfigItem ( 'ytdl_subscriptions_base_path' ) ; // only for single-user mode
const multi _user _mode = config _api . getConfigItem ( 'ytdl_multi_user_mode' ) ;
const usersFileFolder = config _api . getConfigItem ( 'ytdl_users_base_path' ) ;
const subscriptions _enabled = config _api . getConfigItem ( 'ytdl_allow_subscriptions' ) ;
if ( multi _user _mode ) {
const users = await exports . getRecords ( 'users' ) ;
for ( let i = 0 ; i < users . length ; i ++ ) {
const user = users [ i ] ;
// add user's audio dir to check list
dirs _to _check . push ( {
basePath : path . join ( usersFileFolder , user . uid , 'audio' ) ,
user _uid : user . uid ,
type : 'audio'
} ) ;
// add user's video dir to check list
dirs _to _check . push ( {
basePath : path . join ( usersFileFolder , user . uid , 'video' ) ,
type : 'video'
} ) ;
}
} else {
const audioFolderPath = config _api . getConfigItem ( 'ytdl_audio_folder_path' ) ;
const videoFolderPath = config _api . getConfigItem ( 'ytdl_video_folder_path' ) ;
// add audio dir to check list
dirs _to _check . push ( {
basePath : audioFolderPath ,
type : 'audio'
} ) ;
// add video dir to check list
dirs _to _check . push ( {
basePath : videoFolderPath ,
type : 'video'
} ) ;
}
if ( subscriptions _enabled ) {
const subscriptions = await exports . getRecords ( 'subscriptions' ) ;
subscriptions _to _check = subscriptions _to _check . concat ( subscriptions ) ;
}
// add subscriptions to check list
for ( let i = 0 ; i < subscriptions _to _check . length ; i ++ ) {
let subscription _to _check = subscriptions _to _check [ i ] ;
if ( ! subscription _to _check . name ) {
// TODO: Remove subscription as it'll never complete
continue ;
}
dirs _to _check . push ( {
basePath : subscription _to _check . user _uid ? path . join ( usersFileFolder , subscription _to _check . user _uid , 'subscriptions' , subscription _to _check . isPlaylist ? 'playlists/' : 'channels/' , subscription _to _check . name )
: path . join ( subscriptions _base _path , subscription _to _check . isPlaylist ? 'playlists/' : 'channels/' , subscription _to _check . name ) ,
user _uid : subscription _to _check . user _uid ,
type : subscription _to _check . type ,
sub _id : subscription _to _check [ 'id' ]
} ) ;
}
return dirs _to _check ;
}
exports . importUnregisteredFiles = async ( ) => {
const dirs _to _check = await exports . getFileDirectoriesAndDBs ( ) ;
// run through check list and check each file to see if it's missing from the db
for ( let i = 0 ; i < dirs _to _check . length ; i ++ ) {
const dir _to _check = dirs _to _check [ i ] ;
// recursively get all files in dir's path
const files = await utils . getDownloadedFilesByType ( dir _to _check . basePath , dir _to _check . type ) ;
for ( let j = 0 ; j < files . length ; j ++ ) {
const file = files [ j ] ;
// check if file exists in db, if not add it
const files _with _same _url = await exports . getRecords ( 'files' , { url : file . url , sub _id : dir _to _check . sub _id } ) ;
const file _is _registered = ! ! ( files _with _same _url . find ( file _with _same _url => path . resolve ( file _with _same _url . path ) === path . resolve ( file . path ) ) ) ;
if ( ! file _is _registered ) {
// add additional info
await exports . registerFileDB ( file [ 'path' ] , dir _to _check . type , dir _to _check . user _uid , null , dir _to _check . sub _id , null ) ;
logger . verbose ( ` Added discovered file to the database: ${ file . id } ` ) ;
}
}
}
}
exports . addMetadataPropertyToDB = async ( property _key ) => {
try {
const dirs _to _check = await exports . getFileDirectoriesAndDBs ( ) ;
const update _obj = { } ;
for ( let i = 0 ; i < dirs _to _check . length ; i ++ ) {
const dir _to _check = dirs _to _check [ i ] ;
// recursively get all files in dir's path
const files = await utils . getDownloadedFilesByType ( dir _to _check . basePath , dir _to _check . type , true ) ;
for ( let j = 0 ; j < files . length ; j ++ ) {
const file = files [ j ] ;
if ( file [ property _key ] ) {
update _obj [ file . uid ] = { [ property _key ] : file [ property _key ] } ;
}
}
}
return await exports . bulkUpdateRecords ( 'files' , 'uid' , update _obj ) ;
} catch ( err ) {
logger . error ( err ) ;
return false ;
}
}
exports . createPlaylist = async ( playlist _name , uids , type , user _uid = null ) => {
const first _video = await exports . getVideo ( uids [ 0 ] ) ;
const thumbnailToUse = first _video [ 'thumbnailURL' ] ;
let new _playlist = {
name : playlist _name ,
uids : uids ,
id : uuid ( ) ,
thumbnailURL : thumbnailToUse ,
type : type ,
registered : Date . now ( ) ,
randomize _order : false
} ;
new _playlist . user _uid = user _uid ? user _uid : undefined ;
await exports . insertRecordIntoTable ( 'playlists' , new _playlist ) ;
const duration = await exports . calculatePlaylistDuration ( new _playlist ) ;
await exports . updateRecord ( 'playlists' , { id : new _playlist . id } , { duration : duration } ) ;
return new _playlist ;
}
exports . getPlaylist = async ( playlist _id , user _uid = null , require _sharing = false ) => {
let playlist = await exports . getRecord ( 'playlists' , { id : playlist _id } ) ;
if ( ! playlist ) {
playlist = await exports . getRecord ( 'categories' , { uid : playlist _id } ) ;
if ( playlist ) {
// category found
const files = await exports . getFiles ( user _uid ) ;
utils . addUIDsToCategory ( playlist , files ) ;
}
}
// converts playlists to new UID-based schema
if ( playlist && playlist [ 'fileNames' ] && ! playlist [ 'uids' ] ) {
playlist [ 'uids' ] = [ ] ;
logger . verbose ( ` Converting playlist ${ playlist [ 'name' ] } to new UID-based schema. ` ) ;
for ( let i = 0 ; i < playlist [ 'fileNames' ] . length ; i ++ ) {
const fileName = playlist [ 'fileNames' ] [ i ] ;
const uid = await exports . getVideoUIDByID ( fileName , user _uid ) ;
if ( uid ) playlist [ 'uids' ] . push ( uid ) ;
else logger . warn ( ` Failed to convert file with name ${ fileName } to its UID while converting playlist ${ playlist [ 'name' ] } to the new UID-based schema. The original file is likely missing/deleted and it will be skipped. ` ) ;
}
exports . updatePlaylist ( playlist , user _uid ) ;
}
// prevent unauthorized users from accessing the file info
if ( require _sharing && ! playlist [ 'sharingEnabled' ] ) return null ;
return playlist ;
}
exports . updatePlaylist = async ( playlist ) => {
let playlistID = playlist . id ;
const duration = await exports . calculatePlaylistDuration ( playlist ) ;
playlist . duration = duration ;
return await exports . updateRecord ( 'playlists' , { id : playlistID } , playlist ) ;
}
exports . setPlaylistProperty = async ( playlist _id , assignment _obj , user _uid = null ) => {
let success = await exports . updateRecord ( 'playlists' , { id : playlist _id } , assignment _obj ) ;
if ( ! success ) {
success = await exports . updateRecord ( 'categories' , { uid : playlist _id } , assignment _obj ) ;
}
if ( ! success ) {
logger . error ( ` Could not find playlist or category with ID ${ playlist _id } ` ) ;
}
return success ;
}
exports . calculatePlaylistDuration = async ( playlist , playlist _file _objs = null ) => {
if ( ! playlist _file _objs ) {
playlist _file _objs = [ ] ;
for ( let i = 0 ; i < playlist [ 'uids' ] . length ; i ++ ) {
const uid = playlist [ 'uids' ] [ i ] ;
const file _obj = await exports . getVideo ( uid ) ;
if ( file _obj ) playlist _file _objs . push ( file _obj ) ;
}
}
return playlist _file _objs . reduce ( ( a , b ) => a + utils . durationStringToNumber ( b . duration ) , 0 ) ;
}
exports . deleteFile = async ( uid , uuid = null , blacklistMode = false ) => {
const file _obj = await exports . getVideo ( uid , uuid ) ;
const type = file _obj . isAudio ? 'audio' : 'video' ;
const folderPath = path . dirname ( file _obj . path ) ;
const ext = type === 'audio' ? 'mp3' : 'mp4' ;
const name = file _obj . id ;
const filePathNoExtension = utils . removeFileExtension ( file _obj . path ) ;
var jsonPath = ` ${ file _obj . path } .info.json ` ;
var altJSONPath = ` ${ filePathNoExtension } .info.json ` ;
var thumbnailPath = ` ${ filePathNoExtension } .webp ` ;
var altThumbnailPath = ` ${ filePathNoExtension } .jpg ` ;
jsonPath = path . join ( _ _dirname , jsonPath ) ;
altJSONPath = path . join ( _ _dirname , altJSONPath ) ;
let jsonExists = await fs . pathExists ( jsonPath ) ;
let thumbnailExists = await fs . pathExists ( thumbnailPath ) ;
if ( ! jsonExists ) {
if ( await fs . pathExists ( altJSONPath ) ) {
jsonExists = true ;
jsonPath = altJSONPath ;
}
}
if ( ! thumbnailExists ) {
if ( await fs . pathExists ( altThumbnailPath ) ) {
thumbnailExists = true ;
thumbnailPath = altThumbnailPath ;
}
}
let fileExists = await fs . pathExists ( file _obj . path ) ;
if ( config _api . descriptors [ uid ] ) {
try {
for ( let i = 0 ; i < config _api . descriptors [ uid ] . length ; i ++ ) {
config _api . descriptors [ uid ] [ i ] . destroy ( ) ;
}
} catch ( e ) {
}
}
let useYoutubeDLArchive = config _api . getConfigItem ( 'ytdl_use_youtubedl_archive' ) ;
if ( useYoutubeDLArchive ) {
const archive _path = uuid ? path . join ( usersFileFolder , uuid , 'archives' , ` archive_ ${ type } .txt ` ) : path . join ( 'appdata' , 'archives' , ` archive_ ${ type } .txt ` ) ;
// get ID from JSON
var jsonobj = await ( type === 'audio' ? utils . getJSONMp3 ( name , folderPath ) : utils . getJSONMp4 ( name , folderPath ) ) ;
let id = null ;
if ( jsonobj ) id = jsonobj . id ;
// use subscriptions API to remove video from the archive file, and write it to the blacklist
if ( await fs . pathExists ( archive _path ) ) {
const line = id ? await utils . removeIDFromArchive ( archive _path , id ) : null ;
if ( blacklistMode && line ) await writeToBlacklist ( type , line ) ;
} else {
logger . info ( 'Could not find archive file for audio files. Creating...' ) ;
await fs . close ( await fs . open ( archive _path , 'w' ) ) ;
}
}
if ( jsonExists ) await fs . unlink ( jsonPath ) ;
if ( thumbnailExists ) await fs . unlink ( thumbnailPath ) ;
await exports . removeRecord ( 'files' , { uid : uid } ) ;
if ( fileExists ) {
await fs . unlink ( file _obj . path ) ;
if ( await fs . pathExists ( jsonPath ) || await fs . pathExists ( file _obj . path ) ) {
return false ;
} else {
return true ;
}
} else {
// TODO: tell user that the file didn't exist
return true ;
}
}
// Video ID is basically just the file name without the base path and file extension - this method helps us get away from that
exports . getVideoUIDByID = async ( file _id , uuid = null ) => {
const file _obj = await exports . getRecord ( 'files' , { id : file _id } ) ;
return file _obj ? file _obj [ 'uid' ] : null ;
}
exports . getVideo = async ( file _uid ) => {
return await exports . getRecord ( 'files' , { uid : file _uid } ) ;
}
exports . getFiles = async ( uuid = null ) => {
return await exports . getRecords ( 'files' , { user _uid : uuid } ) ;
}
exports . setVideoProperty = async ( file _uid , assignment _obj ) => {
// TODO: check if video exists, throw error if not
await exports . updateRecord ( 'files' , { uid : file _uid } , assignment _obj ) ;
}
// Basic DB functions
// Create
exports . insertRecordIntoTable = async ( table , doc , replaceFilter = null ) => {
// local db override
if ( using _local _db ) {
if ( replaceFilter ) local _db . get ( table ) . remove ( replaceFilter ) . write ( ) ;
local _db . get ( table ) . push ( doc ) . write ( ) ;
return true ;
}
if ( replaceFilter ) {
const output = await database . collection ( table ) . bulkWrite ( [
{
deleteMany : {
filter : replaceFilter
}
} ,
{
insertOne : {
document : doc
}
}
] ) ;
logger . debug ( ` Inserted doc into ${ table } with filter: ${ JSON . stringify ( replaceFilter ) } ` ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
const output = await database . collection ( table ) . insertOne ( doc ) ;
logger . debug ( ` Inserted doc into ${ table } ` ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . insertRecordsIntoTable = async ( table , docs , ignore _errors = false ) => {
// local db override
if ( using _local _db ) {
const records _limit = 30000 ;
if ( docs . length < records _limit ) {
local _db . get ( table ) . push ( ... docs ) . write ( ) ;
} else {
for ( let i = 0 ; i < docs . length ; i += records _limit ) {
const records _to _push = docs . slice ( i , i + records _limit > docs . length ? docs . length : i + records _limit )
local _db . get ( table ) . push ( ... records _to _push ) . write ( ) ;
}
}
return true ;
}
const output = await database . collection ( table ) . insertMany ( docs , { ordered : ! ignore _errors } ) ;
logger . debug ( ` Inserted ${ output . insertedCount } docs into ${ table } ` ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . bulkInsertRecordsIntoTable = async ( table , docs ) => {
// local db override
if ( using _local _db ) {
return await exports . insertRecordsIntoTable ( table , docs ) ;
}
// not a necessary function as insertRecords does the same thing but gives us more control on batch size if needed
const table _collection = database . collection ( table ) ;
let bulk = table _collection . initializeOrderedBulkOp ( ) ; // Initialize the Ordered Batch
for ( let i = 0 ; i < docs . length ; i ++ ) {
bulk . insert ( docs [ i ] ) ;
}
const output = await bulk . execute ( ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
// Read
exports . getRecord = async ( table , filter _obj ) => {
// local db override
if ( using _local _db ) {
return applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . value ( ) ;
}
return await database . collection ( table ) . findOne ( filter _obj ) ;
}
exports . getRecords = async ( table , filter _obj = null , return _count = false , sort = null , range = null ) => {
// local db override
if ( using _local _db ) {
let cursor = filter _obj ? applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'filter' ) . value ( ) : local _db . get ( table ) . value ( ) ;
if ( sort ) {
cursor = cursor . sort ( ( a , b ) => ( a [ sort [ 'by' ] ] > b [ sort [ 'by' ] ] ? sort [ 'order' ] : sort [ 'order' ] * - 1 ) ) ;
}
if ( range ) {
cursor = cursor . slice ( range [ 0 ] , range [ 1 ] ) ;
}
return ! return _count ? cursor : cursor . length ;
}
const cursor = filter _obj ? database . collection ( table ) . find ( filter _obj ) : database . collection ( table ) . find ( ) ;
if ( sort ) {
cursor . sort ( { [ sort [ 'by' ] ] : sort [ 'order' ] } ) ;
}
if ( range ) {
cursor . skip ( range [ 0 ] ) . limit ( range [ 1 ] - range [ 0 ] ) ;
}
return ! return _count ? await cursor . toArray ( ) : await cursor . count ( ) ;
}
// Update
exports . updateRecord = async ( table , filter _obj , update _obj ) => {
// local db override
if ( using _local _db ) {
applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . assign ( update _obj ) . write ( ) ;
return true ;
}
// sometimes _id will be in the update obj, this breaks mongodb
if ( update _obj [ '_id' ] ) delete update _obj [ '_id' ] ;
const output = await database . collection ( table ) . updateOne ( filter _obj , { $set : update _obj } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . updateRecords = async ( table , filter _obj , update _obj ) => {
// local db override
if ( using _local _db ) {
applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'filter' ) . assign ( update _obj ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateMany ( filter _obj , { $set : update _obj } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . bulkUpdateRecords = async ( table , key _label , update _obj ) => {
// local db override
if ( using _local _db ) {
local _db . get ( table ) . each ( ( record ) => {
const item _id _to _update = record [ key _label ] ;
if ( ! update _obj [ item _id _to _update ] ) return ;
const props _to _update = Object . keys ( update _obj [ item _id _to _update ] ) ;
for ( let i = 0 ; i < props _to _update . length ; i ++ ) {
const prop _to _update = props _to _update [ i ] ;
const prop _value = update _obj [ item _id _to _update ] [ prop _to _update ] ;
record [ prop _to _update ] = prop _value ;
}
} ) . write ( ) ;
return true ;
}
const table _collection = database . collection ( table ) ;
let bulk = table _collection . initializeOrderedBulkOp ( ) ; // Initialize the Ordered Batch
const item _ids _to _update = Object . keys ( update _obj ) ;
for ( let i = 0 ; i < item _ids _to _update . length ; i ++ ) {
const item _id _to _update = item _ids _to _update [ i ] ;
bulk . find ( { [ key _label ] : item _id _to _update } ) . updateOne ( {
"$set" : update _obj [ item _id _to _update ]
} ) ;
}
const output = await bulk . execute ( ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . pushToRecordsArray = async ( table , filter _obj , key , value ) => {
// local db override
if ( using _local _db ) {
applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . get ( key ) . push ( value ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateOne ( filter _obj , { $push : { [ key ] : value } } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . pullFromRecordsArray = async ( table , filter _obj , key , value ) => {
// local db override
if ( using _local _db ) {
applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'find' ) . get ( key ) . pull ( value ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . updateOne ( filter _obj , { $pull : { [ key ] : value } } ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
// Delete
exports . removeRecord = async ( table , filter _obj ) => {
// local db override
if ( using _local _db ) {
applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'remove' ) . write ( ) ;
return true ;
}
const output = await database . collection ( table ) . deleteOne ( filter _obj ) ;
return ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
exports . removeAllRecords = async ( table = null , filter _obj = null ) => {
// local db override
const tables _to _remove = table ? [ table ] : tables _list ;
logger . debug ( ` Removing all records from: ${ tables _to _remove } with filter: ${ JSON . stringify ( filter _obj ) } ` )
if ( using _local _db ) {
for ( let i = 0 ; i < tables _to _remove . length ; i ++ ) {
const table _to _remove = tables _to _remove [ i ] ;
if ( filter _obj ) applyFilterLocalDB ( local _db . get ( table ) , filter _obj , 'remove' ) . write ( ) ;
else local _db . assign ( { [ table _to _remove ] : [ ] } ) . write ( ) ;
logger . debug ( ` Successfully removed records from ${ table _to _remove } ` ) ;
}
return true ;
}
let success = true ;
for ( let i = 0 ; i < tables _to _remove . length ; i ++ ) {
const table _to _remove = tables _to _remove [ i ] ;
const output = await database . collection ( table _to _remove ) . deleteMany ( filter _obj ? filter _obj : { } ) ;
logger . debug ( ` Successfully removed records from ${ table _to _remove } ` ) ;
success &= ! ! ( output [ 'result' ] [ 'ok' ] ) ;
}
return success ;
}
// Stats
exports . getDBStats = async ( ) => {
const stats _by _table = { } ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
if ( table === 'test' ) continue ;
stats _by _table [ table ] = await getDBTableStats ( table ) ;
}
return { stats _by _table : stats _by _table , using _local _db : using _local _db } ;
}
const getDBTableStats = async ( table ) => {
const table _stats = { } ;
// local db override
if ( using _local _db ) {
table _stats [ 'records_count' ] = local _db . get ( table ) . value ( ) . length ;
} else {
const stats = await database . collection ( table ) . stats ( ) ;
table _stats [ 'records_count' ] = stats . count ;
}
return table _stats ;
}
// JSON to DB
exports . generateJSONTables = async ( db _json , users _json ) => {
// create records
let files = db _json [ 'files' ] || [ ] ;
let playlists = db _json [ 'playlists' ] || [ ] ;
let categories = db _json [ 'categories' ] || [ ] ;
let subscriptions = db _json [ 'subscriptions' ] || [ ] ;
const users = users _json [ 'users' ] ;
for ( let i = 0 ; i < users . length ; i ++ ) {
const user = users [ i ] ;
if ( user [ 'files' ] ) {
user [ 'files' ] = user [ 'files' ] . map ( file => ( { ... file , user _uid : user [ 'uid' ] } ) ) ;
files = files . concat ( user [ 'files' ] ) ;
}
if ( user [ 'playlists' ] ) {
user [ 'playlists' ] = user [ 'playlists' ] . map ( playlist => ( { ... playlist , user _uid : user [ 'uid' ] } ) ) ;
playlists = playlists . concat ( user [ 'playlists' ] ) ;
}
if ( user [ 'categories' ] ) {
user [ 'categories' ] = user [ 'categories' ] . map ( category => ( { ... category , user _uid : user [ 'uid' ] } ) ) ;
categories = categories . concat ( user [ 'categories' ] ) ;
}
if ( user [ 'subscriptions' ] ) {
user [ 'subscriptions' ] = user [ 'subscriptions' ] . map ( subscription => ( { ... subscription , user _uid : user [ 'uid' ] } ) ) ;
subscriptions = subscriptions . concat ( user [ 'subscriptions' ] ) ;
}
}
const tables _obj = { } ;
// TODO: use create*Records funcs to strip unnecessary properties
tables _obj . files = createFilesRecords ( files , subscriptions ) ;
tables _obj . playlists = playlists ;
tables _obj . categories = categories ;
tables _obj . subscriptions = createSubscriptionsRecords ( subscriptions ) ;
tables _obj . users = createUsersRecords ( users ) ;
tables _obj . roles = createRolesRecords ( users _json [ 'roles' ] ) ;
tables _obj . downloads = createDownloadsRecords ( db _json [ 'downloads' ] )
return tables _obj ;
}
exports . importJSONToDB = async ( db _json , users _json ) => {
await fs . writeFile ( ` appdata/db.json. ${ Date . now ( ) / 1000 } .bak ` , JSON . stringify ( db _json , null , 2 ) ) ;
await fs . writeFile ( ` appdata/users_db.json. ${ Date . now ( ) / 1000 } .bak ` , JSON . stringify ( users _json , null , 2 ) ) ;
await exports . removeAllRecords ( ) ;
const tables _obj = await exports . generateJSONTables ( db _json , users _json ) ;
const table _keys = Object . keys ( tables _obj ) ;
let success = true ;
for ( let i = 0 ; i < table _keys . length ; i ++ ) {
const table _key = table _keys [ i ] ;
if ( ! tables _obj [ table _key ] || tables _obj [ table _key ] . length === 0 ) continue ;
success &= await exports . insertRecordsIntoTable ( table _key , tables _obj [ table _key ] , true ) ;
}
return success ;
}
const createFilesRecords = ( files , subscriptions ) => {
for ( let i = 0 ; i < subscriptions . length ; i ++ ) {
const subscription = subscriptions [ i ] ;
subscription [ 'videos' ] = subscription [ 'videos' ] . map ( file => ( { ... file , sub _id : subscription [ 'id' ] , user _uid : subscription [ 'user_uid' ] ? subscription [ 'user_uid' ] : undefined } ) ) ;
files = files . concat ( subscriptions [ i ] [ 'videos' ] ) ;
}
return files ;
}
const createPlaylistsRecords = async ( playlists ) => {
}
const createCategoriesRecords = async ( categories ) => {
}
const createSubscriptionsRecords = ( subscriptions ) => {
for ( let i = 0 ; i < subscriptions . length ; i ++ ) {
delete subscriptions [ i ] [ 'videos' ] ;
}
return subscriptions ;
}
const createUsersRecords = ( users ) => {
users . forEach ( user => {
delete user [ 'files' ] ;
delete user [ 'playlists' ] ;
delete user [ 'subscriptions' ] ;
} ) ;
return users ;
}
const createRolesRecords = ( roles ) => {
const new _roles = [ ] ;
Object . keys ( roles ) . forEach ( role _key => {
new _roles . push ( {
key : role _key ,
... roles [ role _key ]
} ) ;
} ) ;
return new _roles ;
}
const createDownloadsRecords = ( downloads ) => {
const new _downloads = [ ] ;
Object . keys ( downloads ) . forEach ( session _key => {
new _downloads . push ( {
key : session _key ,
... downloads [ session _key ]
} ) ;
} ) ;
return new _downloads ;
}
exports . transferDB = async ( local _to _remote ) => {
const table _to _records = { } ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
table _to _records [ table ] = await exports . getRecords ( table ) ;
}
using _local _db = ! local _to _remote ;
if ( local _to _remote ) {
// backup local DB
logger . debug ( 'Backup up Local DB...' ) ;
await fs . copyFile ( 'appdata/local_db.json' , ` appdata/local_db.json. ${ Date . now ( ) / 1000 } .bak ` ) ;
const db _connected = await exports . connectToDB ( 5 , true ) ;
if ( ! db _connected ) {
logger . error ( 'Failed to transfer database - could not connect to MongoDB. Verify that your connection URL is valid.' ) ;
return false ;
}
}
success = true ;
logger . debug ( 'Clearing new database before transfer...' ) ;
await exports . removeAllRecords ( ) ;
logger . debug ( 'Database cleared! Beginning transfer.' ) ;
for ( let i = 0 ; i < tables _list . length ; i ++ ) {
const table = tables _list [ i ] ;
if ( ! table _to _records [ table ] || table _to _records [ table ] . length === 0 ) continue ;
success &= await exports . bulkInsertRecordsIntoTable ( table , table _to _records [ table ] ) ;
}
config _api . setConfigItem ( 'ytdl_use_local_db' , using _local _db ) ;
logger . debug ( 'Transfer finished!' ) ;
return success ;
}
/ *
This function is necessary to emulate mongodb ' s ability to search for null or missing values .
A filter of null or undefined for a property will find docs that have that property missing , or have it
null or undefined . We want that same functionality for the local DB as well
* /
const applyFilterLocalDB = ( db _path , filter _obj , operation ) => {
const filter _props = Object . keys ( filter _obj ) ;
const return _val = db _path [ operation ] ( record => {
if ( ! filter _props ) return true ;
let filtered = true ;
for ( let i = 0 ; i < filter _props . length ; i ++ ) {
const filter _prop = filter _props [ i ] ;
const filter _prop _value = filter _obj [ filter _prop ] ;
if ( filter _prop _value === undefined || filter _prop _value === null ) {
filtered &= record [ filter _prop ] === undefined || record [ filter _prop ] === null
} else {
if ( typeof filter _prop _value === 'object' ) {
if ( filter _prop _value [ '$regex' ] ) {
filtered &= ( record [ filter _prop ] . search ( new RegExp ( filter _prop _value [ '$regex' ] , filter _prop _value [ '$options' ] ) ) !== - 1 ) ;
}
} else {
filtered &= record [ filter _prop ] === filter _prop _value ;
}
}
}
return filtered ;
} ) ;
return return _val ;
}
// archive helper functions
async function writeToBlacklist ( type , line ) {
const archivePath = path . join ( _ _dirname , 'appdata' , 'archives' ) ;
let blacklistPath = path . join ( archivePath , ( type === 'audio' ) ? 'blacklist_audio.txt' : 'blacklist_video.txt' ) ;
// adds newline to the beginning of the line
line . replace ( '\n' , '' ) ;
line . replace ( '\r' , '' ) ;
line = '\n' + line ;
await fs . appendFile ( blacklistPath , line ) ;
}