experimental multidownload implemented

This commit is contained in:
2022-01-27 15:32:31 +01:00
parent 9b0dc4d21b
commit 8c10868dd0
7 changed files with 199 additions and 97 deletions

View File

@@ -7,7 +7,7 @@ const { logger } = require('./logger');
* @constructor
* @param {string} url - The downlaod url.
* @param {string} params - The cli arguments passed by the frontend.
* @param {object} settings - The download settings passed by the frontend.
* @param {*} settings - The download settings passed by the frontend.
*/
class Process {
@@ -26,7 +26,7 @@ class Process {
* @returns {Promise<this>} the process instance
*/
async start(callback) {
await this.__internalGetInfo();
await this.#__internalGetInfo();
const ytldp = spawn('./lib/yt-dlp',
[
@@ -51,7 +51,7 @@ class Process {
* function used internally by the download process to fetch information, usually thumbnail and title
* @returns Promise to the lock
*/
async __internalGetInfo() {
async #__internalGetInfo() {
let lock = true;
let stdoutChunks = [];
const ytdlpInfo = spawn('./lib/yt-dlp', ['-s', '-j', this.url]);

View File

@@ -40,6 +40,7 @@ async function download(socket, payload) {
const p = new Process(url, params, settings);
p.start().then(downloader => {
pool.add(p)
let infoLock = true;
let pid = downloader.getPid();
@@ -52,7 +53,9 @@ async function download(socket, payload) {
if (downloader.getInfo() === null) {
return;
}
socket.emit('info', downloader.getInfo());
socket.emit('info', {
pid: pid, info: downloader.getInfo()
});
infoLock = false;
}
socket.emit('progress', formatter(String(stdout), pid)) // finally, emit
@@ -61,13 +64,15 @@ async function download(socket, payload) {
downloader.kill().then(() => {
socket.emit('progress', {
status: 'Done!',
process: pid,
pid: pid,
})
pool.remove(downloader);
})
},
error: () => {
socket.emit('progress', { status: 'Done!' });
socket.emit('progress', {
status: 'Done!', pid: pid
});
}
});
})
@@ -99,14 +104,23 @@ async function retriveDownload(socket) {
// it's an hot-reload the server it's running and the frontend ask for
// the pending job: retrieve them from the "in-memory database" (ProcessPool)
logger('dl', `Retrieving jobs from pool`)
const it = pool.iterator();
const it = pool.iterator();
tempWorkQueue = new Array();
// sanitize
for (const entry of it) {
const [pid, process] = entry;
pool.removeByPid(pid);
await killProcess(pid);
tempWorkQueue.push(process);
}
// resume the jobs
for (const entry of tempWorkQueue) {
await download(socket, {
url: process.url,
params: process.params
url: entry.url,
params: entry.params,
});
}
}