experimental multidownload implemented

This commit is contained in:
2022-01-27 15:32:31 +01:00
parent 9b0dc4d21b
commit 8c10868dd0
7 changed files with 199 additions and 97 deletions

View File

@@ -40,6 +40,7 @@ async function download(socket, payload) {
const p = new Process(url, params, settings);
p.start().then(downloader => {
pool.add(p)
let infoLock = true;
let pid = downloader.getPid();
@@ -52,7 +53,9 @@ async function download(socket, payload) {
if (downloader.getInfo() === null) {
return;
}
socket.emit('info', downloader.getInfo());
socket.emit('info', {
pid: pid, info: downloader.getInfo()
});
infoLock = false;
}
socket.emit('progress', formatter(String(stdout), pid)) // finally, emit
@@ -61,13 +64,15 @@ async function download(socket, payload) {
downloader.kill().then(() => {
socket.emit('progress', {
status: 'Done!',
process: pid,
pid: pid,
})
pool.remove(downloader);
})
},
error: () => {
socket.emit('progress', { status: 'Done!' });
socket.emit('progress', {
status: 'Done!', pid: pid
});
}
});
})
@@ -99,14 +104,23 @@ async function retriveDownload(socket) {
// it's an hot-reload the server it's running and the frontend ask for
// the pending job: retrieve them from the "in-memory database" (ProcessPool)
logger('dl', `Retrieving jobs from pool`)
const it = pool.iterator();
const it = pool.iterator();
tempWorkQueue = new Array();
// sanitize
for (const entry of it) {
const [pid, process] = entry;
pool.removeByPid(pid);
await killProcess(pid);
tempWorkQueue.push(process);
}
// resume the jobs
for (const entry of tempWorkQueue) {
await download(socket, {
url: process.url,
params: process.params
url: entry.url,
params: entry.params,
});
}
}