??
This commit is contained in:
23
server/cli/ascii.go
Normal file
23
server/cli/ascii.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package cli
|
||||
|
||||
import "fmt"
|
||||
|
||||
const (
|
||||
// FG
|
||||
Red = "\033[31m"
|
||||
Green = "\033[32m"
|
||||
Yellow = "\033[33m"
|
||||
Blue = "\033[34m"
|
||||
Magenta = "\033[35m"
|
||||
Cyan = "\033[36m"
|
||||
Reset = "\033[0m"
|
||||
// BG
|
||||
BgRed = "\033[1;41m"
|
||||
BgBlue = "\033[1;44m"
|
||||
BgGreen = "\033[1;42m"
|
||||
)
|
||||
|
||||
// Formats a message with the specified ascii escape code, then reset.
|
||||
func Format(message string, code string) string {
|
||||
return fmt.Sprintf("%s%s%s", code, message, Reset)
|
||||
}
|
||||
68
server/handlers.go
Normal file
68
server/handlers.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
"github.com/gofiber/websocket/v2"
|
||||
)
|
||||
|
||||
// Websocket handlers
|
||||
|
||||
func download(c *websocket.Conn) {
|
||||
req := DownloadRequest{}
|
||||
c.ReadJSON(&req)
|
||||
|
||||
p := Process{mem: &db, url: req.Url, params: req.Params}
|
||||
p.Start()
|
||||
|
||||
c.WriteJSON(req)
|
||||
}
|
||||
|
||||
func getFormats(c *websocket.Conn) {
|
||||
log.Println("Requesting formats")
|
||||
mtype, msg, _ := c.ReadMessage()
|
||||
|
||||
req := DownloadRequest{}
|
||||
json.Unmarshal(msg, &req)
|
||||
|
||||
p := Process{mem: &db, url: req.Url}
|
||||
p.GetFormatsSync()
|
||||
|
||||
c.WriteMessage(mtype, msg)
|
||||
}
|
||||
|
||||
func status(c *websocket.Conn) {
|
||||
mtype, _, _ := c.ReadMessage()
|
||||
|
||||
all := db.All()
|
||||
msg, _ := json.Marshal(all)
|
||||
|
||||
c.WriteMessage(mtype, msg)
|
||||
}
|
||||
|
||||
func abort(c *websocket.Conn) {
|
||||
mtype, msg, _ := c.ReadMessage()
|
||||
|
||||
req := AbortRequest{}
|
||||
json.Unmarshal(msg, &req)
|
||||
|
||||
p := db.Get(req.Id)
|
||||
p.Kill()
|
||||
|
||||
c.WriteMessage(mtype, msg)
|
||||
}
|
||||
|
||||
func abortAll(c *websocket.Conn) {
|
||||
keys := db.Keys()
|
||||
for _, key := range keys {
|
||||
proc := db.Get(key)
|
||||
if proc != nil {
|
||||
proc.Kill()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func hotUpdate(c *websocket.Conn) {
|
||||
|
||||
}
|
||||
121
server/memory_db.go
Normal file
121
server/memory_db.go
Normal file
@@ -0,0 +1,121 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"sync"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/marcopeocchi/yt-dlp-web-ui/server/cli"
|
||||
)
|
||||
|
||||
// In-Memory volatile Thread-Safe Key-Value Storage
|
||||
type MemoryDB struct {
|
||||
table map[string]*Process
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
// Inits the db with an empty map of string->Process pointer
|
||||
func (m *MemoryDB) New() {
|
||||
m.table = make(map[string]*Process)
|
||||
}
|
||||
|
||||
// Get a process pointer given its id
|
||||
func (m *MemoryDB) Get(id string) *Process {
|
||||
m.mu.Lock()
|
||||
res := m.table[id]
|
||||
m.mu.Unlock()
|
||||
return res
|
||||
}
|
||||
|
||||
// Store a pointer of a process and return its id
|
||||
func (m *MemoryDB) Set(process *Process) string {
|
||||
id := uuid.Must(uuid.NewRandom()).String()
|
||||
m.mu.Lock()
|
||||
m.table[id] = process
|
||||
m.mu.Unlock()
|
||||
return id
|
||||
}
|
||||
|
||||
// Update a process info/metadata, given the process id
|
||||
func (m *MemoryDB) Update(id string, info DownloadInfo) {
|
||||
m.mu.Lock()
|
||||
if m.table[id] != nil {
|
||||
m.table[id].Info = info
|
||||
}
|
||||
m.mu.Unlock()
|
||||
}
|
||||
|
||||
// Update a process progress data, given the process id
|
||||
// Used for updating completition percentage or ETA
|
||||
func (m *MemoryDB) UpdateProgress(id string, progress DownloadProgress) {
|
||||
m.mu.Lock()
|
||||
if m.table[id] != nil {
|
||||
m.table[id].Progress = progress
|
||||
}
|
||||
m.mu.Unlock()
|
||||
}
|
||||
|
||||
// Removes a process progress, given the process id
|
||||
func (m *MemoryDB) Delete(id string) {
|
||||
m.mu.Lock()
|
||||
delete(m.table, id)
|
||||
m.mu.Unlock()
|
||||
}
|
||||
|
||||
// Returns a slice of all currently stored processes id
|
||||
func (m *MemoryDB) Keys() []string {
|
||||
m.mu.Lock()
|
||||
keys := make([]string, len(m.table))
|
||||
i := 0
|
||||
for k := range m.table {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
m.mu.Unlock()
|
||||
return keys
|
||||
}
|
||||
|
||||
// Returns a slice of all currently stored processes progess
|
||||
func (m *MemoryDB) All() []ProcessResponse {
|
||||
running := make([]ProcessResponse, len(m.table))
|
||||
i := 0
|
||||
for k, v := range m.table {
|
||||
if v != nil {
|
||||
running[i] = ProcessResponse{
|
||||
Id: k,
|
||||
Info: v.Info,
|
||||
Progress: v.Progress,
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
return running
|
||||
}
|
||||
|
||||
// WIP: Persist the database in a single file named "session.dat"
|
||||
func (m *MemoryDB) Persist() {
|
||||
running := m.All()
|
||||
|
||||
session, err := json.Marshal(Session{
|
||||
Processes: running,
|
||||
})
|
||||
if err != nil {
|
||||
log.Println(cli.Red, "Failed to persist database", cli.Reset)
|
||||
return
|
||||
}
|
||||
|
||||
err = os.WriteFile("session.dat", session, 0700)
|
||||
if err != nil {
|
||||
log.Println(cli.Red, "Failed to persist database", cli.Reset)
|
||||
}
|
||||
}
|
||||
|
||||
// WIP: Restore a persisted state
|
||||
func (m *MemoryDB) Restore() {
|
||||
feed, _ := os.ReadFile("session.dat")
|
||||
session := Session{}
|
||||
json.Unmarshal(feed, &session)
|
||||
}
|
||||
154
server/process.go
Normal file
154
server/process.go
Normal file
@@ -0,0 +1,154 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/marcopeocchi/yt-dlp-web-ui/server/rx"
|
||||
)
|
||||
|
||||
const template = `download:
|
||||
{
|
||||
"eta":%(progress.eta)s,
|
||||
"percentage":"%(progress._percent_str)s",
|
||||
"speed":%(progress.speed)s
|
||||
}`
|
||||
|
||||
const driver = "yt-dlp"
|
||||
|
||||
type ProgressTemplate struct {
|
||||
Percentage string `json:"percentage"`
|
||||
Speed float32 `json:"speed"`
|
||||
Size string `json:"size"`
|
||||
Eta int `json:"eta"`
|
||||
}
|
||||
|
||||
// Process descriptor
|
||||
type Process struct {
|
||||
id string
|
||||
url string
|
||||
params []string
|
||||
Info DownloadInfo
|
||||
Progress DownloadProgress
|
||||
mem *MemoryDB
|
||||
proc *os.Process
|
||||
}
|
||||
|
||||
// Starts spawns/forks a new yt-dlp process and parse its stdout.
|
||||
// The process is spawned to outputting a custom progress text that
|
||||
// Resembles a JSON Object in order to Unmarshal it later.
|
||||
// This approach is anyhow not perfect: quotes are not escaped properly.
|
||||
// Each process is not identified by its PID but by a UUIDv2
|
||||
func (p *Process) Start() {
|
||||
params := append([]string{
|
||||
strings.Split(p.url, "?list")[0], //no playlist
|
||||
"--newline",
|
||||
"--no-colors",
|
||||
"--no-playlist",
|
||||
"--progress-template", strings.ReplaceAll(template, "\n", ""),
|
||||
"-o",
|
||||
"./downloads/%(title)s.%(ext)s",
|
||||
}, p.params...)
|
||||
|
||||
// ----------------- main block ----------------- //
|
||||
cmd := exec.Command(driver, params...)
|
||||
r, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
scan := bufio.NewScanner(r)
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
log.Panicln(err)
|
||||
}
|
||||
|
||||
p.id = p.mem.Set(p)
|
||||
p.proc = cmd.Process
|
||||
|
||||
// ----------------- info block ----------------- //
|
||||
// spawn a goroutine that retrieves the info for the download
|
||||
go func() {
|
||||
cmd := exec.Command(driver, p.url, "-J")
|
||||
stdout, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Println("Cannot retrieve info for", p.url)
|
||||
}
|
||||
info := DownloadInfo{URL: p.url}
|
||||
json.Unmarshal(stdout, &info)
|
||||
p.mem.Update(p.id, info)
|
||||
}()
|
||||
|
||||
// --------------- progress block --------------- //
|
||||
// unbuffered channe connected to stdout
|
||||
eventChan := make(chan string)
|
||||
|
||||
// spawn a goroutine that does the dirty job of parsing the stdout
|
||||
// fill the channel with as many stdout line as yt-dlp produces (producer)
|
||||
go func() {
|
||||
defer cmd.Wait()
|
||||
defer r.Close()
|
||||
defer p.Complete()
|
||||
for scan.Scan() {
|
||||
eventChan <- scan.Text()
|
||||
}
|
||||
}()
|
||||
|
||||
// do the unmarshal operation every 500ms (consumer)
|
||||
go rx.Sample(time.Millisecond*500, eventChan, func(text string) {
|
||||
stdout := ProgressTemplate{}
|
||||
err := json.Unmarshal([]byte(text), &stdout)
|
||||
if err == nil {
|
||||
p.mem.UpdateProgress(p.id, DownloadProgress{
|
||||
Percentage: stdout.Percentage,
|
||||
Speed: stdout.Speed,
|
||||
ETA: stdout.Eta,
|
||||
})
|
||||
shortId := strings.Split(p.id, "-")[0]
|
||||
log.Printf("[%s] %s %s\n", shortId, p.url, p.Progress.Percentage)
|
||||
}
|
||||
})
|
||||
// ------------- end progress block ------------- //
|
||||
}
|
||||
|
||||
// Keep process in the memoryDB but marks it as complete
|
||||
// Convention: All completed processes has progress -1
|
||||
// and speed 0 bps.
|
||||
func (p *Process) Complete() {
|
||||
p.mem.UpdateProgress(p.id, DownloadProgress{
|
||||
Percentage: "-1",
|
||||
Speed: 0,
|
||||
ETA: 0,
|
||||
})
|
||||
}
|
||||
|
||||
// Kill a process and remove it from the memory
|
||||
func (p *Process) Kill() error {
|
||||
err := p.proc.Kill()
|
||||
p.mem.Delete(p.id)
|
||||
log.Printf("Killed process %s\n", p.id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *Process) GetFormatsSync() (DownloadInfo, error) {
|
||||
cmd := exec.Command(driver, p.url, "-J")
|
||||
stdout, err := cmd.Output()
|
||||
|
||||
if err != nil {
|
||||
return DownloadInfo{}, err
|
||||
}
|
||||
|
||||
cmd.Wait()
|
||||
|
||||
info := DownloadInfo{URL: p.url}
|
||||
json.Unmarshal(stdout, &info)
|
||||
|
||||
return info, nil
|
||||
}
|
||||
39
server/rpc.go
Normal file
39
server/rpc.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/rpc/jsonrpc"
|
||||
)
|
||||
|
||||
// Wrapper for HTTP RPC request that implements io.Reader interface
|
||||
type rpcRequest struct {
|
||||
r io.Reader
|
||||
rw io.ReadWriter
|
||||
done chan bool
|
||||
}
|
||||
|
||||
func NewRPCRequest(r io.Reader) *rpcRequest {
|
||||
var buf bytes.Buffer
|
||||
done := make(chan bool)
|
||||
return &rpcRequest{r, &buf, done}
|
||||
}
|
||||
|
||||
func (r *rpcRequest) Read(p []byte) (n int, err error) {
|
||||
return r.r.Read(p)
|
||||
}
|
||||
|
||||
func (r *rpcRequest) Write(p []byte) (n int, err error) {
|
||||
return r.rw.Write(p)
|
||||
}
|
||||
|
||||
func (r *rpcRequest) Close() error {
|
||||
r.done <- true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *rpcRequest) Call() io.Reader {
|
||||
go jsonrpc.ServeConn(r)
|
||||
<-r.done
|
||||
return r.rw
|
||||
}
|
||||
47
server/rx/extensions.go
Normal file
47
server/rx/extensions.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package rx
|
||||
|
||||
import "time"
|
||||
|
||||
/*
|
||||
Package rx contains:
|
||||
- Definitions for common reactive programming functions/patterns
|
||||
*/
|
||||
|
||||
// ReactiveX inspired debounce function.
|
||||
//
|
||||
// Debounce emits a string from the source channel only after a particular
|
||||
// time span determined a Go Interval
|
||||
// --A--B--CD--EFG-------|>
|
||||
//
|
||||
// -t-> |>
|
||||
// -t-> |> t is a timer tick
|
||||
// -t-> |>
|
||||
//
|
||||
// --A-----C-----G-------|>
|
||||
func Debounce(interval time.Duration, source chan string, cb func(emit string)) {
|
||||
var item string
|
||||
timer := time.NewTimer(interval)
|
||||
for {
|
||||
select {
|
||||
case item = <-source:
|
||||
timer.Reset(interval)
|
||||
case <-timer.C:
|
||||
if item != "" {
|
||||
cb(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ReactiveX inspired sample function.
|
||||
//
|
||||
// Debounce emits the most recently emitted value from the source
|
||||
// withing the timespan set by the span time.Duration
|
||||
func Sample[T any](span time.Duration, source chan T, cb func(emit T)) {
|
||||
timer := time.NewTimer(span)
|
||||
for {
|
||||
<-timer.C
|
||||
cb(<-source)
|
||||
timer.Reset(span)
|
||||
}
|
||||
}
|
||||
63
server/server.go
Normal file
63
server/server.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/gofiber/fiber/v2/middleware/filesystem"
|
||||
"github.com/gofiber/websocket/v2"
|
||||
)
|
||||
|
||||
var db MemoryDB
|
||||
|
||||
func init() {
|
||||
db.New()
|
||||
}
|
||||
|
||||
func RunBlocking(ctx context.Context) {
|
||||
fe := ctx.Value("frontend").(fs.SubFS)
|
||||
port := ctx.Value("port")
|
||||
|
||||
app := fiber.New()
|
||||
|
||||
app.Use("/", filesystem.New(filesystem.Config{
|
||||
Root: http.FS(fe),
|
||||
}))
|
||||
|
||||
app.Get("/ws", websocket.New(func(c *websocket.Conn) {
|
||||
for {
|
||||
mtype, msg, err := c.ReadMessage()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
switch string(msg) {
|
||||
case "send-url-format-selection":
|
||||
getFormats(c)
|
||||
case "send-url":
|
||||
download(c)
|
||||
case "abort":
|
||||
abort(c)
|
||||
case "abort-all":
|
||||
abortAll(c)
|
||||
case "status":
|
||||
status(c)
|
||||
case "update-bin":
|
||||
hotUpdate(c)
|
||||
}
|
||||
|
||||
log.Printf("Read: %s", msg)
|
||||
|
||||
err = c.WriteMessage(mtype, msg)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
log.Fatal(app.Listen(fmt.Sprintf(":%s", port)))
|
||||
}
|
||||
78
server/service.go
Normal file
78
server/service.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/marcopeocchi/yt-dlp-web-ui/server/sys"
|
||||
)
|
||||
|
||||
type Service int
|
||||
|
||||
type Running []ProcessResponse
|
||||
type Pending []string
|
||||
|
||||
type NoArgs struct{}
|
||||
type Args struct {
|
||||
Id string
|
||||
URL string
|
||||
Params []string
|
||||
}
|
||||
|
||||
// Exec spawns a Process.
|
||||
// The result of the execution is the newly spawned process Id.
|
||||
func (t *Service) Exec(args Args, result *string) error {
|
||||
log.Printf("Spawning new process for %s\n", args.URL)
|
||||
p := Process{mem: &db, url: args.URL, params: args.Params}
|
||||
p.Start()
|
||||
*result = p.id
|
||||
return nil
|
||||
}
|
||||
|
||||
// Progess retrieves the Progress of a specific Process given its Id
|
||||
func (t *Service) Progess(args Args, progress *DownloadProgress) error {
|
||||
*progress = db.Get(args.Id).Progress
|
||||
return nil
|
||||
}
|
||||
|
||||
// Pending retrieves a slice of all Pending/Running processes ids
|
||||
func (t *Service) Pending(args NoArgs, pending *Pending) error {
|
||||
*pending = Pending(db.Keys())
|
||||
return nil
|
||||
}
|
||||
|
||||
// Running retrieves a slice of all Processes progress
|
||||
func (t *Service) Running(args NoArgs, running *Running) error {
|
||||
*running = db.All()
|
||||
return nil
|
||||
}
|
||||
|
||||
// Kill kills a process given its id and remove it from the memoryDB
|
||||
func (t *Service) Kill(args string, killed *string) error {
|
||||
proc := db.Get(args)
|
||||
var err error
|
||||
if proc != nil {
|
||||
err = proc.Kill()
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// KillAll kills all process unconditionally and removes them from
|
||||
// the memory db
|
||||
func (t *Service) KillAll(args NoArgs, killed *string) error {
|
||||
keys := db.Keys()
|
||||
var err error
|
||||
for _, key := range keys {
|
||||
proc := db.Get(key)
|
||||
if proc != nil {
|
||||
proc.Kill()
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// FreeSpace gets the available from package sys util
|
||||
func (t *Service) FreeSpace(args NoArgs, free *uint64) error {
|
||||
freeSpace, err := sys.FreeSpace()
|
||||
*free = freeSpace
|
||||
return err
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
import { createServer, Server } from 'http';
|
||||
import { parse as urlParse } from 'url';
|
||||
import { open, close, readFile, fstat } from 'fs';
|
||||
import { parse, join } from 'path';
|
||||
|
||||
namespace server {
|
||||
export const mimes = {
|
||||
'.html': 'text/html',
|
||||
'.ico': 'image/x-icon',
|
||||
'.js': 'text/javascript',
|
||||
'.json': 'application/json',
|
||||
'.css': 'text/css',
|
||||
'.png': 'image/png',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.webp': 'image/webp',
|
||||
};
|
||||
}
|
||||
|
||||
class Jean {
|
||||
private workingDir: string;
|
||||
|
||||
/**
|
||||
* Jean static file server its only purpose is serving SPA and images
|
||||
* with the lowest impact possible.
|
||||
* @param workingDir sets the root directory automatically trying index.html
|
||||
* If specified the file in addition to the directory it will serve the
|
||||
* file directly.
|
||||
* *e.g* new Jean(path.join(__dirname, 'dist')) will try
|
||||
* index.html from the dist directory;
|
||||
* @author me :D
|
||||
*/
|
||||
|
||||
constructor(workingDir: string) {
|
||||
this.workingDir = workingDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a static file server
|
||||
* @returns an instance of a standard NodeJS http.Server
|
||||
*/
|
||||
public createServer(): Server {
|
||||
return createServer((req, res) => {
|
||||
// parse the current given url
|
||||
const parsedUrl = urlParse(req.url, false)
|
||||
// extract the pathname and guard it with the working dir
|
||||
let pathname = join(this.workingDir, `.${parsedUrl.pathname}`);
|
||||
// extract the file extension
|
||||
const ext = parse(pathname).ext;
|
||||
|
||||
// open the file or directory and fetch its descriptor
|
||||
open(pathname, 'r', (err, fd) => {
|
||||
// whoops, not found, send a 404
|
||||
if (err) {
|
||||
res.statusCode = 404;
|
||||
res.end(`File ${pathname} not found!`);
|
||||
return;
|
||||
}
|
||||
// something's gone wrong it's not a file or a directory
|
||||
fstat(fd, (err, stat) => {
|
||||
if (err) {
|
||||
res.statusCode = 500;
|
||||
res.end(err);
|
||||
}
|
||||
// try file index.html
|
||||
if (stat.isDirectory()) {
|
||||
pathname = join(pathname, 'index.html')
|
||||
}
|
||||
// read the file
|
||||
readFile(pathname, (err, data) => {
|
||||
if (err) {
|
||||
res.statusCode = 500;
|
||||
res.end(`Error reading the file: ${err}`);
|
||||
} else {
|
||||
// infer it's extension otherwise it's the index.html
|
||||
res.setHeader('Content-type', server.mimes[ext] || 'text/html');
|
||||
res.end(data);
|
||||
close(fd);
|
||||
}
|
||||
});
|
||||
})
|
||||
});
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default Jean;
|
||||
@@ -1,166 +0,0 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { join } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import { ISettings } from '../interfaces/ISettings';
|
||||
import { availableParams } from '../utils/params';
|
||||
import Logger from '../utils/BetterLogger';
|
||||
import { IDownloadFormat, IDownloadMetadata } from '../interfaces/IDownloadMetadata';
|
||||
|
||||
const log = Logger.instance;
|
||||
|
||||
/**
|
||||
* Represents a download process that spawns yt-dlp.
|
||||
* @param url - The downlaod url.
|
||||
* @param params - The cli arguments passed by the frontend.
|
||||
* @param settings - The download settings passed by the frontend.
|
||||
*/
|
||||
|
||||
class Process {
|
||||
public readonly url: string;
|
||||
public readonly params: Array<string>;
|
||||
private settings: ISettings;
|
||||
private stdout: Readable;
|
||||
private pid: number;
|
||||
private metadata?: IDownloadMetadata;
|
||||
private exePath = join(__dirname, 'yt-dlp');
|
||||
private customFileName?: string;
|
||||
|
||||
private readonly template = `download:
|
||||
{
|
||||
"eta":%(progress.eta)s,
|
||||
"percentage":"%(progress._percent_str)s",
|
||||
"speed":"%(progress._speed_str)s",
|
||||
"size":%(info.filesize_approx)s
|
||||
}`
|
||||
.replace(/\s\s+/g, ' ')
|
||||
.replace('\n', '');
|
||||
|
||||
constructor(url: string, params: Array<string>, settings: any, customFileName?: string) {
|
||||
this.url = url;
|
||||
this.params = params || [];
|
||||
this.settings = settings
|
||||
this.stdout = undefined;
|
||||
this.pid = undefined;
|
||||
this.metadata = undefined;
|
||||
this.customFileName = customFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* function that launch the download process, sets the stdout property and the pid
|
||||
* @param callback not yet implemented
|
||||
* @returns the process instance
|
||||
*/
|
||||
public async start(callback?: Function): Promise<this> {
|
||||
const sanitizedParams = this.params.filter((param: string) => availableParams.includes(param));
|
||||
|
||||
if (this.settings?.download_path) {
|
||||
if (this.settings.download_path.charAt(this.settings.download_path.length - 1) !== '/') {
|
||||
this.settings.download_path = `${this.settings.download_path}/`
|
||||
}
|
||||
}
|
||||
|
||||
const ytldp = spawn(this.exePath,
|
||||
[
|
||||
'-o', `${this.settings?.download_path || 'downloads/'}${this.customFileName || '%(title)s'}.%(ext)s`,
|
||||
'--progress-template', this.template,
|
||||
'--no-colors',
|
||||
]
|
||||
.concat(sanitizedParams)
|
||||
.concat((this.settings?.cliArgs ?? []).map(arg => arg.split(' ')).flat())
|
||||
.concat([this.url])
|
||||
);
|
||||
|
||||
this.pid = ytldp.pid;
|
||||
this.stdout = ytldp.stdout;
|
||||
|
||||
log.info('proc', `Spawned a new process, pid: ${this.pid}`)
|
||||
|
||||
if (callback) {
|
||||
callback()
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* function used internally by the download process to fetch information, usually thumbnail and title
|
||||
* @returns Promise to the lock
|
||||
*/
|
||||
public getMetadata(): Promise<IDownloadMetadata> {
|
||||
if (!this.metadata) {
|
||||
let stdoutChunks = [];
|
||||
const ytdlpInfo = spawn(this.exePath, ['-J', this.url]);
|
||||
|
||||
ytdlpInfo.stdout.on('data', (data) => {
|
||||
stdoutChunks.push(data);
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
ytdlpInfo.on('exit', () => {
|
||||
try {
|
||||
const buffer = Buffer.concat(stdoutChunks);
|
||||
const json = JSON.parse(buffer.toString());
|
||||
const info = {
|
||||
formats: json.formats.map((format: IDownloadFormat) => {
|
||||
return {
|
||||
format_id: format.format_id ?? '',
|
||||
format_note: format.format_note ?? '',
|
||||
fps: format.fps ?? '',
|
||||
resolution: format.resolution ?? '',
|
||||
vcodec: format.vcodec ?? '',
|
||||
acodec: format.acodec ?? '',
|
||||
}
|
||||
}).filter((format: IDownloadFormat) => format.format_note !== 'storyboard'),
|
||||
best: {
|
||||
format_id: json.format_id ?? '',
|
||||
format_note: json.format_note ?? '',
|
||||
fps: json.fps ?? '',
|
||||
resolution: json.resolution ?? '',
|
||||
vcodec: json.vcodec ?? '',
|
||||
acodec: json.acodec ?? '',
|
||||
},
|
||||
thumbnail: json.thumbnail,
|
||||
title: json.title,
|
||||
}
|
||||
resolve(info);
|
||||
this.metadata = info;
|
||||
|
||||
} catch (e) {
|
||||
reject('failed fetching formats, downloading best available');
|
||||
}
|
||||
});
|
||||
})
|
||||
}
|
||||
return new Promise((resolve) => { resolve(this.metadata!) });
|
||||
}
|
||||
|
||||
/**
|
||||
* function that kills the current process
|
||||
*/
|
||||
async kill() {
|
||||
spawn('kill', [String(this.pid)]).on('exit', () => {
|
||||
log.info('proc', `Stopped ${this.pid} because SIGKILL`)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* pid getter function
|
||||
* @returns {number} pid
|
||||
*/
|
||||
getPid(): number {
|
||||
if (!this.pid) {
|
||||
throw "Process isn't started"
|
||||
}
|
||||
return this.pid;
|
||||
}
|
||||
|
||||
/**
|
||||
* stdout getter function
|
||||
* @returns {Readable} stdout as stream
|
||||
*/
|
||||
getStdout(): Readable {
|
||||
return this.stdout
|
||||
}
|
||||
}
|
||||
|
||||
export default Process;
|
||||
@@ -1,30 +0,0 @@
|
||||
import { resolve as pathResolve } from "path";
|
||||
import { readdir } from "fs";
|
||||
import { ISettings } from "../interfaces/ISettings";
|
||||
import Logger from "../utils/BetterLogger";
|
||||
|
||||
let settings: ISettings;
|
||||
const log = Logger.instance;
|
||||
|
||||
try {
|
||||
settings = require('../../settings.json');
|
||||
} catch (e) {
|
||||
log.warn('dl', 'settings.json not found');
|
||||
}
|
||||
|
||||
export function listDownloaded(ctx: any) {
|
||||
return new Promise((resolve, reject) => {
|
||||
readdir(pathResolve(settings.download_path || 'download'), (err, files) => {
|
||||
if (err) {
|
||||
reject({ err: true })
|
||||
return
|
||||
}
|
||||
ctx.body = files.map(file => {
|
||||
resolve({
|
||||
filename: file,
|
||||
path: pathResolve(file),
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,251 +0,0 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { from, interval } from 'rxjs';
|
||||
import { map, throttle } from 'rxjs/operators';
|
||||
import { Socket } from 'socket.io';
|
||||
import MemoryDB from '../db/memoryDB';
|
||||
import { IPayload } from '../interfaces/IPayload';
|
||||
import { ISettings } from '../interfaces/ISettings';
|
||||
import { CLIProgress } from '../types';
|
||||
import Logger from '../utils/BetterLogger';
|
||||
import Process from './Process';
|
||||
import { states } from './states';
|
||||
|
||||
// settings read from settings.json
|
||||
let settings: ISettings;
|
||||
const log = Logger.instance;
|
||||
|
||||
const mem_db = new MemoryDB();
|
||||
|
||||
try {
|
||||
settings = require('../../settings.json');
|
||||
}
|
||||
catch (e) {
|
||||
new Promise(resolve => setTimeout(resolve, 500))
|
||||
.then(() => log.warn('dl', 'settings.json not found, ignore if using Docker'));
|
||||
}
|
||||
/**
|
||||
* Get download info such as thumbnail, title, resolution and list all formats
|
||||
* @param socket
|
||||
* @param url
|
||||
*/
|
||||
export async function getFormatsAndMetadata(socket: Socket, url: string) {
|
||||
let p = new Process(url, [], settings);
|
||||
try {
|
||||
const formats = await p.getMetadata();
|
||||
socket.emit('available-formats', formats)
|
||||
} catch (e) {
|
||||
log.warn('dl', e)
|
||||
socket.emit('progress', {
|
||||
status: states.PROG_DONE,
|
||||
pid: -1,
|
||||
});
|
||||
} finally {
|
||||
p = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke a new download.
|
||||
* Called by the websocket messages listener.
|
||||
* @param {Socket} socket current connection socket
|
||||
* @param {object} payload frontend download payload
|
||||
* @returns
|
||||
*/
|
||||
export async function download(socket: Socket, payload: IPayload) {
|
||||
if (!payload || payload.url === '' || payload.url === null) {
|
||||
socket.emit('progress', { status: states.PROG_DONE });
|
||||
return;
|
||||
}
|
||||
|
||||
const url = payload.url;
|
||||
const params = typeof payload.params !== 'object' ?
|
||||
payload.params.split(' ') :
|
||||
payload.params;
|
||||
|
||||
const renameTo = payload.renameTo
|
||||
|
||||
const scopedSettings: ISettings = {
|
||||
...settings,
|
||||
download_path: payload.path
|
||||
}
|
||||
|
||||
let p = new Process(url, params, scopedSettings, renameTo);
|
||||
|
||||
p.start().then(downloader => {
|
||||
mem_db.add(downloader)
|
||||
displayDownloadMetadata(downloader, socket);
|
||||
streamProcess(downloader, socket);
|
||||
});
|
||||
|
||||
// GC
|
||||
p = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send via websocket download info "chunk"
|
||||
* @param process
|
||||
* @param socket
|
||||
*/
|
||||
function displayDownloadMetadata(process: Process, socket: Socket) {
|
||||
process.getMetadata()
|
||||
.then(metadata => {
|
||||
socket.emit('metadata', {
|
||||
pid: process.getPid(),
|
||||
metadata: metadata,
|
||||
});
|
||||
})
|
||||
.catch((e) => {
|
||||
socket.emit('progress', {
|
||||
status: states.PROG_DONE,
|
||||
pid: process.getPid(),
|
||||
});
|
||||
log.warn('dl', e)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream via websocket download stdoud "chunks"
|
||||
* @param process
|
||||
* @param socket
|
||||
*/
|
||||
function streamProcess(process: Process, socket: Socket) {
|
||||
const emitAbort = () => {
|
||||
socket.emit('progress', {
|
||||
status: states.PROG_DONE,
|
||||
pid: process.getPid(),
|
||||
});
|
||||
}
|
||||
|
||||
from(process.getStdout().removeAllListeners()) // stdout as observable
|
||||
.pipe(
|
||||
throttle(() => interval(500)), // discard events closer than 500ms
|
||||
map(stdout => formatter(String(stdout), process.getPid()))
|
||||
)
|
||||
.subscribe({
|
||||
next: (stdout) => {
|
||||
socket.emit('progress', stdout)
|
||||
},
|
||||
complete: () => {
|
||||
process.kill().then(() => {
|
||||
emitAbort();
|
||||
mem_db.remove(process);
|
||||
});
|
||||
},
|
||||
error: () => {
|
||||
emitAbort();
|
||||
mem_db.remove(process);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all downloads.
|
||||
* If the server has just been launched retrieve the ones saved to the database.
|
||||
* If the server is running fetches them from the process pool.
|
||||
* @param {Socket} socket current connection socket
|
||||
* @returns
|
||||
*/
|
||||
export async function retrieveDownload(socket: Socket) {
|
||||
// it's a cold restart: the server has just been started with pending
|
||||
// downloads, so fetch them from the database and resume.
|
||||
|
||||
// if (coldRestart) {
|
||||
// coldRestart = false;
|
||||
// let downloads = [];
|
||||
// // sanitize
|
||||
// downloads = [...new Set(downloads.filter(el => el !== undefined))];
|
||||
// log.info('dl', `Cold restart, retrieving ${downloads.length} jobs`)
|
||||
// for (const entry of downloads) {
|
||||
// if (entry) {
|
||||
// await download(socket, entry);
|
||||
// }
|
||||
// }
|
||||
// return;
|
||||
// }
|
||||
|
||||
// it's an hot-reload the server it's running and the frontend ask for
|
||||
// the pending job: retrieve them from the "in-memory database" (ProcessPool)
|
||||
|
||||
const _poolSize = mem_db.size()
|
||||
log.info('dl', `Retrieving ${_poolSize} jobs from pool`)
|
||||
socket.emit('pending-jobs', _poolSize)
|
||||
|
||||
const it = mem_db.iterator();
|
||||
|
||||
// resume the jobs
|
||||
for (const entry of it) {
|
||||
const [, process] = entry
|
||||
displayDownloadMetadata(process, socket);
|
||||
streamProcess(process, socket);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort a specific download if pid is provided, in the other case
|
||||
* calls the abortAllDownloads function
|
||||
* @see abortAllDownloads
|
||||
* @param {Socket} socket currenct connection socket
|
||||
* @param {*} args args sent by the frontend. MUST contain the PID.
|
||||
* @returns
|
||||
*/
|
||||
export function abortDownload(socket: Socket, args: any) {
|
||||
if (!args) {
|
||||
abortAllDownloads(socket);
|
||||
return;
|
||||
}
|
||||
const { pid } = args;
|
||||
|
||||
spawn('kill', [pid])
|
||||
.on('exit', () => {
|
||||
socket.emit('progress', {
|
||||
status: states.PROC_ABORT,
|
||||
process: pid,
|
||||
});
|
||||
log.warn('dl', `Aborting download ${pid}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Unconditionally kills all yt-dlp process.
|
||||
* @param {Socket} socket currenct connection socket
|
||||
*/
|
||||
export function abortAllDownloads(socket: Socket) {
|
||||
spawn('killall', ['yt-dlp'])
|
||||
.on('exit', () => {
|
||||
socket.emit('progress', { status: states.PROC_ABORT });
|
||||
log.info('dl', 'Aborting downloads');
|
||||
});
|
||||
mem_db.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get pool current size
|
||||
*/
|
||||
export function getQueueSize(): number {
|
||||
return mem_db.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* @private Formats the yt-dlp stdout to a frontend-readable format
|
||||
* @param {string} stdout stdout as string
|
||||
* @param {number} pid current process id relative to stdout
|
||||
* @returns
|
||||
*/
|
||||
const formatter = (stdout: string, pid: number) => {
|
||||
try {
|
||||
const p: CLIProgress = JSON.parse(stdout);
|
||||
if (p) {
|
||||
return {
|
||||
status: states.PROC_DOWNLOAD,
|
||||
progress: p.percentage,
|
||||
size: p.size,
|
||||
dlSpeed: p.speed,
|
||||
pid: pid,
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
progress: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
/**
|
||||
* Possible server states map
|
||||
*/
|
||||
export const states = {
|
||||
PROC_DOWNLOAD: 'download',
|
||||
PROC_MERGING: 'merging',
|
||||
PROC_ABORT: 'abort',
|
||||
PROG_DONE: 'status_done',
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import { stat, createReadStream } from 'fs';
|
||||
import { lookup } from 'mime-types';
|
||||
|
||||
export function streamer(ctx: any, next: any) {
|
||||
const filepath = ''
|
||||
stat(filepath, (err, stat) => {
|
||||
if (err) {
|
||||
ctx.response.status = 404;
|
||||
ctx.body = { err: 'resource not found' };
|
||||
next();
|
||||
}
|
||||
const fileSize = stat.size;
|
||||
const range = ctx.headers.range;
|
||||
if (range) {
|
||||
const parts = range.replace(/bytes=/, '').split('-');
|
||||
const start = parseInt(parts[0], 10);
|
||||
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
|
||||
const chunksize = end - start + 1;
|
||||
const file = createReadStream(filepath, { start, end });
|
||||
const head = {
|
||||
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Content-Length': chunksize,
|
||||
'Content-Type': lookup(filepath)
|
||||
};
|
||||
ctx.res.writeHead(206, head);
|
||||
file.pipe(ctx.res);
|
||||
next();
|
||||
} else {
|
||||
const head = {
|
||||
'Content-Length': fileSize,
|
||||
'Content-Type': 'video/mp4'
|
||||
};
|
||||
ctx.res.writeHead(200, head);
|
||||
createReadStream(ctx.params.filepath).pipe(ctx.res);
|
||||
next();
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
/**
|
||||
* Represents a download process that spawns yt-dlp.
|
||||
*/
|
||||
|
||||
import Process from "../core/Process";
|
||||
|
||||
class MemoryDB {
|
||||
private _pool: Map<number, Process>
|
||||
private _size: number
|
||||
|
||||
constructor() {
|
||||
this.init()
|
||||
}
|
||||
|
||||
private init() {
|
||||
this._pool = new Map<number, Process>()
|
||||
this._size = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Pool size getter
|
||||
* @returns {number} pool's size
|
||||
*/
|
||||
size(): number {
|
||||
return this._size
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a process to the pool
|
||||
* @param {Process} process
|
||||
*/
|
||||
add(process: Process) {
|
||||
this._pool.set(process.getPid(), process)
|
||||
this._size++
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a process from the pool
|
||||
* @param {Process} process
|
||||
*/
|
||||
remove(process: Process) {
|
||||
if (this._size === 0) return
|
||||
this._pool.delete(process.getPid())
|
||||
this._size--
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a process from the pool by its pid
|
||||
* @param {number} pid
|
||||
*/
|
||||
removeByPid(pid: number) {
|
||||
this._pool.delete(pid)
|
||||
}
|
||||
|
||||
/**
|
||||
* get an iterator for the pool
|
||||
* @returns {IterableIterator} iterator
|
||||
*/
|
||||
iterator(): IterableIterator<[number, Process]> {
|
||||
return this._pool.entries()
|
||||
}
|
||||
|
||||
/**
|
||||
* get a process by its pid
|
||||
* @param {number} pid
|
||||
* @returns {Process}
|
||||
*/
|
||||
getByPid(pid: number): Process {
|
||||
return this._pool.get(pid)
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear memory db
|
||||
*/
|
||||
flush() {
|
||||
this.init()
|
||||
}
|
||||
}
|
||||
|
||||
export default MemoryDB;
|
||||
15
server/src/interfaces/IDownloadMetadata.d.ts
vendored
15
server/src/interfaces/IDownloadMetadata.d.ts
vendored
@@ -1,15 +0,0 @@
|
||||
export interface IDownloadMetadata {
|
||||
formats: Array<IDownloadFormat>,
|
||||
best: IDownloadFormat,
|
||||
thumbnail: string,
|
||||
title: string,
|
||||
}
|
||||
|
||||
export interface IDownloadFormat {
|
||||
format_id: string,
|
||||
format_note: string,
|
||||
fps: number,
|
||||
resolution: string,
|
||||
vcodec: string,
|
||||
acodec: string,
|
||||
}
|
||||
13
server/src/interfaces/IPayload.d.ts
vendored
13
server/src/interfaces/IPayload.d.ts
vendored
@@ -1,13 +0,0 @@
|
||||
/**
|
||||
* Represent a download payload sent by the frontend
|
||||
*/
|
||||
|
||||
export interface IPayload {
|
||||
url: string
|
||||
params: Array<string> | string
|
||||
path: string
|
||||
title?: string
|
||||
thumbnail?: string
|
||||
size?: string
|
||||
renameTo?: string
|
||||
}
|
||||
14
server/src/interfaces/IRecord.d.ts
vendored
14
server/src/interfaces/IRecord.d.ts
vendored
@@ -1,14 +0,0 @@
|
||||
/**
|
||||
* Represent a download db record
|
||||
*/
|
||||
|
||||
export interface IRecord {
|
||||
uid: string,
|
||||
url: string,
|
||||
title: string,
|
||||
thumbnail: string,
|
||||
created: Date,
|
||||
size: string,
|
||||
pid: number,
|
||||
params: string,
|
||||
}
|
||||
5
server/src/interfaces/ISettings.d.ts
vendored
5
server/src/interfaces/ISettings.d.ts
vendored
@@ -1,5 +0,0 @@
|
||||
export interface ISettings {
|
||||
download_path: string,
|
||||
cliArgs?: string[],
|
||||
port?: number,
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
import { splash } from './utils/logger';
|
||||
import { join } from 'path';
|
||||
import { Server } from 'socket.io';
|
||||
import { ytdlpUpdater } from './utils/updater';
|
||||
import {
|
||||
download,
|
||||
abortDownload,
|
||||
retrieveDownload,
|
||||
abortAllDownloads,
|
||||
getFormatsAndMetadata
|
||||
} from './core/downloader';
|
||||
import { getFreeDiskSpace } from './utils/procUtils';
|
||||
import { listDownloaded } from './core/downloadArchive';
|
||||
import { createServer } from 'http';
|
||||
import { streamer } from './core/streamer';
|
||||
import * as Koa from 'koa';
|
||||
import * as Router from 'koa-router';
|
||||
import * as serve from 'koa-static';
|
||||
import * as cors from '@koa/cors';
|
||||
import Logger from './utils/BetterLogger';
|
||||
import { ISettings } from './interfaces/ISettings';
|
||||
import { directoryTree } from './utils/directoryUtils';
|
||||
|
||||
const app = new Koa();
|
||||
const server = createServer(app.callback());
|
||||
const router = new Router();
|
||||
const log = Logger.instance;
|
||||
const io = new Server(server, {
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST"]
|
||||
}
|
||||
});
|
||||
|
||||
let settings: ISettings;
|
||||
|
||||
try {
|
||||
settings = require('../settings.json');
|
||||
} catch (e) {
|
||||
log.warn('settings', 'file not found, ignore if using Docker');
|
||||
}
|
||||
|
||||
// Koa routing
|
||||
router.get('/settings', (ctx, next) => {
|
||||
ctx.redirect('/')
|
||||
next()
|
||||
})
|
||||
router.get('/downloaded', (ctx, next) => {
|
||||
ctx.redirect('/')
|
||||
next()
|
||||
})
|
||||
router.get('/archive', (ctx, next) => {
|
||||
listDownloaded(ctx)
|
||||
.then((res: any) => {
|
||||
ctx.body = res
|
||||
next()
|
||||
})
|
||||
.catch((err: any) => {
|
||||
ctx.body = err;
|
||||
next()
|
||||
})
|
||||
})
|
||||
router.get('/stream/:filepath', (ctx, next) => {
|
||||
streamer(ctx, next)
|
||||
})
|
||||
router.get('/tree', (ctx, next) => {
|
||||
ctx.body = directoryTree()
|
||||
next()
|
||||
})
|
||||
|
||||
// WebSocket listeners
|
||||
io.on('connection', socket => {
|
||||
log.info('ws', `${socket.handshake.address} connected!`)
|
||||
|
||||
socket.on('send-url', (args) => {
|
||||
log.info('ws', args?.url)
|
||||
download(socket, args)
|
||||
})
|
||||
socket.on('send-url-format-selection', (args) => {
|
||||
log.info('ws', `Formats ${args?.url}`)
|
||||
if (args.url) getFormatsAndMetadata(socket, args?.url)
|
||||
})
|
||||
socket.on('abort', (args) => {
|
||||
abortDownload(socket, args)
|
||||
})
|
||||
socket.on('abort-all', () => {
|
||||
abortAllDownloads(socket)
|
||||
})
|
||||
socket.on('update-bin', () => {
|
||||
ytdlpUpdater(socket)
|
||||
})
|
||||
socket.on('retrieve-jobs', () => {
|
||||
retrieveDownload(socket)
|
||||
})
|
||||
socket.on('disk-space', () => {
|
||||
getFreeDiskSpace(socket, settings.download_path || 'downloads/')
|
||||
})
|
||||
})
|
||||
|
||||
io.on('disconnect', (socket) => {
|
||||
log.info('ws', `${socket.handshake.address} disconnected`)
|
||||
})
|
||||
|
||||
app.use(serve(join(__dirname, 'frontend')))
|
||||
app.use(cors())
|
||||
app.use(router.routes())
|
||||
|
||||
server.listen(process.env.PORT || settings.port || 3022)
|
||||
|
||||
splash()
|
||||
log.info('http', `Server started on port ${process.env.PORT || settings.port || 3022}`)
|
||||
|
||||
/**
|
||||
* Cleanup handler
|
||||
*/
|
||||
const gracefullyStop = () => {
|
||||
log.warn('proc', 'Shutting down...')
|
||||
io.disconnectSockets(true)
|
||||
server.close()
|
||||
log.info('proc', 'Done!')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
// Intercepts singnals and perform cleanups before shutting down.
|
||||
process
|
||||
.on('SIGTERM', () => gracefullyStop())
|
||||
.on('SIGUSR1', () => gracefullyStop())
|
||||
.on('SIGUSR2', () => gracefullyStop())
|
||||
6
server/src/types/index.d.ts
vendored
6
server/src/types/index.d.ts
vendored
@@ -1,6 +0,0 @@
|
||||
export type CLIProgress = {
|
||||
percentage: string
|
||||
speed: string
|
||||
size: number
|
||||
eta: number
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
const ansi = {
|
||||
reset: '\u001b[0m',
|
||||
red: '\u001b[31m',
|
||||
cyan: '\u001b[36m',
|
||||
green: '\u001b[32m',
|
||||
yellow: '\u001b[93m',
|
||||
bold: '\u001b[1m',
|
||||
normal: '\u001b[22m',
|
||||
}
|
||||
|
||||
class Logger {
|
||||
private static _instance: Logger;
|
||||
|
||||
constructor() { };
|
||||
|
||||
static get instance() {
|
||||
if (this._instance) {
|
||||
return this._instance
|
||||
}
|
||||
this._instance = new Logger()
|
||||
return this._instance;
|
||||
}
|
||||
/**
|
||||
* Print a standard info message
|
||||
* @param {string} proto the context/protocol/section outputting the message
|
||||
* @param {string} args the acutal message
|
||||
*/
|
||||
public info(proto: string, args: string) {
|
||||
process.stdout.write(
|
||||
this.formatter(proto, args)
|
||||
)
|
||||
}
|
||||
/**
|
||||
* Print a warn message
|
||||
* @param {string} proto the context/protocol/section outputting the message
|
||||
* @param {string} args the acutal message
|
||||
*/
|
||||
public warn(proto: string, args: string) {
|
||||
process.stdout.write(
|
||||
`${ansi.yellow}${this.formatter(proto, args)}${ansi.reset}`
|
||||
)
|
||||
}
|
||||
/**
|
||||
* Print an error message
|
||||
* @param {string} proto the context/protocol/section outputting the message
|
||||
* @param {string} args the acutal message
|
||||
*/
|
||||
public err(proto: string, args: string) {
|
||||
process.stdout.write(
|
||||
`${ansi.red}${this.formatter(proto, args)}${ansi.reset}`
|
||||
)
|
||||
}
|
||||
|
||||
private formatter(proto: any, args: any) {
|
||||
return `${ansi.bold}[${proto}]${ansi.normal}\t${args}\n`
|
||||
}
|
||||
}
|
||||
|
||||
export default Logger;
|
||||
@@ -1,59 +0,0 @@
|
||||
import { readdirSync, statSync } from "fs";
|
||||
import { ISettings } from "../interfaces/ISettings";
|
||||
|
||||
let settings: ISettings;
|
||||
|
||||
class Node {
|
||||
public path: string
|
||||
public children: Node[]
|
||||
|
||||
constructor(path: string) {
|
||||
this.path = path
|
||||
this.children = []
|
||||
}
|
||||
}
|
||||
|
||||
function buildTreeDFS(rootPath: string, directoryOnly: boolean) {
|
||||
const root = new Node(rootPath)
|
||||
const stack: Node[] = []
|
||||
const flattened: string[] = []
|
||||
|
||||
stack.push(root)
|
||||
flattened.push(rootPath)
|
||||
|
||||
while (stack.length) {
|
||||
const current = stack.pop()
|
||||
if (current) {
|
||||
const children = readdirSync(current.path)
|
||||
for (const it of children) {
|
||||
const childPath = `${current.path}/${it}`
|
||||
const childNode = new Node(childPath)
|
||||
|
||||
if (directoryOnly) {
|
||||
if (statSync(childPath).isDirectory()) {
|
||||
current.children.push(childNode)
|
||||
stack.push(childNode)
|
||||
flattened.push(childNode.path)
|
||||
}
|
||||
} else {
|
||||
current.children.push(childNode)
|
||||
if (statSync(childPath).isDirectory()) {
|
||||
stack.push(childNode)
|
||||
flattened.push(childNode.path)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
tree: root,
|
||||
flat: flattened
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
settings = require('../../settings.json');
|
||||
} catch (e) { }
|
||||
|
||||
export const directoryTree = () => buildTreeDFS(settings.download_path || 'downloads', true)
|
||||
@@ -1,25 +0,0 @@
|
||||
/**
|
||||
* Simplest logger function, takes two argument: first one put between
|
||||
* square brackets (the protocol), the second one it's the effective message
|
||||
* @param {string} proto protocol
|
||||
* @param {string} args message
|
||||
*/
|
||||
export const logger = (proto: string, args: string) => {
|
||||
console.log(`[${proto}]\t${args}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* CLI splash
|
||||
*/
|
||||
|
||||
export const splash = () => {
|
||||
const fg = "\u001b[38;2;50;113;168m"
|
||||
const reset = "\u001b[0m"
|
||||
console.log(`${fg} __ ____ __ __ ______`)
|
||||
console.log(" __ __/ /________/ / /__ _ _____ / / / / / / _/")
|
||||
console.log(" / // / __/___/ _ / / _ \\ | |/|/ / -_) _ \\/ /_/ // / ")
|
||||
console.log(" \\_, /\\__/ \\_,_/_/ .__/ |__,__/\\__/_.__/\\____/___/ ")
|
||||
console.log(`/___/ /_/ \n${reset}`)
|
||||
console.log(" yt-dlp-webUI - A web-ui for yt-dlp, simply enough")
|
||||
console.log("---------------------------------------------------\n")
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
export const availableParams = [
|
||||
'--no-mtime',
|
||||
'-x',
|
||||
]
|
||||
@@ -1,36 +0,0 @@
|
||||
import { exec, spawn } from 'child_process';
|
||||
import { statSync } from 'fs';
|
||||
import Logger from './BetterLogger';
|
||||
|
||||
const log = Logger.instance;
|
||||
|
||||
/**
|
||||
* Browse /proc in order to find the specific pid
|
||||
* @param {number} pid
|
||||
* @returns {*} process stats if any
|
||||
*/
|
||||
export function existsInProc(pid: number): any {
|
||||
try {
|
||||
return statSync(`/proc/${pid}`)
|
||||
} catch (e) {
|
||||
log.warn('proc', `pid ${pid} not found in procfs`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Kills a process with a sys-call
|
||||
* @param {number} pid the killed process pid
|
||||
*/
|
||||
export async function killProcess(pid: number) {
|
||||
const res = spawn('kill', [String(pid)])
|
||||
res.on('exit', () => {
|
||||
log.info('proc', `Successfully killed yt-dlp process, pid: ${pid}`)
|
||||
})
|
||||
}
|
||||
|
||||
export function getFreeDiskSpace(socket: any, path: string) {
|
||||
const message: string = 'free-space';
|
||||
exec(`df -h ${path} | tail -1 | awk '{print $4}'`, (_, stdout) => {
|
||||
socket.emit(message, stdout)
|
||||
})
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
import { get } from 'https';
|
||||
import { rmSync, createWriteStream, chmod } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
// endpoint to github API
|
||||
const options = {
|
||||
hostname: 'api.github.com',
|
||||
path: '/repos/yt-dlp/yt-dlp/releases/latest',
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0'
|
||||
},
|
||||
method: 'GET',
|
||||
port: 443,
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the binary url based on the release tag
|
||||
* @param {string} release yt-dlp GitHub release tag
|
||||
* @returns {*} the fetch options with the correct tag and headers
|
||||
*/
|
||||
function buildDonwloadOptions(release) {
|
||||
return {
|
||||
hostname: 'github.com',
|
||||
path: `/yt-dlp/yt-dlp/releases/download/${release}/yt-dlp`,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0'
|
||||
},
|
||||
method: 'GET',
|
||||
port: 443,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the yt-dlp latest binary URL from GitHub API
|
||||
*/
|
||||
async function update() {
|
||||
// ensure that the binary has been removed
|
||||
try {
|
||||
rmSync(join(__dirname, '..', 'core', 'yt-dlp'))
|
||||
}
|
||||
catch (e) {
|
||||
console.log('file not found!')
|
||||
}
|
||||
// body buffer
|
||||
let chunks = []
|
||||
get(options, res => {
|
||||
// push the http packets chunks into the buffer
|
||||
res.on('data', chunk => {
|
||||
chunks.push(chunk)
|
||||
});
|
||||
// the connection has ended so build the body from the buffer
|
||||
// parse it as a JSON and get the tag_name
|
||||
res.on('end', () => {
|
||||
const buffer = Buffer.concat(chunks)
|
||||
const release = JSON.parse(buffer.toString())['tag_name']
|
||||
console.log('The latest release is:', release)
|
||||
// invoke the binary downloader
|
||||
downloadBinary(buildDonwloadOptions(release))
|
||||
})
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Utility that Pipes the latest binary to a file
|
||||
* @param {string} url yt-dlp GitHub release url
|
||||
*/
|
||||
function downloadBinary(url) {
|
||||
get(url, res => {
|
||||
// if it is a redirect follow the url
|
||||
if (res.statusCode === 301 || res.statusCode === 302) {
|
||||
return downloadBinary(res.headers.location)
|
||||
}
|
||||
let bin = createWriteStream(join(__dirname, '..', 'core', 'yt-dlp'))
|
||||
res.pipe(bin)
|
||||
// once the connection has ended make the file executable
|
||||
res.on('end', () => {
|
||||
chmod(join(__dirname, '..', 'core', 'yt-dlp'), 0o775, err => {
|
||||
err ? console.error('failed updating!') : console.log('done!')
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Invoke the yt-dlp update procedure
|
||||
* @param {Socket} socket the current connection socket
|
||||
*/
|
||||
export function ytdlpUpdater(socket) {
|
||||
update().then(() => {
|
||||
socket.emit('updated')
|
||||
})
|
||||
}
|
||||
20
server/sys/fs.go
Normal file
20
server/sys/fs.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package sys
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
// package containing fs related operation (unix only)
|
||||
|
||||
// FreeSpace gets the available Bytes writable to download directory
|
||||
func FreeSpace() (uint64, error) {
|
||||
var stat unix.Statfs_t
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
unix.Statfs(wd+"/downloads", &stat)
|
||||
return (stat.Bavail * uint64(stat.Bsize)), nil
|
||||
}
|
||||
43
server/types.go
Normal file
43
server/types.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package server
|
||||
|
||||
type DownloadProgress struct {
|
||||
Percentage string `json:"percentage"`
|
||||
Speed float32 `json:"speed"`
|
||||
ETA int `json:"eta"`
|
||||
}
|
||||
|
||||
type DownloadInfo struct {
|
||||
URL string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Resolution string `json:"resolution"`
|
||||
Size int32 `json:"filesize_approx"`
|
||||
VCodec string `json:"vcodec"`
|
||||
ACodec string `json:"acodec"`
|
||||
Extension string `json:"ext"`
|
||||
}
|
||||
|
||||
// struct representing the response sent to the client
|
||||
// as JSON-RPC result field
|
||||
type ProcessResponse struct {
|
||||
Id string `json:"id"`
|
||||
Progress DownloadProgress `json:"progress"`
|
||||
Info DownloadInfo `json:"info"`
|
||||
}
|
||||
|
||||
// struct representing the current status of the memoryDB
|
||||
// used for serializaton/persistence reasons
|
||||
type Session struct {
|
||||
Processes []ProcessResponse `json:"processes"`
|
||||
}
|
||||
|
||||
// struct representing the intent to stop a specific process
|
||||
type AbortRequest struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
// struct representing the intent to start a download
|
||||
type DownloadRequest struct {
|
||||
Url string `json:"url"`
|
||||
Params []string `json:"params"`
|
||||
}
|
||||
Reference in New Issue
Block a user