Compare commits

...

11 Commits

Author SHA1 Message Date
e4362468f7 fixed livestreams not being monitored 2025-03-15 11:08:08 +01:00
6880f60d14 Code refactoring, added clear button 2025-03-13 11:22:17 +01:00
Marco Piovanello
5d4aa7e2a3 Update README.md 2025-03-09 17:07:56 +01:00
Piotr Hajdas
2845196bc7 Add one-click deploy options for AWS, DigitalOcean, and Render in README (#268) 2025-02-20 09:47:11 +01:00
LelieL91
983915f8aa Fixed static file location (#263)
* Update EN, IT langs

Fixed EN lang mistype error
Added missing IT keys + added more translations

* Fixed files location

- livestreams.dat now uses same location as session.data (if specified on config.yml)
- .db.lock now uses same location as database file (if specified on config.yml)

* Update migrate.go

revert edit

---------

Co-authored-by: Marco Piovanello <35533749+marcopiovanello@users.noreply.github.com>
2025-02-07 22:00:11 +01:00
ce2fb13ef2 code refactoring 2025-02-07 10:13:35 +01:00
99069fe5f7 fixed proxy subdir malformed string 2025-02-07 09:45:26 +01:00
761f26b387 subscriptions: prevent downloading already existing file 2025-02-07 09:37:47 +01:00
eec72bb6e2 handle cancellation of scheduled cron jobs 2025-02-06 19:28:03 +01:00
ceb92d066c code refactoring 2025-02-06 19:27:38 +01:00
Marco Piovanello
cf74948840 initial support for playlist modifiers (#262)
supported modifiers are --playlist-start, --playlist-end, --playlist-reverse, --max-downloads
2025-02-06 11:30:28 +01:00
25 changed files with 437 additions and 127 deletions

View File

@@ -0,0 +1 @@
docker run -d -p 3033:3033 -v /downloads:/downloads marcobaobao/yt-dlp-webui

View File

@@ -3,6 +3,7 @@
result/ result/
result result
dist dist
.pnpm-store/
.pnpm-debug.log .pnpm-debug.log
node_modules node_modules
.env .env
@@ -20,9 +21,11 @@ cookies.txt
__debug* __debug*
ui/ ui/
.idea .idea
.idea/
frontend/.pnp.cjs frontend/.pnp.cjs
frontend/.pnp.loader.mjs frontend/.pnp.loader.mjs
frontend/.yarn/install-state.gz frontend/.yarn/install-state.gz
.db.lock .db.lock
livestreams.dat livestreams.dat
.git .vite/deps
archive.txt

View File

@@ -28,7 +28,7 @@ docker pull ghcr.io/marcopiovanello/yt-dlp-web-ui:latest
## Community stuff ## Community stuff
Feel free to join :) Feel free to join :)
[![Discord Banner](https://api.weblutions.com/discord/invite/3Sj9ZZHv/)](https://discord.gg/3Sj9ZZHv) [![Discord Banner](https://api.weblutions.com/discord/invite/3Sj9ZZHv/)](https://discord.gg/WRnVWr4y)
## Some screeshots ## Some screeshots
![image](https://github.com/user-attachments/assets/fc43a3fb-ecf9-449d-b5cb-5d5635020c00) ![image](https://github.com/user-attachments/assets/fc43a3fb-ecf9-449d-b5cb-5d5635020c00)
@@ -115,6 +115,16 @@ services:
restart: unless-stopped restart: unless-stopped
``` ```
### ⚡ One-Click Deploy
| Cloud Provider | Deploy Button |
|----------------|---------------|
| AWS | <a href="https://deploystack.io/deploy/marcopiovanello-yt-dlp-web-ui?provider=aws&language=cfn"><img src="https://raw.githubusercontent.com/deploystackio/deploy-templates/refs/heads/main/.assets/img/aws.svg" height="38"></a> |
| DigitalOcean | <a href="https://deploystack.io/deploy/marcopiovanello-yt-dlp-web-ui?provider=do&language=dop"><img src="https://raw.githubusercontent.com/deploystackio/deploy-templates/refs/heads/main/.assets/img/do.svg" height="38"></a> |
| Render | <a href="https://deploystack.io/deploy/marcopiovanello-yt-dlp-web-ui?provider=rnd&language=rnd"><img src="https://raw.githubusercontent.com/deploystackio/deploy-templates/refs/heads/main/.assets/img/rnd.svg" height="38"></a> |
<sub>Generated by <a href="https://deploystack.io/c/marcopiovanello-yt-dlp-web-ui" target="_blank">DeployStack.io</a></sub>
## [Prebuilt binaries](https://github.com/marcopiovanello/yt-dlp-web-ui/releases) installation ## [Prebuilt binaries](https://github.com/marcopiovanello/yt-dlp-web-ui/releases) installation
```sh ```sh

View File

@@ -31,7 +31,7 @@ keys:
splashText: No active downloads splashText: No active downloads
archiveTitle: Archive archiveTitle: Archive
clipboardAction: Copied URL to clipboard clipboardAction: Copied URL to clipboard
playlistCheckbox: Download playlist (it will take time, after submitting you may close this window) playlistCheckbox: Download playlist
restartAppMessage: Needs a page reload to take effect restartAppMessage: Needs a page reload to take effect
servedFromReverseProxyCheckbox: Is behind a reverse proxy servedFromReverseProxyCheckbox: Is behind a reverse proxy
urlBase: URL base, for reverse proxy support (subdir), defaults to empty urlBase: URL base, for reverse proxy support (subdir), defaults to empty
@@ -79,4 +79,5 @@ keys:
The monitor job will be scheduled/triggered by a defined cron expression (defaults to every 5 minutes if left blank). The monitor job will be scheduled/triggered by a defined cron expression (defaults to every 5 minutes if left blank).
cronExpressionLabel: 'Cron expression' cronExpressionLabel: 'Cron expression'
editButtonLabel: 'Edit' editButtonLabel: 'Edit'
newSubscriptionButton: New subscription newSubscriptionButton: New subscription
clearCompletedButton: 'Clear completed'

View File

@@ -121,11 +121,14 @@ export const appTitleState = atomWithStorage(
export const serverAddressAndPortState = atom((get) => { export const serverAddressAndPortState = atom((get) => {
if (get(servedFromReverseProxySubDirState)) { if (get(servedFromReverseProxySubDirState)) {
return `${get(serverAddressState)}/${get(servedFromReverseProxySubDirState)}/` return `${get(serverAddressState)}/${get(servedFromReverseProxySubDirState)}/`
.replaceAll('"', '') // TODO: atomWithStorage put extra double quotes on strings
} }
if (get(servedFromReverseProxyState)) { if (get(servedFromReverseProxyState)) {
return `${get(serverAddressState)}` return `${get(serverAddressState)}`
.replaceAll('"', '')
} }
return `${get(serverAddressState)}:${get(serverPortState)}` return `${get(serverAddressState)}:${get(serverPortState)}`
.replaceAll('"', '')
}) })
export const serverURL = atom((get) => export const serverURL = atom((get) =>
@@ -135,14 +138,12 @@ export const serverURL = atom((get) =>
export const rpcWebSocketEndpoint = atom((get) => { export const rpcWebSocketEndpoint = atom((get) => {
const proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:' const proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:'
return `${proto}//${get(serverAddressAndPortState)}/rpc/ws` return `${proto}//${get(serverAddressAndPortState)}/rpc/ws`
} })
)
export const rpcHTTPEndpoint = atom((get) => { export const rpcHTTPEndpoint = atom((get) => {
const proto = window.location.protocol const proto = window.location.protocol
return `${proto}//${get(serverAddressAndPortState)}/rpc/http` return `${proto}//${get(serverAddressAndPortState)}/rpc/http`
} })
)
export const serverSideCookiesState = atom<Promise<string>>(async (get) => await pipe( export const serverSideCookiesState = atom<Promise<string>>(async (get) => await pipe(
ffetch<Readonly<{ cookies: string }>>(`${get(serverURL)}/api/v1/cookies`), ffetch<Readonly<{ cookies: string }>>(`${get(serverURL)}/api/v1/cookies`),
@@ -180,5 +181,4 @@ export const settingsState = atom<SettingsState>((get) => ({
listView: get(listViewState), listView: get(listViewState),
servedFromReverseProxy: get(servedFromReverseProxyState), servedFromReverseProxy: get(servedFromReverseProxyState),
appTitle: get(appTitleState) appTitle: get(appTitleState)
}) }))
)

View File

@@ -1,5 +1,6 @@
import AddCircleIcon from '@mui/icons-material/AddCircle' import AddCircleIcon from '@mui/icons-material/AddCircle'
import BuildCircleIcon from '@mui/icons-material/BuildCircle' import BuildCircleIcon from '@mui/icons-material/BuildCircle'
import ClearAllIcon from '@mui/icons-material/ClearAll'
import DeleteForeverIcon from '@mui/icons-material/DeleteForever' import DeleteForeverIcon from '@mui/icons-material/DeleteForever'
import FolderZipIcon from '@mui/icons-material/FolderZip' import FolderZipIcon from '@mui/icons-material/FolderZip'
import FormatListBulleted from '@mui/icons-material/FormatListBulleted' import FormatListBulleted from '@mui/icons-material/FormatListBulleted'
@@ -42,6 +43,11 @@ const HomeSpeedDial: React.FC<Props> = ({ onDownloadOpen, onEditorOpen }) => {
tooltipTitle={i18n.t('bulkDownload')} tooltipTitle={i18n.t('bulkDownload')}
onClick={() => window.open(`${serverAddr}/archive/bulk?token=${localStorage.getItem('token')}`)} onClick={() => window.open(`${serverAddr}/archive/bulk?token=${localStorage.getItem('token')}`)}
/> />
<SpeedDialAction
icon={<ClearAllIcon />}
tooltipTitle={i18n.t('clearCompletedButton')}
onClick={() => client.clearCompleted()}
/>
<SpeedDialAction <SpeedDialAction
icon={<DeleteForeverIcon />} icon={<DeleteForeverIcon />}
tooltipTitle={i18n.t('abortAllButton')} tooltipTitle={i18n.t('abortAllButton')}

View File

@@ -200,4 +200,11 @@ export class RPCClient {
params: [] params: []
}) })
} }
public clearCompleted() {
return this.sendHTTP({
method: 'Service.ClearCompleted',
params: []
})
}
} }

View File

@@ -13,6 +13,7 @@ export type RPCMethods =
| "Service.ProgressLivestream" | "Service.ProgressLivestream"
| "Service.KillLivestream" | "Service.KillLivestream"
| "Service.KillAllLivestream" | "Service.KillAllLivestream"
| "Service.ClearCompleted"
export type RPCRequest = { export type RPCRequest = {
method: RPCMethods method: RPCMethods

2
go.mod
View File

@@ -1,6 +1,6 @@
module github.com/marcopiovanello/yt-dlp-web-ui/v3 module github.com/marcopiovanello/yt-dlp-web-ui/v3
go 1.23 go 1.24
require ( require (
github.com/asaskevich/EventBus v0.0.0-20200907212545-49d423059eef github.com/asaskevich/EventBus v0.0.0-20200907212545-49d423059eef

58
server/archive/utils.go Normal file
View File

@@ -0,0 +1,58 @@
package archive
import (
"bufio"
"bytes"
"context"
"os"
"os/exec"
"path/filepath"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config"
)
// Perform a search on the archive.txt file an determines if a download
// has already be done.
func DownloadExists(ctx context.Context, url string) (bool, error) {
cmd := exec.CommandContext(
ctx,
config.Instance().DownloaderPath,
"--print",
"%(extractor)s %(id)s",
url,
)
stdout, err := cmd.Output()
if err != nil {
return false, err
}
extractorAndURL := bytes.Trim(stdout, "\n")
fd, err := os.Open(filepath.Join(config.Instance().Dir(), "archive.txt"))
if err != nil {
return false, err
}
defer fd.Close()
scanner := bufio.NewScanner(fd)
// search linearly for lower memory usage...
// the a pre-sorted with hashed values version of the archive.txt file can be loaded in memory
// and perform a binary search on it.
for scanner.Scan() {
if bytes.Equal(scanner.Bytes(), extractorAndURL) {
return true, nil
}
}
// data, err := io.ReadAll(fd)
// if err != nil {
// return false, err
// }
// slices.BinarySearchFunc(data, extractorAndURL, func(a []byte, b []byte) int {
// return hash(a).Compare(hash(b))
// })
return false, nil
}

18
server/common/types.go Normal file
View File

@@ -0,0 +1,18 @@
package common
import "time"
// Used to deser the yt-dlp -J output
type DownloadInfo struct {
URL string `json:"url"`
Title string `json:"title"`
Thumbnail string `json:"thumbnail"`
Resolution string `json:"resolution"`
Size int32 `json:"filesize_approx"`
VCodec string `json:"vcodec"`
ACodec string `json:"acodec"`
Extension string `json:"ext"`
OriginalURL string `json:"original_url"`
FileName string `json:"filename"`
CreatedAt time.Time `json:"created_at"`
}

View File

@@ -2,6 +2,7 @@ package internal
import ( import (
"container/heap" "container/heap"
"log/slog"
) )
type LoadBalancer struct { type LoadBalancer struct {
@@ -9,7 +10,29 @@ type LoadBalancer struct {
done chan *Worker done chan *Worker
} }
func (b *LoadBalancer) Balance(work chan Process) { func NewLoadBalancer(numWorker int) *LoadBalancer {
var pool Pool
doneChan := make(chan *Worker)
for i := range numWorker {
w := &Worker{
requests: make(chan *Process, 1),
index: i,
}
go w.Work(doneChan)
pool = append(pool, w)
slog.Info("spawned worker", slog.Int("index", i))
}
return &LoadBalancer{
pool: pool,
done: doneChan,
}
}
func (b *LoadBalancer) Balance(work chan *Process) {
for { for {
select { select {
case req := <-work: case req := <-work:
@@ -20,7 +43,7 @@ func (b *LoadBalancer) Balance(work chan Process) {
} }
} }
func (b *LoadBalancer) dispatch(req Process) { func (b *LoadBalancer) dispatch(req *Process) {
w := heap.Pop(&b.pool).(*Worker) w := heap.Pop(&b.pool).(*Worker)
w.requests <- req w.requests <- req
w.pending++ w.pending++

View File

@@ -1,6 +1,8 @@
package internal package internal
import "time" import (
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/common"
)
// Used to unmarshall yt-dlp progress // Used to unmarshall yt-dlp progress
type ProgressTemplate struct { type ProgressTemplate struct {
@@ -29,29 +31,14 @@ type DownloadProgress struct {
ETA float64 `json:"eta"` ETA float64 `json:"eta"`
} }
// Used to deser the yt-dlp -J output
type DownloadInfo struct {
URL string `json:"url"`
Title string `json:"title"`
Thumbnail string `json:"thumbnail"`
Resolution string `json:"resolution"`
Size int32 `json:"filesize_approx"`
VCodec string `json:"vcodec"`
ACodec string `json:"acodec"`
Extension string `json:"ext"`
OriginalURL string `json:"original_url"`
FileName string `json:"filename"`
CreatedAt time.Time `json:"created_at"`
}
// struct representing the response sent to the client // struct representing the response sent to the client
// as JSON-RPC result field // as JSON-RPC result field
type ProcessResponse struct { type ProcessResponse struct {
Id string `json:"id"` Id string `json:"id"`
Progress DownloadProgress `json:"progress"` Progress DownloadProgress `json:"progress"`
Info DownloadInfo `json:"info"` Info common.DownloadInfo `json:"info"`
Output DownloadOutput `json:"output"` Output DownloadOutput `json:"output"`
Params []string `json:"params"` Params []string `json:"params"`
} }
// struct representing the current status of the memoryDB // struct representing the current status of the memoryDB

View File

@@ -141,26 +141,13 @@ func (l *LiveStream) monitorStartTime(r io.Reader) {
} }
} }
const TRIES = 5 scanner.Scan()
/*
if it's waiting a livestream the 5th line will indicate the time to live
its a dumb and not robust method.
example: for !strings.Contains(scanner.Text(), "Waiting for") {
[youtube] Extracting URL: https://www.youtube.com/watch?v=IQVbGfVVjgY
[youtube] IQVbGfVVjgY: Downloading webpage
[youtube] IQVbGfVVjgY: Downloading ios player API JSON
[youtube] IQVbGfVVjgY: Downloading web creator player API JSON
WARNING: [youtube] This live event will begin in 27 minutes. <- STDERR, ignore
[wait] Waiting for 00:27:15 - Press Ctrl+C to try now <- 5th line
*/
for range TRIES {
scanner.Scan() scanner.Scan()
if strings.Contains(scanner.Text(), "Waiting for") {
waitTimeScanner()
}
} }
waitTimeScanner()
} }
func (l *LiveStream) WaitTime() <-chan time.Duration { func (l *LiveStream) WaitTime() <-chan time.Duration {

View File

@@ -9,15 +9,17 @@ import (
) )
func setupTest() { func setupTest() {
config.Instance().DownloaderPath = "yt-dlp" config.Instance().DownloaderPath = "build/yt-dlp"
} }
const URL = "https://www.youtube.com/watch?v=pwoAyLGOysU"
func TestLivestream(t *testing.T) { func TestLivestream(t *testing.T) {
setupTest() setupTest()
done := make(chan *LiveStream) done := make(chan *LiveStream)
ls := New("https://www.youtube.com/watch?v=LSm1daKezcE", done, &internal.MessageQueue{}, &internal.MemoryDB{}) ls := New(URL, done, &internal.MessageQueue{}, &internal.MemoryDB{})
go ls.Start() go ls.Start()
time.AfterFunc(time.Second*20, func() { time.AfterFunc(time.Second*20, func() {

View File

@@ -76,7 +76,7 @@ func (m *Monitor) Status() LiveStreamStatus {
// Persist the monitor current state to a file. // Persist the monitor current state to a file.
// The file is located in the configured config directory // The file is located in the configured config directory
func (m *Monitor) Persist() error { func (m *Monitor) Persist() error {
fd, err := os.Create(filepath.Join(config.Instance().Dir(), "livestreams.dat")) fd, err := os.Create(filepath.Join(config.Instance().SessionFilePath, "livestreams.dat"))
if err != nil { if err != nil {
return err return err
} }
@@ -95,7 +95,7 @@ func (m *Monitor) Persist() error {
// Restore a saved state and resume the monitored livestreams // Restore a saved state and resume the monitored livestreams
func (m *Monitor) Restore() error { func (m *Monitor) Restore() error {
fd, err := os.Open(filepath.Join(config.Instance().Dir(), "livestreams.dat")) fd, err := os.Open(filepath.Join(config.Instance().SessionFilePath, "livestreams.dat"))
if err != nil { if err != nil {
return err return err
} }

View File

@@ -9,20 +9,18 @@ import (
"strings" "strings"
"time" "time"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/common"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config" "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/playlist"
) )
type metadata struct {
Entries []DownloadInfo `json:"entries"`
Count int `json:"playlist_count"`
PlaylistTitle string `json:"title"`
Type string `json:"_type"`
}
func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB) error { func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB) error {
params := append(req.Params, "--flat-playlist", "-J")
urlWithParams := append([]string{req.URL}, params...)
var ( var (
downloader = config.Instance().DownloaderPath downloader = config.Instance().DownloaderPath
cmd = exec.Command(downloader, req.URL, "--flat-playlist", "-J") cmd = exec.Command(downloader, urlWithParams...)
) )
stdout, err := cmd.StdoutPipe() stdout, err := cmd.StdoutPipe()
@@ -30,7 +28,7 @@ func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB) error {
return err return err
} }
var m metadata var m playlist.Metadata
if err := cmd.Start(); err != nil { if err := cmd.Start(); err != nil {
return err return err
@@ -52,13 +50,21 @@ func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB) error {
return errors.New("probably not a valid URL") return errors.New("probably not a valid URL")
} }
if m.Type == "playlist" { if m.IsPlaylist() {
entries := slices.CompactFunc(slices.Compact(m.Entries), func(a DownloadInfo, b DownloadInfo) bool { entries := slices.CompactFunc(slices.Compact(m.Entries), func(a common.DownloadInfo, b common.DownloadInfo) bool {
return a.URL == b.URL return a.URL == b.URL
}) })
entries = slices.DeleteFunc(entries, func(e common.DownloadInfo) bool {
return strings.Contains(e.URL, "list=")
})
slog.Info("playlist detected", slog.String("url", req.URL), slog.Int("count", len(entries))) slog.Info("playlist detected", slog.String("url", req.URL), slog.Int("count", len(entries)))
if err := playlist.ApplyModifiers(&entries, req.Params); err != nil {
return err
}
for i, meta := range entries { for i, meta := range entries {
// detect playlist title from metadata since each playlist entry will be // detect playlist title from metadata since each playlist entry will be
// treated as an individual download // treated as an individual download
@@ -82,11 +88,13 @@ func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB) error {
proc.Info.URL = meta.URL proc.Info.URL = meta.URL
time.Sleep(time.Millisecond)
db.Set(proc) db.Set(proc)
mq.Publish(proc) mq.Publish(proc)
proc.Info.CreatedAt = meta.CreatedAt
} }
return nil
} }
proc := &Process{ proc := &Process{

View File

@@ -1,16 +1,24 @@
package internal package internal
// Pool implements heap.Interface interface as a standard priority queue
type Pool []*Worker type Pool []*Worker
func (h Pool) Len() int { return len(h) } func (h Pool) Len() int { return len(h) }
func (h Pool) Less(i, j int) bool { return h[i].index < h[j].index } func (h Pool) Less(i, j int) bool { return h[i].pending < h[j].pending }
func (h Pool) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h *Pool) Push(x any) { *h = append(*h, x.(*Worker)) } func (h Pool) Swap(i, j int) {
h[i], h[j] = h[j], h[i]
h[i].index = i
h[j].index = j
}
func (h *Pool) Push(x any) { *h = append(*h, x.(*Worker)) }
func (h *Pool) Pop() any { func (h *Pool) Pop() any {
old := *h old := *h
n := len(old) n := len(old)
x := old[n-1] x := old[n-1]
old[n-1] = nil
*h = old[0 : n-1] *h = old[0 : n-1]
return x return x
} }

View File

@@ -19,6 +19,7 @@ import (
"time" "time"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/archiver" "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/archiver"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/common"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config" "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config"
) )
@@ -50,7 +51,7 @@ type Process struct {
Livestream bool Livestream bool
AutoRemove bool AutoRemove bool
Params []string Params []string
Info DownloadInfo Info common.DownloadInfo
Progress DownloadProgress Progress DownloadProgress
Output DownloadOutput Output DownloadOutput
proc *os.Process proc *os.Process
@@ -302,7 +303,7 @@ func (p *Process) GetFileName(o *DownloadOutput) error {
func (p *Process) SetPending() { func (p *Process) SetPending() {
// Since video's title isn't available yet, fill in with the URL. // Since video's title isn't available yet, fill in with the URL.
p.Info = DownloadInfo{ p.Info = common.DownloadInfo{
URL: p.Url, URL: p.Url,
Title: p.Url, Title: p.Url,
CreatedAt: time.Now(), CreatedAt: time.Now(),
@@ -334,7 +335,7 @@ func (p *Process) SetMetadata() error {
return err return err
} }
info := DownloadInfo{ info := common.DownloadInfo{
URL: p.Url, URL: p.Url,
CreatedAt: time.Now(), CreatedAt: time.Now(),
} }

View File

@@ -1,9 +1,9 @@
package internal package internal
type Worker struct { type Worker struct {
requests chan Process // downloads to do requests chan *Process // downloads to do
pending int // downloads pending pending int // downloads pending
index int // index in the heap index int // index in the heap
} }
func (w *Worker) Work(done chan *Worker) { func (w *Worker) Work(done chan *Worker) {

View File

@@ -0,0 +1,86 @@
package playlist
import (
"slices"
"strconv"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/common"
)
/*
Applicable modifiers
full | short | description
---------------------------------------------------------------------------------
--playlist-start NUMBER | -I NUMBER: | discard first N entries
--playlist-end NUMBER | -I :NUMBER | discard last N entries
--playlist-reverse | -I ::-1 | self explanatory
--max-downloads NUMBER | | stops after N completed downloads
*/
func ApplyModifiers(entries *[]common.DownloadInfo, args []string) error {
for i, modifier := range args {
switch modifier {
case "--playlist-start":
return playlistStart(i, modifier, args, entries)
case "--playlist-end":
return playlistEnd(i, modifier, args, entries)
case "--max-downloads":
return maxDownloads(i, modifier, args, entries)
case "--playlist-reverse":
slices.Reverse(*entries)
return nil
}
}
return nil
}
func playlistStart(i int, modifier string, args []string, entries *[]common.DownloadInfo) error {
if !guard(i, len(modifier)) {
return nil
}
n, err := strconv.Atoi(args[i+1])
if err != nil {
return err
}
*entries = (*entries)[n:]
return nil
}
func playlistEnd(i int, modifier string, args []string, entries *[]common.DownloadInfo) error {
if !guard(i, len(modifier)) {
return nil
}
n, err := strconv.Atoi(args[i+1])
if err != nil {
return err
}
*entries = (*entries)[:n]
return nil
}
func maxDownloads(i int, modifier string, args []string, entries *[]common.DownloadInfo) error {
if !guard(i, len(modifier)) {
return nil
}
n, err := strconv.Atoi(args[i+1])
if err != nil {
return err
}
*entries = (*entries)[0:n]
return nil
}
func guard(i, len int) bool { return i+1 < len-1 }

12
server/playlist/types.go Normal file
View File

@@ -0,0 +1,12 @@
package playlist
import "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/common"
type Metadata struct {
Entries []common.DownloadInfo `json:"entries"`
Count int `json:"playlist_count"`
PlaylistTitle string `json:"title"`
Type string `json:"_type"`
}
func (m *Metadata) IsPlaylist() bool { return m.Type == "playlist" }

View File

@@ -183,6 +183,7 @@ func (s *Service) KillAll(args NoArgs, killed *string) error {
} }
slog.Info("succesfully killed process", slog.String("id", proc.Id)) slog.Info("succesfully killed process", slog.String("id", proc.Id))
proc = nil // gc helper
} }
return nil return nil
@@ -195,6 +196,35 @@ func (s *Service) Clear(args string, killed *string) error {
return nil return nil
} }
// Removes completed processes
func (s *Service) ClearCompleted(cleared *string) error {
var (
keys = s.db.Keys()
removeFunc = func(p *internal.Process) error {
defer s.db.Delete(p.Id)
if p.Progress.Status != internal.StatusCompleted {
return nil
}
return p.Kill()
}
)
for _, key := range *keys {
proc, err := s.db.Get(key)
if err != nil {
return err
}
if err := removeFunc(proc); err != nil {
return err
}
}
return nil
}
// FreeSpace gets the available from package sys util // FreeSpace gets the available from package sys util
func (s *Service) FreeSpace(args NoArgs, free *uint64) error { func (s *Service) FreeSpace(args NoArgs, free *uint64) error {
freeSpace, err := sys.FreeSpace() freeSpace, err := sys.FreeSpace()

View File

@@ -53,6 +53,7 @@ func toDB(dto *domain.Subscription) data.Subscription {
// Delete implements domain.Service. // Delete implements domain.Service.
func (s *Service) Delete(ctx context.Context, id string) error { func (s *Service) Delete(ctx context.Context, id string) error {
s.runner.StopTask(id)
return s.r.Delete(ctx, id) return s.r.Delete(ctx, id)
} }

View File

@@ -7,9 +7,9 @@ import (
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings"
"time" "time"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/archive"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config" "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/config"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/internal" "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/internal"
"github.com/marcopiovanello/yt-dlp-web-ui/v3/server/subscription/domain" "github.com/marcopiovanello/yt-dlp-web-ui/v3/server/subscription/domain"
@@ -19,10 +19,12 @@ import (
type TaskRunner interface { type TaskRunner interface {
Submit(subcription *domain.Subscription) error Submit(subcription *domain.Subscription) error
Spawner(ctx context.Context) Spawner(ctx context.Context)
StopTask(id string) error
Recoverer() Recoverer()
} }
type taskPair struct { type monitorTask struct {
Done chan struct{}
Schedule cron.Schedule Schedule cron.Schedule
Subscription *domain.Subscription Subscription *domain.Subscription
} }
@@ -31,21 +33,22 @@ type CronTaskRunner struct {
mq *internal.MessageQueue mq *internal.MessageQueue
db *internal.MemoryDB db *internal.MemoryDB
tasks chan taskPair tasks chan monitorTask
errors chan error errors chan error
running map[string]*monitorTask
} }
func NewCronTaskRunner(mq *internal.MessageQueue, db *internal.MemoryDB) TaskRunner { func NewCronTaskRunner(mq *internal.MessageQueue, db *internal.MemoryDB) TaskRunner {
return &CronTaskRunner{ return &CronTaskRunner{
mq: mq, mq: mq,
db: db, db: db,
tasks: make(chan taskPair), tasks: make(chan monitorTask),
errors: make(chan error), errors: make(chan error),
running: make(map[string]*monitorTask),
} }
} }
const commandTemplate = "-I1 --flat-playlist --print webpage_url $1"
var argsSplitterRe = regexp.MustCompile(`(?mi)[^\s"']+|"([^"]*)"|'([^']*)'`) var argsSplitterRe = regexp.MustCompile(`(?mi)[^\s"']+|"([^"]*)"|'([^']*)'`)
func (t *CronTaskRunner) Submit(subcription *domain.Subscription) error { func (t *CronTaskRunner) Submit(subcription *domain.Subscription) error {
@@ -54,7 +57,8 @@ func (t *CronTaskRunner) Submit(subcription *domain.Subscription) error {
return err return err
} }
job := taskPair{ job := monitorTask{
Done: make(chan struct{}),
Schedule: schedule, Schedule: schedule,
Subscription: subcription, Subscription: subcription,
} }
@@ -64,54 +68,110 @@ func (t *CronTaskRunner) Submit(subcription *domain.Subscription) error {
return nil return nil
} }
// Handles the entire lifecylce of a monitor job.
func (t *CronTaskRunner) Spawner(ctx context.Context) { func (t *CronTaskRunner) Spawner(ctx context.Context) {
for task := range t.tasks { for req := range t.tasks {
t.running[req.Subscription.Id] = &req // keep track of the current job
go func() { go func() {
ctx, cancel := context.WithCancel(ctx) // inject into the job's context a cancellation singal
fetcherEvents := t.doFetch(ctx, &req) // retrieve the channel of events of the job
for { for {
slog.Info("fetching latest video for channel", slog.String("channel", task.Subscription.URL)) select {
case <-req.Done:
fetcherParams := strings.Split(strings.Replace(commandTemplate, "$1", task.Subscription.URL, 1), " ") slog.Info("stopping cron job and removing schedule", slog.String("url", req.Subscription.URL))
cancel()
cmd := exec.CommandContext(
ctx,
config.Instance().DownloaderPath,
fetcherParams...,
)
stdout, err := cmd.Output()
if err != nil {
t.errors <- err
return return
case <-fetcherEvents:
slog.Info("finished monitoring channel", slog.String("url", req.Subscription.URL))
} }
latestChannelURL := string(bytes.Trim(stdout, "\n"))
p := &internal.Process{
Url: latestChannelURL,
Params: append(argsSplitterRe.FindAllString(task.Subscription.Params, 1), []string{
"--download-archive",
filepath.Join(config.Instance().Dir(), "archive.txt"),
}...),
AutoRemove: true,
}
t.db.Set(p)
t.mq.Publish(p)
nextSchedule := time.Until(task.Schedule.Next(time.Now()))
slog.Info(
"cron task runner next schedule",
slog.String("url", task.Subscription.URL),
slog.Any("duration", nextSchedule),
)
time.Sleep(nextSchedule)
} }
}() }()
} }
} }
func (t *CronTaskRunner) Recoverer() { // Stop a currently scheduled job
panic("Unimplemented") func (t *CronTaskRunner) StopTask(id string) error {
task := t.running[id]
if task != nil {
t.running[id].Done <- struct{}{}
delete(t.running, id)
}
return nil
}
// Start a fetcher and notify on a channel when a fetcher has completed
func (t *CronTaskRunner) doFetch(ctx context.Context, req *monitorTask) <-chan struct{} {
completed := make(chan struct{})
// generator func
go func() {
for {
sleepFor := t.fetcher(ctx, req)
completed <- struct{}{}
time.Sleep(sleepFor)
}
}()
return completed
}
// Perform the retrieval of the latest video of the channel.
// Returns a time.Duration containing the amount of time to the next schedule.
func (t *CronTaskRunner) fetcher(ctx context.Context, req *monitorTask) time.Duration {
slog.Info("fetching latest video for channel", slog.String("channel", req.Subscription.URL))
nextSchedule := time.Until(req.Schedule.Next(time.Now()))
cmd := exec.CommandContext(
ctx,
config.Instance().DownloaderPath,
"-I1",
"--flat-playlist",
"--print", "webpage_url",
req.Subscription.URL,
)
stdout, err := cmd.Output()
if err != nil {
t.errors <- err
return time.Duration(0)
}
latestVideoURL := string(bytes.Trim(stdout, "\n"))
// if the download exists there's not point in sending it into the message queue.
exists, err := archive.DownloadExists(ctx, latestVideoURL)
if exists && err == nil {
return nextSchedule
}
p := &internal.Process{
Url: latestVideoURL,
Params: append(
argsSplitterRe.FindAllString(req.Subscription.Params, 1),
[]string{
"--break-on-existing",
"--download-archive",
filepath.Join(config.Instance().Dir(), "archive.txt"),
}...),
AutoRemove: true,
}
t.db.Set(p) // give it an id
t.mq.Publish(p) // send it to the message queue waiting to be processed
slog.Info(
"cron task runner next schedule",
slog.String("url", req.Subscription.URL),
slog.Any("duration", nextSchedule),
)
return nextSchedule
}
func (t *CronTaskRunner) Recoverer() {
panic("unimplemented")
} }