Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 00bacf5c41 | |||
| 01c327d308 | |||
| 83f6444df2 | |||
| e09c52dd05 | |||
| 3da81affb3 | |||
| a73b8d362e | |||
| 205f2e5cdf | |||
| 294ad29bf2 | |||
| d336b92e46 | |||
| 2f02293a52 |
@@ -1,18 +0,0 @@
|
|||||||
dist
|
|
||||||
package-lock.json
|
|
||||||
pnpm-lock.yaml
|
|
||||||
.pnpm-debug.log
|
|
||||||
node_modules
|
|
||||||
.env
|
|
||||||
*.mp4
|
|
||||||
*.ytdl
|
|
||||||
*.part
|
|
||||||
*.db
|
|
||||||
downloads
|
|
||||||
.DS_Store
|
|
||||||
build/
|
|
||||||
yt-dlp-webui
|
|
||||||
session.dat
|
|
||||||
config.yml
|
|
||||||
cookies.txt
|
|
||||||
examples/
|
|
||||||
10
Dockerfile
10
Dockerfile
@@ -25,14 +25,16 @@ RUN CGO_ENABLED=0 GOOS=linux go build -o yt-dlp-webui
|
|||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
# dependencies ----------------------------------------------------------------
|
# dependencies ----------------------------------------------------------------
|
||||||
FROM alpine:edge
|
FROM cgr.dev/chainguard/wolfi-base
|
||||||
|
|
||||||
|
RUN apk update && \
|
||||||
|
apk add ffmpeg ca-certificates python3 py3-pip
|
||||||
|
|
||||||
VOLUME /downloads /config
|
VOLUME /downloads /config
|
||||||
|
|
||||||
WORKDIR /app
|
RUN python3 -m pip install yt-dlp
|
||||||
|
|
||||||
RUN apk update && \
|
WORKDIR /app
|
||||||
apk add psmisc ffmpeg yt-dlp --no-cache
|
|
||||||
|
|
||||||
COPY --from=build /usr/src/yt-dlp-webui/yt-dlp-webui /app
|
COPY --from=build /usr/src/yt-dlp-webui/yt-dlp-webui /app
|
||||||
|
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
german:
|
german:
|
||||||
urlInput: Video URL
|
urlInput: Video URL
|
||||||
statusTitle: Status
|
statusTitle: Status
|
||||||
@@ -98,6 +99,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Vorlagen Inhalt
|
templatesEditorContentLabel: Vorlagen Inhalt
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
french:
|
french:
|
||||||
urlInput: URL vidéo de YouTube ou d'un autre service pris en charge
|
urlInput: URL vidéo de YouTube ou d'un autre service pris en charge
|
||||||
statusTitle: Statut
|
statusTitle: Statut
|
||||||
@@ -149,6 +151,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
italian:
|
italian:
|
||||||
urlInput: URL Video (uno per linea)
|
urlInput: URL Video (uno per linea)
|
||||||
statusTitle: Stato
|
statusTitle: Stato
|
||||||
@@ -197,6 +200,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Contentunto template
|
templatesEditorContentLabel: Contentunto template
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
chinese:
|
chinese:
|
||||||
urlInput: 视频 URL
|
urlInput: 视频 URL
|
||||||
statusTitle: 状态
|
statusTitle: 状态
|
||||||
@@ -246,6 +250,7 @@ languages:
|
|||||||
templatesEditorContentLabel: 模板内容
|
templatesEditorContentLabel: 模板内容
|
||||||
logsTitle: '日志'
|
logsTitle: '日志'
|
||||||
awaitingLogs: '正在等待日志…'
|
awaitingLogs: '正在等待日志…'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
spanish:
|
spanish:
|
||||||
urlInput: URL de YouTube u otro servicio compatible
|
urlInput: URL de YouTube u otro servicio compatible
|
||||||
statusTitle: Estado
|
statusTitle: Estado
|
||||||
@@ -293,6 +298,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
russian:
|
russian:
|
||||||
urlInput: URL-адрес YouTube или любого другого поддерживаемого сервиса
|
urlInput: URL-адрес YouTube или любого другого поддерживаемого сервиса
|
||||||
statusTitle: Статус
|
statusTitle: Статус
|
||||||
@@ -340,6 +346,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
korean:
|
korean:
|
||||||
urlInput: YouTube나 다른 지원되는 사이트의 URL
|
urlInput: YouTube나 다른 지원되는 사이트의 URL
|
||||||
statusTitle: 상태
|
statusTitle: 상태
|
||||||
@@ -387,6 +394,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
japanese:
|
japanese:
|
||||||
urlInput: YouTubeまたはサポート済み動画のURL
|
urlInput: YouTubeまたはサポート済み動画のURL
|
||||||
statusTitle: 状態
|
statusTitle: 状態
|
||||||
@@ -435,6 +443,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
catalan:
|
catalan:
|
||||||
urlInput: URL de YouTube o d'un altre servei compatible
|
urlInput: URL de YouTube o d'un altre servei compatible
|
||||||
statusTitle: Estat
|
statusTitle: Estat
|
||||||
@@ -482,6 +491,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
ukrainian:
|
ukrainian:
|
||||||
urlInput: URL-адреса YouTube або будь-якого іншого підтримуваного сервісу
|
urlInput: URL-адреса YouTube або будь-якого іншого підтримуваного сервісу
|
||||||
statusTitle: Статус
|
statusTitle: Статус
|
||||||
@@ -529,6 +539,7 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
polish:
|
polish:
|
||||||
urlInput: Adres URL YouTube lub innej obsługiwanej usługi
|
urlInput: Adres URL YouTube lub innej obsługiwanej usługi
|
||||||
statusTitle: Status
|
statusTitle: Status
|
||||||
@@ -576,3 +587,4 @@ languages:
|
|||||||
templatesEditorContentLabel: Template content
|
templatesEditorContentLabel: Template content
|
||||||
logsTitle: 'Logs'
|
logsTitle: 'Logs'
|
||||||
awaitingLogs: 'Awaiting logs...'
|
awaitingLogs: 'Awaiting logs...'
|
||||||
|
bulkDownload: 'Download files in a zip archive'
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import { atom, selector } from 'recoil'
|
import { atom, selector } from 'recoil'
|
||||||
import { RPCResult } from '../types'
|
|
||||||
import { activeDownloadsState } from './downloads'
|
import { activeDownloadsState } from './downloads'
|
||||||
|
|
||||||
export const loadingAtom = atom({
|
export const loadingAtom = atom({
|
||||||
@@ -7,11 +6,6 @@ export const loadingAtom = atom({
|
|||||||
default: true
|
default: true
|
||||||
})
|
})
|
||||||
|
|
||||||
export const optimisticDownloadsState = atom<RPCResult[]>({
|
|
||||||
key: 'optimisticDownloadsState',
|
|
||||||
default: []
|
|
||||||
})
|
|
||||||
|
|
||||||
export const totalDownloadSpeedState = selector<number>({
|
export const totalDownloadSpeedState = selector<number>({
|
||||||
key: 'totalDownloadSpeedState',
|
key: 'totalDownloadSpeedState',
|
||||||
get: ({ get }) => get(activeDownloadsState)
|
get: ({ get }) => get(activeDownloadsState)
|
||||||
|
|||||||
@@ -16,8 +16,10 @@ import {
|
|||||||
Typography
|
Typography
|
||||||
} from '@mui/material'
|
} from '@mui/material'
|
||||||
import { useCallback } from 'react'
|
import { useCallback } from 'react'
|
||||||
|
import { useRecoilValue } from 'recoil'
|
||||||
|
import { serverURL } from '../atoms/settings'
|
||||||
import { RPCResult } from '../types'
|
import { RPCResult } from '../types'
|
||||||
import { ellipsis, formatSpeedMiB, mapProcessStatus, formatSize } from '../utils'
|
import { base64URLEncode, ellipsis, formatSize, formatSpeedMiB, mapProcessStatus } from '../utils'
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
download: RPCResult
|
download: RPCResult
|
||||||
@@ -35,6 +37,8 @@ const Resolution: React.FC<{ resolution?: string }> = ({ resolution }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const DownloadCard: React.FC<Props> = ({ download, onStop, onCopy }) => {
|
const DownloadCard: React.FC<Props> = ({ download, onStop, onCopy }) => {
|
||||||
|
const serverAddr = useRecoilValue(serverURL)
|
||||||
|
|
||||||
const isCompleted = useCallback(
|
const isCompleted = useCallback(
|
||||||
() => download.progress.percentage === '-1',
|
() => download.progress.percentage === '-1',
|
||||||
[download.progress.percentage]
|
[download.progress.percentage]
|
||||||
@@ -47,6 +51,16 @@ const DownloadCard: React.FC<Props> = ({ download, onStop, onCopy }) => {
|
|||||||
[download.progress.percentage, isCompleted]
|
[download.progress.percentage, isCompleted]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const viewFile = (path: string) => {
|
||||||
|
const encoded = base64URLEncode(path)
|
||||||
|
window.open(`${serverAddr}/archive/v/${encoded}?token=${localStorage.getItem('token')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadFile = (path: string) => {
|
||||||
|
const encoded = base64URLEncode(path)
|
||||||
|
window.open(`${serverAddr}/archive/d/${encoded}?token=${localStorage.getItem('token')}`)
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card>
|
<Card>
|
||||||
<CardActionArea onClick={() => {
|
<CardActionArea onClick={() => {
|
||||||
@@ -109,6 +123,26 @@ const DownloadCard: React.FC<Props> = ({ download, onStop, onCopy }) => {
|
|||||||
>
|
>
|
||||||
{isCompleted() ? "Clear" : "Stop"}
|
{isCompleted() ? "Clear" : "Stop"}
|
||||||
</Button>
|
</Button>
|
||||||
|
{isCompleted() &&
|
||||||
|
<>
|
||||||
|
<Button
|
||||||
|
variant="contained"
|
||||||
|
size="small"
|
||||||
|
color="primary"
|
||||||
|
onClick={() => downloadFile(download.output.savedFilePath)}
|
||||||
|
>
|
||||||
|
Download
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="contained"
|
||||||
|
size="small"
|
||||||
|
color="primary"
|
||||||
|
onClick={() => viewFile(download.output.savedFilePath)}
|
||||||
|
>
|
||||||
|
View
|
||||||
|
</Button>
|
||||||
|
</>
|
||||||
|
}
|
||||||
</CardActions>
|
</CardActions>
|
||||||
</Card>
|
</Card>
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ import { useI18n } from '../hooks/useI18n'
|
|||||||
import { useRPC } from '../hooks/useRPC'
|
import { useRPC } from '../hooks/useRPC'
|
||||||
import { CliArguments } from '../lib/argsParser'
|
import { CliArguments } from '../lib/argsParser'
|
||||||
import type { DLMetadata } from '../types'
|
import type { DLMetadata } from '../types'
|
||||||
import { isValidURL, toFormatArgs } from '../utils'
|
import { toFormatArgs } from '../utils'
|
||||||
import ExtraDownloadOptions from './ExtraDownloadOptions'
|
import ExtraDownloadOptions from './ExtraDownloadOptions'
|
||||||
|
|
||||||
const Transition = forwardRef(function Transition(
|
const Transition = forwardRef(function Transition(
|
||||||
@@ -166,7 +166,6 @@ const DownloadDialog: FC<Props> = ({ open, onClose, onDownloadStart }) => {
|
|||||||
|
|
||||||
file
|
file
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter(u => isValidURL(u))
|
|
||||||
.forEach(u => sendUrl(u))
|
.forEach(u => sendUrl(u))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
import DeleteIcon from '@mui/icons-material/Delete'
|
import DeleteIcon from '@mui/icons-material/Delete'
|
||||||
import DownloadIcon from '@mui/icons-material/Download'
|
import DownloadIcon from '@mui/icons-material/Download'
|
||||||
import DownloadDoneIcon from '@mui/icons-material/DownloadDone'
|
import DownloadDoneIcon from '@mui/icons-material/DownloadDone'
|
||||||
|
import FileDownloadIcon from '@mui/icons-material/FileDownload'
|
||||||
|
import SmartDisplayIcon from '@mui/icons-material/SmartDisplay'
|
||||||
import StopCircleIcon from '@mui/icons-material/StopCircle'
|
import StopCircleIcon from '@mui/icons-material/StopCircle'
|
||||||
import {
|
import {
|
||||||
Box,
|
Box,
|
||||||
Grid,
|
ButtonGroup,
|
||||||
IconButton,
|
IconButton,
|
||||||
LinearProgress,
|
LinearProgress,
|
||||||
LinearProgressProps,
|
LinearProgressProps,
|
||||||
Paper,
|
|
||||||
Table,
|
Table,
|
||||||
TableBody,
|
TableBody,
|
||||||
TableCell,
|
TableCell,
|
||||||
@@ -19,8 +20,9 @@ import {
|
|||||||
} from "@mui/material"
|
} from "@mui/material"
|
||||||
import { useRecoilValue } from 'recoil'
|
import { useRecoilValue } from 'recoil'
|
||||||
import { activeDownloadsState } from '../atoms/downloads'
|
import { activeDownloadsState } from '../atoms/downloads'
|
||||||
|
import { serverURL } from '../atoms/settings'
|
||||||
import { useRPC } from '../hooks/useRPC'
|
import { useRPC } from '../hooks/useRPC'
|
||||||
import { formatSize, formatSpeedMiB } from "../utils"
|
import { base64URLEncode, formatSize, formatSpeedMiB } from "../utils"
|
||||||
|
|
||||||
function LinearProgressWithLabel(props: LinearProgressProps & { value: number }) {
|
function LinearProgressWithLabel(props: LinearProgressProps & { value: number }) {
|
||||||
return (
|
return (
|
||||||
@@ -38,12 +40,23 @@ function LinearProgressWithLabel(props: LinearProgressProps & { value: number })
|
|||||||
}
|
}
|
||||||
|
|
||||||
const DownloadsTableView: React.FC = () => {
|
const DownloadsTableView: React.FC = () => {
|
||||||
|
const serverAddr = useRecoilValue(serverURL)
|
||||||
const downloads = useRecoilValue(activeDownloadsState)
|
const downloads = useRecoilValue(activeDownloadsState)
|
||||||
|
|
||||||
const { client } = useRPC()
|
const { client } = useRPC()
|
||||||
|
|
||||||
const abort = (id: string) => client.kill(id)
|
const abort = (id: string) => client.kill(id)
|
||||||
|
|
||||||
|
const viewFile = (path: string) => {
|
||||||
|
const encoded = base64URLEncode(path)
|
||||||
|
window.open(`${serverAddr}/archive/v/${encoded}?token=${localStorage.getItem('token')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadFile = (path: string) => {
|
||||||
|
const encoded = base64URLEncode(path)
|
||||||
|
window.open(`${serverAddr}/archive/d/${encoded}?token=${localStorage.getItem('token')}`)
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<TableContainer
|
<TableContainer
|
||||||
sx={{ minHeight: '80vh', mt: 4 }}
|
sx={{ minHeight: '80vh', mt: 4 }}
|
||||||
@@ -108,6 +121,7 @@ const DownloadsTableView: React.FC = () => {
|
|||||||
{new Date(download.info.created_at).toLocaleString()}
|
{new Date(download.info.created_at).toLocaleString()}
|
||||||
</TableCell>
|
</TableCell>
|
||||||
<TableCell align="right">
|
<TableCell align="right">
|
||||||
|
<ButtonGroup>
|
||||||
<IconButton
|
<IconButton
|
||||||
size="small"
|
size="small"
|
||||||
onClick={() => abort(download.id)}
|
onClick={() => abort(download.id)}
|
||||||
@@ -115,6 +129,23 @@ const DownloadsTableView: React.FC = () => {
|
|||||||
{download.progress.percentage === '-1' ? <DeleteIcon /> : <StopCircleIcon />}
|
{download.progress.percentage === '-1' ? <DeleteIcon /> : <StopCircleIcon />}
|
||||||
|
|
||||||
</IconButton>
|
</IconButton>
|
||||||
|
{download.progress.percentage === '-1' &&
|
||||||
|
<>
|
||||||
|
<IconButton
|
||||||
|
size="small"
|
||||||
|
onClick={() => viewFile(download.output.savedFilePath)}
|
||||||
|
>
|
||||||
|
<SmartDisplayIcon />
|
||||||
|
</IconButton>
|
||||||
|
<IconButton
|
||||||
|
size="small"
|
||||||
|
onClick={() => downloadFile(download.output.savedFilePath)}
|
||||||
|
>
|
||||||
|
<FileDownloadIcon />
|
||||||
|
</IconButton>
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
</ButtonGroup>
|
||||||
</TableCell>
|
</TableCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
))
|
))
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { Suspense, useState } from 'react'
|
import { Suspense, useState } from 'react'
|
||||||
import { useRecoilState } from 'recoil'
|
import { useRecoilState } from 'recoil'
|
||||||
import { loadingAtom, optimisticDownloadsState } from '../atoms/ui'
|
import { loadingAtom } from '../atoms/ui'
|
||||||
import { useToast } from '../hooks/toast'
|
import { useToast } from '../hooks/toast'
|
||||||
import DownloadDialog from './DownloadDialog'
|
import DownloadDialog from './DownloadDialog'
|
||||||
import HomeSpeedDial from './HomeSpeedDial'
|
import HomeSpeedDial from './HomeSpeedDial'
|
||||||
@@ -8,32 +8,12 @@ import TemplatesEditor from './TemplatesEditor'
|
|||||||
|
|
||||||
const HomeActions: React.FC = () => {
|
const HomeActions: React.FC = () => {
|
||||||
const [, setIsLoading] = useRecoilState(loadingAtom)
|
const [, setIsLoading] = useRecoilState(loadingAtom)
|
||||||
const [optimistic, setOptimistic] = useRecoilState(optimisticDownloadsState)
|
|
||||||
|
|
||||||
const [openDownload, setOpenDownload] = useState(false)
|
const [openDownload, setOpenDownload] = useState(false)
|
||||||
const [openEditor, setOpenEditor] = useState(false)
|
const [openEditor, setOpenEditor] = useState(false)
|
||||||
|
|
||||||
const { pushMessage } = useToast()
|
const { pushMessage } = useToast()
|
||||||
|
|
||||||
// it's stupid because it will be overriden on the next server tick
|
|
||||||
const handleOptimisticUpdate = (url: string) => setOptimistic([
|
|
||||||
...optimistic, {
|
|
||||||
id: url,
|
|
||||||
info: {
|
|
||||||
created_at: new Date().toISOString(),
|
|
||||||
thumbnail: '',
|
|
||||||
title: url,
|
|
||||||
url: url
|
|
||||||
},
|
|
||||||
progress: {
|
|
||||||
eta: Number.MAX_SAFE_INTEGER,
|
|
||||||
percentage: '0%',
|
|
||||||
process_status: 0,
|
|
||||||
speed: 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
])
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<HomeSpeedDial
|
<HomeSpeedDial
|
||||||
@@ -49,7 +29,6 @@ const HomeActions: React.FC = () => {
|
|||||||
}}
|
}}
|
||||||
// TODO: handle optimistic UI update
|
// TODO: handle optimistic UI update
|
||||||
onDownloadStart={(url) => {
|
onDownloadStart={(url) => {
|
||||||
handleOptimisticUpdate(url)
|
|
||||||
pushMessage(`Requested ${url}`, 'info')
|
pushMessage(`Requested ${url}`, 'info')
|
||||||
setOpenDownload(false)
|
setOpenDownload(false)
|
||||||
setIsLoading(true)
|
setIsLoading(true)
|
||||||
|
|||||||
@@ -3,13 +3,14 @@ import BuildCircleIcon from '@mui/icons-material/BuildCircle'
|
|||||||
import DeleteForeverIcon from '@mui/icons-material/DeleteForever'
|
import DeleteForeverIcon from '@mui/icons-material/DeleteForever'
|
||||||
import FormatListBulleted from '@mui/icons-material/FormatListBulleted'
|
import FormatListBulleted from '@mui/icons-material/FormatListBulleted'
|
||||||
import ViewAgendaIcon from '@mui/icons-material/ViewAgenda'
|
import ViewAgendaIcon from '@mui/icons-material/ViewAgenda'
|
||||||
|
import FolderZipIcon from '@mui/icons-material/FolderZip'
|
||||||
import {
|
import {
|
||||||
SpeedDial,
|
SpeedDial,
|
||||||
SpeedDialAction,
|
SpeedDialAction,
|
||||||
SpeedDialIcon
|
SpeedDialIcon
|
||||||
} from '@mui/material'
|
} from '@mui/material'
|
||||||
import { useRecoilState } from 'recoil'
|
import { useRecoilState, useRecoilValue } from 'recoil'
|
||||||
import { listViewState } from '../atoms/settings'
|
import { listViewState, serverURL } from '../atoms/settings'
|
||||||
import { useI18n } from '../hooks/useI18n'
|
import { useI18n } from '../hooks/useI18n'
|
||||||
import { useRPC } from '../hooks/useRPC'
|
import { useRPC } from '../hooks/useRPC'
|
||||||
|
|
||||||
@@ -19,6 +20,7 @@ type Props = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const HomeSpeedDial: React.FC<Props> = ({ onDownloadOpen, onEditorOpen }) => {
|
const HomeSpeedDial: React.FC<Props> = ({ onDownloadOpen, onEditorOpen }) => {
|
||||||
|
const serverAddr = useRecoilValue(serverURL)
|
||||||
const [listView, setListView] = useRecoilState(listViewState)
|
const [listView, setListView] = useRecoilState(listViewState)
|
||||||
|
|
||||||
const { i18n } = useI18n()
|
const { i18n } = useI18n()
|
||||||
@@ -37,6 +39,11 @@ const HomeSpeedDial: React.FC<Props> = ({ onDownloadOpen, onEditorOpen }) => {
|
|||||||
tooltipTitle={listView ? 'Card view' : 'Table view'}
|
tooltipTitle={listView ? 'Card view' : 'Table view'}
|
||||||
onClick={() => setListView(state => !state)}
|
onClick={() => setListView(state => !state)}
|
||||||
/>
|
/>
|
||||||
|
<SpeedDialAction
|
||||||
|
icon={<FolderZipIcon />}
|
||||||
|
tooltipTitle={i18n.t('bulkDownload')}
|
||||||
|
onClick={() => window.open(`${serverAddr}/archive/bulk`)}
|
||||||
|
/>
|
||||||
<SpeedDialAction
|
<SpeedDialAction
|
||||||
icon={<DeleteForeverIcon />}
|
icon={<DeleteForeverIcon />}
|
||||||
tooltipTitle={i18n.t('abortAllButton')}
|
tooltipTitle={i18n.t('abortAllButton')}
|
||||||
|
|||||||
@@ -45,6 +45,9 @@ export type RPCResult = Readonly<{
|
|||||||
id: string
|
id: string
|
||||||
progress: DownloadProgress
|
progress: DownloadProgress
|
||||||
info: DownloadInfo
|
info: DownloadInfo
|
||||||
|
output: {
|
||||||
|
savedFilePath: string
|
||||||
|
}
|
||||||
}>
|
}>
|
||||||
|
|
||||||
export type RPCParams = {
|
export type RPCParams = {
|
||||||
|
|||||||
@@ -20,17 +20,10 @@ export function validateDomain(url: string): boolean {
|
|||||||
return urlRegex.test(url) || name === 'localhost' && slugRegex.test(slug)
|
return urlRegex.test(url) || name === 'localhost' && slugRegex.test(slug)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isValidURL(url: string): boolean {
|
export const ellipsis = (str: string, lim: number) =>
|
||||||
let urlRegex = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/
|
str.length > lim
|
||||||
return urlRegex.test(url)
|
? `${str.substring(0, lim)}...`
|
||||||
}
|
: str
|
||||||
|
|
||||||
export function ellipsis(str: string, lim: number): string {
|
|
||||||
if (str) {
|
|
||||||
return str.length > lim ? `${str.substring(0, lim)}...` : str
|
|
||||||
}
|
|
||||||
return ''
|
|
||||||
}
|
|
||||||
|
|
||||||
export function toFormatArgs(codes: string[]): string {
|
export function toFormatArgs(codes: string[]): string {
|
||||||
if (codes.length > 1) {
|
if (codes.length > 1) {
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
@@ -8,12 +10,14 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/marcopeocchi/yt-dlp-web-ui/server/config"
|
"github.com/marcopeocchi/yt-dlp-web-ui/server/config"
|
||||||
|
"github.com/marcopeocchi/yt-dlp-web-ui/server/internal"
|
||||||
"github.com/marcopeocchi/yt-dlp-web-ui/server/utils"
|
"github.com/marcopeocchi/yt-dlp-web-ui/server/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -194,3 +198,54 @@ func DownloadFile(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
w.WriteHeader(http.StatusUnauthorized)
|
w.WriteHeader(http.StatusUnauthorized)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BulkDownload(mdb *internal.MemoryDB) http.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ps := slices.DeleteFunc(*mdb.All(), func(e internal.ProcessResponse) bool {
|
||||||
|
return e.Progress.Status != internal.StatusCompleted
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(ps) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
buff bytes.Buffer
|
||||||
|
zipWriter = zip.NewWriter(&buff)
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, p := range ps {
|
||||||
|
wr, err := zipWriter.Create(filepath.Base(p.Output.SavedFilePath))
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fd, err := os.Open(p.Output.SavedFilePath)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(wr, fd)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := zipWriter.Close()
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Add(
|
||||||
|
"Content-Disposition",
|
||||||
|
"inline; filename=download-archive-"+time.Now().Format(time.RFC3339)+".zip",
|
||||||
|
)
|
||||||
|
w.Header().Set("Content-Type", "application/zip")
|
||||||
|
|
||||||
|
io.Copy(w, &buff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ type LoginRequest struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Login(w http.ResponseWriter, r *http.Request) {
|
func Login(w http.ResponseWriter, r *http.Request) {
|
||||||
req := new(LoginRequest)
|
var req LoginRequest
|
||||||
err := json.NewDecoder(r.Body).Decode(req)
|
|
||||||
if err != nil {
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,21 @@ package internal
|
|||||||
|
|
||||||
import "time"
|
import "time"
|
||||||
|
|
||||||
|
// Used to unmarshall yt-dlp progress
|
||||||
|
type ProgressTemplate struct {
|
||||||
|
Percentage string `json:"percentage"`
|
||||||
|
Speed float32 `json:"speed"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
Eta float32 `json:"eta"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defines where and how the download needs to be saved
|
||||||
|
type DownloadOutput struct {
|
||||||
|
Path string
|
||||||
|
Filename string
|
||||||
|
SavedFilePath string `json:"savedFilePath"`
|
||||||
|
}
|
||||||
|
|
||||||
// Progress for the Running call
|
// Progress for the Running call
|
||||||
type DownloadProgress struct {
|
type DownloadProgress struct {
|
||||||
Status int `json:"process_status"`
|
Status int `json:"process_status"`
|
||||||
@@ -79,6 +94,7 @@ type SetCookiesRequest struct {
|
|||||||
Cookies string `json:"cookies"`
|
Cookies string `json:"cookies"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// represents a user defined collection of yt-dlp arguments
|
||||||
type CustomTemplate struct {
|
type CustomTemplate struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ package internal
|
|||||||
import (
|
import (
|
||||||
"encoding/gob"
|
"encoding/gob"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -35,33 +34,6 @@ func (m *MemoryDB) Set(process *Process) string {
|
|||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update a process info/metadata, given the process id
|
|
||||||
//
|
|
||||||
// Deprecated: will be removed anytime soon.
|
|
||||||
func (m *MemoryDB) UpdateInfo(id string, info DownloadInfo) error {
|
|
||||||
entry, ok := m.table.Load(id)
|
|
||||||
if ok {
|
|
||||||
entry.(*Process).Info = info
|
|
||||||
m.table.Store(id, entry)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return fmt.Errorf("can't update row with id %s", id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update a process progress data, given the process id
|
|
||||||
// Used for updating completition percentage or ETA.
|
|
||||||
//
|
|
||||||
// Deprecated: will be removed anytime soon.
|
|
||||||
func (m *MemoryDB) UpdateProgress(id string, progress DownloadProgress) error {
|
|
||||||
entry, ok := m.table.Load(id)
|
|
||||||
if ok {
|
|
||||||
entry.(*Process).Progress = progress
|
|
||||||
m.table.Store(id, entry)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return fmt.Errorf("can't update row with id %s", id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Removes a process progress, given the process id
|
// Removes a process progress, given the process id
|
||||||
func (m *MemoryDB) Delete(id string) {
|
func (m *MemoryDB) Delete(id string) {
|
||||||
m.table.Delete(id)
|
m.table.Delete(id)
|
||||||
@@ -92,7 +64,7 @@ func (m *MemoryDB) All() *[]ProcessResponse {
|
|||||||
return &running
|
return &running
|
||||||
}
|
}
|
||||||
|
|
||||||
// WIP: Persist the database in a single file named "session.dat"
|
// Persist the database in a single file named "session.dat"
|
||||||
func (m *MemoryDB) Persist() error {
|
func (m *MemoryDB) Persist() error {
|
||||||
running := m.All()
|
running := m.All()
|
||||||
|
|
||||||
@@ -115,17 +87,16 @@ func (m *MemoryDB) Persist() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// WIP: Restore a persisted state
|
// Restore a persisted state
|
||||||
func (m *MemoryDB) Restore(logger *slog.Logger) {
|
func (m *MemoryDB) Restore(logger *slog.Logger) {
|
||||||
fd, err := os.Open("session.dat")
|
fd, err := os.Open("session.dat")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
session := Session{}
|
var session Session
|
||||||
|
|
||||||
err = gob.NewDecoder(fd).Decode(&session)
|
if err := gob.NewDecoder(fd).Decode(&session); err != nil {
|
||||||
if err != nil {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
package internal
|
package internal
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
"github.com/marcopeocchi/yt-dlp-web-ui/server/config"
|
"github.com/marcopeocchi/yt-dlp-web-ui/server/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
type MessageQueue struct {
|
type MessageQueue struct {
|
||||||
producerCh chan *Process
|
producerCh chan *Process
|
||||||
consumerCh chan struct{}
|
consumerCh chan struct{}
|
||||||
|
logger *slog.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates a new message queue.
|
// Creates a new message queue.
|
||||||
// By default it will be created with a size equals to nthe number of logical
|
// By default it will be created with a size equals to nthe number of logical
|
||||||
// CPU cores.
|
// CPU cores.
|
||||||
// The queue size can be set via the qs flag.
|
// The queue size can be set via the qs flag.
|
||||||
func NewMessageQueue() *MessageQueue {
|
func NewMessageQueue(l *slog.Logger) *MessageQueue {
|
||||||
size := config.Instance().QueueSize
|
size := config.Instance().QueueSize
|
||||||
|
|
||||||
if size <= 0 {
|
if size <= 0 {
|
||||||
@@ -23,13 +26,21 @@ func NewMessageQueue() *MessageQueue {
|
|||||||
return &MessageQueue{
|
return &MessageQueue{
|
||||||
producerCh: make(chan *Process, size),
|
producerCh: make(chan *Process, size),
|
||||||
consumerCh: make(chan struct{}, size),
|
consumerCh: make(chan struct{}, size),
|
||||||
|
logger: l,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Publish a message to the queue and set the task to a peding state.
|
// Publish a message to the queue and set the task to a peding state.
|
||||||
func (m *MessageQueue) Publish(p *Process) {
|
func (m *MessageQueue) Publish(p *Process) {
|
||||||
p.SetPending()
|
p.SetPending()
|
||||||
go p.SetMetadata()
|
go func() {
|
||||||
|
if err := p.SetMetadata(); err != nil {
|
||||||
|
m.logger.Error(
|
||||||
|
"failed to retrieve metadata",
|
||||||
|
slog.String("err", err.Error()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}()
|
||||||
m.producerCh <- p
|
m.producerCh <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -29,17 +29,15 @@ func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB, logger
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
m := metadata{}
|
var m metadata
|
||||||
|
|
||||||
err = cmd.Start()
|
if err := cmd.Start(); err != nil {
|
||||||
if err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Info("decoding metadata", slog.String("url", req.URL))
|
logger.Info("decoding metadata", slog.String("url", req.URL))
|
||||||
|
|
||||||
err = json.NewDecoder(stdout).Decode(&m)
|
if err := json.NewDecoder(stdout).Decode(&m); err != nil {
|
||||||
if err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,11 +70,10 @@ func PlaylistDetect(req DownloadRequest, mq *MessageQueue, db *MemoryDB, logger
|
|||||||
proc := &Process{
|
proc := &Process{
|
||||||
Url: meta.OriginalURL,
|
Url: meta.OriginalURL,
|
||||||
Progress: DownloadProgress{},
|
Progress: DownloadProgress{},
|
||||||
Output: DownloadOutput{
|
Output: DownloadOutput{Filename: req.Rename},
|
||||||
Filename: req.Rename,
|
|
||||||
},
|
|
||||||
Info: meta,
|
Info: meta,
|
||||||
Params: req.Params,
|
Params: req.Params,
|
||||||
|
Logger: logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
proc.Info.URL = meta.OriginalURL
|
proc.Info.URL = meta.OriginalURL
|
||||||
|
|||||||
@@ -2,8 +2,11 @@ package internal
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
"slices"
|
||||||
@@ -35,13 +38,6 @@ const (
|
|||||||
StatusErrored
|
StatusErrored
|
||||||
)
|
)
|
||||||
|
|
||||||
type ProgressTemplate struct {
|
|
||||||
Percentage string `json:"percentage"`
|
|
||||||
Speed float32 `json:"speed"`
|
|
||||||
Size string `json:"size"`
|
|
||||||
Eta float32 `json:"eta"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process descriptor
|
// Process descriptor
|
||||||
type Process struct {
|
type Process struct {
|
||||||
Id string
|
Id string
|
||||||
@@ -54,11 +50,6 @@ type Process struct {
|
|||||||
Logger *slog.Logger
|
Logger *slog.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
type DownloadOutput struct {
|
|
||||||
Path string
|
|
||||||
Filename string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Starts spawns/forks a new yt-dlp process and parse its stdout.
|
// Starts spawns/forks a new yt-dlp process and parse its stdout.
|
||||||
// The process is spawned to outputting a custom progress text that
|
// The process is spawned to outputting a custom progress text that
|
||||||
// Resembles a JSON Object in order to Unmarshal it later.
|
// Resembles a JSON Object in order to Unmarshal it later.
|
||||||
@@ -91,6 +82,8 @@ func (p *Process) Start() {
|
|||||||
|
|
||||||
buildFilename(&p.Output)
|
buildFilename(&p.Output)
|
||||||
|
|
||||||
|
go p.GetFileName(&out)
|
||||||
|
|
||||||
params := []string{
|
params := []string{
|
||||||
strings.Split(p.Url, "?list")[0], //no playlist
|
strings.Split(p.Url, "?list")[0], //no playlist
|
||||||
"--newline",
|
"--newline",
|
||||||
@@ -120,7 +113,6 @@ func (p *Process) Start() {
|
|||||||
)
|
)
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
scan := bufio.NewScanner(r)
|
|
||||||
|
|
||||||
err = cmd.Start()
|
err = cmd.Start()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -142,6 +134,8 @@ func (p *Process) Start() {
|
|||||||
// spawn a goroutine that does the dirty job of parsing the stdout
|
// spawn a goroutine that does the dirty job of parsing the stdout
|
||||||
// filling the channel with as many stdout line as yt-dlp produces (producer)
|
// filling the channel with as many stdout line as yt-dlp produces (producer)
|
||||||
go func() {
|
go func() {
|
||||||
|
scan := bufio.NewScanner(r)
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
r.Close()
|
r.Close()
|
||||||
p.Complete()
|
p.Complete()
|
||||||
@@ -158,21 +152,24 @@ func (p *Process) Start() {
|
|||||||
// Slows down the unmarshal operation to every 500ms
|
// Slows down the unmarshal operation to every 500ms
|
||||||
go func() {
|
go func() {
|
||||||
rx.Sample(time.Millisecond*500, sourceChan, doneChan, func(event []byte) {
|
rx.Sample(time.Millisecond*500, sourceChan, doneChan, func(event []byte) {
|
||||||
stdout := ProgressTemplate{}
|
var progress ProgressTemplate
|
||||||
err := json.Unmarshal(event, &stdout)
|
|
||||||
if err == nil {
|
if err := json.Unmarshal(event, &progress); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
p.Progress = DownloadProgress{
|
p.Progress = DownloadProgress{
|
||||||
Status: StatusDownloading,
|
Status: StatusDownloading,
|
||||||
Percentage: stdout.Percentage,
|
Percentage: progress.Percentage,
|
||||||
Speed: stdout.Speed,
|
Speed: progress.Speed,
|
||||||
ETA: stdout.Eta,
|
ETA: progress.Eta,
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Logger.Info("progress",
|
p.Logger.Info("progress",
|
||||||
slog.String("id", p.getShortId()),
|
slog.String("id", p.getShortId()),
|
||||||
slog.String("url", p.Url),
|
slog.String("url", p.Url),
|
||||||
slog.String("percentege", stdout.Percentage),
|
slog.String("percentage", progress.Percentage),
|
||||||
)
|
)
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -220,9 +217,13 @@ func (p *Process) Kill() error {
|
|||||||
// Returns the available format for this URL
|
// Returns the available format for this URL
|
||||||
func (p *Process) GetFormatsSync() (DownloadFormats, error) {
|
func (p *Process) GetFormatsSync() (DownloadFormats, error) {
|
||||||
cmd := exec.Command(config.Instance().DownloaderPath, p.Url, "-J")
|
cmd := exec.Command(config.Instance().DownloaderPath, p.Url, "-J")
|
||||||
stdout, err := cmd.Output()
|
|
||||||
|
|
||||||
|
stdout, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
p.Logger.Error(
|
||||||
|
"failed to retrieve metadata",
|
||||||
|
slog.String("err", err.Error()),
|
||||||
|
)
|
||||||
return DownloadFormats{}, err
|
return DownloadFormats{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -269,6 +270,24 @@ func (p *Process) GetFormatsSync() (DownloadFormats, error) {
|
|||||||
return info, nil
|
return info, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Process) GetFileName(o *DownloadOutput) error {
|
||||||
|
cmd := exec.Command(
|
||||||
|
config.Instance().DownloaderPath,
|
||||||
|
"--print", "filename",
|
||||||
|
"-o", fmt.Sprintf("%s/%s", o.Path, o.Filename),
|
||||||
|
p.Url,
|
||||||
|
)
|
||||||
|
cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
|
||||||
|
|
||||||
|
out, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Output.SavedFilePath = strings.Trim(string(out), "\n")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (p *Process) SetPending() {
|
func (p *Process) SetPending() {
|
||||||
// Since video's title isn't available yet, fill in with the URL.
|
// Since video's title isn't available yet, fill in with the URL.
|
||||||
p.Info = DownloadInfo{
|
p.Info = DownloadInfo{
|
||||||
@@ -285,7 +304,17 @@ func (p *Process) SetMetadata() error {
|
|||||||
|
|
||||||
stdout, err := cmd.StdoutPipe()
|
stdout, err := cmd.StdoutPipe()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.Logger.Error("failed retrieving info",
|
p.Logger.Error("failed to connect to stdout",
|
||||||
|
slog.String("id", p.getShortId()),
|
||||||
|
slog.String("url", p.Url),
|
||||||
|
slog.String("err", err.Error()),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
stderr, err := cmd.StderrPipe()
|
||||||
|
if err != nil {
|
||||||
|
p.Logger.Error("failed to connect to stderr",
|
||||||
slog.String("id", p.getShortId()),
|
slog.String("id", p.getShortId()),
|
||||||
slog.String("url", p.Url),
|
slog.String("url", p.Url),
|
||||||
slog.String("err", err.Error()),
|
slog.String("err", err.Error()),
|
||||||
@@ -298,27 +327,33 @@ func (p *Process) SetMetadata() error {
|
|||||||
CreatedAt: time.Now(),
|
CreatedAt: time.Now(),
|
||||||
}
|
}
|
||||||
|
|
||||||
err = cmd.Start()
|
if err := cmd.Start(); err != nil {
|
||||||
if err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var bufferedStderr bytes.Buffer
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
io.Copy(&bufferedStderr, stderr)
|
||||||
|
}()
|
||||||
|
|
||||||
p.Logger.Info("retrieving metadata",
|
p.Logger.Info("retrieving metadata",
|
||||||
slog.String("id", p.getShortId()),
|
slog.String("id", p.getShortId()),
|
||||||
slog.String("url", p.Url),
|
slog.String("url", p.Url),
|
||||||
)
|
)
|
||||||
|
|
||||||
err = json.NewDecoder(stdout).Decode(&info)
|
if err := json.NewDecoder(stdout).Decode(&info); err != nil {
|
||||||
if err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Info = info
|
p.Info = info
|
||||||
p.Progress.Status = StatusPending
|
p.Progress.Status = StatusPending
|
||||||
|
|
||||||
err = cmd.Wait()
|
if err := cmd.Wait(); err != nil {
|
||||||
|
return errors.New(bufferedStderr.String())
|
||||||
|
}
|
||||||
|
|
||||||
return err
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Process) getShortId() string {
|
func (p *Process) getShortId() string {
|
||||||
|
|||||||
@@ -8,6 +8,15 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
File base logger with log-rotate capabilities.
|
||||||
|
The rotate process must be initiated from an external goroutine.
|
||||||
|
|
||||||
|
After rotation the previous logs file are compressed with gzip algorithm.
|
||||||
|
|
||||||
|
The rotated log follows this naming: [filename].UTC time.gz
|
||||||
|
*/
|
||||||
|
|
||||||
// implements io.Writer interface
|
// implements io.Writer interface
|
||||||
type LogRotateWriter struct {
|
type LogRotateWriter struct {
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
|
|||||||
@@ -6,10 +6,21 @@ import (
|
|||||||
"github.com/reactivex/rxgo/v2"
|
"github.com/reactivex/rxgo/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
Logger implementation using the observable pattern.
|
||||||
|
Implements io.Writer interface.
|
||||||
|
|
||||||
|
The observable is an event source which drops everythigng unless there's
|
||||||
|
a subscriber connected.
|
||||||
|
|
||||||
|
The observer implementatios are a http ServerSentEvents handler and a
|
||||||
|
websocket one in handler.go
|
||||||
|
*/
|
||||||
|
|
||||||
var (
|
var (
|
||||||
logsChan = make(chan rxgo.Item, 100)
|
logsChan = make(chan rxgo.Item, 100)
|
||||||
logsObservable = rxgo.
|
logsObservable = rxgo.
|
||||||
FromChannel(logsChan, rxgo.WithBackPressureStrategy(rxgo.Drop)).
|
FromEventSource(logsChan, rxgo.WithBackPressureStrategy(rxgo.Drop)).
|
||||||
BufferWithTime(rxgo.WithDuration(time.Millisecond * 500))
|
BufferWithTime(rxgo.WithDuration(time.Millisecond * 500))
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -20,9 +31,7 @@ func NewObservableLogger() *ObservableLogger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (o *ObservableLogger) Write(p []byte) (n int, err error) {
|
func (o *ObservableLogger) Write(p []byte) (n int, err error) {
|
||||||
go func() {
|
|
||||||
logsChan <- rxgo.Of(string(p))
|
logsChan <- rxgo.Of(string(p))
|
||||||
}()
|
|
||||||
|
|
||||||
n = len(p)
|
n = len(p)
|
||||||
err = nil
|
err = nil
|
||||||
|
|||||||
@@ -11,20 +11,18 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func validateToken(tokenValue string) error {
|
func validateToken(tokenValue string) error {
|
||||||
if tokenValue == "" {
|
token, err := jwt.Parse(tokenValue, func(t *jwt.Token) (interface{}, error) {
|
||||||
return errors.New("invalid token")
|
|
||||||
}
|
|
||||||
|
|
||||||
token, _ := jwt.Parse(tokenValue, func(t *jwt.Token) (interface{}, error) {
|
|
||||||
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
|
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||||
return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
|
return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
|
||||||
}
|
}
|
||||||
return []byte(os.Getenv("JWT_SECRET")), nil
|
return []byte(os.Getenv("JWT_SECRET")), nil
|
||||||
})
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if claims, ok := token.Claims.(jwt.MapClaims); ok && token.Valid {
|
if claims, ok := token.Claims.(jwt.MapClaims); ok && token.Valid {
|
||||||
expiresAt, err := time.Parse(time.RFC3339, claims["expiresAt"].(string))
|
expiresAt, err := time.Parse(time.RFC3339, claims["expiresAt"].(string))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -154,6 +154,7 @@ func (s *Service) UpdateExecutable(args NoArgs, updated *bool) error {
|
|||||||
err := updater.UpdateExecutable()
|
err := updater.UpdateExecutable()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
*updated = true
|
*updated = true
|
||||||
|
s.logger.Info("Succesfully updated yt-dlp")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
*updated = false
|
*updated = false
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ func RunBlocking(cfg *RunConfig) {
|
|||||||
logger.Error("failed to init database", slog.String("err", err.Error()))
|
logger.Error("failed to init database", slog.String("err", err.Error()))
|
||||||
}
|
}
|
||||||
|
|
||||||
mq := internal.NewMessageQueue()
|
mq := internal.NewMessageQueue(logger)
|
||||||
go mq.Subscriber()
|
go mq.Subscriber()
|
||||||
|
|
||||||
srv := newServer(serverConfig{
|
srv := newServer(serverConfig{
|
||||||
@@ -163,6 +163,7 @@ func newServer(c serverConfig) *http.Server {
|
|||||||
r.Post("/delete", handlers.DeleteFile)
|
r.Post("/delete", handlers.DeleteFile)
|
||||||
r.Get("/d/{id}", handlers.DownloadFile)
|
r.Get("/d/{id}", handlers.DownloadFile)
|
||||||
r.Get("/v/{id}", handlers.SendFile)
|
r.Get("/v/{id}", handlers.SendFile)
|
||||||
|
r.Get("/bulk", handlers.BulkDownload(c.mdb))
|
||||||
})
|
})
|
||||||
|
|
||||||
// Authentication routes
|
// Authentication routes
|
||||||
|
|||||||
@@ -26,10 +26,12 @@ func DirectoryTree() (*[]string, error) {
|
|||||||
children []Node
|
children []Node
|
||||||
}
|
}
|
||||||
|
|
||||||
rootPath := config.Instance().DownloadPath
|
var (
|
||||||
|
rootPath = config.Instance().DownloadPath
|
||||||
|
|
||||||
stack := internal.NewStack[Node]()
|
stack = internal.NewStack[Node]()
|
||||||
flattened := make([]string, 0)
|
flattened = make([]string, 0)
|
||||||
|
)
|
||||||
|
|
||||||
stack.Push(Node{path: rootPath})
|
stack.Push(Node{path: rootPath})
|
||||||
|
|
||||||
@@ -37,14 +39,16 @@ func DirectoryTree() (*[]string, error) {
|
|||||||
|
|
||||||
for stack.IsNotEmpty() {
|
for stack.IsNotEmpty() {
|
||||||
current := stack.Pop().Value
|
current := stack.Pop().Value
|
||||||
|
|
||||||
children, err := os.ReadDir(current.path)
|
children, err := os.ReadDir(current.path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, entry := range children {
|
for _, entry := range children {
|
||||||
childPath := filepath.Join(current.path, entry.Name())
|
var (
|
||||||
childNode := Node{path: childPath}
|
childPath = filepath.Join(current.path, entry.Name())
|
||||||
|
childNode = Node{path: childPath}
|
||||||
|
)
|
||||||
if entry.IsDir() {
|
if entry.IsDir() {
|
||||||
current.children = append(current.children, childNode)
|
current.children = append(current.children, childNode)
|
||||||
stack.Push(childNode)
|
stack.Push(childNode)
|
||||||
|
|||||||
Reference in New Issue
Block a user