import { getConfig as loadConfig } from '../../shared/config'; import { queryOne, queryAll, run } from '../../shared/database'; import { getAnimeById } from '../anime/anime.service'; import { getBookById } from '../books/books.service'; import crypto from 'crypto'; import fs from 'fs'; import path from 'path'; import AdmZip from 'adm-zip'; import { spawn } from 'child_process'; const { values } = loadConfig(); const FFMPEG_PATH = values.paths?.ffmpeg || 'ffmpeg'; type DownloadStatus = { id: string; type: 'anime' | 'manga' | 'novel'; anilistId: number; unitNumber: number; status: 'pending' | 'downloading' | 'completed' | 'failed'; progress: number; speed?: string; timeElapsed?: string; error?: string; startedAt: number; completedAt?: number; }; const activeDownloads = new Map(); export function getActiveDownloads(): DownloadStatus[] { return Array.from(activeDownloads.values()); } export function getDownloadById(id: string): DownloadStatus | undefined { return activeDownloads.get(id); } function updateDownloadProgress(id: string, updates: Partial) { const current = activeDownloads.get(id); if (current) { activeDownloads.set(id, { ...current, ...updates }); } } type AnimeDownloadParams = { anilistId: number; episodeNumber: number; streamUrl: string; headers?: Record; quality?: string; subtitles?: Array<{ language: string; url: string }>; chapters?: Array<{ title: string; start_time: number; end_time: number }>; }; type BookDownloadParams = { anilistId: number; chapterNumber: number; format: 'manga' | 'novel'; content?: string; images?: Array<{ index: number; url: string }>; }; async function ensureDirectory(dirPath: string) { if (!fs.existsSync(dirPath)) { fs.mkdirSync(dirPath, { recursive: true }); } } async function downloadFile(url: string, outputPath: string): Promise { const res = await fetch(url); if (!res.ok) throw new Error(`HTTP_${res.status}`); await ensureDirectory(path.dirname(outputPath)); const buf = Buffer.from(await res.arrayBuffer()); fs.writeFileSync(outputPath, buf); } async function getOrCreateEntry( anilistId: number, type: 'anime' | 'manga' | 'novels' ): Promise<{ id: string; path: string; folderName: string }> { const existing = await queryOne( `SELECT id, path, folder_name FROM local_entries WHERE matched_id = ? AND matched_source = 'anilist' AND type = ?`, [anilistId, type], 'local_library' ); if (existing) { return { id: existing.id, path: existing.path, folderName: existing.folder_name }; } const metadata: any = type === 'anime' ? await getAnimeById(anilistId) : await getBookById(anilistId); if (!metadata) { throw new Error('METADATA_NOT_FOUND'); } const { values } = loadConfig(); const basePath = values.library?.[type]; if (!basePath) { throw new Error(`NO_LIBRARY_PATH_FOR_${type.toUpperCase()}`); } const title = metadata.title?.romaji || metadata.title?.english || `ID_${anilistId}`; const safeName = title.replace(/[<>:"/\\|?*]/g, '_'); const folderPath = path.join(basePath, safeName); await ensureDirectory(folderPath); const entryId = crypto .createHash('sha1') .update(`anilist:${type}:${anilistId}`) .digest('hex'); const now = Date.now(); await run( `INSERT OR IGNORE INTO local_entries (id, type, path, folder_name, matched_id, matched_source, last_scan) VALUES (?, ?, ?, ?, ?, 'anilist', ?)`, [entryId, type, folderPath, safeName, anilistId, now], 'local_library' ); return { id: entryId, path: folderPath, folderName: safeName }; } export async function downloadAnimeEpisode(params: AnimeDownloadParams) { const { anilistId, episodeNumber, streamUrl, subtitles, chapters } = params; const downloadId = crypto.randomUUID(); activeDownloads.set(downloadId, { id: downloadId, type: 'anime', anilistId, unitNumber: episodeNumber, status: 'pending', progress: 0, startedAt: Date.now() }); const entry = await getOrCreateEntry(anilistId, 'anime'); const exists = await queryOne( `SELECT id FROM local_files WHERE entry_id = ? AND unit_number = ?`, [entry.id, episodeNumber], 'local_library' ); if (exists) { activeDownloads.delete(downloadId); return { status: 'ALREADY_EXISTS', entry_id: entry.id, episode: episodeNumber }; } const outputPath = path.join(entry.path, `Episode_${episodeNumber.toString().padStart(2, '0')}.mkv`); const tempDir = path.join(entry.path, '.temp'); await ensureDirectory(tempDir); try { updateDownloadProgress(downloadId, { status: 'downloading' }); let videoInput = streamUrl; let audioInputs: string[] = []; const isMaster = (params as any).is_master === true; if (isMaster) { const variant = (params as any).variant; const audios = (params as any).audio; if (!variant || !variant.playlist_url) { throw new Error('VARIANT_REQUIRED_FOR_MASTER'); } videoInput = variant.playlist_url; if (audios && audios.length > 0) { audioInputs = audios.map((a: any) => a.playlist_url); } } const subFiles: string[] = []; if (subtitles?.length) { for (let i = 0; i < subtitles.length; i++) { const ext = subtitles[i].url.endsWith('.vtt') ? 'vtt' : 'srt'; const p = path.join(tempDir, `sub_${i}.${ext}`); await downloadFile(subtitles[i].url, p); subFiles.push(p); } } const args = [ '-protocol_whitelist', 'file,http,https,tcp,tls,crypto', '-allowed_extensions', 'ALL', '-f', 'hls', '-extension_picky', '0', '-i', videoInput ]; audioInputs.forEach(audioUrl => { args.push( '-protocol_whitelist', 'file,http,https,tcp,tls,crypto', '-allowed_extensions', 'ALL', '-f', 'hls', '-extension_picky', '0', '-i', audioUrl ); }); subFiles.forEach(f => args.push('-i', f)); let chaptersInputIndex = -1; if (chapters?.length) { const meta = path.join(tempDir, 'chapters.txt'); const sorted = [...chapters].sort((a, b) => a.start_time - b.start_time); const lines: string[] = [';FFMETADATA1']; for (let i = 0; i < sorted.length; i++) { const c = sorted[i]; const start = Math.floor(c.start_time * 1000); const end = Math.floor(c.end_time * 1000); const title = (c.title || 'chapter').toUpperCase(); lines.push( '[CHAPTER]', 'TIMEBASE=1/1000', `START=${start}`, `END=${end}`, `title=${title}` ); if (i < sorted.length - 1) { const nextStart = Math.floor(sorted[i + 1].start_time * 1000); if (nextStart - end > 1000) { lines.push( '[CHAPTER]', 'TIMEBASE=1/1000', `START=${end}`, `END=${nextStart}`, 'title=Episode' ); } } else { lines.push( '[CHAPTER]', 'TIMEBASE=1/1000', `START=${end}`, 'title=Episode' ); } } fs.writeFileSync(meta, lines.join('\n')); args.push('-i', meta); chaptersInputIndex = 1 + audioInputs.length + subFiles.length; } args.push('-map', '0:v:0'); if (audioInputs.length > 0) { audioInputs.forEach((_, i) => { args.push('-map', `${i + 1}:a:0`); const audioInfo = (params as any).audio?.[i]; if (audioInfo) { const audioStreamIndex = i; if (audioInfo.language) { args.push(`-metadata:s:a:${audioStreamIndex}`, `language=${audioInfo.language}`); } if (audioInfo.name) { args.push(`-metadata:s:a:${audioStreamIndex}`, `title=${audioInfo.name}`); } } }); } else { args.push('-map', '0:a:0?'); } const subtitleStartIndex = 1 + audioInputs.length; subFiles.forEach((_, i) => { args.push('-map', `${subtitleStartIndex + i}:0`); args.push(`-metadata:s:s:${i}`, `language=${subtitles![i].language}`); }); if (chaptersInputIndex >= 0) { args.push('-map_metadata', `${chaptersInputIndex}`); } args.push('-c:v', 'copy', '-c:a', 'copy'); if (subFiles.length) args.push('-c:s', 'srt'); args.push('-y', outputPath); await new Promise((resolve, reject) => { const ff = spawn(FFMPEG_PATH, args, { windowsHide: true, stdio: ['ignore', 'pipe', 'pipe'] }); ff.stderr.on('data', (data) => { const text = data.toString(); const timeMatch = text.match(/time=(\S+)/); const speedMatch = text.match(/speed=(\S+)/); if (timeMatch || speedMatch) { updateDownloadProgress(downloadId, { timeElapsed: timeMatch?.[1], speed: speedMatch?.[1] }); } }); ff.on('error', (error) => reject(error)); ff.on('close', (code) => { if (code === 0) resolve(true); else reject(new Error(`FFmpeg exited with code ${code}`)); }); }); fs.rmSync(tempDir, { recursive: true, force: true }); const fileId = crypto.randomUUID(); await run( `INSERT INTO local_files (id, entry_id, file_path, unit_number) VALUES (?, ?, ?, ?)`, [fileId, entry.id, outputPath, episodeNumber], 'local_library' ); await run( `UPDATE local_entries SET last_scan = ? WHERE id = ?`, [Date.now(), entry.id], 'local_library' ); updateDownloadProgress(downloadId, { status: 'completed', progress: 100, completedAt: Date.now() }); setTimeout(() => activeDownloads.delete(downloadId), 30000); return { status: 'SUCCESS', download_id: downloadId, entry_id: entry.id, file_id: fileId, episode: episodeNumber, path: outputPath }; } catch (e: any) { fs.rmSync(tempDir, { recursive: true, force: true }); if (fs.existsSync(outputPath)) fs.unlinkSync(outputPath); updateDownloadProgress(downloadId, { status: 'failed', error: e.message }); setTimeout(() => activeDownloads.delete(downloadId), 60000); const err = new Error('DOWNLOAD_FAILED'); (err as any).details = e.message; throw err; } } export async function downloadBookChapter(params: BookDownloadParams) { const { anilistId, chapterNumber, format, content, images } = params; const downloadId = crypto.randomUUID(); activeDownloads.set(downloadId, { id: downloadId, type: format === 'manga' ? 'manga' : 'novel', anilistId, unitNumber: chapterNumber, status: 'pending', progress: 0, startedAt: Date.now() }); const type = format === 'manga' ? 'manga' : 'novels'; const entry = await getOrCreateEntry(anilistId, type); const existingFile = await queryOne( `SELECT id FROM local_files WHERE entry_id = ? AND unit_number = ?`, [entry.id, chapterNumber], 'local_library' ); if (existingFile) { activeDownloads.delete(downloadId); return { status: 'ALREADY_EXISTS', message: `Chapter ${chapterNumber} already exists`, entry_id: entry.id, chapter: chapterNumber }; } try { updateDownloadProgress(downloadId, { status: 'downloading' }); let outputPath: string; let fileId: string; if (format === 'manga') { const chapterName = `Chapter_${chapterNumber.toString().padStart(3, '0')}.cbz`; outputPath = path.join(entry.path, chapterName); const zip = new AdmZip(); const sortedImages = images!.sort((a, b) => a.index - b.index); for (let i = 0; i < sortedImages.length; i++) { const img = sortedImages[i]; const res = await fetch(img.url); if (!res.ok) throw new Error(`HTTP_${res.status}`); const buf = Buffer.from(await res.arrayBuffer()); const ext = path.extname(new URL(img.url).pathname) || '.jpg'; const filename = `${img.index.toString().padStart(4, '0')}${ext}`; zip.addFile(filename, buf); updateDownloadProgress(downloadId, { progress: Math.floor((i / sortedImages.length) * 100) }); } zip.writeZip(outputPath); } else { const chapterName = `Chapter_${chapterNumber.toString().padStart(3, '0')}.epub`; outputPath = path.join(entry.path, chapterName); const zip = new AdmZip(); zip.addFile('mimetype', Buffer.from('application/epub+zip'), '', 0); const containerXml = ` `; zip.addFile('META-INF/container.xml', Buffer.from(containerXml)); const contentOpf = ` Chapter ${chapterNumber} chapter-${anilistId}-${chapterNumber} en `; zip.addFile('OEBPS/content.opf', Buffer.from(contentOpf)); const chapterXhtml = ` Chapter ${chapterNumber} ${content} `; zip.addFile('OEBPS/chapter.xhtml', Buffer.from(chapterXhtml)); zip.writeZip(outputPath); } fileId = crypto.randomUUID(); await run( `INSERT INTO local_files (id, entry_id, file_path, unit_number) VALUES (?, ?, ?, ?)`, [fileId, entry.id, outputPath, chapterNumber], 'local_library' ); await run( `UPDATE local_entries SET last_scan = ? WHERE id = ?`, [Date.now(), entry.id], 'local_library' ); updateDownloadProgress(downloadId, { status: 'completed', progress: 100, completedAt: Date.now() }); setTimeout(() => activeDownloads.delete(downloadId), 30000); return { status: 'SUCCESS', download_id: downloadId, entry_id: entry.id, file_id: fileId, chapter: chapterNumber, format, path: outputPath }; } catch (error: any) { updateDownloadProgress(downloadId, { status: 'failed', error: error.message }); setTimeout(() => activeDownloads.delete(downloadId), 60000); const err = new Error('DOWNLOAD_FAILED'); (err as any).details = error.message; throw err; } }