Files
WaifuBoard/desktop/src/api/local/download.service.ts

368 lines
12 KiB
TypeScript

import { getConfig as loadConfig } from '../../shared/config.js';
import { queryOne, queryAll, run } from '../../shared/database.js';
import { getAnimeById } from '../anime/anime.service';
import { getBookById } from '../books/books.service';
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { exec } from 'child_process';
import { promisify } from 'util';
import AdmZip from 'adm-zip';
const execPromise = promisify(exec);
const FFMPEG_PATH = 'D:\\ffmpeg\\bin\\ffmpeg.exe'; // Hardcoded como pediste
type AnimeDownloadParams = {
anilistId: number;
episodeNumber: number;
streamUrl: string;
quality?: string;
subtitles?: Array<{ language: string; url: string }>;
chapters?: Array<{ title: string; start_time: number; end_time: number }>;
};
type BookDownloadParams = {
anilistId: number;
chapterNumber: number;
format: 'manga' | 'novel';
content?: string;
images?: Array<{ index: number; url: string }>;
};
async function ensureDirectory(dirPath: string) {
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
}
async function downloadFile(url: string, outputPath: string): Promise<void> {
const res = await fetch(url);
if (!res.ok) throw new Error(`HTTP_${res.status}`);
await ensureDirectory(path.dirname(outputPath));
const buf = Buffer.from(await res.arrayBuffer());
fs.writeFileSync(outputPath, buf);
}
async function getOrCreateEntry(
anilistId: number,
type: 'anime' | 'manga' | 'novels'
): Promise<{ id: string; path: string; folderName: string }> {
const existing = await queryOne(
`SELECT id, path, folder_name FROM local_entries
WHERE matched_id = ? AND matched_source = 'anilist' AND type = ?`,
[anilistId, type],
'local_library'
);
if (existing) {
return {
id: existing.id,
path: existing.path,
folderName: existing.folder_name
};
}
const metadata: any = type === 'anime'
? await getAnimeById(anilistId)
: await getBookById(anilistId);
if (!metadata) {
throw new Error('METADATA_NOT_FOUND');
}
const config = loadConfig();
const basePath = config.library?.[type];
if (!basePath) {
throw new Error(`NO_LIBRARY_PATH_FOR_${type.toUpperCase()}`);
}
const title = metadata.title?.romaji || metadata.title?.english || `ID_${anilistId}`;
const safeName = title.replace(/[<>:"/\\|?*]/g, '_');
const folderPath = path.join(basePath, safeName);
await ensureDirectory(folderPath);
const entryId = crypto.createHash('sha1').update(folderPath).digest('hex');
const now = Date.now();
await run(
`INSERT INTO local_entries (id, type, path, folder_name, matched_id, matched_source, last_scan)
VALUES (?, ?, ?, ?, ?, 'anilist', ?)`,
[entryId, type, folderPath, safeName, anilistId, now],
'local_library'
);
return {
id: entryId,
path: folderPath,
folderName: safeName
};
}
export async function downloadAnimeEpisode(params: AnimeDownloadParams) {
const { anilistId, episodeNumber, streamUrl, quality, subtitles, chapters } = params;
const entry = await getOrCreateEntry(anilistId, 'anime');
const existingFile = await queryOne(
`SELECT id FROM local_files WHERE entry_id = ? AND unit_number = ?`,
[entry.id, episodeNumber],
'local_library'
);
if (existingFile) {
return {
status: 'ALREADY_EXISTS',
message: `Episode ${episodeNumber} already exists`,
entry_id: entry.id,
episode: episodeNumber
};
}
const outputFileName = `Episode_${episodeNumber.toString().padStart(2, '0')}.mkv`;
const outputPath = path.join(entry.path, outputFileName);
const tempDir = path.join(entry.path, '.temp');
await ensureDirectory(tempDir);
try {
let inputArgs: string[] = [];
let videoInput = streamUrl;
if (streamUrl.includes('.m3u8')) {
if (quality) {
const tempM3u8 = path.join(tempDir, 'stream.m3u8');
await downloadFile(streamUrl, tempM3u8);
const content = fs.readFileSync(tempM3u8, 'utf8');
const qualities = content.match(/RESOLUTION=\d+x(\d+)/g) || [];
const targetHeight = quality.replace('p', '');
const targetLine = content.split('\n').find(line =>
line.includes(`RESOLUTION=`) && line.includes(`x${targetHeight}`)
);
if (targetLine) {
const nextLine = content.split('\n')[content.split('\n').indexOf(targetLine) + 1];
if (nextLine && !nextLine.startsWith('#')) {
const baseUrl = streamUrl.substring(0, streamUrl.lastIndexOf('/') + 1);
videoInput = nextLine.startsWith('http') ? nextLine : baseUrl + nextLine;
}
}
fs.unlinkSync(tempM3u8);
}
}
inputArgs = ['-i', videoInput];
const subtitleFiles: string[] = [];
if (subtitles && subtitles.length > 0) {
for (let i = 0; i < subtitles.length; i++) {
const sub = subtitles[i];
const subPath = path.join(tempDir, `subtitle_${i}.${sub.url.endsWith('.vtt') ? 'vtt' : 'srt'}`);
await downloadFile(sub.url, subPath);
subtitleFiles.push(subPath);
inputArgs.push('-i', subPath);
}
}
let ffmpegArgs = [
...inputArgs,
'-map', '0:v',
'-map', '0:a',
'-c:v', 'copy',
'-c:a', 'copy'
];
for (let i = 0; i < subtitleFiles.length; i++) {
ffmpegArgs.push('-map', `${i + 1}:s`);
ffmpegArgs.push(`-metadata:s:s:${i}`, `language=${subtitles![i].language}`);
}
if (subtitleFiles.length > 0) {
ffmpegArgs.push('-c:s', 'copy');
}
if (chapters && chapters.length > 0) {
const metadataFile = path.join(tempDir, 'chapters.txt');
let chapterContent = ';FFMETADATA1\n';
for (const chapter of chapters) {
const startMs = Math.floor(chapter.start_time * 1000);
const endMs = Math.floor(chapter.end_time * 1000);
chapterContent += '[CHAPTER]\n';
chapterContent += `TIMEBASE=1/1000\n`;
chapterContent += `START=${startMs}\n`;
chapterContent += `END=${endMs}\n`;
chapterContent += `title=${chapter.title}\n`;
}
fs.writeFileSync(metadataFile, chapterContent);
ffmpegArgs.push('-i', metadataFile);
ffmpegArgs.push('-map_metadata', `${inputArgs.length / 2}`);
}
ffmpegArgs.push(outputPath);
const command = `${FFMPEG_PATH} ${ffmpegArgs.join(' ')}`;
await execPromise(command, { maxBuffer: 1024 * 1024 * 100 });
fs.rmSync(tempDir, { recursive: true, force: true });
const fileId = crypto.randomUUID();
await run(
`INSERT INTO local_files (id, entry_id, file_path, unit_number)
VALUES (?, ?, ?, ?)`,
[fileId, entry.id, outputPath, episodeNumber],
'local_library'
);
await run(
`UPDATE local_entries SET last_scan = ? WHERE id = ?`,
[Date.now(), entry.id],
'local_library'
);
return {
status: 'SUCCESS',
entry_id: entry.id,
file_id: fileId,
episode: episodeNumber,
path: outputPath
};
} catch (error: any) {
fs.rmSync(tempDir, { recursive: true, force: true });
if (fs.existsSync(outputPath)) {
fs.unlinkSync(outputPath);
}
const err = new Error('DOWNLOAD_FAILED');
(err as any).details = error.message;
throw err;
}
}
export async function downloadBookChapter(params: BookDownloadParams) {
const { anilistId, chapterNumber, format, content, images } = params;
const type = format === 'manga' ? 'manga' : 'novels';
const entry = await getOrCreateEntry(anilistId, type);
const existingFile = await queryOne(
`SELECT id FROM local_files WHERE entry_id = ? AND unit_number = ?`,
[entry.id, chapterNumber],
'local_library'
);
if (existingFile) {
return {
status: 'ALREADY_EXISTS',
message: `Chapter ${chapterNumber} already exists`,
entry_id: entry.id,
chapter: chapterNumber
};
}
try {
let outputPath: string;
let fileId: string;
if (format === 'manga') {
const chapterName = `Chapter_${chapterNumber.toString().padStart(3, '0')}.cbz`;
outputPath = path.join(entry.path, chapterName);
const zip = new AdmZip();
const sortedImages = images!.sort((a, b) => a.index - b.index);
for (const img of sortedImages) {
const res = await fetch(img.url);
if (!res.ok) throw new Error(`HTTP_${res.status}`);
const buf = Buffer.from(await res.arrayBuffer());
const ext = path.extname(new URL(img.url).pathname) || '.jpg';
const filename = `${img.index.toString().padStart(4, '0')}${ext}`;
zip.addFile(filename, buf);
}
zip.writeZip(outputPath);
} else {
const chapterName = `Chapter_${chapterNumber.toString().padStart(3, '0')}.epub`;
outputPath = path.join(entry.path, chapterName);
const zip = new AdmZip();
zip.addFile('mimetype', Buffer.from('application/epub+zip'), '', 0);
const containerXml = `<?xml version="1.0" encoding="UTF-8"?>
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
<rootfiles>
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
</rootfiles>
</container>`;
zip.addFile('META-INF/container.xml', Buffer.from(containerXml));
const contentOpf = `<?xml version="1.0" encoding="UTF-8"?>
<package xmlns="http://www.idpf.org/2007/opf" version="3.0" unique-identifier="bookid">
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/">
<dc:title>Chapter ${chapterNumber}</dc:title>
<dc:identifier id="bookid">chapter-${anilistId}-${chapterNumber}</dc:identifier>
<dc:language>en</dc:language>
</metadata>
<manifest>
<item id="chapter" href="chapter.xhtml" media-type="application/xhtml+xml"/>
</manifest>
<spine>
<itemref idref="chapter"/>
</spine>
</package>`;
zip.addFile('OEBPS/content.opf', Buffer.from(contentOpf));
const chapterXhtml = `<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Chapter ${chapterNumber}</title>
</head>
<body>
${content}
</body>
</html>`;
zip.addFile('OEBPS/chapter.xhtml', Buffer.from(chapterXhtml));
zip.writeZip(outputPath);
}
fileId = crypto.randomUUID();
await run(
`INSERT INTO local_files (id, entry_id, file_path, unit_number)
VALUES (?, ?, ?, ?)`,
[fileId, entry.id, outputPath, chapterNumber],
'local_library'
);
await run(
`UPDATE local_entries SET last_scan = ? WHERE id = ?`,
[Date.now(), entry.id],
'local_library'
);
return {
status: 'SUCCESS',
entry_id: entry.id,
file_id: fileId,
chapter: chapterNumber,
format,
path: outputPath
};
} catch (error: any) {
const err = new Error('DOWNLOAD_FAILED');
(err as any).details = error.message;
throw err;
}
}