added endpoint for downloading from extensions
This commit is contained in:
368
desktop/src/api/local/download.service.ts
Normal file
368
desktop/src/api/local/download.service.ts
Normal file
@@ -0,0 +1,368 @@
|
||||
import { getConfig as loadConfig } from '../../shared/config.js';
|
||||
import { queryOne, queryAll, run } from '../../shared/database.js';
|
||||
import { getAnimeById } from '../anime/anime.service';
|
||||
import { getBookById } from '../books/books.service';
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import AdmZip from 'adm-zip';
|
||||
|
||||
const execPromise = promisify(exec);
|
||||
|
||||
const FFMPEG_PATH = 'D:\\ffmpeg\\bin\\ffmpeg.exe'; // Hardcoded como pediste
|
||||
|
||||
type AnimeDownloadParams = {
|
||||
anilistId: number;
|
||||
episodeNumber: number;
|
||||
streamUrl: string;
|
||||
quality?: string;
|
||||
subtitles?: Array<{ language: string; url: string }>;
|
||||
chapters?: Array<{ title: string; start_time: number; end_time: number }>;
|
||||
};
|
||||
|
||||
type BookDownloadParams = {
|
||||
anilistId: number;
|
||||
chapterNumber: number;
|
||||
format: 'manga' | 'novel';
|
||||
content?: string;
|
||||
images?: Array<{ index: number; url: string }>;
|
||||
};
|
||||
|
||||
async function ensureDirectory(dirPath: string) {
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadFile(url: string, outputPath: string): Promise<void> {
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) throw new Error(`HTTP_${res.status}`);
|
||||
|
||||
await ensureDirectory(path.dirname(outputPath));
|
||||
const buf = Buffer.from(await res.arrayBuffer());
|
||||
fs.writeFileSync(outputPath, buf);
|
||||
}
|
||||
|
||||
async function getOrCreateEntry(
|
||||
anilistId: number,
|
||||
type: 'anime' | 'manga' | 'novels'
|
||||
): Promise<{ id: string; path: string; folderName: string }> {
|
||||
const existing = await queryOne(
|
||||
`SELECT id, path, folder_name FROM local_entries
|
||||
WHERE matched_id = ? AND matched_source = 'anilist' AND type = ?`,
|
||||
[anilistId, type],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
id: existing.id,
|
||||
path: existing.path,
|
||||
folderName: existing.folder_name
|
||||
};
|
||||
}
|
||||
|
||||
const metadata: any = type === 'anime'
|
||||
? await getAnimeById(anilistId)
|
||||
: await getBookById(anilistId);
|
||||
|
||||
if (!metadata) {
|
||||
throw new Error('METADATA_NOT_FOUND');
|
||||
}
|
||||
|
||||
const config = loadConfig();
|
||||
const basePath = config.library?.[type];
|
||||
|
||||
if (!basePath) {
|
||||
throw new Error(`NO_LIBRARY_PATH_FOR_${type.toUpperCase()}`);
|
||||
}
|
||||
|
||||
const title = metadata.title?.romaji || metadata.title?.english || `ID_${anilistId}`;
|
||||
const safeName = title.replace(/[<>:"/\\|?*]/g, '_');
|
||||
const folderPath = path.join(basePath, safeName);
|
||||
|
||||
await ensureDirectory(folderPath);
|
||||
|
||||
const entryId = crypto.createHash('sha1').update(folderPath).digest('hex');
|
||||
const now = Date.now();
|
||||
|
||||
await run(
|
||||
`INSERT INTO local_entries (id, type, path, folder_name, matched_id, matched_source, last_scan)
|
||||
VALUES (?, ?, ?, ?, ?, 'anilist', ?)`,
|
||||
[entryId, type, folderPath, safeName, anilistId, now],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
return {
|
||||
id: entryId,
|
||||
path: folderPath,
|
||||
folderName: safeName
|
||||
};
|
||||
}
|
||||
|
||||
export async function downloadAnimeEpisode(params: AnimeDownloadParams) {
|
||||
const { anilistId, episodeNumber, streamUrl, quality, subtitles, chapters } = params;
|
||||
|
||||
const entry = await getOrCreateEntry(anilistId, 'anime');
|
||||
|
||||
const existingFile = await queryOne(
|
||||
`SELECT id FROM local_files WHERE entry_id = ? AND unit_number = ?`,
|
||||
[entry.id, episodeNumber],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
if (existingFile) {
|
||||
return {
|
||||
status: 'ALREADY_EXISTS',
|
||||
message: `Episode ${episodeNumber} already exists`,
|
||||
entry_id: entry.id,
|
||||
episode: episodeNumber
|
||||
};
|
||||
}
|
||||
|
||||
const outputFileName = `Episode_${episodeNumber.toString().padStart(2, '0')}.mkv`;
|
||||
const outputPath = path.join(entry.path, outputFileName);
|
||||
const tempDir = path.join(entry.path, '.temp');
|
||||
await ensureDirectory(tempDir);
|
||||
|
||||
try {
|
||||
let inputArgs: string[] = [];
|
||||
let videoInput = streamUrl;
|
||||
|
||||
if (streamUrl.includes('.m3u8')) {
|
||||
if (quality) {
|
||||
const tempM3u8 = path.join(tempDir, 'stream.m3u8');
|
||||
await downloadFile(streamUrl, tempM3u8);
|
||||
const content = fs.readFileSync(tempM3u8, 'utf8');
|
||||
|
||||
const qualities = content.match(/RESOLUTION=\d+x(\d+)/g) || [];
|
||||
const targetHeight = quality.replace('p', '');
|
||||
const targetLine = content.split('\n').find(line =>
|
||||
line.includes(`RESOLUTION=`) && line.includes(`x${targetHeight}`)
|
||||
);
|
||||
|
||||
if (targetLine) {
|
||||
const nextLine = content.split('\n')[content.split('\n').indexOf(targetLine) + 1];
|
||||
if (nextLine && !nextLine.startsWith('#')) {
|
||||
const baseUrl = streamUrl.substring(0, streamUrl.lastIndexOf('/') + 1);
|
||||
videoInput = nextLine.startsWith('http') ? nextLine : baseUrl + nextLine;
|
||||
}
|
||||
}
|
||||
|
||||
fs.unlinkSync(tempM3u8);
|
||||
}
|
||||
}
|
||||
|
||||
inputArgs = ['-i', videoInput];
|
||||
|
||||
const subtitleFiles: string[] = [];
|
||||
if (subtitles && subtitles.length > 0) {
|
||||
for (let i = 0; i < subtitles.length; i++) {
|
||||
const sub = subtitles[i];
|
||||
const subPath = path.join(tempDir, `subtitle_${i}.${sub.url.endsWith('.vtt') ? 'vtt' : 'srt'}`);
|
||||
await downloadFile(sub.url, subPath);
|
||||
subtitleFiles.push(subPath);
|
||||
inputArgs.push('-i', subPath);
|
||||
}
|
||||
}
|
||||
|
||||
let ffmpegArgs = [
|
||||
...inputArgs,
|
||||
'-map', '0:v',
|
||||
'-map', '0:a',
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'copy'
|
||||
];
|
||||
|
||||
for (let i = 0; i < subtitleFiles.length; i++) {
|
||||
ffmpegArgs.push('-map', `${i + 1}:s`);
|
||||
ffmpegArgs.push(`-metadata:s:s:${i}`, `language=${subtitles![i].language}`);
|
||||
}
|
||||
|
||||
if (subtitleFiles.length > 0) {
|
||||
ffmpegArgs.push('-c:s', 'copy');
|
||||
}
|
||||
|
||||
if (chapters && chapters.length > 0) {
|
||||
const metadataFile = path.join(tempDir, 'chapters.txt');
|
||||
let chapterContent = ';FFMETADATA1\n';
|
||||
|
||||
for (const chapter of chapters) {
|
||||
const startMs = Math.floor(chapter.start_time * 1000);
|
||||
const endMs = Math.floor(chapter.end_time * 1000);
|
||||
|
||||
chapterContent += '[CHAPTER]\n';
|
||||
chapterContent += `TIMEBASE=1/1000\n`;
|
||||
chapterContent += `START=${startMs}\n`;
|
||||
chapterContent += `END=${endMs}\n`;
|
||||
chapterContent += `title=${chapter.title}\n`;
|
||||
}
|
||||
|
||||
fs.writeFileSync(metadataFile, chapterContent);
|
||||
ffmpegArgs.push('-i', metadataFile);
|
||||
ffmpegArgs.push('-map_metadata', `${inputArgs.length / 2}`);
|
||||
}
|
||||
|
||||
ffmpegArgs.push(outputPath);
|
||||
|
||||
const command = `${FFMPEG_PATH} ${ffmpegArgs.join(' ')}`;
|
||||
await execPromise(command, { maxBuffer: 1024 * 1024 * 100 });
|
||||
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
|
||||
const fileId = crypto.randomUUID();
|
||||
await run(
|
||||
`INSERT INTO local_files (id, entry_id, file_path, unit_number)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[fileId, entry.id, outputPath, episodeNumber],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
await run(
|
||||
`UPDATE local_entries SET last_scan = ? WHERE id = ?`,
|
||||
[Date.now(), entry.id],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
return {
|
||||
status: 'SUCCESS',
|
||||
entry_id: entry.id,
|
||||
file_id: fileId,
|
||||
episode: episodeNumber,
|
||||
path: outputPath
|
||||
};
|
||||
|
||||
} catch (error: any) {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
if (fs.existsSync(outputPath)) {
|
||||
fs.unlinkSync(outputPath);
|
||||
}
|
||||
|
||||
const err = new Error('DOWNLOAD_FAILED');
|
||||
(err as any).details = error.message;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadBookChapter(params: BookDownloadParams) {
|
||||
const { anilistId, chapterNumber, format, content, images } = params;
|
||||
|
||||
const type = format === 'manga' ? 'manga' : 'novels';
|
||||
const entry = await getOrCreateEntry(anilistId, type);
|
||||
|
||||
const existingFile = await queryOne(
|
||||
`SELECT id FROM local_files WHERE entry_id = ? AND unit_number = ?`,
|
||||
[entry.id, chapterNumber],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
if (existingFile) {
|
||||
return {
|
||||
status: 'ALREADY_EXISTS',
|
||||
message: `Chapter ${chapterNumber} already exists`,
|
||||
entry_id: entry.id,
|
||||
chapter: chapterNumber
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
let outputPath: string;
|
||||
let fileId: string;
|
||||
|
||||
if (format === 'manga') {
|
||||
const chapterName = `Chapter_${chapterNumber.toString().padStart(3, '0')}.cbz`;
|
||||
outputPath = path.join(entry.path, chapterName);
|
||||
|
||||
const zip = new AdmZip();
|
||||
const sortedImages = images!.sort((a, b) => a.index - b.index);
|
||||
|
||||
for (const img of sortedImages) {
|
||||
const res = await fetch(img.url);
|
||||
if (!res.ok) throw new Error(`HTTP_${res.status}`);
|
||||
const buf = Buffer.from(await res.arrayBuffer());
|
||||
|
||||
const ext = path.extname(new URL(img.url).pathname) || '.jpg';
|
||||
const filename = `${img.index.toString().padStart(4, '0')}${ext}`;
|
||||
zip.addFile(filename, buf);
|
||||
}
|
||||
|
||||
zip.writeZip(outputPath);
|
||||
|
||||
} else {
|
||||
const chapterName = `Chapter_${chapterNumber.toString().padStart(3, '0')}.epub`;
|
||||
outputPath = path.join(entry.path, chapterName);
|
||||
|
||||
const zip = new AdmZip();
|
||||
|
||||
zip.addFile('mimetype', Buffer.from('application/epub+zip'), '', 0);
|
||||
|
||||
const containerXml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
|
||||
<rootfiles>
|
||||
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
|
||||
</rootfiles>
|
||||
</container>`;
|
||||
zip.addFile('META-INF/container.xml', Buffer.from(containerXml));
|
||||
|
||||
const contentOpf = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<package xmlns="http://www.idpf.org/2007/opf" version="3.0" unique-identifier="bookid">
|
||||
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<dc:title>Chapter ${chapterNumber}</dc:title>
|
||||
<dc:identifier id="bookid">chapter-${anilistId}-${chapterNumber}</dc:identifier>
|
||||
<dc:language>en</dc:language>
|
||||
</metadata>
|
||||
<manifest>
|
||||
<item id="chapter" href="chapter.xhtml" media-type="application/xhtml+xml"/>
|
||||
</manifest>
|
||||
<spine>
|
||||
<itemref idref="chapter"/>
|
||||
</spine>
|
||||
</package>`;
|
||||
zip.addFile('OEBPS/content.opf', Buffer.from(contentOpf));
|
||||
|
||||
const chapterXhtml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>Chapter ${chapterNumber}</title>
|
||||
</head>
|
||||
<body>
|
||||
${content}
|
||||
</body>
|
||||
</html>`;
|
||||
zip.addFile('OEBPS/chapter.xhtml', Buffer.from(chapterXhtml));
|
||||
|
||||
zip.writeZip(outputPath);
|
||||
}
|
||||
|
||||
fileId = crypto.randomUUID();
|
||||
await run(
|
||||
`INSERT INTO local_files (id, entry_id, file_path, unit_number)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[fileId, entry.id, outputPath, chapterNumber],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
await run(
|
||||
`UPDATE local_entries SET last_scan = ? WHERE id = ?`,
|
||||
[Date.now(), entry.id],
|
||||
'local_library'
|
||||
);
|
||||
|
||||
return {
|
||||
status: 'SUCCESS',
|
||||
entry_id: entry.id,
|
||||
file_id: fileId,
|
||||
chapter: chapterNumber,
|
||||
format,
|
||||
path: outputPath
|
||||
};
|
||||
|
||||
} catch (error: any) {
|
||||
const err = new Error('DOWNLOAD_FAILED');
|
||||
(err as any).details = error.message;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import {FastifyReply, FastifyRequest} from 'fastify';
|
||||
import fs from 'fs';
|
||||
import * as service from './local.service';
|
||||
import * as downloadService from './download.service';
|
||||
|
||||
type ScanQuery = {
|
||||
mode?: 'full' | 'incremental';
|
||||
@@ -16,6 +17,33 @@ type MatchBody = {
|
||||
matched_id: number | null;
|
||||
};
|
||||
|
||||
type DownloadAnimeBody = {
|
||||
anilist_id: number;
|
||||
episode_number: number;
|
||||
stream_url: string;
|
||||
quality?: string;
|
||||
subtitles?: Array<{
|
||||
language: string;
|
||||
url: string;
|
||||
}>;
|
||||
chapters?: Array<{
|
||||
title: string;
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
}>;
|
||||
};
|
||||
|
||||
type DownloadBookBody = {
|
||||
anilist_id: number;
|
||||
chapter_number: number;
|
||||
format: 'manga' | 'novel';
|
||||
content?: string;
|
||||
images?: Array<{
|
||||
index: number;
|
||||
url: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
export async function scanLibrary(request: FastifyRequest<{ Querystring: ScanQuery }>, reply: FastifyReply) {
|
||||
try {
|
||||
const mode = request.query.mode || 'incremental';
|
||||
@@ -182,4 +210,110 @@ export async function getPage(request: FastifyRequest, reply: FastifyReply) {
|
||||
}
|
||||
|
||||
return reply.status(400).send();
|
||||
}
|
||||
|
||||
export async function downloadAnime(request: FastifyRequest<{ Body: DownloadAnimeBody }>, reply: FastifyReply) {
|
||||
try {
|
||||
const {
|
||||
anilist_id,
|
||||
episode_number,
|
||||
stream_url,
|
||||
quality,
|
||||
subtitles,
|
||||
chapters
|
||||
} = request.body;
|
||||
|
||||
if (!anilist_id || !episode_number || !stream_url) {
|
||||
return reply.status(400).send({
|
||||
error: 'MISSING_REQUIRED_FIELDS',
|
||||
required: ['anilist_id', 'episode_number', 'stream_url']
|
||||
});
|
||||
}
|
||||
|
||||
const result = await downloadService.downloadAnimeEpisode({
|
||||
anilistId: anilist_id,
|
||||
episodeNumber: episode_number,
|
||||
streamUrl: stream_url,
|
||||
quality,
|
||||
subtitles,
|
||||
chapters
|
||||
});
|
||||
|
||||
if (result.status === 'ALREADY_EXISTS') {
|
||||
return reply.status(409).send(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (err: any) {
|
||||
console.error('Error downloading anime:', err);
|
||||
|
||||
if (err.message === 'METADATA_NOT_FOUND') {
|
||||
return reply.status(404).send({ error: 'ANIME_NOT_FOUND_IN_ANILIST' });
|
||||
}
|
||||
|
||||
if (err.message === 'DOWNLOAD_FAILED') {
|
||||
return reply.status(500).send({ error: 'DOWNLOAD_FAILED', details: err.details });
|
||||
}
|
||||
|
||||
return reply.status(500).send({ error: 'FAILED_TO_DOWNLOAD_ANIME' });
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadBook(request: FastifyRequest<{ Body: DownloadBookBody }>, reply: FastifyReply) {
|
||||
try {
|
||||
const {
|
||||
anilist_id,
|
||||
chapter_number,
|
||||
format,
|
||||
content,
|
||||
images
|
||||
} = request.body;
|
||||
|
||||
if (!anilist_id || !chapter_number || !format) {
|
||||
return reply.status(400).send({
|
||||
error: 'MISSING_REQUIRED_FIELDS',
|
||||
required: ['anilist_id', 'chapter_number', 'format']
|
||||
});
|
||||
}
|
||||
|
||||
if (format === 'novel' && !content) {
|
||||
return reply.status(400).send({
|
||||
error: 'MISSING_CONTENT',
|
||||
message: 'content field is required for novels'
|
||||
});
|
||||
}
|
||||
|
||||
if (format === 'manga' && (!images || images.length === 0)) {
|
||||
return reply.status(400).send({
|
||||
error: 'MISSING_IMAGES',
|
||||
message: 'images field is required for manga'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await downloadService.downloadBookChapter({
|
||||
anilistId: anilist_id,
|
||||
chapterNumber: chapter_number,
|
||||
format,
|
||||
content,
|
||||
images
|
||||
});
|
||||
|
||||
if (result.status === 'ALREADY_EXISTS') {
|
||||
return reply.status(409).send(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (err: any) {
|
||||
console.error('Error downloading book:', err);
|
||||
|
||||
if (err.message === 'METADATA_NOT_FOUND') {
|
||||
return reply.status(404).send({ error: 'BOOK_NOT_FOUND_IN_ANILIST' });
|
||||
}
|
||||
|
||||
if (err.message === 'DOWNLOAD_FAILED') {
|
||||
return reply.status(500).send({ error: 'DOWNLOAD_FAILED', details: err.details });
|
||||
}
|
||||
|
||||
return reply.status(500).send({ error: 'FAILED_TO_DOWNLOAD_BOOK' });
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,8 @@ async function localRoutes(fastify: FastifyInstance) {
|
||||
fastify.get('/library/:id/units', controller.getUnits);
|
||||
fastify.get('/library/:unitId/manifest', controller.getManifest);
|
||||
fastify.get('/library/:unitId/resource/:resId', controller.getPage);
|
||||
fastify.post('/library/download/anime', controller.downloadAnime);
|
||||
fastify.post('/library/download/book', controller.downloadBook);
|
||||
}
|
||||
|
||||
export default localRoutes;
|
||||
@@ -7,7 +7,6 @@ import path from "path";
|
||||
import { getAnimeById, searchAnimeLocal } from "../anime/anime.service";
|
||||
import { getBookById, searchBooksAniList } from "../books/books.service";
|
||||
import AdmZip from 'adm-zip';
|
||||
import EPub from 'epub';
|
||||
|
||||
const MANGA_IMAGE_EXTS = ['.jpg', '.jpeg', '.png', '.webp'];
|
||||
const MANGA_ARCHIVES = ['.cbz', '.cbr', '.zip'];
|
||||
@@ -490,28 +489,10 @@ export async function getUnitResource(unitId: string, resId: string) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseEpubToHtml(filePath: string): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const epub = new EPub(filePath);
|
||||
|
||||
epub.on('end', async () => {
|
||||
let html = '';
|
||||
|
||||
for (const id of epub.flow.map(f => f.id)) {
|
||||
const chapter = await new Promise<string>((res, rej) => {
|
||||
epub.getChapter(id, (err, text) => {
|
||||
if (err) rej(err);
|
||||
else res(text);
|
||||
});
|
||||
});
|
||||
|
||||
html += `<section class="ln-chapter">${chapter}</section>`;
|
||||
}
|
||||
|
||||
resolve(html);
|
||||
});
|
||||
|
||||
epub.on('error', reject);
|
||||
epub.parse();
|
||||
});
|
||||
function parseEpubToHtml(filePath: string) {
|
||||
const zip = new AdmZip(filePath);
|
||||
const entry = zip.getEntry('OEBPS/chapter.xhtml');
|
||||
if (!entry) throw new Error('CHAPTER_NOT_FOUND');
|
||||
return entry.getData().toString('utf8');
|
||||
}
|
||||
@@ -187,8 +187,7 @@ async function loadChapter() {
|
||||
|
||||
// Lógica específica para contenido LOCAL
|
||||
if (provider === 'local') {
|
||||
const unitIndex = Number(currentChapterId); // En local el ID suele ser el índice
|
||||
const unit = data.units[unitIndex];
|
||||
const unit = data.units.find(u => String(u.id) === String(currentChapterId));
|
||||
|
||||
if (!unit) {
|
||||
reader.innerHTML = '<div class="loading-container"><span>Chapter not found (Local)</span></div>';
|
||||
@@ -201,8 +200,12 @@ async function loadChapter() {
|
||||
reader.innerHTML = '';
|
||||
|
||||
// Setup navegación manual para local (simple index +/- 1)
|
||||
const unitIndex = data.units.findIndex(
|
||||
u => String(u.id) === String(currentChapterId)
|
||||
);
|
||||
setupLocalNavigation(unitIndex, data.units.length);
|
||||
|
||||
|
||||
if (manifest.type === 'manga') {
|
||||
currentType = 'manga';
|
||||
updateSettingsVisibility();
|
||||
|
||||
Reference in New Issue
Block a user