Initial commit mit MkDocs-Dokumentation
This commit is contained in:
13
backend/src/config.js
Normal file
13
backend/src/config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const path = require('path');
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..');
|
||||
const rawDbPath = process.env.DB_PATH || path.join(rootDir, 'data', 'ripster.db');
|
||||
const rawLogDir = process.env.LOG_DIR || path.join(rootDir, 'logs');
|
||||
|
||||
module.exports = {
|
||||
port: process.env.PORT ? Number(process.env.PORT) : 3001,
|
||||
dbPath: path.isAbsolute(rawDbPath) ? rawDbPath : path.resolve(rootDir, rawDbPath),
|
||||
corsOrigin: process.env.CORS_ORIGIN || '*',
|
||||
logDir: path.isAbsolute(rawLogDir) ? rawLogDir : path.resolve(rootDir, rawLogDir),
|
||||
logLevel: process.env.LOG_LEVEL || 'info'
|
||||
};
|
||||
603
backend/src/db/database.js
Normal file
603
backend/src/db/database.js
Normal file
@@ -0,0 +1,603 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sqlite3 = require('sqlite3');
|
||||
const { open } = require('sqlite');
|
||||
const { dbPath } = require('../config');
|
||||
const { defaultSchema } = require('./defaultSettings');
|
||||
const logger = require('../services/logger').child('DB');
|
||||
const { errorToMeta } = require('../utils/errorMeta');
|
||||
const { setLogRootDir, getJobLogDir } = require('../services/logPathService');
|
||||
|
||||
const schemaFilePath = path.resolve(__dirname, '../../../db/schema.sql');
|
||||
|
||||
let dbInstance;
|
||||
|
||||
function nowFileStamp() {
|
||||
return new Date().toISOString().replace(/[:.]/g, '-');
|
||||
}
|
||||
|
||||
function isSqliteCorruptionError(error) {
|
||||
if (!error) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const code = String(error.code || '').toUpperCase();
|
||||
const msg = String(error.message || '').toLowerCase();
|
||||
|
||||
return (
|
||||
code === 'SQLITE_CORRUPT' ||
|
||||
msg.includes('database disk image is malformed') ||
|
||||
msg.includes('file is not a database')
|
||||
);
|
||||
}
|
||||
|
||||
function moveIfExists(sourcePath, targetPath) {
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
return false;
|
||||
}
|
||||
fs.renameSync(sourcePath, targetPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
function quarantineCorruptDatabaseFiles() {
|
||||
const dir = path.dirname(dbPath);
|
||||
const base = path.basename(dbPath);
|
||||
const stamp = nowFileStamp();
|
||||
const archiveDir = path.join(dir, 'corrupt-backups');
|
||||
|
||||
fs.mkdirSync(archiveDir, { recursive: true });
|
||||
|
||||
const moved = [];
|
||||
const candidates = [
|
||||
dbPath,
|
||||
`${dbPath}-wal`,
|
||||
`${dbPath}-shm`
|
||||
];
|
||||
|
||||
for (const sourcePath of candidates) {
|
||||
const fileName = path.basename(sourcePath);
|
||||
const targetPath = path.join(archiveDir, `${fileName}.${stamp}.corrupt`);
|
||||
if (moveIfExists(sourcePath, targetPath)) {
|
||||
moved.push({
|
||||
from: sourcePath,
|
||||
to: targetPath
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn('recovery:quarantine-complete', {
|
||||
dbPath,
|
||||
base,
|
||||
movedCount: moved.length,
|
||||
moved
|
||||
});
|
||||
}
|
||||
|
||||
function quoteIdentifier(identifier) {
|
||||
return `"${String(identifier || '').replace(/"/g, '""')}"`;
|
||||
}
|
||||
|
||||
function normalizeSqlType(value) {
|
||||
return String(value || '').trim().replace(/\s+/g, ' ').toUpperCase();
|
||||
}
|
||||
|
||||
function normalizeDefault(value) {
|
||||
if (value === null || value === undefined) {
|
||||
return '';
|
||||
}
|
||||
return String(value).trim().replace(/\s+/g, ' ').toUpperCase();
|
||||
}
|
||||
|
||||
function sameTableShape(current = [], desired = []) {
|
||||
if (current.length !== desired.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < current.length; i += 1) {
|
||||
const left = current[i];
|
||||
const right = desired[i];
|
||||
if (!left || !right) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.name || '') !== String(right.name || '')) {
|
||||
return false;
|
||||
}
|
||||
if (normalizeSqlType(left.type) !== normalizeSqlType(right.type)) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.notnull || 0) !== Number(right.notnull || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.pk || 0) !== Number(right.pk || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (normalizeDefault(left.dflt_value) !== normalizeDefault(right.dflt_value)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function sameForeignKeys(current = [], desired = []) {
|
||||
if (current.length !== desired.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < current.length; i += 1) {
|
||||
const left = current[i];
|
||||
const right = desired[i];
|
||||
if (!left || !right) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.id || 0) !== Number(right.id || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.seq || 0) !== Number(right.seq || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.table || '') !== String(right.table || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.from || '') !== String(right.from || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.to || '') !== String(right.to || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.on_update || '') !== String(right.on_update || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.on_delete || '') !== String(right.on_delete || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.match || '') !== String(right.match || '')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function tableExists(db, tableName) {
|
||||
const row = await db.get(
|
||||
`SELECT 1 as ok FROM sqlite_master WHERE type = 'table' AND name = ? LIMIT 1`,
|
||||
[tableName]
|
||||
);
|
||||
return Boolean(row);
|
||||
}
|
||||
|
||||
async function getTableInfo(db, tableName) {
|
||||
return db.all(`PRAGMA table_info(${quoteIdentifier(tableName)})`);
|
||||
}
|
||||
|
||||
async function getForeignKeyInfo(db, tableName) {
|
||||
return db.all(`PRAGMA foreign_key_list(${quoteIdentifier(tableName)})`);
|
||||
}
|
||||
|
||||
async function readConfiguredLogDirSetting(db) {
|
||||
const hasSchemaTable = await tableExists(db, 'settings_schema');
|
||||
const hasValuesTable = await tableExists(db, 'settings_values');
|
||||
if (!hasSchemaTable || !hasValuesTable) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const row = await db.get(
|
||||
`
|
||||
SELECT
|
||||
COALESCE(v.value, s.default_value, '') AS value
|
||||
FROM settings_schema s
|
||||
LEFT JOIN settings_values v ON v.key = s.key
|
||||
WHERE s.key = ?
|
||||
LIMIT 1
|
||||
`,
|
||||
['log_dir']
|
||||
);
|
||||
const value = String(row?.value || '').trim();
|
||||
return value || null;
|
||||
} catch (error) {
|
||||
logger.warn('log-root:read-setting-failed', {
|
||||
error: error?.message || String(error)
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function configureRuntimeLogRootFromSettings(db, options = {}) {
|
||||
const ensure = Boolean(options.ensure);
|
||||
const configured = await readConfiguredLogDirSetting(db);
|
||||
let resolved = setLogRootDir(configured);
|
||||
if (ensure) {
|
||||
try {
|
||||
fs.mkdirSync(resolved, { recursive: true });
|
||||
} catch (error) {
|
||||
const fallbackResolved = setLogRootDir(null);
|
||||
try {
|
||||
fs.mkdirSync(fallbackResolved, { recursive: true });
|
||||
} catch (_fallbackError) {
|
||||
// ignored: logger itself is hardened and may still write to console only
|
||||
}
|
||||
logger.warn('log-root:ensure-failed', {
|
||||
configured: configured || null,
|
||||
resolved,
|
||||
fallbackResolved,
|
||||
error: error?.message || String(error)
|
||||
});
|
||||
resolved = fallbackResolved;
|
||||
}
|
||||
}
|
||||
return {
|
||||
configured,
|
||||
resolved
|
||||
};
|
||||
}
|
||||
|
||||
async function loadSchemaModel() {
|
||||
if (!fs.existsSync(schemaFilePath)) {
|
||||
const error = new Error(`Schema-Datei fehlt: ${schemaFilePath}`);
|
||||
error.code = 'SCHEMA_FILE_MISSING';
|
||||
throw error;
|
||||
}
|
||||
|
||||
const schemaSql = fs.readFileSync(schemaFilePath, 'utf-8');
|
||||
const memDb = await open({
|
||||
filename: ':memory:',
|
||||
driver: sqlite3.Database
|
||||
});
|
||||
|
||||
try {
|
||||
await memDb.exec(schemaSql);
|
||||
const tables = await memDb.all(`
|
||||
SELECT name, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const indexes = await memDb.all(`
|
||||
SELECT name, tbl_name AS tableName, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
AND sql IS NOT NULL
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const tableInfos = {};
|
||||
const tableForeignKeys = {};
|
||||
for (const table of tables) {
|
||||
tableInfos[table.name] = await getTableInfo(memDb, table.name);
|
||||
tableForeignKeys[table.name] = await getForeignKeyInfo(memDb, table.name);
|
||||
}
|
||||
|
||||
return {
|
||||
schemaSql,
|
||||
tables,
|
||||
indexes,
|
||||
tableInfos,
|
||||
tableForeignKeys
|
||||
};
|
||||
} finally {
|
||||
await memDb.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function rebuildTable(db, tableName, createSql) {
|
||||
const oldName = `${tableName}__old_${Date.now()}`;
|
||||
const tableNameQuoted = quoteIdentifier(tableName);
|
||||
const oldNameQuoted = quoteIdentifier(oldName);
|
||||
const beforeInfo = await getTableInfo(db, tableName);
|
||||
|
||||
await db.exec(`ALTER TABLE ${tableNameQuoted} RENAME TO ${oldNameQuoted}`);
|
||||
await db.exec(createSql);
|
||||
|
||||
const afterInfo = await getTableInfo(db, tableName);
|
||||
const beforeColumns = new Set(beforeInfo.map((column) => String(column.name)));
|
||||
const commonColumns = afterInfo
|
||||
.map((column) => String(column.name))
|
||||
.filter((name) => beforeColumns.has(name));
|
||||
|
||||
if (commonColumns.length > 0) {
|
||||
const columnList = commonColumns.map((name) => quoteIdentifier(name)).join(', ');
|
||||
await db.exec(`
|
||||
INSERT INTO ${tableNameQuoted} (${columnList})
|
||||
SELECT ${columnList}
|
||||
FROM ${oldNameQuoted}
|
||||
`);
|
||||
}
|
||||
|
||||
await db.exec(`DROP TABLE ${oldNameQuoted}`);
|
||||
}
|
||||
|
||||
async function syncSchemaToModel(db, model) {
|
||||
const desiredTables = Array.isArray(model?.tables) ? model.tables : [];
|
||||
const desiredIndexes = Array.isArray(model?.indexes) ? model.indexes : [];
|
||||
const desiredTableInfo = model?.tableInfos && typeof model.tableInfos === 'object'
|
||||
? model.tableInfos
|
||||
: {};
|
||||
const desiredTableForeignKeys = model?.tableForeignKeys && typeof model.tableForeignKeys === 'object'
|
||||
? model.tableForeignKeys
|
||||
: {};
|
||||
|
||||
const currentTables = await db.all(`
|
||||
SELECT name, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const currentByName = new Map(currentTables.map((table) => [table.name, table]));
|
||||
const desiredTableNameSet = new Set(desiredTables.map((table) => table.name));
|
||||
|
||||
for (const table of desiredTables) {
|
||||
const tableName = String(table.name || '');
|
||||
const createSql = String(table.sql || '').trim();
|
||||
if (!tableName || !createSql) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentByName.has(tableName)) {
|
||||
await db.exec(createSql);
|
||||
logger.info('schema:create-table', { table: tableName });
|
||||
continue;
|
||||
}
|
||||
|
||||
const currentInfo = await getTableInfo(db, tableName);
|
||||
const wantedInfo = Array.isArray(desiredTableInfo[tableName]) ? desiredTableInfo[tableName] : [];
|
||||
const currentFks = await getForeignKeyInfo(db, tableName);
|
||||
const wantedFks = Array.isArray(desiredTableForeignKeys[tableName]) ? desiredTableForeignKeys[tableName] : [];
|
||||
const shapeMatches = sameTableShape(currentInfo, wantedInfo);
|
||||
const foreignKeysMatch = sameForeignKeys(currentFks, wantedFks);
|
||||
if (!shapeMatches || !foreignKeysMatch) {
|
||||
await rebuildTable(db, tableName, createSql);
|
||||
logger.warn('schema:rebuild-table', {
|
||||
table: tableName,
|
||||
reason: !shapeMatches ? 'shape-mismatch' : 'foreign-key-mismatch'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const table of currentTables) {
|
||||
if (desiredTableNameSet.has(table.name)) {
|
||||
continue;
|
||||
}
|
||||
await db.exec(`DROP TABLE IF EXISTS ${quoteIdentifier(table.name)}`);
|
||||
logger.warn('schema:drop-table', { table: table.name });
|
||||
}
|
||||
|
||||
const currentIndexes = await db.all(`
|
||||
SELECT name, tbl_name AS tableName, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
AND sql IS NOT NULL
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const desiredIndexNameSet = new Set(desiredIndexes.map((index) => index.name));
|
||||
|
||||
for (const index of currentIndexes) {
|
||||
if (desiredIndexNameSet.has(index.name)) {
|
||||
continue;
|
||||
}
|
||||
await db.exec(`DROP INDEX IF EXISTS ${quoteIdentifier(index.name)}`);
|
||||
logger.warn('schema:drop-index', { index: index.name, table: index.tableName });
|
||||
}
|
||||
|
||||
for (const index of desiredIndexes) {
|
||||
let sql = String(index.sql || '').trim();
|
||||
if (!sql) {
|
||||
continue;
|
||||
}
|
||||
if (/^CREATE\s+UNIQUE\s+INDEX\s+/i.test(sql)) {
|
||||
sql = sql.replace(/^CREATE\s+UNIQUE\s+INDEX\s+/i, 'CREATE UNIQUE INDEX IF NOT EXISTS ');
|
||||
} else if (/^CREATE\s+INDEX\s+/i.test(sql)) {
|
||||
sql = sql.replace(/^CREATE\s+INDEX\s+/i, 'CREATE INDEX IF NOT EXISTS ');
|
||||
}
|
||||
await db.exec(sql);
|
||||
}
|
||||
}
|
||||
|
||||
async function exportLegacyJobLogsToFiles(db) {
|
||||
const hasJobLogsTable = await tableExists(db, 'job_logs');
|
||||
if (!hasJobLogsTable) {
|
||||
return;
|
||||
}
|
||||
|
||||
const rows = await db.all(`
|
||||
SELECT job_id, source, message, timestamp
|
||||
FROM job_logs
|
||||
ORDER BY job_id ASC, id ASC
|
||||
`);
|
||||
if (!Array.isArray(rows) || rows.length === 0) {
|
||||
logger.info('legacy-job-logs:export:skip-empty');
|
||||
return;
|
||||
}
|
||||
|
||||
const targetDir = getJobLogDir();
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
const streams = new Map();
|
||||
|
||||
try {
|
||||
for (const row of rows) {
|
||||
const jobId = Number(row?.job_id);
|
||||
if (!Number.isFinite(jobId) || jobId <= 0) {
|
||||
continue;
|
||||
}
|
||||
const key = String(Math.trunc(jobId));
|
||||
if (!streams.has(key)) {
|
||||
const filePath = path.join(targetDir, `job-${key}.process.log`);
|
||||
const stream = fs.createWriteStream(filePath, {
|
||||
flags: 'w',
|
||||
encoding: 'utf-8'
|
||||
});
|
||||
streams.set(key, stream);
|
||||
}
|
||||
const line = `[${String(row?.timestamp || '')}] [${String(row?.source || 'SYSTEM')}] ${String(row?.message || '')}\n`;
|
||||
streams.get(key).write(line);
|
||||
}
|
||||
} finally {
|
||||
await Promise.all(
|
||||
[...streams.values()].map(
|
||||
(stream) =>
|
||||
new Promise((resolve) => {
|
||||
stream.end(resolve);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
logger.warn('legacy-job-logs:exported', {
|
||||
lines: rows.length,
|
||||
jobs: streams.size,
|
||||
targetDir
|
||||
});
|
||||
}
|
||||
|
||||
async function applySchemaModel(db, model) {
|
||||
await db.exec('PRAGMA foreign_keys = OFF;');
|
||||
await db.exec('BEGIN');
|
||||
try {
|
||||
await syncSchemaToModel(db, model);
|
||||
await db.exec('COMMIT');
|
||||
} catch (error) {
|
||||
await db.exec('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
await db.exec('PRAGMA foreign_keys = ON;');
|
||||
}
|
||||
}
|
||||
|
||||
async function openAndPrepareDatabase() {
|
||||
fs.mkdirSync(path.dirname(dbPath), { recursive: true });
|
||||
logger.info('init:open', { dbPath });
|
||||
|
||||
dbInstance = await open({
|
||||
filename: dbPath,
|
||||
driver: sqlite3.Database
|
||||
});
|
||||
|
||||
await dbInstance.exec('PRAGMA journal_mode = WAL;');
|
||||
await dbInstance.exec('PRAGMA foreign_keys = ON;');
|
||||
const initialLogRoot = await configureRuntimeLogRootFromSettings(dbInstance, { ensure: true });
|
||||
logger.info('log-root:initialized', {
|
||||
configured: initialLogRoot.configured || null,
|
||||
resolved: initialLogRoot.resolved
|
||||
});
|
||||
await exportLegacyJobLogsToFiles(dbInstance);
|
||||
const schemaModel = await loadSchemaModel();
|
||||
await applySchemaModel(dbInstance, schemaModel);
|
||||
|
||||
await seedDefaultSettings(dbInstance);
|
||||
await removeDeprecatedSettings(dbInstance);
|
||||
await ensurePipelineStateRow(dbInstance);
|
||||
const syncedLogRoot = await configureRuntimeLogRootFromSettings(dbInstance, { ensure: true });
|
||||
logger.info('log-root:synced', {
|
||||
configured: syncedLogRoot.configured || null,
|
||||
resolved: syncedLogRoot.resolved
|
||||
});
|
||||
logger.info('init:done');
|
||||
return dbInstance;
|
||||
}
|
||||
|
||||
async function initDatabase({ allowRecovery = true } = {}) {
|
||||
if (dbInstance) {
|
||||
return dbInstance;
|
||||
}
|
||||
|
||||
try {
|
||||
return await openAndPrepareDatabase();
|
||||
} catch (error) {
|
||||
logger.error('init:failed', { error: errorToMeta(error), allowRecovery });
|
||||
|
||||
if (dbInstance) {
|
||||
try {
|
||||
await dbInstance.close();
|
||||
} catch (_closeError) {
|
||||
// ignore close errors during failed init
|
||||
}
|
||||
dbInstance = undefined;
|
||||
}
|
||||
|
||||
if (allowRecovery && isSqliteCorruptionError(error)) {
|
||||
logger.warn('recovery:corrupt-db-detected', { dbPath });
|
||||
quarantineCorruptDatabaseFiles();
|
||||
return initDatabase({ allowRecovery: false });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function seedDefaultSettings(db) {
|
||||
let seeded = 0;
|
||||
for (const item of defaultSchema) {
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO settings_schema
|
||||
(key, category, label, type, required, description, default_value, options_json, validation_json, order_index)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
category = excluded.category,
|
||||
label = excluded.label,
|
||||
type = excluded.type,
|
||||
required = excluded.required,
|
||||
description = excluded.description,
|
||||
default_value = COALESCE(settings_schema.default_value, excluded.default_value),
|
||||
options_json = excluded.options_json,
|
||||
validation_json = excluded.validation_json,
|
||||
order_index = excluded.order_index,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`,
|
||||
[
|
||||
item.key,
|
||||
item.category,
|
||||
item.label,
|
||||
item.type,
|
||||
item.required,
|
||||
item.description || null,
|
||||
item.defaultValue || null,
|
||||
JSON.stringify(item.options || []),
|
||||
JSON.stringify(item.validation || {}),
|
||||
item.orderIndex || 0
|
||||
]
|
||||
);
|
||||
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO settings_values (key, value)
|
||||
VALUES (?, ?)
|
||||
ON CONFLICT(key) DO NOTHING
|
||||
`,
|
||||
[item.key, item.defaultValue || null]
|
||||
);
|
||||
seeded += 1;
|
||||
}
|
||||
logger.info('seed:settings', { count: seeded });
|
||||
}
|
||||
|
||||
async function ensurePipelineStateRow(db) {
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO pipeline_state (id, state, active_job_id, progress, eta, status_text, context_json)
|
||||
VALUES (1, 'IDLE', NULL, 0, NULL, NULL, '{}')
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
`
|
||||
);
|
||||
}
|
||||
|
||||
async function removeDeprecatedSettings(db) {
|
||||
const deprecatedKeys = ['pushover_notify_disc_detected'];
|
||||
for (const key of deprecatedKeys) {
|
||||
const result = await db.run('DELETE FROM settings_schema WHERE key = ?', [key]);
|
||||
if (result?.changes > 0) {
|
||||
logger.info('migrate:remove-deprecated-setting', { key });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function getDb() {
|
||||
return initDatabase();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initDatabase,
|
||||
getDb
|
||||
};
|
||||
463
backend/src/db/defaultSettings.js
Normal file
463
backend/src/db/defaultSettings.js
Normal file
@@ -0,0 +1,463 @@
|
||||
const defaultSchema = [
|
||||
{
|
||||
key: 'drive_mode',
|
||||
category: 'Laufwerk',
|
||||
label: 'Laufwerksmodus',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'Auto-Discovery oder explizites Device.',
|
||||
defaultValue: 'auto',
|
||||
options: [
|
||||
{ label: 'Auto Discovery', value: 'auto' },
|
||||
{ label: 'Explizites Device', value: 'explicit' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 10
|
||||
},
|
||||
{
|
||||
key: 'drive_device',
|
||||
category: 'Laufwerk',
|
||||
label: 'Device Pfad',
|
||||
type: 'path',
|
||||
required: 0,
|
||||
description: 'Nur für expliziten Modus, z.B. /dev/sr0.',
|
||||
defaultValue: '/dev/sr0',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 20
|
||||
},
|
||||
{
|
||||
key: 'makemkv_source_index',
|
||||
category: 'Laufwerk',
|
||||
label: 'MakeMKV Source Index',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Disc Index im Auto-Modus.',
|
||||
defaultValue: '0',
|
||||
options: [],
|
||||
validation: { min: 0, max: 20 },
|
||||
orderIndex: 30
|
||||
},
|
||||
{
|
||||
key: 'disc_poll_interval_ms',
|
||||
category: 'Laufwerk',
|
||||
label: 'Polling Intervall (ms)',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Intervall für Disk-Erkennung.',
|
||||
defaultValue: '4000',
|
||||
options: [],
|
||||
validation: { min: 1000, max: 60000 },
|
||||
orderIndex: 40
|
||||
},
|
||||
{
|
||||
key: 'raw_dir',
|
||||
category: 'Pfade',
|
||||
label: 'Raw Ausgabeordner',
|
||||
type: 'path',
|
||||
required: 1,
|
||||
description: 'Zwischenablage für MakeMKV Rip.',
|
||||
defaultValue: '/mnt/arm-storage/media/raw',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 100
|
||||
},
|
||||
{
|
||||
key: 'movie_dir',
|
||||
category: 'Pfade',
|
||||
label: 'Film Ausgabeordner',
|
||||
type: 'path',
|
||||
required: 1,
|
||||
description: 'Finale HandBrake Ausgabe.',
|
||||
defaultValue: '/mnt/arm-storage/media/movies',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 110
|
||||
},
|
||||
{
|
||||
key: 'log_dir',
|
||||
category: 'Pfade',
|
||||
label: 'Log Ordner',
|
||||
type: 'path',
|
||||
required: 1,
|
||||
description: 'Basisordner für Logs. Job-Logs liegen direkt hier, Backend-Logs in /backend.',
|
||||
defaultValue: '/mnt/arm-storage/logs',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 120
|
||||
},
|
||||
{
|
||||
key: 'makemkv_command',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Kommando',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Pfad oder Befehl für makemkvcon.',
|
||||
defaultValue: 'makemkvcon',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 200
|
||||
},
|
||||
{
|
||||
key: 'makemkv_registration_key',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Key',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Optionaler Registrierungsschlüssel. Wird vor Analyze/Rip automatisch per "makemkvcon reg" gesetzt.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 202
|
||||
},
|
||||
{
|
||||
key: 'mediainfo_command',
|
||||
category: 'Tools',
|
||||
label: 'Mediainfo Kommando',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Pfad oder Befehl für mediainfo.',
|
||||
defaultValue: 'mediainfo',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 205
|
||||
},
|
||||
{
|
||||
key: 'mediainfo_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'Mediainfo Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Parameter für mediainfo.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 206
|
||||
},
|
||||
{
|
||||
key: 'makemkv_min_length_minutes',
|
||||
category: 'Tools',
|
||||
label: 'Minimale Titellänge (Minuten)',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Filtert kurze Titel beim Rip.',
|
||||
defaultValue: '60',
|
||||
options: [],
|
||||
validation: { min: 1, max: 1000 },
|
||||
orderIndex: 210
|
||||
},
|
||||
{
|
||||
key: 'makemkv_rip_mode',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Rip Modus',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'mkv: direkte MKV-Dateien; backup: vollständige Blu-ray Struktur im RAW-Ordner.',
|
||||
defaultValue: 'backup',
|
||||
options: [
|
||||
{ label: 'MKV', value: 'mkv' },
|
||||
{ label: 'Backup', value: 'backup' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 212
|
||||
},
|
||||
{
|
||||
key: 'makemkv_analyze_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Analyze Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Parameter für Analyze.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 220
|
||||
},
|
||||
{
|
||||
key: 'makemkv_rip_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Rip Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Parameter für Rip.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 230
|
||||
},
|
||||
{
|
||||
key: 'handbrake_command',
|
||||
category: 'Tools',
|
||||
label: 'HandBrake Kommando',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Pfad oder Befehl für HandBrakeCLI.',
|
||||
defaultValue: 'HandBrakeCLI',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 300
|
||||
},
|
||||
{
|
||||
key: 'handbrake_preset',
|
||||
category: 'Tools',
|
||||
label: 'HandBrake Preset',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Preset Name für -Z.',
|
||||
defaultValue: 'H.264 MKV 1080p30',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 310
|
||||
},
|
||||
{
|
||||
key: 'handbrake_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'HandBrake Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Argumente.',
|
||||
defaultValue: '--audio-lang-list deu,eng --first-audio --subtitle-lang-list deu,eng --first-subtitle --aencoder copy --audio-copy-mask ac3,eac3,dts --audio-fallback ac3 --encoder-preset slow --quality 18 --encoder-tune film --encoder-profile high --encoder-level 4.1',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 320
|
||||
},
|
||||
{
|
||||
key: 'output_extension',
|
||||
category: 'Tools',
|
||||
label: 'Ausgabeformat',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'Dateiendung für finale Datei.',
|
||||
defaultValue: 'mkv',
|
||||
options: [
|
||||
{ label: 'MKV', value: 'mkv' },
|
||||
{ label: 'MP4', value: 'mp4' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 330
|
||||
},
|
||||
{
|
||||
key: 'filename_template',
|
||||
category: 'Tools',
|
||||
label: 'Dateiname Template',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Verfügbare Tokens: ${title}, ${year}, ${imdbId}.',
|
||||
defaultValue: '${title} (${year})',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 340
|
||||
},
|
||||
{
|
||||
key: 'omdb_api_key',
|
||||
category: 'Metadaten',
|
||||
label: 'OMDb API Key',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'API Key für Metadatensuche.',
|
||||
defaultValue: '186322c4',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 400
|
||||
},
|
||||
{
|
||||
key: 'omdb_default_type',
|
||||
category: 'Metadaten',
|
||||
label: 'OMDb Typ',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'Vorauswahl für Suche.',
|
||||
defaultValue: 'movie',
|
||||
options: [
|
||||
{ label: 'Movie', value: 'movie' },
|
||||
{ label: 'Series', value: 'series' },
|
||||
{ label: 'Episode', value: 'episode' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 410
|
||||
},
|
||||
{
|
||||
key: 'pushover_enabled',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver aktiviert',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Master-Schalter für PushOver Versand.',
|
||||
defaultValue: 'false',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 500
|
||||
},
|
||||
{
|
||||
key: 'pushover_token',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Token',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Application Token für PushOver.',
|
||||
defaultValue: 'a476diddeew53w8fi4kv88n6ghbfqq',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 510
|
||||
},
|
||||
{
|
||||
key: 'pushover_user',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver User',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'User-Key für PushOver.',
|
||||
defaultValue: 'u47227hupodan28a629az1k43644jg',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 520
|
||||
},
|
||||
{
|
||||
key: 'pushover_device',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Device (optional)',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Optionales Ziel-Device in PushOver.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 530
|
||||
},
|
||||
{
|
||||
key: 'pushover_title_prefix',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Titel-Präfix',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Prefix im PushOver Titel.',
|
||||
defaultValue: 'Ripster',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 540
|
||||
},
|
||||
{
|
||||
key: 'pushover_priority',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Priority',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Priorität -2 bis 2.',
|
||||
defaultValue: '0',
|
||||
options: [],
|
||||
validation: { min: -2, max: 2 },
|
||||
orderIndex: 550
|
||||
},
|
||||
{
|
||||
key: 'pushover_timeout_ms',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Timeout (ms)',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'HTTP Timeout für PushOver Requests.',
|
||||
defaultValue: '7000',
|
||||
options: [],
|
||||
validation: { min: 1000, max: 60000 },
|
||||
orderIndex: 560
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_metadata_ready',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Metadaten-Auswahl senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet wenn Metadaten zur Auswahl bereitstehen.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 570
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_rip_started',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Rip-Start senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet beim Start des MakeMKV-Rips.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 580
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_encoding_started',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Encode-Start senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet beim Start von HandBrake.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 590
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_job_finished',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Erfolg senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet bei erfolgreich abgeschlossenem Job.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 600
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_job_error',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Fehler senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet bei Fehlern in der Pipeline.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 610
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_job_cancelled',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Abbruch senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet wenn Job manuell abgebrochen wurde.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 620
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_reencode_started',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Re-Encode Start senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet beim Start von RAW Re-Encode.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 630
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_reencode_finished',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Re-Encode Erfolg senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet bei erfolgreichem RAW Re-Encode.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 640
|
||||
}
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
defaultSchema
|
||||
};
|
||||
95
backend/src/index.js
Normal file
95
backend/src/index.js
Normal file
@@ -0,0 +1,95 @@
|
||||
require('dotenv').config();
|
||||
|
||||
const http = require('http');
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const { port, corsOrigin } = require('./config');
|
||||
const { initDatabase } = require('./db/database');
|
||||
const errorHandler = require('./middleware/errorHandler');
|
||||
const requestLogger = require('./middleware/requestLogger');
|
||||
const settingsRoutes = require('./routes/settingsRoutes');
|
||||
const pipelineRoutes = require('./routes/pipelineRoutes');
|
||||
const historyRoutes = require('./routes/historyRoutes');
|
||||
const wsService = require('./services/websocketService');
|
||||
const pipelineService = require('./services/pipelineService');
|
||||
const diskDetectionService = require('./services/diskDetectionService');
|
||||
const logger = require('./services/logger').child('BOOT');
|
||||
const { errorToMeta } = require('./utils/errorMeta');
|
||||
|
||||
async function start() {
|
||||
logger.info('backend:start:init');
|
||||
await initDatabase();
|
||||
await pipelineService.init();
|
||||
|
||||
const app = express();
|
||||
app.use(cors({ origin: corsOrigin }));
|
||||
app.use(express.json({ limit: '2mb' }));
|
||||
app.use(requestLogger);
|
||||
|
||||
app.get('/api/health', (req, res) => {
|
||||
res.json({ ok: true, now: new Date().toISOString() });
|
||||
});
|
||||
|
||||
app.use('/api/settings', settingsRoutes);
|
||||
app.use('/api/pipeline', pipelineRoutes);
|
||||
app.use('/api/history', historyRoutes);
|
||||
|
||||
app.use(errorHandler);
|
||||
|
||||
const server = http.createServer(app);
|
||||
wsService.init(server);
|
||||
|
||||
diskDetectionService.on('discInserted', (device) => {
|
||||
logger.info('disk:inserted:event', { device });
|
||||
pipelineService.onDiscInserted(device).catch((error) => {
|
||||
logger.error('pipeline:onDiscInserted:failed', { error: errorToMeta(error), device });
|
||||
wsService.broadcast('PIPELINE_ERROR', { message: error.message });
|
||||
});
|
||||
});
|
||||
|
||||
diskDetectionService.on('discRemoved', (device) => {
|
||||
logger.info('disk:removed:event', { device });
|
||||
pipelineService.onDiscRemoved(device).catch((error) => {
|
||||
logger.error('pipeline:onDiscRemoved:failed', { error: errorToMeta(error), device });
|
||||
wsService.broadcast('PIPELINE_ERROR', { message: error.message });
|
||||
});
|
||||
});
|
||||
|
||||
diskDetectionService.on('error', (error) => {
|
||||
logger.error('diskDetection:error:event', { error: errorToMeta(error) });
|
||||
wsService.broadcast('DISK_DETECTION_ERROR', { message: error.message });
|
||||
});
|
||||
|
||||
diskDetectionService.start();
|
||||
|
||||
server.listen(port, () => {
|
||||
logger.info('backend:listening', { port });
|
||||
});
|
||||
|
||||
const shutdown = () => {
|
||||
logger.warn('backend:shutdown:received');
|
||||
diskDetectionService.stop();
|
||||
server.close(() => {
|
||||
logger.warn('backend:shutdown:completed');
|
||||
process.exit(0);
|
||||
});
|
||||
};
|
||||
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('SIGTERM', shutdown);
|
||||
|
||||
process.on('uncaughtException', (error) => {
|
||||
logger.error('process:uncaughtException', { error: errorToMeta(error) });
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
logger.error('process:unhandledRejection', {
|
||||
reason: reason instanceof Error ? errorToMeta(reason) : String(reason)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
start().catch((error) => {
|
||||
logger.error('backend:start:failed', { error: errorToMeta(error) });
|
||||
process.exit(1);
|
||||
});
|
||||
5
backend/src/middleware/asyncHandler.js
Normal file
5
backend/src/middleware/asyncHandler.js
Normal file
@@ -0,0 +1,5 @@
|
||||
module.exports = function asyncHandler(fn) {
|
||||
return function wrapped(req, res, next) {
|
||||
Promise.resolve(fn(req, res, next)).catch(next);
|
||||
};
|
||||
};
|
||||
23
backend/src/middleware/errorHandler.js
Normal file
23
backend/src/middleware/errorHandler.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const logger = require('../services/logger').child('ERROR_HANDLER');
|
||||
const { errorToMeta } = require('../utils/errorMeta');
|
||||
|
||||
module.exports = function errorHandler(error, req, res, next) {
|
||||
const statusCode = error.statusCode || 500;
|
||||
|
||||
logger.error('request:error', {
|
||||
reqId: req?.reqId,
|
||||
method: req?.method,
|
||||
url: req?.originalUrl,
|
||||
statusCode,
|
||||
error: errorToMeta(error)
|
||||
});
|
||||
|
||||
res.status(statusCode).json({
|
||||
error: {
|
||||
message: error.message || 'Interner Fehler',
|
||||
statusCode,
|
||||
reqId: req?.reqId,
|
||||
details: Array.isArray(error.details) ? error.details : undefined
|
||||
}
|
||||
});
|
||||
};
|
||||
53
backend/src/middleware/requestLogger.js
Normal file
53
backend/src/middleware/requestLogger.js
Normal file
@@ -0,0 +1,53 @@
|
||||
const { randomUUID } = require('crypto');
|
||||
const logger = require('../services/logger').child('HTTP');
|
||||
|
||||
function truncate(value, maxLen = 1500) {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let str;
|
||||
if (typeof value === 'string') {
|
||||
str = value;
|
||||
} else {
|
||||
try {
|
||||
str = JSON.stringify(value);
|
||||
} catch (error) {
|
||||
str = '[unserializable-body]';
|
||||
}
|
||||
}
|
||||
if (str.length <= maxLen) {
|
||||
return str;
|
||||
}
|
||||
|
||||
return `${str.slice(0, maxLen)}...[truncated ${str.length - maxLen} chars]`;
|
||||
}
|
||||
|
||||
module.exports = function requestLogger(req, res, next) {
|
||||
const reqId = randomUUID();
|
||||
const startedAt = Date.now();
|
||||
|
||||
req.reqId = reqId;
|
||||
|
||||
logger.info('request:start', {
|
||||
reqId,
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
ip: req.ip,
|
||||
query: req.query,
|
||||
body: truncate(req.body)
|
||||
});
|
||||
|
||||
res.on('close', () => {
|
||||
if (!res.writableEnded) {
|
||||
logger.warn('request:aborted', {
|
||||
reqId,
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
durationMs: Date.now() - startedAt
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
154
backend/src/routes/historyRoutes.js
Normal file
154
backend/src/routes/historyRoutes.js
Normal file
@@ -0,0 +1,154 @@
|
||||
const express = require('express');
|
||||
const asyncHandler = require('../middleware/asyncHandler');
|
||||
const historyService = require('../services/historyService');
|
||||
const pipelineService = require('../services/pipelineService');
|
||||
const logger = require('../services/logger').child('HISTORY_ROUTE');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.info('get:jobs', {
|
||||
reqId: req.reqId,
|
||||
status: req.query.status,
|
||||
search: req.query.search
|
||||
});
|
||||
|
||||
const jobs = await historyService.getJobs({
|
||||
status: req.query.status,
|
||||
search: req.query.search
|
||||
});
|
||||
|
||||
res.json({ jobs });
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/database',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.info('get:database', {
|
||||
reqId: req.reqId,
|
||||
status: req.query.status,
|
||||
search: req.query.search
|
||||
});
|
||||
|
||||
const rows = await historyService.getDatabaseRows({
|
||||
status: req.query.status,
|
||||
search: req.query.search
|
||||
});
|
||||
|
||||
res.json({ rows });
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/orphan-raw',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.info('get:orphan-raw', { reqId: req.reqId });
|
||||
const result = await historyService.getOrphanRawFolders();
|
||||
res.json(result);
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/orphan-raw/import',
|
||||
asyncHandler(async (req, res) => {
|
||||
const rawPath = String(req.body?.rawPath || '').trim();
|
||||
logger.info('post:orphan-raw:import', { reqId: req.reqId, rawPath });
|
||||
const job = await historyService.importOrphanRawFolder(rawPath);
|
||||
const uiReset = await pipelineService.resetFrontendState('history_orphan_import');
|
||||
res.json({ job, uiReset });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/:id/omdb/assign',
|
||||
asyncHandler(async (req, res) => {
|
||||
const id = Number(req.params.id);
|
||||
const payload = req.body || {};
|
||||
logger.info('post:job:omdb:assign', {
|
||||
reqId: req.reqId,
|
||||
id,
|
||||
imdbId: payload?.imdbId || null,
|
||||
hasTitle: Boolean(payload?.title),
|
||||
hasYear: Boolean(payload?.year)
|
||||
});
|
||||
|
||||
const job = await historyService.assignOmdbMetadata(id, payload);
|
||||
res.json({ job });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/:id/delete-files',
|
||||
asyncHandler(async (req, res) => {
|
||||
const id = Number(req.params.id);
|
||||
const target = String(req.body?.target || 'both');
|
||||
|
||||
logger.warn('post:delete-files', {
|
||||
reqId: req.reqId,
|
||||
id,
|
||||
target
|
||||
});
|
||||
|
||||
const result = await historyService.deleteJobFiles(id, target);
|
||||
res.json(result);
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/:id/delete',
|
||||
asyncHandler(async (req, res) => {
|
||||
const id = Number(req.params.id);
|
||||
const target = String(req.body?.target || 'none');
|
||||
|
||||
logger.warn('post:delete-job', {
|
||||
reqId: req.reqId,
|
||||
id,
|
||||
target
|
||||
});
|
||||
|
||||
const result = await historyService.deleteJob(id, target);
|
||||
const uiReset = await pipelineService.resetFrontendState('history_delete');
|
||||
res.json({ ...result, uiReset });
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:id',
|
||||
asyncHandler(async (req, res) => {
|
||||
const id = Number(req.params.id);
|
||||
const includeLiveLog = ['1', 'true', 'yes'].includes(String(req.query.includeLiveLog || '').toLowerCase());
|
||||
const includeLogs = ['1', 'true', 'yes'].includes(String(req.query.includeLogs || '').toLowerCase());
|
||||
const includeAllLogs = ['1', 'true', 'yes'].includes(String(req.query.includeAllLogs || '').toLowerCase());
|
||||
const parsedTail = Number(req.query.logTailLines);
|
||||
const logTailLines = Number.isFinite(parsedTail) && parsedTail > 0
|
||||
? Math.trunc(parsedTail)
|
||||
: null;
|
||||
|
||||
logger.info('get:job-detail', {
|
||||
reqId: req.reqId,
|
||||
id,
|
||||
includeLiveLog,
|
||||
includeLogs,
|
||||
includeAllLogs,
|
||||
logTailLines
|
||||
});
|
||||
const job = await historyService.getJobWithLogs(id, {
|
||||
includeLiveLog,
|
||||
includeLogs,
|
||||
includeAllLogs,
|
||||
logTailLines
|
||||
});
|
||||
if (!job) {
|
||||
const error = new Error('Job nicht gefunden.');
|
||||
error.statusCode = 404;
|
||||
throw error;
|
||||
}
|
||||
|
||||
res.json({ job });
|
||||
})
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
160
backend/src/routes/pipelineRoutes.js
Normal file
160
backend/src/routes/pipelineRoutes.js
Normal file
@@ -0,0 +1,160 @@
|
||||
const express = require('express');
|
||||
const asyncHandler = require('../middleware/asyncHandler');
|
||||
const pipelineService = require('../services/pipelineService');
|
||||
const diskDetectionService = require('../services/diskDetectionService');
|
||||
const logger = require('../services/logger').child('PIPELINE_ROUTE');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get(
|
||||
'/state',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.debug('get:state', { reqId: req.reqId });
|
||||
res.json({ pipeline: pipelineService.getSnapshot() });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/analyze',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.info('post:analyze', { reqId: req.reqId });
|
||||
const result = await pipelineService.analyzeDisc();
|
||||
res.json({ result });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/rescan-disc',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.info('post:rescan-disc', { reqId: req.reqId });
|
||||
const result = await diskDetectionService.rescanAndEmit();
|
||||
res.json({ result });
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/omdb/search',
|
||||
asyncHandler(async (req, res) => {
|
||||
const query = req.query.q || '';
|
||||
logger.info('get:omdb:search', { reqId: req.reqId, query });
|
||||
const results = await pipelineService.searchOmdb(String(query));
|
||||
res.json({ results });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/select-metadata',
|
||||
asyncHandler(async (req, res) => {
|
||||
const { jobId, title, year, imdbId, poster, fromOmdb, selectedPlaylist } = req.body;
|
||||
|
||||
if (!jobId) {
|
||||
const error = new Error('jobId fehlt.');
|
||||
error.statusCode = 400;
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.info('post:select-metadata', {
|
||||
reqId: req.reqId,
|
||||
jobId,
|
||||
title,
|
||||
year,
|
||||
imdbId,
|
||||
poster,
|
||||
fromOmdb,
|
||||
selectedPlaylist
|
||||
});
|
||||
|
||||
const job = await pipelineService.selectMetadata({
|
||||
jobId: Number(jobId),
|
||||
title,
|
||||
year,
|
||||
imdbId,
|
||||
poster,
|
||||
fromOmdb,
|
||||
selectedPlaylist
|
||||
});
|
||||
|
||||
res.json({ job });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/start/:jobId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const jobId = Number(req.params.jobId);
|
||||
logger.info('post:start-job', { reqId: req.reqId, jobId });
|
||||
const result = await pipelineService.startPreparedJob(jobId);
|
||||
res.json({ result });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/confirm-encode/:jobId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const jobId = Number(req.params.jobId);
|
||||
const selectedEncodeTitleId = req.body?.selectedEncodeTitleId ?? null;
|
||||
const selectedTrackSelection = req.body?.selectedTrackSelection ?? null;
|
||||
logger.info('post:confirm-encode', {
|
||||
reqId: req.reqId,
|
||||
jobId,
|
||||
selectedEncodeTitleId,
|
||||
selectedTrackSelectionProvided: Boolean(selectedTrackSelection)
|
||||
});
|
||||
const job = await pipelineService.confirmEncodeReview(jobId, {
|
||||
selectedEncodeTitleId,
|
||||
selectedTrackSelection
|
||||
});
|
||||
res.json({ job });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/cancel',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.warn('post:cancel', { reqId: req.reqId });
|
||||
await pipelineService.cancel();
|
||||
res.json({ ok: true });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/retry/:jobId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const jobId = Number(req.params.jobId);
|
||||
logger.info('post:retry', { reqId: req.reqId, jobId });
|
||||
await pipelineService.retry(jobId);
|
||||
res.json({ ok: true });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/resume-ready/:jobId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const jobId = Number(req.params.jobId);
|
||||
logger.info('post:resume-ready', { reqId: req.reqId, jobId });
|
||||
const job = await pipelineService.resumeReadyToEncodeJob(jobId);
|
||||
res.json({ job });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/reencode/:jobId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const jobId = Number(req.params.jobId);
|
||||
logger.info('post:reencode', { reqId: req.reqId, jobId });
|
||||
const result = await pipelineService.reencodeFromRaw(jobId);
|
||||
res.json({ result });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/restart-encode/:jobId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const jobId = Number(req.params.jobId);
|
||||
logger.info('post:restart-encode', { reqId: req.reqId, jobId });
|
||||
const result = await pipelineService.restartEncodeWithLastSettings(jobId);
|
||||
res.json({ result });
|
||||
})
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
128
backend/src/routes/settingsRoutes.js
Normal file
128
backend/src/routes/settingsRoutes.js
Normal file
@@ -0,0 +1,128 @@
|
||||
const express = require('express');
|
||||
const asyncHandler = require('../middleware/asyncHandler');
|
||||
const settingsService = require('../services/settingsService');
|
||||
const notificationService = require('../services/notificationService');
|
||||
const pipelineService = require('../services/pipelineService');
|
||||
const wsService = require('../services/websocketService');
|
||||
const logger = require('../services/logger').child('SETTINGS_ROUTE');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
function isSensitiveSettingKey(key) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (!normalized) {
|
||||
return false;
|
||||
}
|
||||
return /(token|password|secret|api_key|registration_key|pushover_user)/i.test(normalized);
|
||||
}
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res) => {
|
||||
logger.debug('get:settings', { reqId: req.reqId });
|
||||
const categories = await settingsService.getCategorizedSettings();
|
||||
res.json({ categories });
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/:key',
|
||||
asyncHandler(async (req, res) => {
|
||||
const { key } = req.params;
|
||||
const { value } = req.body;
|
||||
|
||||
logger.info('put:setting', {
|
||||
reqId: req.reqId,
|
||||
key,
|
||||
value: isSensitiveSettingKey(key) ? '[redacted]' : value
|
||||
});
|
||||
const updated = await settingsService.setSettingValue(key, value);
|
||||
let reviewRefresh = null;
|
||||
try {
|
||||
reviewRefresh = await pipelineService.refreshEncodeReviewAfterSettingsSave([key]);
|
||||
if (reviewRefresh?.triggered) {
|
||||
logger.info('put:setting:review-refresh-started', {
|
||||
reqId: req.reqId,
|
||||
key,
|
||||
jobId: reviewRefresh.jobId
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('put:setting:review-refresh-failed', {
|
||||
reqId: req.reqId,
|
||||
key,
|
||||
error: {
|
||||
name: error?.name,
|
||||
message: error?.message
|
||||
}
|
||||
});
|
||||
reviewRefresh = {
|
||||
triggered: false,
|
||||
reason: 'refresh_error',
|
||||
message: error?.message || 'unknown'
|
||||
};
|
||||
}
|
||||
wsService.broadcast('SETTINGS_UPDATED', updated);
|
||||
|
||||
res.json({ setting: updated, reviewRefresh });
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/',
|
||||
asyncHandler(async (req, res) => {
|
||||
const { settings } = req.body || {};
|
||||
if (!settings || typeof settings !== 'object' || Array.isArray(settings)) {
|
||||
const error = new Error('settings fehlt oder ist ungültig.');
|
||||
error.statusCode = 400;
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.info('put:settings:bulk', { reqId: req.reqId, count: Object.keys(settings).length });
|
||||
const changes = await settingsService.setSettingsBulk(settings);
|
||||
let reviewRefresh = null;
|
||||
try {
|
||||
reviewRefresh = await pipelineService.refreshEncodeReviewAfterSettingsSave(changes.map((item) => item.key));
|
||||
if (reviewRefresh?.triggered) {
|
||||
logger.info('put:settings:bulk:review-refresh-started', {
|
||||
reqId: req.reqId,
|
||||
jobId: reviewRefresh.jobId,
|
||||
relevantKeys: reviewRefresh.relevantKeys
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('put:settings:bulk:review-refresh-failed', {
|
||||
reqId: req.reqId,
|
||||
error: {
|
||||
name: error?.name,
|
||||
message: error?.message
|
||||
}
|
||||
});
|
||||
reviewRefresh = {
|
||||
triggered: false,
|
||||
reason: 'refresh_error',
|
||||
message: error?.message || 'unknown'
|
||||
};
|
||||
}
|
||||
wsService.broadcast('SETTINGS_BULK_UPDATED', { count: changes.length, keys: changes.map((item) => item.key) });
|
||||
|
||||
res.json({ changes, reviewRefresh });
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/pushover/test',
|
||||
asyncHandler(async (req, res) => {
|
||||
const title = req.body?.title;
|
||||
const message = req.body?.message;
|
||||
logger.info('post:pushover:test', {
|
||||
reqId: req.reqId,
|
||||
hasTitle: Boolean(title),
|
||||
hasMessage: Boolean(message)
|
||||
});
|
||||
const result = await notificationService.sendTest({ title, message });
|
||||
res.json({ result });
|
||||
})
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
385
backend/src/services/diskDetectionService.js
Normal file
385
backend/src/services/diskDetectionService.js
Normal file
@@ -0,0 +1,385 @@
|
||||
const fs = require('fs');
|
||||
const { EventEmitter } = require('events');
|
||||
const { execFile } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const settingsService = require('./settingsService');
|
||||
const logger = require('./logger').child('DISK');
|
||||
const { errorToMeta } = require('../utils/errorMeta');
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
function flattenDevices(nodes, acc = []) {
|
||||
for (const node of nodes || []) {
|
||||
acc.push(node);
|
||||
if (Array.isArray(node.children)) {
|
||||
flattenDevices(node.children, acc);
|
||||
}
|
||||
}
|
||||
|
||||
return acc;
|
||||
}
|
||||
|
||||
function buildSignature(info) {
|
||||
return `${info.path || ''}|${info.discLabel || ''}|${info.label || ''}|${info.model || ''}|${info.mountpoint || ''}|${info.fstype || ''}`;
|
||||
}
|
||||
|
||||
class DiskDetectionService extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.running = false;
|
||||
this.timer = null;
|
||||
this.lastDetected = null;
|
||||
this.lastPresent = false;
|
||||
this.deviceLocks = new Map();
|
||||
}
|
||||
|
||||
start() {
|
||||
if (this.running) {
|
||||
return;
|
||||
}
|
||||
this.running = true;
|
||||
logger.info('start');
|
||||
this.scheduleNext(1000);
|
||||
}
|
||||
|
||||
stop() {
|
||||
this.running = false;
|
||||
if (this.timer) {
|
||||
clearTimeout(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
logger.info('stop');
|
||||
}
|
||||
|
||||
scheduleNext(delayMs) {
|
||||
if (!this.running) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.timer = setTimeout(async () => {
|
||||
let nextDelay = 4000;
|
||||
|
||||
try {
|
||||
const map = await settingsService.getSettingsMap();
|
||||
nextDelay = Number(map.disc_poll_interval_ms || 4000);
|
||||
logger.debug('poll:tick', {
|
||||
driveMode: map.drive_mode,
|
||||
driveDevice: map.drive_device,
|
||||
nextDelay
|
||||
});
|
||||
const detected = await this.detectDisc(map);
|
||||
this.applyDetectionResult(detected, { forceInsertEvent: false });
|
||||
} catch (error) {
|
||||
logger.error('poll:error', { error: errorToMeta(error) });
|
||||
this.emit('error', error);
|
||||
}
|
||||
|
||||
this.scheduleNext(nextDelay);
|
||||
}, delayMs);
|
||||
}
|
||||
|
||||
async rescanAndEmit() {
|
||||
try {
|
||||
const map = await settingsService.getSettingsMap();
|
||||
logger.info('rescan:requested', {
|
||||
driveMode: map.drive_mode,
|
||||
driveDevice: map.drive_device
|
||||
});
|
||||
|
||||
const detected = await this.detectDisc(map);
|
||||
const result = this.applyDetectionResult(detected, { forceInsertEvent: true });
|
||||
|
||||
logger.info('rescan:done', {
|
||||
present: result.present,
|
||||
emitted: result.emitted,
|
||||
changed: result.changed,
|
||||
detected: result.device || null
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('rescan:error', { error: errorToMeta(error) });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
normalizeDevicePath(devicePath) {
|
||||
return String(devicePath || '').trim();
|
||||
}
|
||||
|
||||
lockDevice(devicePath, owner = null) {
|
||||
const normalized = this.normalizeDevicePath(devicePath);
|
||||
if (!normalized) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const entry = this.deviceLocks.get(normalized) || {
|
||||
count: 0,
|
||||
owners: []
|
||||
};
|
||||
|
||||
entry.count += 1;
|
||||
if (owner) {
|
||||
entry.owners.push(owner);
|
||||
}
|
||||
this.deviceLocks.set(normalized, entry);
|
||||
|
||||
logger.info('lock:add', {
|
||||
devicePath: normalized,
|
||||
count: entry.count,
|
||||
owner
|
||||
});
|
||||
|
||||
return {
|
||||
devicePath: normalized,
|
||||
owner
|
||||
};
|
||||
}
|
||||
|
||||
unlockDevice(devicePath, owner = null) {
|
||||
const normalized = this.normalizeDevicePath(devicePath);
|
||||
if (!normalized) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = this.deviceLocks.get(normalized);
|
||||
if (!entry) {
|
||||
return;
|
||||
}
|
||||
|
||||
entry.count = Math.max(0, entry.count - 1);
|
||||
if (entry.count === 0) {
|
||||
this.deviceLocks.delete(normalized);
|
||||
logger.info('lock:remove', {
|
||||
devicePath: normalized,
|
||||
owner
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
this.deviceLocks.set(normalized, entry);
|
||||
logger.info('lock:decrement', {
|
||||
devicePath: normalized,
|
||||
count: entry.count,
|
||||
owner
|
||||
});
|
||||
}
|
||||
|
||||
isDeviceLocked(devicePath) {
|
||||
const normalized = this.normalizeDevicePath(devicePath);
|
||||
if (!normalized) {
|
||||
return false;
|
||||
}
|
||||
return this.deviceLocks.has(normalized);
|
||||
}
|
||||
|
||||
getActiveLocks() {
|
||||
return Array.from(this.deviceLocks.entries()).map(([path, info]) => ({
|
||||
path,
|
||||
count: info.count,
|
||||
owners: info.owners
|
||||
}));
|
||||
}
|
||||
|
||||
applyDetectionResult(detected, { forceInsertEvent = false } = {}) {
|
||||
const isPresent = Boolean(detected);
|
||||
const changed =
|
||||
isPresent &&
|
||||
(!this.lastDetected || buildSignature(this.lastDetected) !== buildSignature(detected));
|
||||
|
||||
if (isPresent) {
|
||||
const shouldEmitInserted = forceInsertEvent || !this.lastPresent || changed;
|
||||
this.lastDetected = detected;
|
||||
this.lastPresent = true;
|
||||
|
||||
if (shouldEmitInserted) {
|
||||
logger.info('disc:inserted', { detected, forceInsertEvent, changed });
|
||||
this.emit('discInserted', detected);
|
||||
return {
|
||||
present: true,
|
||||
changed,
|
||||
emitted: 'discInserted',
|
||||
device: detected
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
present: true,
|
||||
changed,
|
||||
emitted: 'none',
|
||||
device: detected
|
||||
};
|
||||
}
|
||||
|
||||
if (!isPresent && this.lastPresent) {
|
||||
const removed = this.lastDetected;
|
||||
this.lastDetected = null;
|
||||
this.lastPresent = false;
|
||||
logger.info('disc:removed', { removed });
|
||||
this.emit('discRemoved', removed);
|
||||
return {
|
||||
present: false,
|
||||
changed: true,
|
||||
emitted: 'discRemoved',
|
||||
device: null
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
present: false,
|
||||
changed: false,
|
||||
emitted: 'none',
|
||||
device: null
|
||||
};
|
||||
}
|
||||
|
||||
async detectDisc(settingsMap) {
|
||||
if (settingsMap.drive_mode === 'explicit') {
|
||||
return this.detectExplicit(settingsMap.drive_device);
|
||||
}
|
||||
|
||||
return this.detectAuto();
|
||||
}
|
||||
|
||||
async detectExplicit(devicePath) {
|
||||
if (this.isDeviceLocked(devicePath)) {
|
||||
logger.debug('detect:explicit:locked', {
|
||||
devicePath,
|
||||
activeLocks: this.getActiveLocks()
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!devicePath || !fs.existsSync(devicePath)) {
|
||||
logger.debug('detect:explicit:not-found', { devicePath });
|
||||
return null;
|
||||
}
|
||||
|
||||
const hasMedia = await this.checkMediaPresent(devicePath);
|
||||
if (!hasMedia) {
|
||||
logger.debug('detect:explicit:no-media', { devicePath });
|
||||
return null;
|
||||
}
|
||||
const discLabel = await this.getDiscLabel(devicePath);
|
||||
|
||||
const details = await this.getBlockDeviceInfo();
|
||||
const match = details.find((entry) => entry.path === devicePath || `/dev/${entry.name}` === devicePath) || {};
|
||||
|
||||
const detected = {
|
||||
mode: 'explicit',
|
||||
path: devicePath,
|
||||
name: match.name || devicePath.split('/').pop(),
|
||||
model: match.model || 'Unknown',
|
||||
label: match.label || null,
|
||||
discLabel: discLabel || null,
|
||||
mountpoint: match.mountpoint || null,
|
||||
fstype: match.fstype || null,
|
||||
index: this.guessDiscIndex(match.name || devicePath)
|
||||
};
|
||||
logger.debug('detect:explicit:success', { detected });
|
||||
return detected;
|
||||
}
|
||||
|
||||
async detectAuto() {
|
||||
const details = await this.getBlockDeviceInfo();
|
||||
const romCandidates = details.filter((entry) => entry.type === 'rom');
|
||||
|
||||
for (const item of romCandidates) {
|
||||
const path = item.path || (item.name ? `/dev/${item.name}` : null);
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.isDeviceLocked(path)) {
|
||||
logger.debug('detect:auto:skip-locked', {
|
||||
path,
|
||||
activeLocks: this.getActiveLocks()
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const hasMedia = await this.checkMediaPresent(path);
|
||||
if (!hasMedia) {
|
||||
continue;
|
||||
}
|
||||
const discLabel = await this.getDiscLabel(path);
|
||||
|
||||
const detected = {
|
||||
mode: 'auto',
|
||||
path,
|
||||
name: item.name,
|
||||
model: item.model || 'Optical Drive',
|
||||
label: item.label || null,
|
||||
discLabel: discLabel || null,
|
||||
mountpoint: item.mountpoint || null,
|
||||
fstype: item.fstype || null,
|
||||
index: this.guessDiscIndex(item.name)
|
||||
};
|
||||
logger.debug('detect:auto:success', { detected });
|
||||
return detected;
|
||||
}
|
||||
|
||||
logger.debug('detect:auto:none');
|
||||
return null;
|
||||
}
|
||||
|
||||
async getBlockDeviceInfo() {
|
||||
try {
|
||||
const { stdout } = await execFileAsync('lsblk', [
|
||||
'-J',
|
||||
'-o',
|
||||
'NAME,PATH,TYPE,MOUNTPOINT,FSTYPE,LABEL,MODEL'
|
||||
]);
|
||||
const parsed = JSON.parse(stdout);
|
||||
const devices = flattenDevices(parsed.blockdevices || []).map((entry) => ({
|
||||
name: entry.name,
|
||||
path: entry.path,
|
||||
type: entry.type,
|
||||
mountpoint: entry.mountpoint,
|
||||
fstype: entry.fstype,
|
||||
label: entry.label,
|
||||
model: entry.model
|
||||
}));
|
||||
logger.debug('lsblk:ok', { deviceCount: devices.length });
|
||||
return devices;
|
||||
} catch (error) {
|
||||
logger.warn('lsblk:failed', { error: errorToMeta(error) });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async checkMediaPresent(devicePath) {
|
||||
try {
|
||||
const { stdout } = await execFileAsync('blkid', ['-o', 'value', '-s', 'TYPE', devicePath]);
|
||||
const has = stdout.trim().length > 0;
|
||||
logger.debug('blkid:result', { devicePath, hasMedia: has, type: stdout.trim() });
|
||||
return has;
|
||||
} catch (error) {
|
||||
logger.debug('blkid:no-media-or-fail', { devicePath, error: errorToMeta(error) });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async getDiscLabel(devicePath) {
|
||||
try {
|
||||
const { stdout } = await execFileAsync('blkid', ['-o', 'value', '-s', 'LABEL', devicePath]);
|
||||
const label = stdout.trim();
|
||||
logger.debug('blkid:label', { devicePath, discLabel: label || null });
|
||||
return label || null;
|
||||
} catch (error) {
|
||||
logger.debug('blkid:no-label', { devicePath, error: errorToMeta(error) });
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
guessDiscIndex(name) {
|
||||
if (!name) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const match = String(name).match(/(\d+)$/);
|
||||
return match ? Number(match[1]) : 0;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new DiskDetectionService();
|
||||
1098
backend/src/services/historyService.js
Normal file
1098
backend/src/services/historyService.js
Normal file
File diff suppressed because it is too large
Load Diff
46
backend/src/services/logPathService.js
Normal file
46
backend/src/services/logPathService.js
Normal file
@@ -0,0 +1,46 @@
|
||||
const path = require('path');
|
||||
const { logDir: fallbackLogDir } = require('../config');
|
||||
|
||||
function normalizeDir(value) {
|
||||
const raw = String(value || '').trim();
|
||||
if (!raw) {
|
||||
return null;
|
||||
}
|
||||
return path.isAbsolute(raw) ? path.normalize(raw) : path.resolve(raw);
|
||||
}
|
||||
|
||||
function getFallbackLogRootDir() {
|
||||
return path.resolve(fallbackLogDir);
|
||||
}
|
||||
|
||||
function resolveLogRootDir(value) {
|
||||
return normalizeDir(value) || getFallbackLogRootDir();
|
||||
}
|
||||
|
||||
let runtimeLogRootDir = getFallbackLogRootDir();
|
||||
|
||||
function setLogRootDir(value) {
|
||||
runtimeLogRootDir = resolveLogRootDir(value);
|
||||
return runtimeLogRootDir;
|
||||
}
|
||||
|
||||
function getLogRootDir() {
|
||||
return runtimeLogRootDir || getFallbackLogRootDir();
|
||||
}
|
||||
|
||||
function getBackendLogDir() {
|
||||
return path.join(getLogRootDir(), 'backend');
|
||||
}
|
||||
|
||||
function getJobLogDir() {
|
||||
return getLogRootDir();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getFallbackLogRootDir,
|
||||
resolveLogRootDir,
|
||||
setLogRootDir,
|
||||
getLogRootDir,
|
||||
getBackendLogDir,
|
||||
getJobLogDir
|
||||
};
|
||||
151
backend/src/services/logger.js
Normal file
151
backend/src/services/logger.js
Normal file
@@ -0,0 +1,151 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { logLevel } = require('../config');
|
||||
const { getBackendLogDir, getFallbackLogRootDir } = require('./logPathService');
|
||||
|
||||
const LEVELS = {
|
||||
debug: 10,
|
||||
info: 20,
|
||||
warn: 30,
|
||||
error: 40
|
||||
};
|
||||
|
||||
const ACTIVE_LEVEL = LEVELS[String(logLevel || 'info').toLowerCase()] || LEVELS.info;
|
||||
|
||||
function ensureLogDir(logDirPath) {
|
||||
try {
|
||||
fs.mkdirSync(logDirPath, { recursive: true });
|
||||
return true;
|
||||
} catch (_error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function resolveWritableBackendLogDir() {
|
||||
const preferred = getBackendLogDir();
|
||||
if (ensureLogDir(preferred)) {
|
||||
return preferred;
|
||||
}
|
||||
|
||||
const fallback = path.join(getFallbackLogRootDir(), 'backend');
|
||||
if (fallback !== preferred && ensureLogDir(fallback)) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getDailyFileName() {
|
||||
const d = new Date();
|
||||
const y = d.getFullYear();
|
||||
const m = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(d.getDate()).padStart(2, '0');
|
||||
return `backend-${y}-${m}-${day}.log`;
|
||||
}
|
||||
|
||||
function safeJson(value) {
|
||||
try {
|
||||
return JSON.stringify(value);
|
||||
} catch (error) {
|
||||
return JSON.stringify({ serializationError: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
function truncateString(value, maxLen = 3000) {
|
||||
const str = String(value);
|
||||
if (str.length <= maxLen) {
|
||||
return str;
|
||||
}
|
||||
return `${str.slice(0, maxLen)}...[truncated ${str.length - maxLen} chars]`;
|
||||
}
|
||||
|
||||
function sanitizeMeta(meta) {
|
||||
if (!meta || typeof meta !== 'object') {
|
||||
return meta;
|
||||
}
|
||||
|
||||
const out = Array.isArray(meta) ? [] : {};
|
||||
|
||||
for (const [key, val] of Object.entries(meta)) {
|
||||
if (val instanceof Error) {
|
||||
out[key] = {
|
||||
name: val.name,
|
||||
message: val.message,
|
||||
stack: val.stack
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof val === 'string') {
|
||||
out[key] = truncateString(val, 5000);
|
||||
continue;
|
||||
}
|
||||
|
||||
out[key] = val;
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
function writeLine(line) {
|
||||
const backendLogDir = resolveWritableBackendLogDir();
|
||||
if (!backendLogDir) {
|
||||
return;
|
||||
}
|
||||
const daily = path.join(backendLogDir, getDailyFileName());
|
||||
const latest = path.join(backendLogDir, 'backend-latest.log');
|
||||
|
||||
fs.appendFile(daily, `${line}\n`, (_error) => null);
|
||||
fs.appendFile(latest, `${line}\n`, (_error) => null);
|
||||
}
|
||||
|
||||
function emit(level, scope, message, meta = null) {
|
||||
const normLevel = String(level || 'info').toLowerCase();
|
||||
const lvl = LEVELS[normLevel] || LEVELS.info;
|
||||
if (lvl < ACTIVE_LEVEL) {
|
||||
return;
|
||||
}
|
||||
|
||||
const timestamp = new Date().toISOString();
|
||||
const payload = {
|
||||
timestamp,
|
||||
level: normLevel,
|
||||
scope,
|
||||
message,
|
||||
meta: sanitizeMeta(meta)
|
||||
};
|
||||
|
||||
const line = safeJson(payload);
|
||||
writeLine(line);
|
||||
|
||||
const print = `[${timestamp}] [${normLevel.toUpperCase()}] [${scope}] ${message}`;
|
||||
if (normLevel === 'error') {
|
||||
console.error(print, payload.meta ? payload.meta : '');
|
||||
} else if (normLevel === 'warn') {
|
||||
console.warn(print, payload.meta ? payload.meta : '');
|
||||
} else {
|
||||
console.log(print, payload.meta ? payload.meta : '');
|
||||
}
|
||||
}
|
||||
|
||||
function child(scope) {
|
||||
return {
|
||||
debug(message, meta) {
|
||||
emit('debug', scope, message, meta);
|
||||
},
|
||||
info(message, meta) {
|
||||
emit('info', scope, message, meta);
|
||||
},
|
||||
warn(message, meta) {
|
||||
emit('warn', scope, message, meta);
|
||||
},
|
||||
error(message, meta) {
|
||||
emit('error', scope, message, meta);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
child,
|
||||
emit
|
||||
};
|
||||
165
backend/src/services/notificationService.js
Normal file
165
backend/src/services/notificationService.js
Normal file
@@ -0,0 +1,165 @@
|
||||
const settingsService = require('./settingsService');
|
||||
const logger = require('./logger').child('PUSHOVER');
|
||||
const { toBoolean } = require('../utils/validators');
|
||||
const { errorToMeta } = require('../utils/errorMeta');
|
||||
|
||||
const PUSHOVER_API_URL = 'https://api.pushover.net/1/messages.json';
|
||||
|
||||
const EVENT_TOGGLE_KEYS = {
|
||||
metadata_ready: 'pushover_notify_metadata_ready',
|
||||
rip_started: 'pushover_notify_rip_started',
|
||||
encoding_started: 'pushover_notify_encoding_started',
|
||||
job_finished: 'pushover_notify_job_finished',
|
||||
job_error: 'pushover_notify_job_error',
|
||||
job_cancelled: 'pushover_notify_job_cancelled',
|
||||
reencode_started: 'pushover_notify_reencode_started',
|
||||
reencode_finished: 'pushover_notify_reencode_finished'
|
||||
};
|
||||
|
||||
function truncate(value, maxLen = 1024) {
|
||||
const text = String(value || '').trim();
|
||||
if (text.length <= maxLen) {
|
||||
return text;
|
||||
}
|
||||
return `${text.slice(0, maxLen - 20)}...[truncated]`;
|
||||
}
|
||||
|
||||
function normalizePriority(raw) {
|
||||
const n = Number(raw);
|
||||
if (Number.isNaN(n)) {
|
||||
return 0;
|
||||
}
|
||||
if (n < -2) {
|
||||
return -2;
|
||||
}
|
||||
if (n > 2) {
|
||||
return 2;
|
||||
}
|
||||
return Math.round(n);
|
||||
}
|
||||
|
||||
class NotificationService {
|
||||
async notify(eventKey, payload = {}) {
|
||||
const settings = await settingsService.getSettingsMap();
|
||||
return this.notifyWithSettings(settings, eventKey, payload);
|
||||
}
|
||||
|
||||
async sendTest({ title, message } = {}) {
|
||||
return this.notify('test', {
|
||||
title: title || 'Ripster Test',
|
||||
message: message || 'PushOver Testnachricht von Ripster.'
|
||||
});
|
||||
}
|
||||
|
||||
async notifyWithSettings(settings, eventKey, payload = {}) {
|
||||
const enabled = toBoolean(settings.pushover_enabled);
|
||||
if (!enabled) {
|
||||
logger.debug('notify:skip:disabled', { eventKey });
|
||||
return { sent: false, reason: 'disabled', eventKey };
|
||||
}
|
||||
|
||||
const toggleKey = EVENT_TOGGLE_KEYS[eventKey];
|
||||
if (toggleKey && !toBoolean(settings[toggleKey])) {
|
||||
logger.debug('notify:skip:event-disabled', { eventKey, toggleKey });
|
||||
return { sent: false, reason: 'event-disabled', eventKey };
|
||||
}
|
||||
|
||||
const token = String(settings.pushover_token || '').trim();
|
||||
const user = String(settings.pushover_user || '').trim();
|
||||
if (!token || !user) {
|
||||
logger.warn('notify:skip:missing-credentials', {
|
||||
eventKey,
|
||||
hasToken: Boolean(token),
|
||||
hasUser: Boolean(user)
|
||||
});
|
||||
return { sent: false, reason: 'missing-credentials', eventKey };
|
||||
}
|
||||
|
||||
const prefix = String(settings.pushover_title_prefix || 'Ripster').trim();
|
||||
const title = truncate(payload.title || `${prefix} - ${eventKey}`, 120);
|
||||
const message = truncate(payload.message || eventKey, 1024);
|
||||
const priority = normalizePriority(
|
||||
payload.priority !== undefined ? payload.priority : settings.pushover_priority
|
||||
);
|
||||
const timeoutMs = Math.max(1000, Number(settings.pushover_timeout_ms || 7000));
|
||||
|
||||
const form = new URLSearchParams();
|
||||
form.set('token', token);
|
||||
form.set('user', user);
|
||||
form.set('title', title);
|
||||
form.set('message', message);
|
||||
form.set('priority', String(priority));
|
||||
|
||||
const device = String(settings.pushover_device || '').trim();
|
||||
if (device) {
|
||||
form.set('device', device);
|
||||
}
|
||||
|
||||
if (payload.url) {
|
||||
form.set('url', String(payload.url));
|
||||
}
|
||||
if (payload.urlTitle) {
|
||||
form.set('url_title', String(payload.urlTitle));
|
||||
}
|
||||
if (payload.sound) {
|
||||
form.set('sound', String(payload.sound));
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
||||
|
||||
try {
|
||||
const response = await fetch(PUSHOVER_API_URL, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded'
|
||||
},
|
||||
body: form.toString(),
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
const rawText = await response.text();
|
||||
let data = null;
|
||||
try {
|
||||
data = rawText ? JSON.parse(rawText) : null;
|
||||
} catch (error) {
|
||||
data = null;
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
const messageText = data?.errors?.join(', ') || data?.error || rawText || `HTTP ${response.status}`;
|
||||
const error = new Error(`PushOver HTTP ${response.status}: ${messageText}`);
|
||||
error.statusCode = response.status;
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (data && data.status !== 1) {
|
||||
const messageText = data.errors?.join(', ') || data.error || 'Unbekannte PushOver Antwort.';
|
||||
throw new Error(`PushOver Fehler: ${messageText}`);
|
||||
}
|
||||
|
||||
logger.info('notify:sent', {
|
||||
eventKey,
|
||||
title,
|
||||
priority,
|
||||
requestId: data?.request || null
|
||||
});
|
||||
return {
|
||||
sent: true,
|
||||
eventKey,
|
||||
requestId: data?.request || null
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('notify:failed', {
|
||||
eventKey,
|
||||
title,
|
||||
error: errorToMeta(error)
|
||||
});
|
||||
throw error;
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new NotificationService();
|
||||
92
backend/src/services/omdbService.js
Normal file
92
backend/src/services/omdbService.js
Normal file
@@ -0,0 +1,92 @@
|
||||
const settingsService = require('./settingsService');
|
||||
const logger = require('./logger').child('OMDB');
|
||||
|
||||
class OmdbService {
|
||||
async search(query) {
|
||||
if (!query || query.trim().length === 0) {
|
||||
return [];
|
||||
}
|
||||
logger.info('search:start', { query });
|
||||
|
||||
const settings = await settingsService.getSettingsMap();
|
||||
const apiKey = settings.omdb_api_key;
|
||||
if (!apiKey) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const type = settings.omdb_default_type || 'movie';
|
||||
const url = new URL('https://www.omdbapi.com/');
|
||||
url.searchParams.set('apikey', apiKey);
|
||||
url.searchParams.set('s', query.trim());
|
||||
url.searchParams.set('type', type);
|
||||
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
logger.error('search:http-failed', { query, status: response.status });
|
||||
throw new Error(`OMDb Anfrage fehlgeschlagen (${response.status})`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
if (data.Response === 'False' || !Array.isArray(data.Search)) {
|
||||
logger.warn('search:no-results', { query, response: data.Response, error: data.Error });
|
||||
return [];
|
||||
}
|
||||
const results = data.Search.map((item) => ({
|
||||
title: item.Title,
|
||||
year: item.Year,
|
||||
imdbId: item.imdbID,
|
||||
type: item.Type,
|
||||
poster: item.Poster
|
||||
}));
|
||||
logger.info('search:done', { query, count: results.length });
|
||||
return results;
|
||||
}
|
||||
|
||||
async fetchByImdbId(imdbId) {
|
||||
const normalizedId = String(imdbId || '').trim().toLowerCase();
|
||||
if (!/^tt\d{6,12}$/.test(normalizedId)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.info('fetchByImdbId:start', { imdbId: normalizedId });
|
||||
const settings = await settingsService.getSettingsMap();
|
||||
const apiKey = settings.omdb_api_key;
|
||||
if (!apiKey) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const url = new URL('https://www.omdbapi.com/');
|
||||
url.searchParams.set('apikey', apiKey);
|
||||
url.searchParams.set('i', normalizedId);
|
||||
url.searchParams.set('plot', 'full');
|
||||
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
logger.error('fetchByImdbId:http-failed', { imdbId: normalizedId, status: response.status });
|
||||
throw new Error(`OMDb Anfrage fehlgeschlagen (${response.status})`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
if (data.Response === 'False') {
|
||||
logger.warn('fetchByImdbId:not-found', { imdbId: normalizedId, error: data.Error });
|
||||
return null;
|
||||
}
|
||||
|
||||
const yearMatch = String(data.Year || '').match(/\b(19|20)\d{2}\b/);
|
||||
const year = yearMatch ? Number(yearMatch[0]) : null;
|
||||
const poster = data.Poster && data.Poster !== 'N/A' ? data.Poster : null;
|
||||
|
||||
const result = {
|
||||
title: data.Title || null,
|
||||
year: Number.isFinite(year) ? year : null,
|
||||
imdbId: String(data.imdbID || normalizedId),
|
||||
type: data.Type || null,
|
||||
poster,
|
||||
raw: data
|
||||
};
|
||||
logger.info('fetchByImdbId:done', { imdbId: result.imdbId, title: result.title });
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new OmdbService();
|
||||
5104
backend/src/services/pipelineService.js
Normal file
5104
backend/src/services/pipelineService.js
Normal file
File diff suppressed because it is too large
Load Diff
99
backend/src/services/processRunner.js
Normal file
99
backend/src/services/processRunner.js
Normal file
@@ -0,0 +1,99 @@
|
||||
const { spawn } = require('child_process');
|
||||
const logger = require('./logger').child('PROCESS');
|
||||
const { errorToMeta } = require('../utils/errorMeta');
|
||||
|
||||
function streamLines(stream, onLine) {
|
||||
let buffer = '';
|
||||
stream.on('data', (chunk) => {
|
||||
buffer += chunk.toString();
|
||||
const parts = buffer.split(/\r\n|\n|\r/);
|
||||
buffer = parts.pop() ?? '';
|
||||
|
||||
for (const line of parts) {
|
||||
if (line.length > 0) {
|
||||
onLine(line);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
if (buffer.length > 0) {
|
||||
onLine(buffer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function spawnTrackedProcess({
|
||||
cmd,
|
||||
args,
|
||||
cwd,
|
||||
onStdoutLine,
|
||||
onStderrLine,
|
||||
onStart,
|
||||
context = {}
|
||||
}) {
|
||||
logger.info('spawn:start', { cmd, args, cwd, context });
|
||||
|
||||
const child = spawn(cmd, args, {
|
||||
cwd,
|
||||
env: process.env,
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
if (onStart) {
|
||||
onStart(child);
|
||||
}
|
||||
|
||||
if (child.stdout && onStdoutLine) {
|
||||
streamLines(child.stdout, onStdoutLine);
|
||||
}
|
||||
|
||||
if (child.stderr && onStderrLine) {
|
||||
streamLines(child.stderr, onStderrLine);
|
||||
}
|
||||
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
child.on('error', (error) => {
|
||||
logger.error('spawn:error', { cmd, args, context, error: errorToMeta(error) });
|
||||
reject(error);
|
||||
});
|
||||
|
||||
child.on('close', (code, signal) => {
|
||||
logger.info('spawn:close', { cmd, args, code, signal, context });
|
||||
if (code === 0) {
|
||||
resolve({ code, signal });
|
||||
} else {
|
||||
const error = new Error(`Prozess ${cmd} beendet mit Code ${code ?? 'null'} (Signal ${signal ?? 'none'}).`);
|
||||
error.code = code;
|
||||
error.signal = signal;
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const cancel = () => {
|
||||
if (child.killed) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.warn('spawn:cancel:requested', { cmd, args, context, pid: child.pid });
|
||||
child.kill('SIGINT');
|
||||
|
||||
setTimeout(() => {
|
||||
if (!child.killed) {
|
||||
logger.warn('spawn:cancel:force-kill', { cmd, args, context, pid: child.pid });
|
||||
child.kill('SIGKILL');
|
||||
}
|
||||
}, 3000);
|
||||
};
|
||||
|
||||
return {
|
||||
child,
|
||||
promise,
|
||||
cancel
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
spawnTrackedProcess
|
||||
};
|
||||
710
backend/src/services/settingsService.js
Normal file
710
backend/src/services/settingsService.js
Normal file
@@ -0,0 +1,710 @@
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { spawnSync } = require('child_process');
|
||||
const { getDb } = require('../db/database');
|
||||
const logger = require('./logger').child('SETTINGS');
|
||||
const {
|
||||
parseJson,
|
||||
normalizeValueByType,
|
||||
serializeValueByType,
|
||||
validateSetting
|
||||
} = require('../utils/validators');
|
||||
const { splitArgs } = require('../utils/commandLine');
|
||||
const { setLogRootDir } = require('./logPathService');
|
||||
|
||||
const DEFAULT_AUDIO_COPY_MASK = ['copy:aac', 'copy:ac3', 'copy:eac3', 'copy:truehd', 'copy:dts', 'copy:dtshd', 'copy:mp3', 'copy:flac'];
|
||||
const SENSITIVE_SETTING_KEYS = new Set([
|
||||
'makemkv_registration_key',
|
||||
'omdb_api_key',
|
||||
'pushover_token',
|
||||
'pushover_user'
|
||||
]);
|
||||
const AUDIO_SELECTION_KEYS_WITH_VALUE = new Set(['-a', '--audio', '--audio-lang-list']);
|
||||
const AUDIO_SELECTION_KEYS_FLAG_ONLY = new Set(['--all-audio', '--first-audio']);
|
||||
const SUBTITLE_SELECTION_KEYS_WITH_VALUE = new Set(['-s', '--subtitle', '--subtitle-lang-list']);
|
||||
const SUBTITLE_SELECTION_KEYS_FLAG_ONLY = new Set(['--all-subtitles', '--first-subtitle']);
|
||||
const SUBTITLE_FLAG_KEYS_WITH_VALUE = new Set(['--subtitle-burned', '--subtitle-default', '--subtitle-forced']);
|
||||
const TITLE_SELECTION_KEYS_WITH_VALUE = new Set(['-t', '--title']);
|
||||
const LOG_DIR_SETTING_KEY = 'log_dir';
|
||||
|
||||
function applyRuntimeLogDirSetting(rawValue) {
|
||||
const resolved = setLogRootDir(rawValue);
|
||||
try {
|
||||
fs.mkdirSync(resolved, { recursive: true });
|
||||
return resolved;
|
||||
} catch (error) {
|
||||
const fallbackResolved = setLogRootDir(null);
|
||||
try {
|
||||
fs.mkdirSync(fallbackResolved, { recursive: true });
|
||||
} catch (_fallbackError) {
|
||||
// ignore fallback fs errors here; logger may still print to console
|
||||
}
|
||||
logger.warn('setting:log-dir:fallback', {
|
||||
configured: String(rawValue || '').trim() || null,
|
||||
resolved,
|
||||
fallbackResolved,
|
||||
error: error?.message || String(error)
|
||||
});
|
||||
return fallbackResolved;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeTrackIds(rawList) {
|
||||
const list = Array.isArray(rawList) ? rawList : [];
|
||||
const seen = new Set();
|
||||
const output = [];
|
||||
for (const item of list) {
|
||||
const value = Number(item);
|
||||
if (!Number.isFinite(value) || value <= 0) {
|
||||
continue;
|
||||
}
|
||||
const normalized = String(Math.trunc(value));
|
||||
if (seen.has(normalized)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(normalized);
|
||||
output.push(normalized);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function removeSelectionArgs(extraArgs) {
|
||||
const args = Array.isArray(extraArgs) ? extraArgs : [];
|
||||
const filtered = [];
|
||||
|
||||
for (let i = 0; i < args.length; i += 1) {
|
||||
const token = String(args[i] || '');
|
||||
const key = token.includes('=') ? token.slice(0, token.indexOf('=')) : token;
|
||||
|
||||
const isAudioWithValue = AUDIO_SELECTION_KEYS_WITH_VALUE.has(key);
|
||||
const isAudioFlagOnly = AUDIO_SELECTION_KEYS_FLAG_ONLY.has(key);
|
||||
const isSubtitleWithValue = SUBTITLE_SELECTION_KEYS_WITH_VALUE.has(key)
|
||||
|| SUBTITLE_FLAG_KEYS_WITH_VALUE.has(key);
|
||||
const isSubtitleFlagOnly = SUBTITLE_SELECTION_KEYS_FLAG_ONLY.has(key);
|
||||
const isTitleWithValue = TITLE_SELECTION_KEYS_WITH_VALUE.has(key);
|
||||
const skip = isAudioWithValue || isAudioFlagOnly || isSubtitleWithValue || isSubtitleFlagOnly || isTitleWithValue;
|
||||
|
||||
if (!skip) {
|
||||
filtered.push(token);
|
||||
continue;
|
||||
}
|
||||
|
||||
if ((isAudioWithValue || isSubtitleWithValue || isTitleWithValue) && !token.includes('=')) {
|
||||
const nextToken = String(args[i + 1] || '');
|
||||
if (nextToken && !nextToken.startsWith('-')) {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
|
||||
function flattenPresetList(input, output = []) {
|
||||
const list = Array.isArray(input) ? input : [];
|
||||
for (const entry of list) {
|
||||
if (!entry || typeof entry !== 'object') {
|
||||
continue;
|
||||
}
|
||||
if (Array.isArray(entry.ChildrenArray) && entry.ChildrenArray.length > 0) {
|
||||
flattenPresetList(entry.ChildrenArray, output);
|
||||
continue;
|
||||
}
|
||||
output.push(entry);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function buildFallbackPresetProfile(presetName, message = null) {
|
||||
return {
|
||||
source: 'fallback',
|
||||
message,
|
||||
presetName: presetName || null,
|
||||
audioTrackSelectionBehavior: 'first',
|
||||
audioLanguages: [],
|
||||
audioEncoders: [],
|
||||
audioCopyMask: DEFAULT_AUDIO_COPY_MASK,
|
||||
audioFallback: 'av_aac',
|
||||
subtitleTrackSelectionBehavior: 'none',
|
||||
subtitleLanguages: [],
|
||||
subtitleBurnBehavior: 'none'
|
||||
};
|
||||
}
|
||||
|
||||
class SettingsService {
|
||||
async getSchemaRows() {
|
||||
const db = await getDb();
|
||||
return db.all('SELECT * FROM settings_schema ORDER BY category ASC, order_index ASC');
|
||||
}
|
||||
|
||||
async getSettingsMap() {
|
||||
const rows = await this.getFlatSettings();
|
||||
const map = {};
|
||||
|
||||
for (const row of rows) {
|
||||
map[row.key] = row.value;
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
async getFlatSettings() {
|
||||
const db = await getDb();
|
||||
const rows = await db.all(
|
||||
`
|
||||
SELECT
|
||||
s.key,
|
||||
s.category,
|
||||
s.label,
|
||||
s.type,
|
||||
s.required,
|
||||
s.description,
|
||||
s.default_value,
|
||||
s.options_json,
|
||||
s.validation_json,
|
||||
s.order_index,
|
||||
v.value as current_value
|
||||
FROM settings_schema s
|
||||
LEFT JOIN settings_values v ON v.key = s.key
|
||||
ORDER BY s.category ASC, s.order_index ASC
|
||||
`
|
||||
);
|
||||
|
||||
return rows.map((row) => ({
|
||||
key: row.key,
|
||||
category: row.category,
|
||||
label: row.label,
|
||||
type: row.type,
|
||||
required: Boolean(row.required),
|
||||
description: row.description,
|
||||
defaultValue: row.default_value,
|
||||
options: parseJson(row.options_json, []),
|
||||
validation: parseJson(row.validation_json, {}),
|
||||
value: normalizeValueByType(row.type, row.current_value ?? row.default_value),
|
||||
orderIndex: row.order_index
|
||||
}));
|
||||
}
|
||||
|
||||
async getCategorizedSettings() {
|
||||
const flat = await this.getFlatSettings();
|
||||
const byCategory = new Map();
|
||||
|
||||
for (const item of flat) {
|
||||
if (!byCategory.has(item.category)) {
|
||||
byCategory.set(item.category, []);
|
||||
}
|
||||
byCategory.get(item.category).push(item);
|
||||
}
|
||||
|
||||
return Array.from(byCategory.entries()).map(([category, settings]) => ({
|
||||
category,
|
||||
settings
|
||||
}));
|
||||
}
|
||||
|
||||
async setSettingValue(key, rawValue) {
|
||||
const db = await getDb();
|
||||
const schema = await db.get('SELECT * FROM settings_schema WHERE key = ?', [key]);
|
||||
if (!schema) {
|
||||
const error = new Error(`Setting ${key} existiert nicht.`);
|
||||
error.statusCode = 404;
|
||||
throw error;
|
||||
}
|
||||
|
||||
const result = validateSetting(schema, rawValue);
|
||||
if (!result.valid) {
|
||||
const error = new Error(result.errors.join(' '));
|
||||
error.statusCode = 400;
|
||||
throw error;
|
||||
}
|
||||
|
||||
const serializedValue = serializeValueByType(schema.type, result.normalized);
|
||||
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO settings_values (key, value, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
value = excluded.value,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`,
|
||||
[key, serializedValue]
|
||||
);
|
||||
logger.info('setting:updated', {
|
||||
key,
|
||||
value: SENSITIVE_SETTING_KEYS.has(String(key || '').trim().toLowerCase()) ? '[redacted]' : result.normalized
|
||||
});
|
||||
if (String(key || '').trim().toLowerCase() === LOG_DIR_SETTING_KEY) {
|
||||
applyRuntimeLogDirSetting(result.normalized);
|
||||
}
|
||||
|
||||
return {
|
||||
key,
|
||||
value: result.normalized
|
||||
};
|
||||
}
|
||||
|
||||
async setSettingsBulk(rawPatch) {
|
||||
if (!rawPatch || typeof rawPatch !== 'object' || Array.isArray(rawPatch)) {
|
||||
const error = new Error('Ungültiger Payload. Erwartet wird ein Objekt mit key/value Paaren.');
|
||||
error.statusCode = 400;
|
||||
throw error;
|
||||
}
|
||||
|
||||
const entries = Object.entries(rawPatch);
|
||||
if (entries.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const db = await getDb();
|
||||
const schemaRows = await db.all('SELECT * FROM settings_schema');
|
||||
const schemaByKey = new Map(schemaRows.map((row) => [row.key, row]));
|
||||
const normalizedEntries = [];
|
||||
const validationErrors = [];
|
||||
|
||||
for (const [key, rawValue] of entries) {
|
||||
const schema = schemaByKey.get(key);
|
||||
if (!schema) {
|
||||
const error = new Error(`Setting ${key} existiert nicht.`);
|
||||
error.statusCode = 404;
|
||||
throw error;
|
||||
}
|
||||
|
||||
const result = validateSetting(schema, rawValue);
|
||||
if (!result.valid) {
|
||||
validationErrors.push({
|
||||
key,
|
||||
message: result.errors.join(' ')
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
normalizedEntries.push({
|
||||
key,
|
||||
value: result.normalized,
|
||||
serializedValue: serializeValueByType(schema.type, result.normalized)
|
||||
});
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
const error = new Error('Mindestens ein Setting ist ungültig.');
|
||||
error.statusCode = 400;
|
||||
error.details = validationErrors;
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
await db.exec('BEGIN');
|
||||
for (const item of normalizedEntries) {
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO settings_values (key, value, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
value = excluded.value,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`,
|
||||
[item.key, item.serializedValue]
|
||||
);
|
||||
}
|
||||
await db.exec('COMMIT');
|
||||
} catch (error) {
|
||||
await db.exec('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
|
||||
const logDirChange = normalizedEntries.find(
|
||||
(item) => String(item?.key || '').trim().toLowerCase() === LOG_DIR_SETTING_KEY
|
||||
);
|
||||
if (logDirChange) {
|
||||
applyRuntimeLogDirSetting(logDirChange.value);
|
||||
}
|
||||
|
||||
logger.info('settings:bulk-updated', { count: normalizedEntries.length });
|
||||
return normalizedEntries.map((item) => ({
|
||||
key: item.key,
|
||||
value: item.value
|
||||
}));
|
||||
}
|
||||
|
||||
async buildMakeMKVAnalyzeConfig(deviceInfo = null) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.makemkv_command;
|
||||
const args = ['-r', 'info', this.resolveSourceArg(map, deviceInfo)];
|
||||
logger.debug('cli:makemkv:analyze', { cmd, args, deviceInfo });
|
||||
return { cmd, args };
|
||||
}
|
||||
|
||||
async buildMakeMKVAnalyzePathConfig(sourcePath, options = {}) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.makemkv_command;
|
||||
const sourceArg = `file:${sourcePath}`;
|
||||
const args = ['-r', 'info', sourceArg];
|
||||
const titleIdRaw = Number(options?.titleId);
|
||||
// "makemkvcon info" supports only <source>; title filtering is done in app parser.
|
||||
logger.debug('cli:makemkv:analyze:path', {
|
||||
cmd,
|
||||
args,
|
||||
sourcePath,
|
||||
requestedTitleId: Number.isFinite(titleIdRaw) && titleIdRaw >= 0 ? Math.trunc(titleIdRaw) : null
|
||||
});
|
||||
return { cmd, args, sourceArg };
|
||||
}
|
||||
|
||||
async buildMakeMKVRipConfig(rawJobDir, deviceInfo = null, options = {}) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.makemkv_command;
|
||||
const ripMode = String(map.makemkv_rip_mode || 'mkv').trim().toLowerCase() === 'backup'
|
||||
? 'backup'
|
||||
: 'mkv';
|
||||
const sourceArg = this.resolveSourceArg(map, deviceInfo);
|
||||
const rawSelectedTitleId = Number(options?.selectedTitleId);
|
||||
const parsedExtra = splitArgs(map.makemkv_rip_extra_args);
|
||||
let extra = [];
|
||||
let baseArgs = [];
|
||||
|
||||
if (ripMode === 'backup') {
|
||||
if (parsedExtra.length > 0) {
|
||||
logger.warn('cli:makemkv:rip:backup:ignored-extra-args', {
|
||||
ignored: parsedExtra
|
||||
});
|
||||
}
|
||||
baseArgs = [
|
||||
'backup',
|
||||
'--decrypt',
|
||||
sourceArg,
|
||||
rawJobDir
|
||||
];
|
||||
} else {
|
||||
extra = parsedExtra;
|
||||
const minLength = Number(map.makemkv_min_length_minutes || 60);
|
||||
const hasExplicitTitle = Number.isFinite(rawSelectedTitleId) && rawSelectedTitleId >= 0;
|
||||
const targetTitle = hasExplicitTitle ? String(Math.trunc(rawSelectedTitleId)) : 'all';
|
||||
if (hasExplicitTitle) {
|
||||
baseArgs = [
|
||||
'mkv',
|
||||
sourceArg,
|
||||
targetTitle,
|
||||
rawJobDir
|
||||
];
|
||||
} else {
|
||||
baseArgs = [
|
||||
'--minlength=' + Math.round(minLength * 60),
|
||||
'mkv',
|
||||
sourceArg,
|
||||
targetTitle,
|
||||
rawJobDir
|
||||
];
|
||||
}
|
||||
}
|
||||
logger.debug('cli:makemkv:rip', {
|
||||
cmd,
|
||||
args: [...baseArgs, ...extra],
|
||||
ripMode,
|
||||
rawJobDir,
|
||||
deviceInfo,
|
||||
selectedTitleId: ripMode === 'mkv' && Number.isFinite(rawSelectedTitleId) && rawSelectedTitleId >= 0
|
||||
? Math.trunc(rawSelectedTitleId)
|
||||
: null
|
||||
});
|
||||
return { cmd, args: [...baseArgs, ...extra] };
|
||||
}
|
||||
|
||||
async buildMakeMKVRegisterConfig() {
|
||||
const map = await this.getSettingsMap();
|
||||
const registrationKey = String(map.makemkv_registration_key || '').trim();
|
||||
if (!registrationKey) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cmd = map.makemkv_command || 'makemkvcon';
|
||||
const args = ['reg', registrationKey];
|
||||
logger.debug('cli:makemkv:register', { cmd, args: ['reg', '<redacted>'] });
|
||||
return {
|
||||
cmd,
|
||||
args,
|
||||
argsForLog: ['reg', '<redacted>']
|
||||
};
|
||||
}
|
||||
|
||||
async buildMediaInfoConfig(inputPath) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.mediainfo_command || 'mediainfo';
|
||||
const baseArgs = ['--Output=JSON'];
|
||||
const extra = splitArgs(map.mediainfo_extra_args);
|
||||
const args = [...baseArgs, ...extra, inputPath];
|
||||
logger.debug('cli:mediainfo', { cmd, args, inputPath });
|
||||
return { cmd, args };
|
||||
}
|
||||
|
||||
async buildHandBrakeConfig(inputFile, outputFile, options = {}) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.handbrake_command;
|
||||
const rawTitleId = Number(options?.titleId);
|
||||
const selectedTitleId = Number.isFinite(rawTitleId) && rawTitleId > 0
|
||||
? Math.trunc(rawTitleId)
|
||||
: null;
|
||||
const baseArgs = ['-i', inputFile, '-o', outputFile];
|
||||
if (selectedTitleId !== null) {
|
||||
baseArgs.push('-t', String(selectedTitleId));
|
||||
}
|
||||
baseArgs.push('-Z', map.handbrake_preset);
|
||||
const extra = splitArgs(map.handbrake_extra_args);
|
||||
const rawSelection = options?.trackSelection || null;
|
||||
const hasSelection = rawSelection && typeof rawSelection === 'object';
|
||||
|
||||
if (!hasSelection) {
|
||||
logger.debug('cli:handbrake', {
|
||||
cmd,
|
||||
args: [...baseArgs, ...extra],
|
||||
inputFile,
|
||||
outputFile,
|
||||
selectedTitleId
|
||||
});
|
||||
return { cmd, args: [...baseArgs, ...extra] };
|
||||
}
|
||||
|
||||
const audioTrackIds = normalizeTrackIds(rawSelection.audioTrackIds);
|
||||
const subtitleTrackIds = normalizeTrackIds(rawSelection.subtitleTrackIds);
|
||||
const subtitleBurnTrackId = normalizeTrackIds([rawSelection.subtitleBurnTrackId])[0] || null;
|
||||
const subtitleDefaultTrackId = normalizeTrackIds([rawSelection.subtitleDefaultTrackId])[0] || null;
|
||||
const subtitleForcedTrackId = normalizeTrackIds([rawSelection.subtitleForcedTrackId])[0] || null;
|
||||
const subtitleForcedOnly = Boolean(rawSelection.subtitleForcedOnly);
|
||||
const filteredExtra = removeSelectionArgs(extra);
|
||||
const overrideArgs = [
|
||||
'-a',
|
||||
audioTrackIds.length > 0 ? audioTrackIds.join(',') : 'none',
|
||||
'-s',
|
||||
subtitleTrackIds.length > 0 ? subtitleTrackIds.join(',') : 'none'
|
||||
];
|
||||
if (subtitleBurnTrackId !== null) {
|
||||
overrideArgs.push(`--subtitle-burned=${subtitleBurnTrackId}`);
|
||||
}
|
||||
if (subtitleDefaultTrackId !== null) {
|
||||
overrideArgs.push(`--subtitle-default=${subtitleDefaultTrackId}`);
|
||||
}
|
||||
if (subtitleForcedTrackId !== null) {
|
||||
overrideArgs.push(`--subtitle-forced=${subtitleForcedTrackId}`);
|
||||
} else if (subtitleForcedOnly) {
|
||||
overrideArgs.push('--subtitle-forced');
|
||||
}
|
||||
const args = [...baseArgs, ...filteredExtra, ...overrideArgs];
|
||||
|
||||
logger.debug('cli:handbrake:with-selection', {
|
||||
cmd,
|
||||
args,
|
||||
inputFile,
|
||||
outputFile,
|
||||
selectedTitleId,
|
||||
trackSelection: {
|
||||
audioTrackIds,
|
||||
subtitleTrackIds,
|
||||
subtitleBurnTrackId,
|
||||
subtitleDefaultTrackId,
|
||||
subtitleForcedTrackId,
|
||||
subtitleForcedOnly
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
cmd,
|
||||
args,
|
||||
trackSelection: {
|
||||
audioTrackIds,
|
||||
subtitleTrackIds,
|
||||
subtitleBurnTrackId,
|
||||
subtitleDefaultTrackId,
|
||||
subtitleForcedTrackId,
|
||||
subtitleForcedOnly
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
resolveHandBrakeSourceArg(map, deviceInfo = null) {
|
||||
if (map.drive_mode === 'explicit') {
|
||||
const device = String(map.drive_device || '').trim();
|
||||
if (!device) {
|
||||
throw new Error('drive_device ist leer, obwohl drive_mode=explicit gesetzt ist.');
|
||||
}
|
||||
return device;
|
||||
}
|
||||
|
||||
const detectedPath = String(deviceInfo?.path || '').trim();
|
||||
if (detectedPath) {
|
||||
return detectedPath;
|
||||
}
|
||||
|
||||
const configuredPath = String(map.drive_device || '').trim();
|
||||
if (configuredPath) {
|
||||
return configuredPath;
|
||||
}
|
||||
|
||||
return '/dev/sr0';
|
||||
}
|
||||
|
||||
async buildHandBrakeScanConfig(deviceInfo = null) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.handbrake_command || 'HandBrakeCLI';
|
||||
const sourceArg = this.resolveHandBrakeSourceArg(map, deviceInfo);
|
||||
// Match legacy rip.sh behavior: scan all titles, then decide in app logic.
|
||||
const args = ['--scan', '--json', '-i', sourceArg, '-t', '0'];
|
||||
logger.debug('cli:handbrake:scan', {
|
||||
cmd,
|
||||
args,
|
||||
deviceInfo
|
||||
});
|
||||
return { cmd, args, sourceArg };
|
||||
}
|
||||
|
||||
async buildHandBrakeScanConfigForInput(inputPath, options = {}) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.handbrake_command || 'HandBrakeCLI';
|
||||
// RAW backup folders must be scanned as full BD source to get usable title list.
|
||||
const rawTitleId = Number(options?.titleId);
|
||||
const titleId = Number.isFinite(rawTitleId) && rawTitleId > 0
|
||||
? Math.trunc(rawTitleId)
|
||||
: 0;
|
||||
const args = ['--scan', '--json', '-i', inputPath, '-t', String(titleId)];
|
||||
logger.debug('cli:handbrake:scan:input', {
|
||||
cmd,
|
||||
args,
|
||||
inputPath,
|
||||
titleId: titleId > 0 ? titleId : null
|
||||
});
|
||||
return { cmd, args, sourceArg: inputPath };
|
||||
}
|
||||
|
||||
async buildHandBrakePresetProfile(sampleInputPath = null, options = {}) {
|
||||
const map = await this.getSettingsMap();
|
||||
const cmd = map.handbrake_command || 'HandBrakeCLI';
|
||||
const presetName = map.handbrake_preset || null;
|
||||
const rawTitleId = Number(options?.titleId);
|
||||
const presetScanTitleId = Number.isFinite(rawTitleId) && rawTitleId > 0
|
||||
? Math.trunc(rawTitleId)
|
||||
: 1;
|
||||
|
||||
if (!presetName) {
|
||||
return buildFallbackPresetProfile(null, 'Kein HandBrake-Preset konfiguriert.');
|
||||
}
|
||||
|
||||
if (!sampleInputPath || !fs.existsSync(sampleInputPath)) {
|
||||
return buildFallbackPresetProfile(
|
||||
presetName,
|
||||
'Preset-Export übersprungen: kein gültiger Sample-Input für HandBrake-Scan.'
|
||||
);
|
||||
}
|
||||
|
||||
const exportName = `ripster-export-${Date.now()}-${Math.floor(Math.random() * 10000)}`;
|
||||
const exportFile = path.join(os.tmpdir(), `${exportName}.json`);
|
||||
const args = [
|
||||
'--scan',
|
||||
'-i',
|
||||
sampleInputPath,
|
||||
'-t',
|
||||
String(presetScanTitleId),
|
||||
'-Z',
|
||||
presetName,
|
||||
'--preset-export',
|
||||
exportName,
|
||||
'--preset-export-file',
|
||||
exportFile
|
||||
];
|
||||
|
||||
try {
|
||||
const result = spawnSync(cmd, args, {
|
||||
encoding: 'utf-8',
|
||||
timeout: 180000,
|
||||
maxBuffer: 10 * 1024 * 1024
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
return buildFallbackPresetProfile(
|
||||
presetName,
|
||||
`Preset-Export fehlgeschlagen: ${result.error.message}`
|
||||
);
|
||||
}
|
||||
|
||||
if (result.status !== 0) {
|
||||
const stderr = String(result.stderr || '').trim();
|
||||
const stdout = String(result.stdout || '').trim();
|
||||
const tail = stderr || stdout || `exit=${result.status}`;
|
||||
return buildFallbackPresetProfile(
|
||||
presetName,
|
||||
`Preset-Export fehlgeschlagen (${tail.slice(0, 280)})`
|
||||
);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(exportFile)) {
|
||||
return buildFallbackPresetProfile(
|
||||
presetName,
|
||||
'Preset-Export fehlgeschlagen: Exportdatei wurde nicht erzeugt.'
|
||||
);
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(exportFile, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
const presetEntries = flattenPresetList(parsed?.PresetList || []);
|
||||
const exported = presetEntries.find((entry) => entry.PresetName === exportName) || presetEntries[0];
|
||||
|
||||
if (!exported) {
|
||||
return buildFallbackPresetProfile(
|
||||
presetName,
|
||||
'Preset-Export fehlgeschlagen: Kein Preset in Exportdatei gefunden.'
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
source: 'preset-export',
|
||||
message: null,
|
||||
presetName,
|
||||
audioTrackSelectionBehavior: exported.AudioTrackSelectionBehavior || 'first',
|
||||
audioLanguages: Array.isArray(exported.AudioLanguageList) ? exported.AudioLanguageList : [],
|
||||
audioEncoders: Array.isArray(exported.AudioList)
|
||||
? exported.AudioList
|
||||
.map((item) => item?.AudioEncoder)
|
||||
.filter(Boolean)
|
||||
: [],
|
||||
audioCopyMask: Array.isArray(exported.AudioCopyMask)
|
||||
? exported.AudioCopyMask
|
||||
: DEFAULT_AUDIO_COPY_MASK,
|
||||
audioFallback: exported.AudioEncoderFallback || 'av_aac',
|
||||
subtitleTrackSelectionBehavior: exported.SubtitleTrackSelectionBehavior || 'none',
|
||||
subtitleLanguages: Array.isArray(exported.SubtitleLanguageList) ? exported.SubtitleLanguageList : [],
|
||||
subtitleBurnBehavior: exported.SubtitleBurnBehavior || 'none'
|
||||
};
|
||||
} catch (error) {
|
||||
return buildFallbackPresetProfile(
|
||||
presetName,
|
||||
`Preset-Export Ausnahme: ${error.message}`
|
||||
);
|
||||
} finally {
|
||||
try {
|
||||
if (fs.existsSync(exportFile)) {
|
||||
fs.unlinkSync(exportFile);
|
||||
}
|
||||
} catch (_error) {
|
||||
// ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resolveSourceArg(map, deviceInfo = null) {
|
||||
const mode = map.drive_mode;
|
||||
if (mode === 'explicit') {
|
||||
const device = map.drive_device;
|
||||
if (!device) {
|
||||
throw new Error('drive_device ist leer, obwohl drive_mode=explicit gesetzt ist.');
|
||||
}
|
||||
return `dev:${device}`;
|
||||
}
|
||||
|
||||
if (deviceInfo && deviceInfo.index !== undefined && deviceInfo.index !== null) {
|
||||
return `disc:${deviceInfo.index}`;
|
||||
}
|
||||
|
||||
return `disc:${map.makemkv_source_index ?? 0}`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new SettingsService();
|
||||
65
backend/src/services/websocketService.js
Normal file
65
backend/src/services/websocketService.js
Normal file
@@ -0,0 +1,65 @@
|
||||
const { WebSocketServer } = require('ws');
|
||||
const logger = require('./logger').child('WS');
|
||||
|
||||
class WebSocketService {
|
||||
constructor() {
|
||||
this.wss = null;
|
||||
this.clients = new Set();
|
||||
}
|
||||
|
||||
init(httpServer) {
|
||||
if (this.wss) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.wss = new WebSocketServer({ server: httpServer, path: '/ws' });
|
||||
|
||||
this.wss.on('connection', (socket) => {
|
||||
this.clients.add(socket);
|
||||
logger.info('client:connected', { clients: this.clients.size });
|
||||
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: 'WS_CONNECTED',
|
||||
payload: { connectedAt: new Date().toISOString() }
|
||||
})
|
||||
);
|
||||
|
||||
socket.on('close', () => {
|
||||
this.clients.delete(socket);
|
||||
logger.info('client:closed', { clients: this.clients.size });
|
||||
});
|
||||
|
||||
socket.on('error', () => {
|
||||
this.clients.delete(socket);
|
||||
logger.warn('client:error', { clients: this.clients.size });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
broadcast(type, payload) {
|
||||
if (!this.wss) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug('broadcast', {
|
||||
type,
|
||||
clients: this.clients.size,
|
||||
payloadKeys: payload && typeof payload === 'object' ? Object.keys(payload) : []
|
||||
});
|
||||
|
||||
const message = JSON.stringify({
|
||||
type,
|
||||
payload,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
for (const client of this.clients) {
|
||||
if (client.readyState === client.OPEN) {
|
||||
client.send(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new WebSocketService();
|
||||
57
backend/src/utils/commandLine.js
Normal file
57
backend/src/utils/commandLine.js
Normal file
@@ -0,0 +1,57 @@
|
||||
function splitArgs(input) {
|
||||
if (!input || typeof input !== 'string') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const args = [];
|
||||
let current = '';
|
||||
let quote = null;
|
||||
let escaping = false;
|
||||
|
||||
for (const ch of input) {
|
||||
if (escaping) {
|
||||
current += ch;
|
||||
escaping = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch === '\\') {
|
||||
escaping = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (quote) {
|
||||
if (ch === quote) {
|
||||
quote = null;
|
||||
} else {
|
||||
current += ch;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch === '"' || ch === "'") {
|
||||
quote = ch;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/\s/.test(ch)) {
|
||||
if (current.length > 0) {
|
||||
args.push(current);
|
||||
current = '';
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
current += ch;
|
||||
}
|
||||
|
||||
if (current.length > 0) {
|
||||
args.push(current);
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
splitArgs
|
||||
};
|
||||
1017
backend/src/utils/encodePlan.js
Normal file
1017
backend/src/utils/encodePlan.js
Normal file
File diff suppressed because it is too large
Load Diff
18
backend/src/utils/errorMeta.js
Normal file
18
backend/src/utils/errorMeta.js
Normal file
@@ -0,0 +1,18 @@
|
||||
function errorToMeta(error) {
|
||||
if (!error) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return {
|
||||
name: error.name,
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
code: error.code,
|
||||
signal: error.signal,
|
||||
statusCode: error.statusCode
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
errorToMeta
|
||||
};
|
||||
70
backend/src/utils/files.js
Normal file
70
backend/src/utils/files.js
Normal file
@@ -0,0 +1,70 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function ensureDir(dirPath) {
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
function sanitizeFileName(input) {
|
||||
return String(input || 'untitled')
|
||||
.replace(/[\\/:*?"<>|]/g, '_')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim()
|
||||
.slice(0, 180);
|
||||
}
|
||||
|
||||
function renderTemplate(template, values) {
|
||||
return String(template || '${title} (${year})').replace(/\$\{([^}]+)\}/g, (_, key) => {
|
||||
const val = values[key.trim()];
|
||||
if (val === undefined || val === null || val === '') {
|
||||
return 'unknown';
|
||||
}
|
||||
return String(val);
|
||||
});
|
||||
}
|
||||
|
||||
function findLargestMediaFile(dirPath, extensions = ['.mkv', '.mp4']) {
|
||||
const files = findMediaFiles(dirPath, extensions);
|
||||
if (files.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return files.reduce((largest, file) => (largest === null || file.size > largest.size ? file : largest), null);
|
||||
}
|
||||
|
||||
function findMediaFiles(dirPath, extensions = ['.mkv', '.mp4']) {
|
||||
const results = [];
|
||||
|
||||
function walk(current) {
|
||||
const entries = fs.readdirSync(current, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const abs = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
walk(abs);
|
||||
} else {
|
||||
const ext = path.extname(entry.name).toLowerCase();
|
||||
if (!extensions.includes(ext)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const stat = fs.statSync(abs);
|
||||
results.push({
|
||||
path: abs,
|
||||
size: stat.size
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk(dirPath);
|
||||
results.sort((a, b) => b.size - a.size || a.path.localeCompare(b.path));
|
||||
return results;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureDir,
|
||||
sanitizeFileName,
|
||||
renderTemplate,
|
||||
findLargestMediaFile,
|
||||
findMediaFiles
|
||||
};
|
||||
576
backend/src/utils/playlistAnalysis.js
Normal file
576
backend/src/utils/playlistAnalysis.js
Normal file
@@ -0,0 +1,576 @@
|
||||
const LARGE_JUMP_THRESHOLD = 20;
|
||||
const DEFAULT_DURATION_SIMILARITY_SECONDS = 90;
|
||||
|
||||
function parseDurationSeconds(raw) {
|
||||
const text = String(raw || '').trim();
|
||||
if (!text) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const hms = text.match(/^(\d{1,2}):(\d{2}):(\d{2})(?:\.\d+)?$/);
|
||||
if (hms) {
|
||||
const h = Number(hms[1]);
|
||||
const m = Number(hms[2]);
|
||||
const s = Number(hms[3]);
|
||||
return (h * 3600) + (m * 60) + s;
|
||||
}
|
||||
|
||||
const hm = text.match(/^(\d{1,2}):(\d{2})(?:\.\d+)?$/);
|
||||
if (hm) {
|
||||
const m = Number(hm[1]);
|
||||
const s = Number(hm[2]);
|
||||
return (m * 60) + s;
|
||||
}
|
||||
|
||||
const asNumber = Number(text);
|
||||
if (Number.isFinite(asNumber) && asNumber > 0) {
|
||||
return Math.round(asNumber);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
function formatDuration(seconds) {
|
||||
const total = Number(seconds || 0);
|
||||
if (!Number.isFinite(total) || total <= 0) {
|
||||
return '-';
|
||||
}
|
||||
|
||||
const h = Math.floor(total / 3600);
|
||||
const m = Math.floor((total % 3600) / 60);
|
||||
const s = total % 60;
|
||||
return `${String(h).padStart(2, '0')}:${String(m).padStart(2, '0')}:${String(s).padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
function parseSizeBytes(raw) {
|
||||
const text = String(raw || '').trim();
|
||||
if (!text) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(text)) {
|
||||
const direct = Number(text);
|
||||
return Number.isFinite(direct) ? Math.max(0, Math.round(direct)) : 0;
|
||||
}
|
||||
|
||||
const match = text.match(/([\d.]+)\s*(B|KB|MB|GB|TB)/i);
|
||||
if (!match) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const value = Number(match[1]);
|
||||
if (!Number.isFinite(value)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const unit = String(match[2] || '').toUpperCase();
|
||||
const factorByUnit = {
|
||||
B: 1,
|
||||
KB: 1024,
|
||||
MB: 1024 ** 2,
|
||||
GB: 1024 ** 3,
|
||||
TB: 1024 ** 4
|
||||
};
|
||||
const factor = factorByUnit[unit] || 1;
|
||||
return Math.max(0, Math.round(value * factor));
|
||||
}
|
||||
|
||||
function normalizePlaylistId(raw) {
|
||||
const value = String(raw || '').trim().toLowerCase();
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const match = value.match(/(\d{1,5})(?:\.mpls)?$/i);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return String(match[1]).padStart(5, '0');
|
||||
}
|
||||
|
||||
function toSegmentFile(segmentNumber) {
|
||||
const value = Number(segmentNumber);
|
||||
if (!Number.isFinite(value) || value < 0) {
|
||||
return null;
|
||||
}
|
||||
return `${String(Math.trunc(value)).padStart(5, '0')}.m2ts`;
|
||||
}
|
||||
|
||||
function parseSegmentNumbers(raw) {
|
||||
const text = String(raw || '').trim();
|
||||
if (!text) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const matches = text.match(/\d{1,6}/g) || [];
|
||||
return matches
|
||||
.map((item) => Number(item))
|
||||
.filter((value) => Number.isFinite(value) && value >= 0)
|
||||
.map((value) => Math.trunc(value));
|
||||
}
|
||||
|
||||
function extractPlaylistMapping(line) {
|
||||
const raw = String(line || '');
|
||||
|
||||
// Robot message typically maps playlist to title id.
|
||||
const msgMatch = raw.match(/MSG:3016.*,"(\d{5}\.mpls)","(\d+)"/i);
|
||||
if (msgMatch) {
|
||||
return {
|
||||
playlistId: normalizePlaylistId(msgMatch[1]),
|
||||
titleId: Number(msgMatch[2])
|
||||
};
|
||||
}
|
||||
|
||||
const textMatch = raw.match(/(?:file|datei)\s+(\d{5}\.mpls).*?(?:title\s*#|titel\s*#?\s*)(\d+)/i);
|
||||
if (textMatch) {
|
||||
return {
|
||||
playlistId: normalizePlaylistId(textMatch[1]),
|
||||
titleId: Number(textMatch[2])
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseAnalyzeTitles(lines) {
|
||||
const titleMap = new Map();
|
||||
|
||||
const ensureTitle = (titleId) => {
|
||||
if (!titleMap.has(titleId)) {
|
||||
titleMap.set(titleId, {
|
||||
titleId,
|
||||
playlistId: null,
|
||||
playlistIdFromMap: null,
|
||||
playlistIdFromField16: null,
|
||||
playlistFile: null,
|
||||
durationSeconds: 0,
|
||||
durationLabel: null,
|
||||
sizeBytes: 0,
|
||||
sizeLabel: null,
|
||||
chapters: 0,
|
||||
segmentNumbers: [],
|
||||
segmentFiles: [],
|
||||
fields: {}
|
||||
});
|
||||
}
|
||||
return titleMap.get(titleId);
|
||||
};
|
||||
|
||||
for (const line of lines || []) {
|
||||
const mapping = extractPlaylistMapping(line);
|
||||
if (mapping && Number.isFinite(mapping.titleId) && mapping.titleId >= 0) {
|
||||
const title = ensureTitle(mapping.titleId);
|
||||
title.playlistIdFromMap = normalizePlaylistId(mapping.playlistId);
|
||||
}
|
||||
|
||||
const tinfo = String(line || '').match(/^TINFO:(\d+),(\d+),\d+,"([^"]*)"/i);
|
||||
if (!tinfo) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const titleId = Number(tinfo[1]);
|
||||
const fieldId = Number(tinfo[2]);
|
||||
const value = String(tinfo[3] || '').trim();
|
||||
if (!Number.isFinite(titleId) || titleId < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const title = ensureTitle(titleId);
|
||||
title.fields[fieldId] = value;
|
||||
|
||||
if (fieldId === 16) {
|
||||
const fromField = normalizePlaylistId(value);
|
||||
if (fromField) {
|
||||
title.playlistIdFromField16 = fromField;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fieldId === 26) {
|
||||
const segmentNumbers = parseSegmentNumbers(value);
|
||||
if (segmentNumbers.length > 0) {
|
||||
title.segmentNumbers = segmentNumbers;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fieldId === 9) {
|
||||
const seconds = parseDurationSeconds(value);
|
||||
if (seconds > 0) {
|
||||
title.durationSeconds = seconds;
|
||||
title.durationLabel = formatDuration(seconds);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fieldId === 10 || fieldId === 11) {
|
||||
const bytes = parseSizeBytes(value);
|
||||
if (bytes > 0) {
|
||||
title.sizeBytes = bytes;
|
||||
title.sizeLabel = value;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fieldId === 8 || fieldId === 7) {
|
||||
const chapters = Number(value);
|
||||
if (Number.isFinite(chapters) && chapters >= 0) {
|
||||
title.chapters = Math.trunc(chapters);
|
||||
}
|
||||
}
|
||||
|
||||
if (!title.durationSeconds && /\d+:\d{2}:\d{2}/.test(value)) {
|
||||
const seconds = parseDurationSeconds(value);
|
||||
if (seconds > 0) {
|
||||
title.durationSeconds = seconds;
|
||||
title.durationLabel = formatDuration(seconds);
|
||||
}
|
||||
}
|
||||
|
||||
if (!title.sizeBytes && /(kb|mb|gb|tb)\b/i.test(value)) {
|
||||
const bytes = parseSizeBytes(value);
|
||||
if (bytes > 0) {
|
||||
title.sizeBytes = bytes;
|
||||
title.sizeLabel = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(titleMap.values())
|
||||
.map((item) => {
|
||||
const playlistId = normalizePlaylistId(item.playlistId);
|
||||
const playlistIdFromMap = normalizePlaylistId(item.playlistIdFromMap);
|
||||
const playlistIdFromField16 = normalizePlaylistId(item.playlistIdFromField16);
|
||||
// Prefer explicit title<->playlist map lines from MakeMKV (MSG:3016).
|
||||
const resolvedPlaylistId = playlistIdFromMap || playlistIdFromField16 || playlistId;
|
||||
const segmentNumbers = Array.isArray(item.segmentNumbers) ? item.segmentNumbers : [];
|
||||
const segmentFiles = segmentNumbers
|
||||
.map((number) => toSegmentFile(number))
|
||||
.filter(Boolean);
|
||||
|
||||
return {
|
||||
...item,
|
||||
playlistId: resolvedPlaylistId,
|
||||
playlistIdFromMap,
|
||||
playlistIdFromField16,
|
||||
playlistFile: resolvedPlaylistId ? `${resolvedPlaylistId}.mpls` : null,
|
||||
durationLabel: item.durationLabel || formatDuration(item.durationSeconds),
|
||||
segmentNumbers,
|
||||
segmentFiles
|
||||
};
|
||||
})
|
||||
.sort((a, b) => a.titleId - b.titleId);
|
||||
}
|
||||
|
||||
function uniqueOrdered(values) {
|
||||
const seen = new Set();
|
||||
const output = [];
|
||||
for (const value of values || []) {
|
||||
const normalized = String(value || '').trim().toLowerCase();
|
||||
if (!normalized || seen.has(normalized)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(normalized);
|
||||
output.push(String(value).trim());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function buildSimilarityGroups(candidates, durationSimilaritySeconds) {
|
||||
const list = Array.isArray(candidates) ? [...candidates] : [];
|
||||
const tolerance = Math.max(0, Math.round(Number(durationSimilaritySeconds || 0)));
|
||||
const groups = [];
|
||||
const used = new Set();
|
||||
|
||||
for (let i = 0; i < list.length; i += 1) {
|
||||
if (used.has(i)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const base = list[i];
|
||||
const currentGroup = [base];
|
||||
used.add(i);
|
||||
|
||||
for (let j = i + 1; j < list.length; j += 1) {
|
||||
if (used.has(j)) {
|
||||
continue;
|
||||
}
|
||||
const candidate = list[j];
|
||||
if (Math.abs(Number(candidate.durationSeconds || 0) - Number(base.durationSeconds || 0)) <= tolerance) {
|
||||
currentGroup.push(candidate);
|
||||
used.add(j);
|
||||
}
|
||||
}
|
||||
|
||||
if (currentGroup.length > 1) {
|
||||
const sortedTitles = currentGroup
|
||||
.slice()
|
||||
.sort((a, b) => b.durationSeconds - a.durationSeconds || b.sizeBytes - a.sizeBytes || a.titleId - b.titleId);
|
||||
const referenceDuration = Number(sortedTitles[0]?.durationSeconds || 0);
|
||||
groups.push({
|
||||
durationSeconds: referenceDuration,
|
||||
durationLabel: formatDuration(referenceDuration),
|
||||
titles: sortedTitles
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return groups.sort((a, b) =>
|
||||
b.durationSeconds - a.durationSeconds || b.titles.length - a.titles.length
|
||||
);
|
||||
}
|
||||
|
||||
function computeSegmentMetrics(segmentNumbers) {
|
||||
const numbers = Array.isArray(segmentNumbers)
|
||||
? segmentNumbers.filter((value) => Number.isFinite(value)).map((value) => Math.trunc(value))
|
||||
: [];
|
||||
|
||||
if (numbers.length === 0) {
|
||||
return {
|
||||
segmentCount: 0,
|
||||
segmentNumbers: [],
|
||||
directSequenceSteps: 0,
|
||||
backwardJumps: 0,
|
||||
largeJumps: 0,
|
||||
alternatingJumps: 0,
|
||||
alternatingPairs: 0,
|
||||
alternatingRatio: 0,
|
||||
sequenceCoherence: 0,
|
||||
monotonicRatio: 0,
|
||||
score: 0
|
||||
};
|
||||
}
|
||||
|
||||
let directSequenceSteps = 0;
|
||||
let backwardJumps = 0;
|
||||
let largeJumps = 0;
|
||||
let alternatingJumps = 0;
|
||||
let alternatingPairs = 0;
|
||||
let prevDiff = null;
|
||||
|
||||
for (let i = 1; i < numbers.length; i += 1) {
|
||||
const current = numbers[i - 1];
|
||||
const next = numbers[i];
|
||||
const diff = next - current;
|
||||
|
||||
if (next < current) {
|
||||
backwardJumps += 1;
|
||||
}
|
||||
if (Math.abs(diff) > LARGE_JUMP_THRESHOLD) {
|
||||
largeJumps += 1;
|
||||
}
|
||||
if (diff === 1) {
|
||||
directSequenceSteps += 1;
|
||||
}
|
||||
|
||||
if (prevDiff !== null) {
|
||||
const largePair = Math.abs(prevDiff) > LARGE_JUMP_THRESHOLD && Math.abs(diff) > LARGE_JUMP_THRESHOLD;
|
||||
if (largePair) {
|
||||
alternatingPairs += 1;
|
||||
const signChanged = (prevDiff < 0 && diff > 0) || (prevDiff > 0 && diff < 0);
|
||||
if (signChanged) {
|
||||
alternatingJumps += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
prevDiff = diff;
|
||||
}
|
||||
|
||||
const transitions = Math.max(1, numbers.length - 1);
|
||||
const sequenceCoherence = Number((directSequenceSteps / transitions).toFixed(4));
|
||||
const alternatingRatio = alternatingPairs > 0
|
||||
? Number((alternatingJumps / alternatingPairs).toFixed(4))
|
||||
: 0;
|
||||
|
||||
const score = (directSequenceSteps * 2) - (backwardJumps * 3) - (largeJumps * 2);
|
||||
|
||||
return {
|
||||
segmentCount: numbers.length,
|
||||
segmentNumbers: numbers,
|
||||
directSequenceSteps,
|
||||
backwardJumps,
|
||||
largeJumps,
|
||||
alternatingJumps,
|
||||
alternatingPairs,
|
||||
alternatingRatio,
|
||||
sequenceCoherence,
|
||||
monotonicRatio: sequenceCoherence,
|
||||
score
|
||||
};
|
||||
}
|
||||
|
||||
function buildEvaluationLabel(metrics) {
|
||||
if (!metrics || metrics.segmentCount === 0) {
|
||||
return 'Keine Segmentliste aus TINFO:26 verfügbar';
|
||||
}
|
||||
if (metrics.alternatingRatio >= 0.55 && metrics.alternatingPairs >= 3) {
|
||||
return 'Fake-Struktur (alternierendes Sprungmuster)';
|
||||
}
|
||||
if (metrics.backwardJumps > 0 || metrics.largeJumps > 0) {
|
||||
return 'Auffällige Segmentreihenfolge';
|
||||
}
|
||||
return 'wahrscheinlich korrekt (lineare Segmentfolge)';
|
||||
}
|
||||
|
||||
function scoreCandidates(groupTitles) {
|
||||
const titles = Array.isArray(groupTitles) ? groupTitles : [];
|
||||
if (titles.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return titles
|
||||
.map((title) => {
|
||||
const metrics = computeSegmentMetrics(title.segmentNumbers);
|
||||
const reasons = [
|
||||
`sequence_steps=${metrics.directSequenceSteps}`,
|
||||
`sequence_coherence=${metrics.sequenceCoherence.toFixed(3)}`,
|
||||
`backward_jumps=${metrics.backwardJumps}`,
|
||||
`large_jumps=${metrics.largeJumps}`,
|
||||
`alternating_ratio=${metrics.alternatingRatio.toFixed(3)}`
|
||||
];
|
||||
|
||||
return {
|
||||
...title,
|
||||
score: Number(metrics.score || 0),
|
||||
reasons,
|
||||
structuralMetrics: metrics,
|
||||
evaluationLabel: buildEvaluationLabel(metrics)
|
||||
};
|
||||
})
|
||||
.sort((a, b) =>
|
||||
b.score - a.score
|
||||
|| b.structuralMetrics.sequenceCoherence - a.structuralMetrics.sequenceCoherence
|
||||
|| b.durationSeconds - a.durationSeconds
|
||||
|| b.sizeBytes - a.sizeBytes
|
||||
|| a.titleId - b.titleId
|
||||
)
|
||||
.map((item, index) => ({
|
||||
...item,
|
||||
recommended: index === 0
|
||||
}));
|
||||
}
|
||||
|
||||
function buildPlaylistSegmentMap(titles) {
|
||||
const map = {};
|
||||
for (const title of titles || []) {
|
||||
const playlistId = normalizePlaylistId(title?.playlistId);
|
||||
if (!playlistId || map[playlistId]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
map[playlistId] = {
|
||||
playlistId,
|
||||
playlistFile: `${playlistId}.mpls`,
|
||||
playlistPath: `BDMV/PLAYLIST/${playlistId}.mpls`,
|
||||
segmentCommand: `strings BDMV/PLAYLIST/${playlistId}.mpls | grep m2ts`,
|
||||
segmentFiles: Array.isArray(title?.segmentFiles) ? title.segmentFiles : [],
|
||||
segmentNumbers: Array.isArray(title?.segmentNumbers) ? title.segmentNumbers : [],
|
||||
fileExists: null,
|
||||
source: 'makemkv_tinfo_26'
|
||||
};
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function buildPlaylistToTitleIdMap(titles) {
|
||||
const map = {};
|
||||
for (const title of titles || []) {
|
||||
const playlistId = normalizePlaylistId(title?.playlistId || title?.playlistFile || null);
|
||||
const titleId = Number(title?.titleId);
|
||||
if (!playlistId || !Number.isFinite(titleId) || titleId < 0) {
|
||||
continue;
|
||||
}
|
||||
const normalizedTitleId = Math.trunc(titleId);
|
||||
if (map[playlistId] === undefined) {
|
||||
map[playlistId] = normalizedTitleId;
|
||||
}
|
||||
const playlistFile = `${playlistId}.mpls`;
|
||||
if (map[playlistFile] === undefined) {
|
||||
map[playlistFile] = normalizedTitleId;
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function extractWarningLines(lines) {
|
||||
return (Array.isArray(lines) ? lines : [])
|
||||
.filter((line) => /warn|warning|error|fehler|decode|decoder|timeout|corrupt/i.test(String(line || '')))
|
||||
.slice(0, 40)
|
||||
.map((line) => String(line || '').slice(0, 260));
|
||||
}
|
||||
|
||||
function extractPlaylistMismatchWarnings(titles) {
|
||||
return (Array.isArray(titles) ? titles : [])
|
||||
.filter((title) => title?.playlistIdFromMap && title?.playlistIdFromField16)
|
||||
.filter((title) => String(title.playlistIdFromMap) !== String(title.playlistIdFromField16))
|
||||
.slice(0, 25)
|
||||
.map((title) =>
|
||||
`Titel #${title.titleId}: MSG-Playlist=${title.playlistIdFromMap}.mpls, TINFO16=${title.playlistIdFromField16}.mpls (MSG bevorzugt)`
|
||||
);
|
||||
}
|
||||
|
||||
function analyzePlaylistObfuscation(lines, minLengthMinutes = 60, options = {}) {
|
||||
const parsedTitles = parseAnalyzeTitles(lines);
|
||||
const minSeconds = Math.max(0, Math.round(Number(minLengthMinutes || 0) * 60));
|
||||
const durationSimilaritySeconds = Math.max(
|
||||
0,
|
||||
Math.round(Number(options.durationSimilaritySeconds || DEFAULT_DURATION_SIMILARITY_SECONDS))
|
||||
);
|
||||
|
||||
const candidates = parsedTitles
|
||||
.filter((item) => Number(item.durationSeconds || 0) >= minSeconds)
|
||||
.sort((a, b) => b.durationSeconds - a.durationSeconds || b.sizeBytes - a.sizeBytes || a.titleId - b.titleId);
|
||||
|
||||
const similarityGroups = buildSimilarityGroups(candidates, durationSimilaritySeconds);
|
||||
const obfuscationDetected = similarityGroups.length > 0;
|
||||
const primaryGroup = similarityGroups[0] || null;
|
||||
const evaluatedCandidates = primaryGroup ? scoreCandidates(primaryGroup.titles) : [];
|
||||
const recommendation = evaluatedCandidates[0] || null;
|
||||
const candidatePlaylists = primaryGroup
|
||||
? uniqueOrdered(primaryGroup.titles.map((item) => item.playlistId).filter(Boolean))
|
||||
: [];
|
||||
const playlistSegments = buildPlaylistSegmentMap(primaryGroup ? primaryGroup.titles : []);
|
||||
const playlistToTitleId = buildPlaylistToTitleIdMap(parsedTitles);
|
||||
|
||||
return {
|
||||
generatedAt: new Date().toISOString(),
|
||||
minLengthMinutes: Number(minLengthMinutes || 0),
|
||||
minLengthSeconds: minSeconds,
|
||||
durationSimilaritySeconds,
|
||||
titles: parsedTitles,
|
||||
candidates,
|
||||
duplicateDurationGroups: similarityGroups,
|
||||
obfuscationDetected,
|
||||
manualDecisionRequired: obfuscationDetected,
|
||||
candidatePlaylists,
|
||||
candidatePlaylistFiles: candidatePlaylists.map((item) => `${item}.mpls`),
|
||||
playlistToTitleId,
|
||||
recommendation: recommendation
|
||||
? {
|
||||
titleId: recommendation.titleId,
|
||||
playlistId: recommendation.playlistId,
|
||||
score: Number(recommendation.score || 0),
|
||||
reason: Array.isArray(recommendation.reasons) && recommendation.reasons.length > 0
|
||||
? recommendation.reasons.join('; ')
|
||||
: 'höchster Struktur-Score'
|
||||
}
|
||||
: null,
|
||||
evaluatedCandidates,
|
||||
playlistSegments,
|
||||
structuralAnalysis: {
|
||||
method: 'makemkv_tinfo_26',
|
||||
sourceCommand: 'makemkvcon -r info disc:0 --robot',
|
||||
analyzedPlaylists: Object.keys(playlistSegments).length
|
||||
},
|
||||
warningLines: [
|
||||
...extractWarningLines(lines),
|
||||
...extractPlaylistMismatchWarnings(parsedTitles)
|
||||
].slice(0, 60)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
normalizePlaylistId,
|
||||
analyzePlaylistObfuscation
|
||||
};
|
||||
72
backend/src/utils/progressParsers.js
Normal file
72
backend/src/utils/progressParsers.js
Normal file
@@ -0,0 +1,72 @@
|
||||
function clampPercent(value) {
|
||||
if (Number.isNaN(value) || value === Infinity || value === -Infinity) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Math.max(0, Math.min(100, Number(value.toFixed(2))));
|
||||
}
|
||||
|
||||
function parseGenericPercent(line) {
|
||||
const match = line.match(/(\d{1,3}(?:\.\d+)?)\s?%/);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return clampPercent(Number(match[1]));
|
||||
}
|
||||
|
||||
function parseEta(line) {
|
||||
const etaMatch = line.match(/ETA\s+([0-9:.hms-]+)/i);
|
||||
if (!etaMatch) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const value = etaMatch[1].trim();
|
||||
if (!value || value.includes('--')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return value.replace(/[),.;]+$/, '');
|
||||
}
|
||||
|
||||
function parseMakeMkvProgress(line) {
|
||||
const prgv = line.match(/PRGV:(\d+),(\d+),(\d+)/);
|
||||
if (prgv) {
|
||||
const a = Number(prgv[1]);
|
||||
const b = Number(prgv[2]);
|
||||
const c = Number(prgv[3]);
|
||||
|
||||
if (c > 0) {
|
||||
return { percent: clampPercent((a / c) * 100), eta: null };
|
||||
}
|
||||
|
||||
if (b > 0) {
|
||||
return { percent: clampPercent((a / b) * 100), eta: null };
|
||||
}
|
||||
}
|
||||
|
||||
const percent = parseGenericPercent(line);
|
||||
if (percent !== null) {
|
||||
return { percent, eta: null };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseHandBrakeProgress(line) {
|
||||
const normalized = String(line || '').replace(/\s+/g, ' ').trim();
|
||||
const match = normalized.match(/Encoding:\s*(?:task\s+\d+\s+of\s+\d+,\s*)?(\d+(?:\.\d+)?)\s?%/i);
|
||||
if (match) {
|
||||
return {
|
||||
percent: clampPercent(Number(match[1])),
|
||||
eta: parseEta(normalized)
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseMakeMkvProgress,
|
||||
parseHandBrakeProgress
|
||||
};
|
||||
112
backend/src/utils/validators.js
Normal file
112
backend/src/utils/validators.js
Normal file
@@ -0,0 +1,112 @@
|
||||
function parseJson(value, fallback = null) {
|
||||
if (!value) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
function toBoolean(value) {
|
||||
if (typeof value === 'boolean') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value === 'true' || value === '1' || value === 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (value === 'false' || value === '0' || value === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Boolean(value);
|
||||
}
|
||||
|
||||
function normalizeValueByType(type, rawValue) {
|
||||
if (rawValue === undefined || rawValue === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'number':
|
||||
return Number(rawValue);
|
||||
case 'boolean':
|
||||
return toBoolean(rawValue);
|
||||
case 'select':
|
||||
case 'string':
|
||||
case 'path':
|
||||
default:
|
||||
return String(rawValue);
|
||||
}
|
||||
}
|
||||
|
||||
function serializeValueByType(type, value) {
|
||||
if (value === undefined || value === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (type === 'boolean') {
|
||||
return value ? 'true' : 'false';
|
||||
}
|
||||
|
||||
return String(value);
|
||||
}
|
||||
|
||||
function validateSetting(schemaItem, value) {
|
||||
const errors = [];
|
||||
const normalized = normalizeValueByType(schemaItem.type, value);
|
||||
|
||||
if (schemaItem.required) {
|
||||
const emptyString = typeof normalized === 'string' && normalized.trim().length === 0;
|
||||
if (normalized === null || emptyString) {
|
||||
errors.push('Wert ist erforderlich.');
|
||||
}
|
||||
}
|
||||
|
||||
if (schemaItem.type === 'number' && normalized !== null) {
|
||||
if (Number.isNaN(normalized)) {
|
||||
errors.push('Ungültige Zahl.');
|
||||
} else {
|
||||
const rules = parseJson(schemaItem.validation_json, {});
|
||||
if (typeof rules.min === 'number' && normalized < rules.min) {
|
||||
errors.push(`Wert muss >= ${rules.min} sein.`);
|
||||
}
|
||||
if (typeof rules.max === 'number' && normalized > rules.max) {
|
||||
errors.push(`Wert muss <= ${rules.max} sein.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (schemaItem.type === 'select' && normalized !== null) {
|
||||
const options = parseJson(schemaItem.options_json, []);
|
||||
const values = options.map((option) => option.value);
|
||||
if (!values.includes(normalized)) {
|
||||
errors.push('Ungültige Auswahl.');
|
||||
}
|
||||
}
|
||||
|
||||
if ((schemaItem.type === 'path' || schemaItem.type === 'string') && normalized !== null) {
|
||||
const rules = parseJson(schemaItem.validation_json, {});
|
||||
if (typeof rules.minLength === 'number' && normalized.length < rules.minLength) {
|
||||
errors.push(`Wert muss mindestens ${rules.minLength} Zeichen haben.`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
normalized
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseJson,
|
||||
normalizeValueByType,
|
||||
serializeValueByType,
|
||||
validateSetting,
|
||||
toBoolean
|
||||
};
|
||||
Reference in New Issue
Block a user