some pload

This commit is contained in:
2026-03-08 21:52:21 +00:00
parent e1a87af16a
commit 7204dbb65b
24 changed files with 4947 additions and 703 deletions

View File

@@ -62,6 +62,42 @@ const defaultSchema = [
validation: { minLength: 1 },
orderIndex: 100
},
{
key: 'raw_dir_bluray',
category: 'Pfade',
label: 'Raw Ausgabeordner (Blu-ray)',
type: 'path',
required: 0,
description: 'Optionaler RAW-Zielpfad nur für Blu-ray. Leer = Fallback auf "Raw Ausgabeordner".',
defaultValue: '',
options: [],
validation: {},
orderIndex: 101
},
{
key: 'raw_dir_dvd',
category: 'Pfade',
label: 'Raw Ausgabeordner (DVD)',
type: 'path',
required: 0,
description: 'Optionaler RAW-Zielpfad nur für DVD. Leer = Fallback auf "Raw Ausgabeordner".',
defaultValue: '',
options: [],
validation: {},
orderIndex: 102
},
{
key: 'raw_dir_other',
category: 'Pfade',
label: 'Raw Ausgabeordner (Sonstiges)',
type: 'path',
required: 0,
description: 'Optionaler RAW-Zielpfad nur für Sonstiges. Leer = Fallback auf "Raw Ausgabeordner".',
defaultValue: '',
options: [],
validation: {},
orderIndex: 103
},
{
key: 'movie_dir',
category: 'Pfade',
@@ -74,6 +110,42 @@ const defaultSchema = [
validation: { minLength: 1 },
orderIndex: 110
},
{
key: 'movie_dir_bluray',
category: 'Pfade',
label: 'Film Ausgabeordner (Blu-ray)',
type: 'path',
required: 0,
description: 'Optionaler Encode-Zielpfad nur für Blu-ray. Leer = Fallback auf "Film Ausgabeordner".',
defaultValue: '',
options: [],
validation: {},
orderIndex: 111
},
{
key: 'movie_dir_dvd',
category: 'Pfade',
label: 'Film Ausgabeordner (DVD)',
type: 'path',
required: 0,
description: 'Optionaler Encode-Zielpfad nur für DVD. Leer = Fallback auf "Film Ausgabeordner".',
defaultValue: '',
options: [],
validation: {},
orderIndex: 112
},
{
key: 'movie_dir_other',
category: 'Pfade',
label: 'Film Ausgabeordner (Sonstiges)',
type: 'path',
required: 0,
description: 'Optionaler Encode-Zielpfad nur für Sonstiges. Leer = Fallback auf "Film Ausgabeordner".',
defaultValue: '',
options: [],
validation: {},
orderIndex: 113
},
{
key: 'log_dir',
category: 'Pfade',

View File

@@ -10,8 +10,10 @@ const requestLogger = require('./middleware/requestLogger');
const settingsRoutes = require('./routes/settingsRoutes');
const pipelineRoutes = require('./routes/pipelineRoutes');
const historyRoutes = require('./routes/historyRoutes');
const cronRoutes = require('./routes/cronRoutes');
const wsService = require('./services/websocketService');
const pipelineService = require('./services/pipelineService');
const cronService = require('./services/cronService');
const diskDetectionService = require('./services/diskDetectionService');
const hardwareMonitorService = require('./services/hardwareMonitorService');
const logger = require('./services/logger').child('BOOT');
@@ -21,6 +23,7 @@ async function start() {
logger.info('backend:start:init');
await initDatabase();
await pipelineService.init();
await cronService.init();
const app = express();
app.use(cors({ origin: corsOrigin }));
@@ -34,6 +37,7 @@ async function start() {
app.use('/api/settings', settingsRoutes);
app.use('/api/pipeline', pipelineRoutes);
app.use('/api/history', historyRoutes);
app.use('/api/crons', cronRoutes);
app.use(errorHandler);
@@ -72,6 +76,7 @@ async function start() {
logger.warn('backend:shutdown:received');
diskDetectionService.stop();
hardwareMonitorService.stop();
cronService.stop();
server.close(() => {
logger.warn('backend:shutdown:completed');
process.exit(0);

View File

@@ -0,0 +1,101 @@
const express = require('express');
const asyncHandler = require('../middleware/asyncHandler');
const cronService = require('../services/cronService');
const wsService = require('../services/websocketService');
const logger = require('../services/logger').child('CRON_ROUTE');
const router = express.Router();
// GET /api/crons alle Cronjobs auflisten
router.get(
'/',
asyncHandler(async (req, res) => {
logger.debug('get:crons', { reqId: req.reqId });
const jobs = await cronService.listJobs();
res.json({ jobs });
})
);
// POST /api/crons/validate-expression Cron-Ausdruck validieren
router.post(
'/validate-expression',
asyncHandler(async (req, res) => {
const expr = String(req.body?.cronExpression || '').trim();
const validation = cronService.validateExpression(expr);
const nextRunAt = validation.valid ? cronService.getNextRunTime(expr) : null;
res.json({ ...validation, nextRunAt });
})
);
// POST /api/crons neuen Cronjob anlegen
router.post(
'/',
asyncHandler(async (req, res) => {
const payload = req.body || {};
logger.info('post:crons:create', { reqId: req.reqId, name: payload?.name });
const job = await cronService.createJob(payload);
wsService.broadcast('CRON_JOBS_UPDATED', { action: 'created', id: job.id });
res.status(201).json({ job });
})
);
// GET /api/crons/:id einzelnen Cronjob abrufen
router.get(
'/:id',
asyncHandler(async (req, res) => {
const id = Number(req.params.id);
logger.debug('get:crons:one', { reqId: req.reqId, cronJobId: id });
const job = await cronService.getJobById(id);
res.json({ job });
})
);
// PUT /api/crons/:id Cronjob aktualisieren
router.put(
'/:id',
asyncHandler(async (req, res) => {
const id = Number(req.params.id);
const payload = req.body || {};
logger.info('put:crons:update', { reqId: req.reqId, cronJobId: id });
const job = await cronService.updateJob(id, payload);
wsService.broadcast('CRON_JOBS_UPDATED', { action: 'updated', id: job.id });
res.json({ job });
})
);
// DELETE /api/crons/:id Cronjob löschen
router.delete(
'/:id',
asyncHandler(async (req, res) => {
const id = Number(req.params.id);
logger.info('delete:crons', { reqId: req.reqId, cronJobId: id });
const removed = await cronService.deleteJob(id);
wsService.broadcast('CRON_JOBS_UPDATED', { action: 'deleted', id: removed.id });
res.json({ removed });
})
);
// GET /api/crons/:id/logs Ausführungs-Logs eines Cronjobs
router.get(
'/:id/logs',
asyncHandler(async (req, res) => {
const id = Number(req.params.id);
const limit = Math.min(Number(req.query?.limit) || 20, 100);
logger.debug('get:crons:logs', { reqId: req.reqId, cronJobId: id, limit });
const logs = await cronService.getJobLogs(id, limit);
res.json({ logs });
})
);
// POST /api/crons/:id/run Cronjob manuell auslösen
router.post(
'/:id/run',
asyncHandler(async (req, res) => {
const id = Number(req.params.id);
logger.info('post:crons:run', { reqId: req.reqId, cronJobId: id });
const result = await cronService.triggerJobManually(id);
res.json(result);
})
);
module.exports = router;

View File

@@ -61,6 +61,20 @@ router.post(
})
);
router.post(
'/scripts/reorder',
asyncHandler(async (req, res) => {
const orderedScriptIds = Array.isArray(req.body?.orderedScriptIds) ? req.body.orderedScriptIds : [];
logger.info('post:settings:scripts:reorder', {
reqId: req.reqId,
count: orderedScriptIds.length
});
const scripts = await scriptService.reorderScripts(orderedScriptIds);
wsService.broadcast('SETTINGS_SCRIPTS_UPDATED', { action: 'reordered', count: scripts.length });
res.json({ scripts });
})
);
router.put(
'/scripts/:id',
asyncHandler(async (req, res) => {
@@ -105,6 +119,16 @@ router.post(
})
);
router.post(
'/script-chains/:id/test',
asyncHandler(async (req, res) => {
const chainId = Number(req.params.id);
logger.info('post:settings:script-chains:test', { reqId: req.reqId, chainId });
const result = await scriptChainService.executeChain(chainId, { source: 'settings_test', mode: 'test' });
res.json({ result });
})
);
router.get(
'/script-chains',
asyncHandler(async (req, res) => {
@@ -125,6 +149,20 @@ router.post(
})
);
router.post(
'/script-chains/reorder',
asyncHandler(async (req, res) => {
const orderedChainIds = Array.isArray(req.body?.orderedChainIds) ? req.body.orderedChainIds : [];
logger.info('post:settings:script-chains:reorder', {
reqId: req.reqId,
count: orderedChainIds.length
});
const chains = await scriptChainService.reorderChains(orderedChainIds);
wsService.broadcast('SETTINGS_SCRIPT_CHAINS_UPDATED', { action: 'reordered', count: chains.length });
res.json({ chains });
})
);
router.get(
'/script-chains/:id',
asyncHandler(async (req, res) => {

View File

@@ -0,0 +1,560 @@
/**
* cronService.js
* Verwaltet und führt Cronjobs aus (Skripte oder Skriptketten).
* Kein externer Package nötig eigener Cron-Expression-Parser.
*/
const { getDb } = require('../db/database');
const logger = require('./logger').child('CRON');
const notificationService = require('./notificationService');
const settingsService = require('./settingsService');
const wsService = require('./websocketService');
const { errorToMeta } = require('../utils/errorMeta');
// Maximale Zeilen pro Log-Eintrag (Output-Truncation)
const MAX_OUTPUT_CHARS = 100000;
// Maximale Log-Einträge pro Cron-Job (ältere werden gelöscht)
const MAX_LOGS_PER_JOB = 50;
// ─── Cron-Expression-Parser ────────────────────────────────────────────────
// Parst ein einzelnes Cron-Feld (z.B. "* /5", "1,3,5", "1-5", "*") und gibt
// alle erlaubten Werte als Set zurück.
function parseCronField(field, min, max) {
const values = new Set();
for (const part of field.split(',')) {
const trimmed = part.trim();
if (trimmed === '*') {
for (let i = min; i <= max; i++) values.add(i);
} else if (trimmed.startsWith('*/')) {
const step = parseInt(trimmed.slice(2), 10);
if (!Number.isFinite(step) || step < 1) throw new Error(`Ungültiges Step: ${trimmed}`);
for (let i = min; i <= max; i += step) values.add(i);
} else if (trimmed.includes('-')) {
const [startStr, endStr] = trimmed.split('-');
const start = parseInt(startStr, 10);
const end = parseInt(endStr, 10);
if (!Number.isFinite(start) || !Number.isFinite(end)) throw new Error(`Ungültiger Bereich: ${trimmed}`);
for (let i = Math.max(min, start); i <= Math.min(max, end); i++) values.add(i);
} else {
const num = parseInt(trimmed, 10);
if (!Number.isFinite(num) || num < min || num > max) throw new Error(`Ungültiger Wert: ${trimmed}`);
values.add(num);
}
}
return values;
}
/**
* Validiert eine Cron-Expression (5 Felder: minute hour day month weekday).
* Gibt { valid: true } oder { valid: false, error: string } zurück.
*/
function validateCronExpression(expr) {
try {
const parts = String(expr || '').trim().split(/\s+/);
if (parts.length !== 5) {
return { valid: false, error: 'Cron-Ausdruck muss genau 5 Felder haben (Minute Stunde Tag Monat Wochentag).' };
}
parseCronField(parts[0], 0, 59); // minute
parseCronField(parts[1], 0, 23); // hour
parseCronField(parts[2], 1, 31); // day of month
parseCronField(parts[3], 1, 12); // month
parseCronField(parts[4], 0, 7); // weekday (0 und 7 = Sonntag)
return { valid: true };
} catch (error) {
return { valid: false, error: error.message };
}
}
/**
* Berechnet den nächsten Ausführungszeitpunkt nach einem Datum.
* Gibt ein Date-Objekt zurück oder null bei Fehler.
*/
function getNextRunTime(expr, fromDate = new Date()) {
try {
const parts = String(expr || '').trim().split(/\s+/);
if (parts.length !== 5) return null;
const minutes = parseCronField(parts[0], 0, 59);
const hours = parseCronField(parts[1], 0, 23);
const days = parseCronField(parts[2], 1, 31);
const months = parseCronField(parts[3], 1, 12);
const weekdays = parseCronField(parts[4], 0, 7);
// Normalisiere Wochentag: 7 → 0 (beide = Sonntag)
if (weekdays.has(7)) weekdays.add(0);
// Suche ab der nächsten Minute
const candidate = new Date(fromDate);
candidate.setSeconds(0, 0);
candidate.setMinutes(candidate.getMinutes() + 1);
// Maximal 2 Jahre in die Zukunft suchen
const limit = new Date(fromDate);
limit.setFullYear(limit.getFullYear() + 2);
while (candidate < limit) {
const month = candidate.getMonth() + 1; // 1-12
const day = candidate.getDate();
const hour = candidate.getHours();
const minute = candidate.getMinutes();
const weekday = candidate.getDay(); // 0 = Sonntag
if (!months.has(month)) {
candidate.setMonth(candidate.getMonth() + 1, 1);
candidate.setHours(0, 0, 0, 0);
continue;
}
if (!days.has(day) || !weekdays.has(weekday)) {
candidate.setDate(candidate.getDate() + 1);
candidate.setHours(0, 0, 0, 0);
continue;
}
if (!hours.has(hour)) {
candidate.setHours(candidate.getHours() + 1, 0, 0, 0);
continue;
}
if (!minutes.has(minute)) {
candidate.setMinutes(candidate.getMinutes() + 1, 0, 0);
continue;
}
return candidate;
}
return null;
} catch (_error) {
return null;
}
}
// ─── DB-Helpers ────────────────────────────────────────────────────────────
function mapJobRow(row) {
if (!row) return null;
return {
id: Number(row.id),
name: String(row.name || ''),
cronExpression: String(row.cron_expression || ''),
sourceType: String(row.source_type || ''),
sourceId: Number(row.source_id),
sourceName: row.source_name != null ? String(row.source_name) : null,
enabled: Boolean(row.enabled),
pushoverEnabled: Boolean(row.pushover_enabled),
lastRunAt: row.last_run_at || null,
lastRunStatus: row.last_run_status || null,
nextRunAt: row.next_run_at || null,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
function mapLogRow(row) {
if (!row) return null;
return {
id: Number(row.id),
cronJobId: Number(row.cron_job_id),
startedAt: row.started_at,
finishedAt: row.finished_at || null,
status: String(row.status || ''),
output: row.output || null,
errorMessage: row.error_message || null
};
}
async function fetchJobWithSource(db, id) {
return db.get(
`
SELECT
c.*,
CASE c.source_type
WHEN 'script' THEN (SELECT name FROM scripts WHERE id = c.source_id)
WHEN 'chain' THEN (SELECT name FROM script_chains WHERE id = c.source_id)
ELSE NULL
END AS source_name
FROM cron_jobs c
WHERE c.id = ?
LIMIT 1
`,
[id]
);
}
async function fetchAllJobsWithSource(db) {
return db.all(
`
SELECT
c.*,
CASE c.source_type
WHEN 'script' THEN (SELECT name FROM scripts WHERE id = c.source_id)
WHEN 'chain' THEN (SELECT name FROM script_chains WHERE id = c.source_id)
ELSE NULL
END AS source_name
FROM cron_jobs c
ORDER BY c.id ASC
`
);
}
// ─── Ausführungslogik ──────────────────────────────────────────────────────
async function runCronJob(job) {
const db = await getDb();
const startedAt = new Date().toISOString();
logger.info('cron:run:start', { cronJobId: job.id, name: job.name, sourceType: job.sourceType, sourceId: job.sourceId });
// Log-Eintrag anlegen (status = 'running')
const insertResult = await db.run(
`INSERT INTO cron_run_logs (cron_job_id, started_at, status) VALUES (?, ?, 'running')`,
[job.id, startedAt]
);
const logId = insertResult.lastID;
// Job als laufend markieren
await db.run(
`UPDATE cron_jobs SET last_run_at = ?, last_run_status = 'running', updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
[startedAt, job.id]
);
wsService.broadcast('CRON_JOB_UPDATED', { id: job.id, lastRunStatus: 'running', lastRunAt: startedAt });
let output = '';
let errorMessage = null;
let success = false;
try {
if (job.sourceType === 'script') {
const scriptService = require('./scriptService');
const script = await scriptService.getScriptById(job.sourceId);
const prepared = await scriptService.createExecutableScriptFile(script, { source: 'cron', cronJobId: job.id });
try {
const result = await new Promise((resolve, reject) => {
const { spawn } = require('child_process');
const child = spawn(prepared.cmd, prepared.args, {
env: process.env,
stdio: ['ignore', 'pipe', 'pipe']
});
let stdout = '';
let stderr = '';
child.stdout?.on('data', (chunk) => { stdout += String(chunk); });
child.stderr?.on('data', (chunk) => { stderr += String(chunk); });
child.on('error', reject);
child.on('close', (code) => resolve({ code, stdout, stderr }));
});
output = [result.stdout, result.stderr].filter(Boolean).join('\n');
if (output.length > MAX_OUTPUT_CHARS) output = output.slice(0, MAX_OUTPUT_CHARS) + '\n...[truncated]';
success = result.code === 0;
if (!success) errorMessage = `Exit-Code ${result.code}`;
} finally {
await prepared.cleanup();
}
} else if (job.sourceType === 'chain') {
const scriptChainService = require('./scriptChainService');
const logLines = [];
const result = await scriptChainService.executeChain(
job.sourceId,
{ source: 'cron', cronJobId: job.id },
{
appendLog: async (_source, line) => {
logLines.push(line);
}
}
);
output = logLines.join('\n');
if (output.length > MAX_OUTPUT_CHARS) output = output.slice(0, MAX_OUTPUT_CHARS) + '\n...[truncated]';
success = Array.isArray(result) ? result.every((r) => r.success !== false) : Boolean(result);
if (!success) errorMessage = 'Kette enthielt fehlgeschlagene Schritte.';
} else {
throw new Error(`Unbekannter source_type: ${job.sourceType}`);
}
} catch (error) {
success = false;
errorMessage = error.message || String(error);
logger.error('cron:run:error', { cronJobId: job.id, error: errorToMeta(error) });
}
const finishedAt = new Date().toISOString();
const status = success ? 'success' : 'error';
const nextRunAt = getNextRunTime(job.cronExpression)?.toISOString() || null;
// Log-Eintrag abschließen
await db.run(
`UPDATE cron_run_logs SET finished_at = ?, status = ?, output = ?, error_message = ? WHERE id = ?`,
[finishedAt, status, output || null, errorMessage, logId]
);
// Job-Status aktualisieren
await db.run(
`UPDATE cron_jobs SET last_run_status = ?, next_run_at = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?`,
[status, nextRunAt, job.id]
);
// Alte Logs trimmen
await db.run(
`
DELETE FROM cron_run_logs
WHERE cron_job_id = ?
AND id NOT IN (
SELECT id FROM cron_run_logs WHERE cron_job_id = ? ORDER BY id DESC LIMIT ?
)
`,
[job.id, job.id, MAX_LOGS_PER_JOB]
);
logger.info('cron:run:done', { cronJobId: job.id, status, durationMs: new Date(finishedAt) - new Date(startedAt) });
wsService.broadcast('CRON_JOB_UPDATED', { id: job.id, lastRunStatus: status, lastRunAt: finishedAt, nextRunAt });
// Pushover-Benachrichtigung (nur wenn am Cron aktiviert UND global aktiviert)
if (job.pushoverEnabled) {
try {
const settings = await settingsService.getSettingsMap();
const eventKey = success ? 'cron_success' : 'cron_error';
const title = `Ripster Cron: ${job.name}`;
const message = success
? `Cronjob "${job.name}" erfolgreich ausgeführt.`
: `Cronjob "${job.name}" fehlgeschlagen: ${errorMessage || 'Unbekannter Fehler'}`;
await notificationService.notifyWithSettings(settings, eventKey, { title, message });
} catch (notifyError) {
logger.warn('cron:run:notify-failed', { cronJobId: job.id, error: errorToMeta(notifyError) });
}
}
return { success, status, output, errorMessage, finishedAt, nextRunAt };
}
// ─── Scheduler ─────────────────────────────────────────────────────────────
class CronService {
constructor() {
this._timer = null;
this._running = new Set(); // IDs aktuell laufender Jobs
}
async init() {
logger.info('cron:scheduler:init');
// Beim Start next_run_at für alle enabled Jobs neu berechnen
await this._recalcNextRuns();
this._scheduleNextTick();
}
stop() {
if (this._timer) {
clearTimeout(this._timer);
this._timer = null;
}
logger.info('cron:scheduler:stopped');
}
_scheduleNextTick() {
// Auf den Beginn der nächsten vollen Minute warten
const now = new Date();
const msUntilNextMinute = (60 - now.getSeconds()) * 1000 - now.getMilliseconds() + 500;
this._timer = setTimeout(() => this._tick(), msUntilNextMinute);
}
async _tick() {
try {
await this._checkAndRunDueJobs();
} catch (error) {
logger.error('cron:scheduler:tick-error', { error: errorToMeta(error) });
}
this._scheduleNextTick();
}
async _recalcNextRuns() {
const db = await getDb();
const jobs = await db.all(`SELECT id, cron_expression FROM cron_jobs WHERE enabled = 1`);
for (const job of jobs) {
const nextRunAt = getNextRunTime(job.cron_expression)?.toISOString() || null;
await db.run(`UPDATE cron_jobs SET next_run_at = ? WHERE id = ?`, [nextRunAt, job.id]);
}
}
async _checkAndRunDueJobs() {
const db = await getDb();
const now = new Date();
const nowIso = now.toISOString();
// Jobs, deren next_run_at <= jetzt ist und die nicht gerade laufen
const dueJobs = await db.all(
`SELECT * FROM cron_jobs WHERE enabled = 1 AND next_run_at IS NOT NULL AND next_run_at <= ?`,
[nowIso]
);
for (const jobRow of dueJobs) {
const id = Number(jobRow.id);
if (this._running.has(id)) {
logger.warn('cron:scheduler:skip-still-running', { cronJobId: id });
continue;
}
const job = mapJobRow(jobRow);
this._running.add(id);
// Asynchron ausführen, damit der Scheduler nicht blockiert
runCronJob(job)
.catch((error) => {
logger.error('cron:run:unhandled-error', { cronJobId: id, error: errorToMeta(error) });
})
.finally(() => {
this._running.delete(id);
});
}
}
// ─── Public API ──────────────────────────────────────────────────────────
async listJobs() {
const db = await getDb();
const rows = await fetchAllJobsWithSource(db);
return rows.map(mapJobRow);
}
async getJobById(id) {
const db = await getDb();
const row = await fetchJobWithSource(db, id);
if (!row) {
const error = new Error(`Cronjob #${id} nicht gefunden.`);
error.statusCode = 404;
throw error;
}
return mapJobRow(row);
}
async createJob(payload) {
const { name, cronExpression, sourceType, sourceId, enabled = true, pushoverEnabled = true } = payload || {};
const trimmedName = String(name || '').trim();
const trimmedExpr = String(cronExpression || '').trim();
if (!trimmedName) throw Object.assign(new Error('Name fehlt.'), { statusCode: 400 });
if (!trimmedExpr) throw Object.assign(new Error('Cron-Ausdruck fehlt.'), { statusCode: 400 });
const validation = validateCronExpression(trimmedExpr);
if (!validation.valid) throw Object.assign(new Error(validation.error), { statusCode: 400 });
if (!['script', 'chain'].includes(sourceType)) {
throw Object.assign(new Error('sourceType muss "script" oder "chain" sein.'), { statusCode: 400 });
}
const normalizedSourceId = Number(sourceId);
if (!Number.isFinite(normalizedSourceId) || normalizedSourceId <= 0) {
throw Object.assign(new Error('sourceId fehlt oder ist ungültig.'), { statusCode: 400 });
}
const nextRunAt = getNextRunTime(trimmedExpr)?.toISOString() || null;
const db = await getDb();
const result = await db.run(
`
INSERT INTO cron_jobs (name, cron_expression, source_type, source_id, enabled, pushover_enabled, next_run_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
`,
[trimmedName, trimmedExpr, sourceType, normalizedSourceId, enabled ? 1 : 0, pushoverEnabled ? 1 : 0, nextRunAt]
);
logger.info('cron:create', { cronJobId: result.lastID, name: trimmedName, cronExpression: trimmedExpr });
return this.getJobById(result.lastID);
}
async updateJob(id, payload) {
const db = await getDb();
const existing = await this.getJobById(id);
const trimmedName = Object.prototype.hasOwnProperty.call(payload, 'name')
? String(payload.name || '').trim()
: existing.name;
const trimmedExpr = Object.prototype.hasOwnProperty.call(payload, 'cronExpression')
? String(payload.cronExpression || '').trim()
: existing.cronExpression;
if (!trimmedName) throw Object.assign(new Error('Name fehlt.'), { statusCode: 400 });
if (!trimmedExpr) throw Object.assign(new Error('Cron-Ausdruck fehlt.'), { statusCode: 400 });
const validation = validateCronExpression(trimmedExpr);
if (!validation.valid) throw Object.assign(new Error(validation.error), { statusCode: 400 });
const sourceType = Object.prototype.hasOwnProperty.call(payload, 'sourceType') ? payload.sourceType : existing.sourceType;
const sourceId = Object.prototype.hasOwnProperty.call(payload, 'sourceId') ? Number(payload.sourceId) : existing.sourceId;
const enabled = Object.prototype.hasOwnProperty.call(payload, 'enabled') ? Boolean(payload.enabled) : existing.enabled;
const pushoverEnabled = Object.prototype.hasOwnProperty.call(payload, 'pushoverEnabled') ? Boolean(payload.pushoverEnabled) : existing.pushoverEnabled;
if (!['script', 'chain'].includes(sourceType)) {
throw Object.assign(new Error('sourceType muss "script" oder "chain" sein.'), { statusCode: 400 });
}
if (!Number.isFinite(sourceId) || sourceId <= 0) {
throw Object.assign(new Error('sourceId fehlt oder ist ungültig.'), { statusCode: 400 });
}
const nextRunAt = enabled ? (getNextRunTime(trimmedExpr)?.toISOString() || null) : null;
await db.run(
`
UPDATE cron_jobs
SET name = ?, cron_expression = ?, source_type = ?, source_id = ?,
enabled = ?, pushover_enabled = ?, next_run_at = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
`,
[trimmedName, trimmedExpr, sourceType, sourceId, enabled ? 1 : 0, pushoverEnabled ? 1 : 0, nextRunAt, id]
);
logger.info('cron:update', { cronJobId: id });
return this.getJobById(id);
}
async deleteJob(id) {
const db = await getDb();
const job = await this.getJobById(id);
await db.run(`DELETE FROM cron_jobs WHERE id = ?`, [id]);
logger.info('cron:delete', { cronJobId: id });
return job;
}
async getJobLogs(id, limit = 20) {
await this.getJobById(id); // Existenz prüfen
const db = await getDb();
const rows = await db.all(
`SELECT * FROM cron_run_logs WHERE cron_job_id = ? ORDER BY id DESC LIMIT ?`,
[id, Math.min(Number(limit) || 20, 100)]
);
return rows.map(mapLogRow);
}
async triggerJobManually(id) {
const job = await this.getJobById(id);
if (this._running.has(id)) {
throw Object.assign(new Error('Cronjob läuft bereits.'), { statusCode: 409 });
}
this._running.add(id);
logger.info('cron:manual-trigger', { cronJobId: id });
// Asynchron starten
runCronJob(job)
.catch((error) => {
logger.error('cron:manual-trigger:error', { cronJobId: id, error: errorToMeta(error) });
})
.finally(() => {
this._running.delete(id);
});
return { triggered: true, cronJobId: id };
}
validateExpression(expr) {
return validateCronExpression(expr);
}
getNextRunTime(expr) {
const next = getNextRunTime(expr);
return next ? next.toISOString() : null;
}
}
module.exports = new CronService();

View File

@@ -28,10 +28,28 @@ function normalizeMediaProfile(rawValue) {
if (!value) {
return null;
}
if (value === 'bluray' || value === 'blu-ray' || value === 'bd' || value === 'bdmv') {
if (
value === 'bluray'
|| value === 'blu-ray'
|| value === 'blu_ray'
|| value === 'bd'
|| value === 'bdmv'
|| value === 'bdrom'
|| value === 'bd-rom'
|| value === 'bd-r'
|| value === 'bd-re'
) {
return 'bluray';
}
if (value === 'dvd') {
if (
value === 'dvd'
|| value === 'dvdvideo'
|| value === 'dvd-video'
|| value === 'dvdrom'
|| value === 'dvd-rom'
|| value === 'video_ts'
|| value === 'iso9660'
) {
return 'dvd';
}
if (value === 'disc' || value === 'other' || value === 'sonstiges' || value === 'cd') {
@@ -40,6 +58,10 @@ function normalizeMediaProfile(rawValue) {
return null;
}
function isSpecificMediaProfile(value) {
return value === 'bluray' || value === 'dvd';
}
function inferMediaProfileFromTextParts(parts) {
const markerText = (parts || [])
.map((value) => String(value || '').trim().toLowerCase())
@@ -49,15 +71,55 @@ function inferMediaProfileFromTextParts(parts) {
if (!markerText) {
return null;
}
if (/(^|[\s_-])bdmv($|[\s_-])|blu[\s-]?ray|bd-rom|bd-r|bd-re/.test(markerText)) {
if (/(^|[\s_-])bdmv($|[\s_-])|blu[\s-]?ray|bd[\s_-]?rom|bd-r|bd-re/.test(markerText)) {
return 'bluray';
}
if (/(^|[\s_-])video_ts($|[\s_-])|dvd/.test(markerText)) {
if (/(^|[\s_-])video_ts($|[\s_-])|dvd|iso9660/.test(markerText)) {
return 'dvd';
}
return null;
}
function inferMediaProfileFromFsTypeAndModel(rawFsType, rawModel) {
const fstype = String(rawFsType || '').trim().toLowerCase();
const model = String(rawModel || '').trim().toLowerCase();
const hasBlurayModelMarker = /(blu[\s-]?ray|bd[\s_-]?rom|bd-r|bd-re)/.test(model);
const hasDvdModelMarker = /dvd/.test(model);
const hasCdOnlyModelMarker = /(^|[\s_-])cd([\s_-]|$)|cd-?rom/.test(model) && !hasBlurayModelMarker && !hasDvdModelMarker;
if (!fstype) {
if (hasBlurayModelMarker) {
return 'bluray';
}
if (hasDvdModelMarker) {
return 'dvd';
}
return null;
}
if (fstype.includes('udf')) {
if (hasBlurayModelMarker) {
return 'bluray';
}
if (hasDvdModelMarker) {
return 'dvd';
}
return 'dvd';
}
if (fstype.includes('iso9660') || fstype.includes('cdfs')) {
if (hasBlurayModelMarker) {
return 'bluray';
}
if (hasCdOnlyModelMarker) {
return 'other';
}
return 'dvd';
}
return null;
}
class DiskDetectionService extends EventEmitter {
constructor() {
super();
@@ -290,8 +352,8 @@ class DiskDetectionService extends EventEmitter {
return null;
}
const hasMedia = await this.checkMediaPresent(devicePath);
if (!hasMedia) {
const mediaState = await this.checkMediaPresent(devicePath);
if (!mediaState.hasMedia) {
logger.debug('detect:explicit:no-media', { devicePath });
return null;
}
@@ -299,12 +361,13 @@ class DiskDetectionService extends EventEmitter {
const details = await this.getBlockDeviceInfo();
const match = details.find((entry) => entry.path === devicePath || `/dev/${entry.name}` === devicePath) || {};
const detectedFsType = String(match.fstype || mediaState.type || '').trim() || null;
const mediaProfile = await this.inferMediaProfile(devicePath, {
discLabel,
label: match.label,
model: match.model,
fstype: match.fstype,
fstype: detectedFsType,
mountpoint: match.mountpoint
});
@@ -316,7 +379,7 @@ class DiskDetectionService extends EventEmitter {
label: match.label || null,
discLabel: discLabel || null,
mountpoint: match.mountpoint || null,
fstype: match.fstype || null,
fstype: detectedFsType,
mediaProfile: mediaProfile || null,
index: this.guessDiscIndex(match.name || devicePath)
};
@@ -342,17 +405,18 @@ class DiskDetectionService extends EventEmitter {
continue;
}
const hasMedia = await this.checkMediaPresent(path);
if (!hasMedia) {
const mediaState = await this.checkMediaPresent(path);
if (!mediaState.hasMedia) {
continue;
}
const discLabel = await this.getDiscLabel(path);
const detectedFsType = String(item.fstype || mediaState.type || '').trim() || null;
const mediaProfile = await this.inferMediaProfile(path, {
discLabel,
label: item.label,
model: item.model,
fstype: item.fstype,
fstype: detectedFsType,
mountpoint: item.mountpoint
});
@@ -364,7 +428,7 @@ class DiskDetectionService extends EventEmitter {
label: item.label || null,
discLabel: discLabel || null,
mountpoint: item.mountpoint || null,
fstype: item.fstype || null,
fstype: detectedFsType,
mediaProfile: mediaProfile || null,
index: this.guessDiscIndex(item.name)
};
@@ -404,12 +468,19 @@ class DiskDetectionService extends EventEmitter {
async checkMediaPresent(devicePath) {
try {
const { stdout } = await execFileAsync('blkid', ['-o', 'value', '-s', 'TYPE', devicePath]);
const has = stdout.trim().length > 0;
logger.debug('blkid:result', { devicePath, hasMedia: has, type: stdout.trim() });
return has;
const type = String(stdout || '').trim().toLowerCase();
const has = type.length > 0;
logger.debug('blkid:result', { devicePath, hasMedia: has, type });
return {
hasMedia: has,
type: type || null
};
} catch (error) {
logger.debug('blkid:no-media-or-fail', { devicePath, error: errorToMeta(error) });
return false;
return {
hasMedia: false,
type: null
};
}
}
@@ -427,19 +498,29 @@ class DiskDetectionService extends EventEmitter {
async inferMediaProfile(devicePath, hints = {}) {
const explicit = normalizeMediaProfile(hints?.mediaProfile);
if (explicit) {
if (isSpecificMediaProfile(explicit)) {
return explicit;
}
const hinted = inferMediaProfileFromTextParts([
hints?.discLabel,
hints?.label,
hints?.fstype
hints?.fstype,
hints?.model
]);
if (hinted) {
return hinted;
}
const hintFstype = String(hints?.fstype || '').trim().toLowerCase();
const byFsTypeHint = inferMediaProfileFromFsTypeAndModel(hints?.fstype, hints?.model);
// UDF is used for both Blu-ray (UDF 2.x) and DVD (UDF 1.x). Without a clear model
// marker identifying it as Blu-ray, a 'dvd' result from UDF is ambiguous. Skip the
// early return and fall through to the blkid check which uses the UDF version number.
if (byFsTypeHint && !(hintFstype.includes('udf') && byFsTypeHint !== 'bluray')) {
return byFsTypeHint;
}
const mountpoint = String(hints?.mountpoint || '').trim();
if (mountpoint) {
try {
@@ -477,19 +558,24 @@ class DiskDetectionService extends EventEmitter {
const byBlkidMarker = inferMediaProfileFromTextParts([
payload.LABEL,
payload.TYPE,
payload.VERSION
payload.VERSION,
payload.APPLICATION_ID,
hints?.model
]);
if (byBlkidMarker) {
return byBlkidMarker;
}
const type = String(payload.TYPE || '').trim().toLowerCase();
if (type === 'udf') {
const version = Number.parseFloat(String(payload.VERSION || '').replace(',', '.'));
if (Number.isFinite(version)) {
return version >= 2 ? 'bluray' : 'dvd';
const byBlkidFsType = inferMediaProfileFromFsTypeAndModel(type, hints?.model);
if (byBlkidFsType) {
if (type.includes('udf')) {
const version = Number.parseFloat(String(payload.VERSION || '').replace(',', '.'));
if (Number.isFinite(version)) {
return version >= 2 ? 'bluray' : 'dvd';
}
}
return 'dvd';
return byBlkidFsType;
}
} catch (error) {
logger.debug('infer-media-profile:blkid-failed', {
@@ -498,7 +584,7 @@ class DiskDetectionService extends EventEmitter {
});
}
return null;
return explicit === 'other' ? 'other' : null;
}
guessDiscIndex(name) {

View File

@@ -20,6 +20,7 @@ function parseJsonSafe(raw, fallback = null) {
const PROCESS_LOG_TAIL_MAX_BYTES = 1024 * 1024;
const processLogStreams = new Map();
const PROFILE_PATH_SUFFIXES = ['bluray', 'dvd', 'other'];
function inspectDirectory(dirPath) {
if (!dirPath) {
@@ -181,10 +182,28 @@ function normalizeMediaTypeValue(value) {
if (!raw) {
return null;
}
if (raw === 'bluray' || raw === 'blu-ray' || raw === 'bd' || raw === 'bdmv') {
if (
raw === 'bluray'
|| raw === 'blu-ray'
|| raw === 'blu_ray'
|| raw === 'bd'
|| raw === 'bdmv'
|| raw === 'bdrom'
|| raw === 'bd-rom'
|| raw === 'bd-r'
|| raw === 'bd-re'
) {
return 'bluray';
}
if (raw === 'dvd') {
if (
raw === 'dvd'
|| raw === 'dvdvideo'
|| raw === 'dvd-video'
|| raw === 'dvdrom'
|| raw === 'dvd-rom'
|| raw === 'video_ts'
|| raw === 'iso9660'
) {
return 'dvd';
}
if (raw === 'disc' || raw === 'other' || raw === 'sonstiges' || raw === 'cd') {
@@ -308,40 +327,83 @@ function resolveEffectiveOutputPath(storedPath, movieDir) {
return path.join(String(movieDir).trim(), folderName, fileName);
}
function enrichJobRow(job, settings = null) {
const rawDir = String(settings?.raw_dir || '').trim();
const movieDir = String(settings?.movie_dir || '').trim();
function getConfiguredMediaPathList(settings = {}, baseKey) {
const source = settings && typeof settings === 'object' ? settings : {};
const candidates = [source[baseKey], ...PROFILE_PATH_SUFFIXES.map((suffix) => source[`${baseKey}_${suffix}`])];
const unique = [];
const seen = new Set();
const effectiveRawPath = rawDir && job.raw_path
for (const candidate of candidates) {
const rawPath = String(candidate || '').trim();
if (!rawPath) {
continue;
}
const normalized = normalizeComparablePath(rawPath);
if (!normalized || seen.has(normalized)) {
continue;
}
seen.add(normalized);
unique.push(normalized);
}
return unique;
}
function resolveEffectiveStoragePathsForJob(settings = null, job = {}, parsed = {}) {
const mkInfo = parsed?.makemkvInfo || parseJsonSafe(job?.makemkv_info_json, null);
const miInfo = parsed?.mediainfoInfo || parseJsonSafe(job?.mediainfo_info_json, null);
const plan = parsed?.encodePlan || parseJsonSafe(job?.encode_plan_json, null);
const mediaType = inferMediaType(job, mkInfo, miInfo, plan);
const effectiveSettings = settingsService.resolveEffectiveToolSettings(settings || {}, mediaType);
const rawDir = String(effectiveSettings?.raw_dir || '').trim();
const movieDir = String(effectiveSettings?.movie_dir || '').trim();
const effectiveRawPath = rawDir && job?.raw_path
? resolveEffectiveRawPath(job.raw_path, rawDir)
: (job.raw_path || null);
const effectiveOutputPath = movieDir && job.output_path
: (job?.raw_path || null);
const effectiveOutputPath = movieDir && job?.output_path
? resolveEffectiveOutputPath(job.output_path, movieDir)
: (job.output_path || null);
: (job?.output_path || null);
const rawStatus = inspectDirectory(effectiveRawPath);
const outputStatus = inspectOutputFile(effectiveOutputPath);
const movieDirPath = effectiveOutputPath ? path.dirname(effectiveOutputPath) : null;
const movieDirStatus = inspectDirectory(movieDirPath);
const makemkvInfo = parseJsonSafe(job.makemkv_info_json, null);
return {
mediaType,
rawDir,
movieDir,
effectiveRawPath,
effectiveOutputPath,
makemkvInfo: mkInfo,
mediainfoInfo: miInfo,
encodePlan: plan
};
}
function enrichJobRow(job, settings = null) {
const handbrakeInfo = parseJsonSafe(job.handbrake_info_json, null);
const mediainfoInfo = parseJsonSafe(job.mediainfo_info_json, null);
const omdbInfo = parseJsonSafe(job.omdb_json, null);
const encodePlan = parseJsonSafe(job.encode_plan_json, null);
const mediaType = inferMediaType(job, makemkvInfo, mediainfoInfo, encodePlan);
const backupSuccess = String(makemkvInfo?.status || '').trim().toUpperCase() === 'SUCCESS';
const resolvedPaths = resolveEffectiveStoragePathsForJob(settings, job);
const rawStatus = inspectDirectory(resolvedPaths.effectiveRawPath);
const outputStatus = inspectOutputFile(resolvedPaths.effectiveOutputPath);
const movieDirPath = resolvedPaths.effectiveOutputPath ? path.dirname(resolvedPaths.effectiveOutputPath) : null;
const movieDirStatus = inspectDirectory(movieDirPath);
const makemkvInfo = resolvedPaths.makemkvInfo;
const mediainfoInfo = resolvedPaths.mediainfoInfo;
const encodePlan = resolvedPaths.encodePlan;
const mediaType = resolvedPaths.mediaType;
const ripSuccessful = Number(job?.rip_successful || 0) === 1
|| String(makemkvInfo?.status || '').trim().toUpperCase() === 'SUCCESS';
const backupSuccess = ripSuccessful;
const encodeSuccess = String(handbrakeInfo?.status || '').trim().toUpperCase() === 'SUCCESS';
return {
...job,
raw_path: effectiveRawPath,
output_path: effectiveOutputPath,
raw_path: resolvedPaths.effectiveRawPath,
output_path: resolvedPaths.effectiveOutputPath,
makemkvInfo,
handbrakeInfo,
mediainfoInfo,
omdbInfo,
encodePlan,
mediaType,
ripSuccessful,
backupSuccess,
encodeSuccess,
rawStatus,
@@ -370,9 +432,10 @@ function normalizeComparablePath(inputPath) {
function parseRawFolderMetadata(folderName) {
const rawName = String(folderName || '').trim();
const folderJobIdMatch = rawName.match(/-\s*RAW\s*-\s*job-(\d+)\s*$/i);
const normalizedRawName = rawName.replace(/^Incomplete_/i, '').trim();
const folderJobIdMatch = normalizedRawName.match(/-\s*RAW\s*-\s*job-(\d+)\s*$/i);
const folderJobId = folderJobIdMatch ? Number(folderJobIdMatch[1]) : null;
let working = rawName.replace(/\s*-\s*RAW\s*-\s*job-\d+\s*$/i, '').trim();
let working = normalizedRawName.replace(/\s*-\s*RAW\s*-\s*job-\d+\s*$/i, '').trim();
const imdbMatch = working.match(/\[(tt\d{6,12})\]/i);
const imdbId = imdbMatch ? String(imdbMatch[1] || '').toLowerCase() : null;
@@ -499,6 +562,16 @@ class HistoryService {
});
}
async updateRawPathByOldPath(oldRawPath, newRawPath) {
const db = await getDb();
const result = await db.run(
'UPDATE jobs SET raw_path = ?, updated_at = CURRENT_TIMESTAMP WHERE raw_path = ?',
[newRawPath, oldRawPath]
);
logger.info('job:raw-path-bulk-updated', { oldRawPath, newRawPath, changes: result.changes });
return result.changes;
}
appendLog(jobId, source, message) {
this.appendProcessLog(jobId, source, message);
}
@@ -820,25 +893,17 @@ class HistoryService {
async getOrphanRawFolders() {
const settings = await settingsService.getSettingsMap();
const rawDir = String(settings.raw_dir || '').trim();
if (!rawDir) {
const error = new Error('raw_dir ist nicht konfiguriert.');
const rawDirs = getConfiguredMediaPathList(settings, 'raw_dir');
if (rawDirs.length === 0) {
const error = new Error('Kein RAW-Pfad konfiguriert (raw_dir oder raw_dir_{bluray,dvd,other}).');
error.statusCode = 400;
throw error;
}
const rawDirInfo = inspectDirectory(rawDir);
if (!rawDirInfo.exists || !rawDirInfo.isDirectory) {
return {
rawDir,
rows: []
};
}
const db = await getDb();
const linkedRows = await db.all(
`
SELECT id, raw_path, status
SELECT id, raw_path, status, makemkv_info_json, mediainfo_info_json, encode_plan_json, encode_input_path, media_type
FROM jobs
WHERE raw_path IS NOT NULL AND TRIM(raw_path) <> ''
`
@@ -846,63 +911,77 @@ class HistoryService {
const linkedPathMap = new Map();
for (const row of linkedRows) {
const normalized = normalizeComparablePath(row.raw_path);
if (!normalized) {
continue;
const resolvedPaths = resolveEffectiveStoragePathsForJob(settings, row);
const linkedCandidates = [
normalizeComparablePath(row.raw_path),
normalizeComparablePath(resolvedPaths.effectiveRawPath)
].filter(Boolean);
for (const linkedPath of linkedCandidates) {
if (!linkedPathMap.has(linkedPath)) {
linkedPathMap.set(linkedPath, []);
}
linkedPathMap.get(linkedPath).push({
id: row.id,
status: row.status
});
}
if (!linkedPathMap.has(normalized)) {
linkedPathMap.set(normalized, []);
}
linkedPathMap.get(normalized).push({
id: row.id,
status: row.status
});
}
const dirEntries = fs.readdirSync(rawDir, { withFileTypes: true });
const orphanRows = [];
const seenOrphanPaths = new Set();
for (const entry of dirEntries) {
if (!entry.isDirectory()) {
for (const rawDir of rawDirs) {
const rawDirInfo = inspectDirectory(rawDir);
if (!rawDirInfo.exists || !rawDirInfo.isDirectory) {
continue;
}
const dirEntries = fs.readdirSync(rawDir, { withFileTypes: true });
const rawPath = path.join(rawDir, entry.name);
const normalizedPath = normalizeComparablePath(rawPath);
if (linkedPathMap.has(normalizedPath)) {
continue;
for (const entry of dirEntries) {
if (!entry.isDirectory()) {
continue;
}
const rawPath = path.join(rawDir, entry.name);
const normalizedPath = normalizeComparablePath(rawPath);
if (!normalizedPath || linkedPathMap.has(normalizedPath) || seenOrphanPaths.has(normalizedPath)) {
continue;
}
const dirInfo = inspectDirectory(rawPath);
if (!dirInfo.exists || !dirInfo.isDirectory || dirInfo.isEmpty) {
continue;
}
const stat = fs.statSync(rawPath);
const metadata = parseRawFolderMetadata(entry.name);
orphanRows.push({
rawPath,
folderName: entry.name,
title: metadata.title,
year: metadata.year,
imdbId: metadata.imdbId,
folderJobId: metadata.folderJobId,
entryCount: Number(dirInfo.entryCount || 0),
hasBlurayStructure: fs.existsSync(path.join(rawPath, 'BDMV', 'STREAM')),
lastModifiedAt: stat.mtime.toISOString()
});
seenOrphanPaths.add(normalizedPath);
}
const dirInfo = inspectDirectory(rawPath);
if (!dirInfo.exists || !dirInfo.isDirectory || dirInfo.isEmpty) {
continue;
}
const stat = fs.statSync(rawPath);
const metadata = parseRawFolderMetadata(entry.name);
orphanRows.push({
rawPath,
folderName: entry.name,
title: metadata.title,
year: metadata.year,
imdbId: metadata.imdbId,
folderJobId: metadata.folderJobId,
entryCount: Number(dirInfo.entryCount || 0),
hasBlurayStructure: fs.existsSync(path.join(rawPath, 'BDMV', 'STREAM')),
lastModifiedAt: stat.mtime.toISOString()
});
}
orphanRows.sort((a, b) => String(b.lastModifiedAt).localeCompare(String(a.lastModifiedAt)));
return {
rawDir,
rawDir: rawDirs[0] || null,
rawDirs,
rows: orphanRows
};
}
async importOrphanRawFolder(rawPath) {
const settings = await settingsService.getSettingsMap();
const rawDir = String(settings.raw_dir || '').trim();
const rawDirs = getConfiguredMediaPathList(settings, 'raw_dir');
const requestedRawPath = String(rawPath || '').trim();
if (!requestedRawPath) {
@@ -911,14 +990,15 @@ class HistoryService {
throw error;
}
if (!rawDir) {
const error = new Error('raw_dir ist nicht konfiguriert.');
if (rawDirs.length === 0) {
const error = new Error('Kein RAW-Pfad konfiguriert (raw_dir oder raw_dir_{bluray,dvd,other}).');
error.statusCode = 400;
throw error;
}
if (!isPathInside(rawDir, requestedRawPath)) {
const error = new Error(`RAW-Pfad liegt außerhalb von raw_dir: ${requestedRawPath}`);
const insideConfiguredRawDir = rawDirs.some((candidate) => isPathInside(candidate, requestedRawPath));
if (!insideConfiguredRawDir) {
const error = new Error(`RAW-Pfad liegt außerhalb der konfigurierten RAW-Verzeichnisse: ${requestedRawPath}`);
error.statusCode = 400;
throw error;
}
@@ -1004,6 +1084,7 @@ class HistoryService {
poster_url: omdbById?.poster || null,
omdb_json: omdbById?.raw ? JSON.stringify(omdbById.raw) : null,
selected_from_omdb: omdbById ? 1 : 0,
rip_successful: 1,
raw_path: finalRawPath,
output_path: null,
handbrake_info_json: null,
@@ -1125,12 +1206,11 @@ class HistoryService {
}
const settings = await settingsService.getSettingsMap();
const effectiveRawPath = settings.raw_dir && job.raw_path
? resolveEffectiveRawPath(job.raw_path, settings.raw_dir)
: job.raw_path;
const effectiveOutputPath = settings.movie_dir && job.output_path
? resolveEffectiveOutputPath(job.output_path, settings.movie_dir)
: job.output_path;
const resolvedPaths = resolveEffectiveStoragePathsForJob(settings, job);
const effectiveRawPath = resolvedPaths.effectiveRawPath;
const effectiveOutputPath = resolvedPaths.effectiveOutputPath;
const effectiveRawDir = resolvedPaths.rawDir;
const effectiveMovieDir = resolvedPaths.movieDir;
const summary = {
target,
raw: { attempted: false, deleted: false, filesDeleted: 0, dirsRemoved: 0, reason: null },
@@ -1141,8 +1221,12 @@ class HistoryService {
summary.raw.attempted = true;
if (!effectiveRawPath) {
summary.raw.reason = 'Kein raw_path im Job gesetzt.';
} else if (!isPathInside(settings.raw_dir, effectiveRawPath)) {
const error = new Error(`RAW-Pfad liegt außerhalb von raw_dir: ${effectiveRawPath}`);
} else if (!effectiveRawDir) {
const error = new Error(`Kein gültiger RAW-Basispfad für Job ${jobId} (${resolvedPaths.mediaType || 'unknown'}).`);
error.statusCode = 400;
throw error;
} else if (!isPathInside(effectiveRawDir, effectiveRawPath)) {
const error = new Error(`RAW-Pfad liegt außerhalb des effektiven RAW-Basispfads: ${effectiveRawPath}`);
error.statusCode = 400;
throw error;
} else if (!fs.existsSync(effectiveRawPath)) {
@@ -1159,15 +1243,19 @@ class HistoryService {
summary.movie.attempted = true;
if (!effectiveOutputPath) {
summary.movie.reason = 'Kein output_path im Job gesetzt.';
} else if (!isPathInside(settings.movie_dir, effectiveOutputPath)) {
const error = new Error(`Movie-Pfad liegt außerhalb von movie_dir: ${effectiveOutputPath}`);
} else if (!effectiveMovieDir) {
const error = new Error(`Kein gültiger Movie-Basispfad für Job ${jobId} (${resolvedPaths.mediaType || 'unknown'}).`);
error.statusCode = 400;
throw error;
} else if (!isPathInside(effectiveMovieDir, effectiveOutputPath)) {
const error = new Error(`Movie-Pfad liegt außerhalb des effektiven Movie-Basispfads: ${effectiveOutputPath}`);
error.statusCode = 400;
throw error;
} else if (!fs.existsSync(effectiveOutputPath)) {
summary.movie.reason = 'Movie-Datei/Pfad existiert nicht.';
} else {
const outputPath = normalizeComparablePath(effectiveOutputPath);
const movieRoot = normalizeComparablePath(settings.movie_dir);
const movieRoot = normalizeComparablePath(effectiveMovieDir);
const stat = fs.lstatSync(outputPath);
if (stat.isDirectory()) {
const keepRoot = outputPath === movieRoot;

File diff suppressed because it is too large Load Diff

View File

@@ -32,6 +32,7 @@ function mapChainRow(row, steps = []) {
return {
id: Number(row.id),
name: String(row.name || ''),
orderIndex: Number(row.order_index || 0),
steps: steps.map(mapStepRow),
createdAt: row.created_at,
updatedAt: row.updated_at
@@ -115,9 +116,9 @@ class ScriptChainService {
const db = await getDb();
const rows = await db.all(
`
SELECT id, name, created_at, updated_at
SELECT id, name, order_index, created_at, updated_at
FROM script_chains
ORDER BY LOWER(name) ASC, id ASC
ORDER BY order_index ASC, id ASC
`
);
@@ -164,7 +165,7 @@ class ScriptChainService {
}
const db = await getDb();
const row = await db.get(
`SELECT id, name, created_at, updated_at FROM script_chains WHERE id = ?`,
`SELECT id, name, order_index, created_at, updated_at FROM script_chains WHERE id = ?`,
[normalizedId]
);
if (!row) {
@@ -186,7 +187,7 @@ class ScriptChainService {
const db = await getDb();
const placeholders = ids.map(() => '?').join(', ');
const rows = await db.all(
`SELECT id, name, created_at, updated_at FROM script_chains WHERE id IN (${placeholders})`,
`SELECT id, name, order_index, created_at, updated_at FROM script_chains WHERE id IN (${placeholders})`,
ids
);
const stepRows = await db.all(
@@ -229,9 +230,13 @@ class ScriptChainService {
const db = await getDb();
try {
const nextOrderIndex = await this._getNextOrderIndex(db);
const result = await db.run(
`INSERT INTO script_chains (name, created_at, updated_at) VALUES (?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)`,
[name]
`
INSERT INTO script_chains (name, order_index, created_at, updated_at)
VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
`,
[name, nextOrderIndex]
);
const chainId = result.lastID;
await this._saveSteps(db, chainId, steps);
@@ -289,6 +294,78 @@ class ScriptChainService {
return existing;
}
async reorderChains(orderedIds = []) {
const providedIds = Array.isArray(orderedIds)
? orderedIds.map(normalizeChainId).filter(Boolean)
: [];
const db = await getDb();
const rows = await db.all(
`
SELECT id
FROM script_chains
ORDER BY order_index ASC, id ASC
`
);
if (rows.length === 0) {
return [];
}
const existingIds = rows.map((row) => Number(row.id)).filter((id) => Number.isFinite(id) && id > 0);
const existingSet = new Set(existingIds);
const used = new Set();
const nextOrder = [];
for (const id of providedIds) {
if (!existingSet.has(id) || used.has(id)) {
continue;
}
used.add(id);
nextOrder.push(id);
}
for (const id of existingIds) {
if (used.has(id)) {
continue;
}
used.add(id);
nextOrder.push(id);
}
await db.exec('BEGIN');
try {
for (let i = 0; i < nextOrder.length; i += 1) {
await db.run(
`
UPDATE script_chains
SET order_index = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
`,
[i + 1, nextOrder[i]]
);
}
await db.exec('COMMIT');
} catch (error) {
await db.exec('ROLLBACK');
throw error;
}
return this.listChains();
}
async _getNextOrderIndex(db) {
const row = await db.get(
`
SELECT COALESCE(MAX(order_index), 0) AS max_order_index
FROM script_chains
`
);
const maxOrder = Number(row?.max_order_index || 0);
if (!Number.isFinite(maxOrder) || maxOrder < 0) {
return 1;
}
return Math.trunc(maxOrder) + 1;
}
async _saveSteps(db, chainId, steps) {
for (let i = 0; i < steps.length; i++) {
const step = steps[i];
@@ -367,7 +444,7 @@ class ScriptChainService {
`Kette "${chain.name}" - Skript "${script.name}": ${success ? 'OK' : `Fehler (Exit ${run.code})`}`
);
}
results.push({ stepType: 'script', scriptId: script.id, scriptName: script.name, success, exitCode: run.code });
results.push({ stepType: 'script', scriptId: script.id, scriptName: script.name, success, exitCode: run.code, stdout: run.stdout || '', stderr: run.stderr || '' });
if (!success) {
logger.warn('chain:step:script-failed', { chainId, scriptId: script.id, exitCode: run.code });

View File

@@ -99,6 +99,7 @@ function mapScriptRow(row) {
id: Number(row.id),
name: String(row.name || ''),
scriptBody: String(row.script_body || ''),
orderIndex: Number(row.order_index || 0),
createdAt: row.created_at,
updatedAt: row.updated_at
};
@@ -225,9 +226,9 @@ class ScriptService {
const db = await getDb();
const rows = await db.all(
`
SELECT id, name, script_body, created_at, updated_at
SELECT id, name, script_body, order_index, created_at, updated_at
FROM scripts
ORDER BY LOWER(name) ASC, id ASC
ORDER BY order_index ASC, id ASC
`
);
return rows.map(mapScriptRow);
@@ -241,7 +242,7 @@ class ScriptService {
const db = await getDb();
const row = await db.get(
`
SELECT id, name, script_body, created_at, updated_at
SELECT id, name, script_body, order_index, created_at, updated_at
FROM scripts
WHERE id = ?
`,
@@ -259,12 +260,13 @@ class ScriptService {
const normalized = validateScriptPayload(payload, { partial: false });
const db = await getDb();
try {
const nextOrderIndex = await this._getNextOrderIndex(db);
const result = await db.run(
`
INSERT INTO scripts (name, script_body, created_at, updated_at)
VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
INSERT INTO scripts (name, script_body, order_index, created_at, updated_at)
VALUES (?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
`,
[normalized.name, normalized.scriptBody]
[normalized.name, normalized.scriptBody, nextOrderIndex]
);
return this.getScriptById(result.lastID);
} catch (error) {
@@ -328,7 +330,7 @@ class ScriptService {
const placeholders = ids.map(() => '?').join(', ');
const rows = await db.all(
`
SELECT id, name, script_body, created_at, updated_at
SELECT id, name, script_body, order_index, created_at, updated_at
FROM scripts
WHERE id IN (${placeholders})
`,
@@ -358,6 +360,76 @@ class ScriptService {
return scripts;
}
async reorderScripts(orderedIds = []) {
const db = await getDb();
const providedIds = normalizeScriptIdList(orderedIds);
const rows = await db.all(
`
SELECT id
FROM scripts
ORDER BY order_index ASC, id ASC
`
);
if (rows.length === 0) {
return [];
}
const existingIds = rows.map((row) => Number(row.id)).filter((id) => Number.isFinite(id) && id > 0);
const existingSet = new Set(existingIds);
const used = new Set();
const nextOrder = [];
for (const id of providedIds) {
if (!existingSet.has(id) || used.has(id)) {
continue;
}
used.add(id);
nextOrder.push(id);
}
for (const id of existingIds) {
if (used.has(id)) {
continue;
}
used.add(id);
nextOrder.push(id);
}
await db.exec('BEGIN');
try {
for (let i = 0; i < nextOrder.length; i += 1) {
await db.run(
`
UPDATE scripts
SET order_index = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
`,
[i + 1, nextOrder[i]]
);
}
await db.exec('COMMIT');
} catch (error) {
await db.exec('ROLLBACK');
throw error;
}
return this.listScripts();
}
async _getNextOrderIndex(db) {
const row = await db.get(
`
SELECT COALESCE(MAX(order_index), 0) AS max_order_index
FROM scripts
`
);
const maxOrder = Number(row?.max_order_index || 0);
if (!Number.isFinite(maxOrder) || maxOrder < 0) {
return 1;
}
return Math.trunc(maxOrder) + 1;
}
async createExecutableScriptFile(script, context = {}) {
const name = String(script?.name || '').trim() || `script-${script?.id || 'unknown'}`;
const scriptBody = normalizeScriptBody(script?.scriptBody);

View File

@@ -30,6 +30,16 @@ const TITLE_SELECTION_KEYS_WITH_VALUE = new Set(['-t', '--title']);
const LOG_DIR_SETTING_KEY = 'log_dir';
const MEDIA_PROFILES = ['bluray', 'dvd', 'other'];
const PROFILED_SETTINGS = {
raw_dir: {
bluray: 'raw_dir_bluray',
dvd: 'raw_dir_dvd',
other: 'raw_dir_other'
},
movie_dir: {
bluray: 'movie_dir_bluray',
dvd: 'movie_dir_dvd',
other: 'movie_dir_other'
},
mediainfo_extra_args: {
bluray: 'mediainfo_extra_args_bluray',
dvd: 'mediainfo_extra_args_dvd'
@@ -67,6 +77,10 @@ const PROFILED_SETTINGS = {
dvd: 'output_folder_template_dvd'
}
};
const STRICT_PROFILE_ONLY_SETTING_KEYS = new Set([
'raw_dir',
'movie_dir'
]);
function applyRuntimeLogDirSetting(rawValue) {
const resolved = setLogRootDir(rawValue);
@@ -227,10 +241,28 @@ function normalizeMediaProfileValue(value) {
if (!raw) {
return null;
}
if (raw === 'bluray' || raw === 'blu-ray' || raw === 'bd' || raw === 'bdmv') {
if (
raw === 'bluray'
|| raw === 'blu-ray'
|| raw === 'blu_ray'
|| raw === 'bd'
|| raw === 'bdmv'
|| raw === 'bdrom'
|| raw === 'bd-rom'
|| raw === 'bd-r'
|| raw === 'bd-re'
) {
return 'bluray';
}
if (raw === 'dvd') {
if (
raw === 'dvd'
|| raw === 'dvdvideo'
|| raw === 'dvd-video'
|| raw === 'dvdrom'
|| raw === 'dvd-rom'
|| raw === 'video_ts'
|| raw === 'iso9660'
) {
return 'dvd';
}
if (raw === 'disc' || raw === 'other' || raw === 'sonstiges' || raw === 'cd') {
@@ -253,6 +285,16 @@ function resolveProfileFallbackOrder(profile) {
return ['dvd', 'bluray'];
}
function hasUsableProfileSpecificValue(value) {
if (value === null || value === undefined) {
return false;
}
if (typeof value === 'string') {
return value.trim().length > 0;
}
return true;
}
function normalizePresetListLines(rawOutput) {
const lines = String(rawOutput || '').split(/\r?\n/);
const normalized = [];
@@ -434,8 +476,9 @@ class SettingsService {
resolveEffectiveToolSettings(settingsMap = {}, mediaProfile = null) {
const sourceMap = settingsMap && typeof settingsMap === 'object' ? settingsMap : {};
const fallbackOrder = resolveProfileFallbackOrder(mediaProfile);
const resolvedMediaProfile = normalizeMediaProfileValue(mediaProfile) || fallbackOrder[0] || 'dvd';
const normalizedRequestedProfile = normalizeMediaProfileValue(mediaProfile);
const fallbackOrder = resolveProfileFallbackOrder(normalizedRequestedProfile);
const resolvedMediaProfile = normalizedRequestedProfile || fallbackOrder[0] || 'dvd';
const effective = {
...sourceMap,
media_profile: resolvedMediaProfile
@@ -443,6 +486,17 @@ class SettingsService {
for (const [legacyKey, profileKeys] of Object.entries(PROFILED_SETTINGS)) {
let resolvedValue = sourceMap[legacyKey];
if (STRICT_PROFILE_ONLY_SETTING_KEYS.has(legacyKey)) {
const selectedProfileKey = normalizedRequestedProfile
? profileKeys?.[normalizedRequestedProfile]
: null;
const selectedProfileValue = selectedProfileKey ? sourceMap[selectedProfileKey] : undefined;
if (hasUsableProfileSpecificValue(selectedProfileValue)) {
resolvedValue = selectedProfileValue;
}
effective[legacyKey] = resolvedValue;
continue;
}
for (const profile of fallbackOrder) {
const profileKey = profileKeys?.[profile];
if (!profileKey) {
@@ -697,10 +751,10 @@ class SettingsService {
const normalizedProfile = normalizeMediaProfileValue(options?.mediaProfile || deviceInfo?.mediaProfile || null);
const isDvd = normalizedProfile === 'dvd';
if (isDvd) {
const isoBase = options?.isoOutputBase
? path.join(rawJobDir, options.isoOutputBase)
const backupBase = options?.backupOutputBase
? path.join(rawJobDir, options.backupOutputBase)
: rawJobDir;
baseArgs = ['-r', '--progress=-same', 'backup', '--decrypt', '--noscan', sourceArg, isoBase];
baseArgs = ['-r', '--progress=-same', 'backup', '--decrypt', '--noscan', sourceArg, backupBase];
} else {
baseArgs = ['-r', '--progress=-same', 'backup', '--decrypt', sourceArg, rawJobDir];
}