Initial commit mit MkDocs-Dokumentation
This commit is contained in:
603
backend/src/db/database.js
Normal file
603
backend/src/db/database.js
Normal file
@@ -0,0 +1,603 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sqlite3 = require('sqlite3');
|
||||
const { open } = require('sqlite');
|
||||
const { dbPath } = require('../config');
|
||||
const { defaultSchema } = require('./defaultSettings');
|
||||
const logger = require('../services/logger').child('DB');
|
||||
const { errorToMeta } = require('../utils/errorMeta');
|
||||
const { setLogRootDir, getJobLogDir } = require('../services/logPathService');
|
||||
|
||||
const schemaFilePath = path.resolve(__dirname, '../../../db/schema.sql');
|
||||
|
||||
let dbInstance;
|
||||
|
||||
function nowFileStamp() {
|
||||
return new Date().toISOString().replace(/[:.]/g, '-');
|
||||
}
|
||||
|
||||
function isSqliteCorruptionError(error) {
|
||||
if (!error) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const code = String(error.code || '').toUpperCase();
|
||||
const msg = String(error.message || '').toLowerCase();
|
||||
|
||||
return (
|
||||
code === 'SQLITE_CORRUPT' ||
|
||||
msg.includes('database disk image is malformed') ||
|
||||
msg.includes('file is not a database')
|
||||
);
|
||||
}
|
||||
|
||||
function moveIfExists(sourcePath, targetPath) {
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
return false;
|
||||
}
|
||||
fs.renameSync(sourcePath, targetPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
function quarantineCorruptDatabaseFiles() {
|
||||
const dir = path.dirname(dbPath);
|
||||
const base = path.basename(dbPath);
|
||||
const stamp = nowFileStamp();
|
||||
const archiveDir = path.join(dir, 'corrupt-backups');
|
||||
|
||||
fs.mkdirSync(archiveDir, { recursive: true });
|
||||
|
||||
const moved = [];
|
||||
const candidates = [
|
||||
dbPath,
|
||||
`${dbPath}-wal`,
|
||||
`${dbPath}-shm`
|
||||
];
|
||||
|
||||
for (const sourcePath of candidates) {
|
||||
const fileName = path.basename(sourcePath);
|
||||
const targetPath = path.join(archiveDir, `${fileName}.${stamp}.corrupt`);
|
||||
if (moveIfExists(sourcePath, targetPath)) {
|
||||
moved.push({
|
||||
from: sourcePath,
|
||||
to: targetPath
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn('recovery:quarantine-complete', {
|
||||
dbPath,
|
||||
base,
|
||||
movedCount: moved.length,
|
||||
moved
|
||||
});
|
||||
}
|
||||
|
||||
function quoteIdentifier(identifier) {
|
||||
return `"${String(identifier || '').replace(/"/g, '""')}"`;
|
||||
}
|
||||
|
||||
function normalizeSqlType(value) {
|
||||
return String(value || '').trim().replace(/\s+/g, ' ').toUpperCase();
|
||||
}
|
||||
|
||||
function normalizeDefault(value) {
|
||||
if (value === null || value === undefined) {
|
||||
return '';
|
||||
}
|
||||
return String(value).trim().replace(/\s+/g, ' ').toUpperCase();
|
||||
}
|
||||
|
||||
function sameTableShape(current = [], desired = []) {
|
||||
if (current.length !== desired.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < current.length; i += 1) {
|
||||
const left = current[i];
|
||||
const right = desired[i];
|
||||
if (!left || !right) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.name || '') !== String(right.name || '')) {
|
||||
return false;
|
||||
}
|
||||
if (normalizeSqlType(left.type) !== normalizeSqlType(right.type)) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.notnull || 0) !== Number(right.notnull || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.pk || 0) !== Number(right.pk || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (normalizeDefault(left.dflt_value) !== normalizeDefault(right.dflt_value)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function sameForeignKeys(current = [], desired = []) {
|
||||
if (current.length !== desired.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < current.length; i += 1) {
|
||||
const left = current[i];
|
||||
const right = desired[i];
|
||||
if (!left || !right) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.id || 0) !== Number(right.id || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (Number(left.seq || 0) !== Number(right.seq || 0)) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.table || '') !== String(right.table || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.from || '') !== String(right.from || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.to || '') !== String(right.to || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.on_update || '') !== String(right.on_update || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.on_delete || '') !== String(right.on_delete || '')) {
|
||||
return false;
|
||||
}
|
||||
if (String(left.match || '') !== String(right.match || '')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async function tableExists(db, tableName) {
|
||||
const row = await db.get(
|
||||
`SELECT 1 as ok FROM sqlite_master WHERE type = 'table' AND name = ? LIMIT 1`,
|
||||
[tableName]
|
||||
);
|
||||
return Boolean(row);
|
||||
}
|
||||
|
||||
async function getTableInfo(db, tableName) {
|
||||
return db.all(`PRAGMA table_info(${quoteIdentifier(tableName)})`);
|
||||
}
|
||||
|
||||
async function getForeignKeyInfo(db, tableName) {
|
||||
return db.all(`PRAGMA foreign_key_list(${quoteIdentifier(tableName)})`);
|
||||
}
|
||||
|
||||
async function readConfiguredLogDirSetting(db) {
|
||||
const hasSchemaTable = await tableExists(db, 'settings_schema');
|
||||
const hasValuesTable = await tableExists(db, 'settings_values');
|
||||
if (!hasSchemaTable || !hasValuesTable) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const row = await db.get(
|
||||
`
|
||||
SELECT
|
||||
COALESCE(v.value, s.default_value, '') AS value
|
||||
FROM settings_schema s
|
||||
LEFT JOIN settings_values v ON v.key = s.key
|
||||
WHERE s.key = ?
|
||||
LIMIT 1
|
||||
`,
|
||||
['log_dir']
|
||||
);
|
||||
const value = String(row?.value || '').trim();
|
||||
return value || null;
|
||||
} catch (error) {
|
||||
logger.warn('log-root:read-setting-failed', {
|
||||
error: error?.message || String(error)
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function configureRuntimeLogRootFromSettings(db, options = {}) {
|
||||
const ensure = Boolean(options.ensure);
|
||||
const configured = await readConfiguredLogDirSetting(db);
|
||||
let resolved = setLogRootDir(configured);
|
||||
if (ensure) {
|
||||
try {
|
||||
fs.mkdirSync(resolved, { recursive: true });
|
||||
} catch (error) {
|
||||
const fallbackResolved = setLogRootDir(null);
|
||||
try {
|
||||
fs.mkdirSync(fallbackResolved, { recursive: true });
|
||||
} catch (_fallbackError) {
|
||||
// ignored: logger itself is hardened and may still write to console only
|
||||
}
|
||||
logger.warn('log-root:ensure-failed', {
|
||||
configured: configured || null,
|
||||
resolved,
|
||||
fallbackResolved,
|
||||
error: error?.message || String(error)
|
||||
});
|
||||
resolved = fallbackResolved;
|
||||
}
|
||||
}
|
||||
return {
|
||||
configured,
|
||||
resolved
|
||||
};
|
||||
}
|
||||
|
||||
async function loadSchemaModel() {
|
||||
if (!fs.existsSync(schemaFilePath)) {
|
||||
const error = new Error(`Schema-Datei fehlt: ${schemaFilePath}`);
|
||||
error.code = 'SCHEMA_FILE_MISSING';
|
||||
throw error;
|
||||
}
|
||||
|
||||
const schemaSql = fs.readFileSync(schemaFilePath, 'utf-8');
|
||||
const memDb = await open({
|
||||
filename: ':memory:',
|
||||
driver: sqlite3.Database
|
||||
});
|
||||
|
||||
try {
|
||||
await memDb.exec(schemaSql);
|
||||
const tables = await memDb.all(`
|
||||
SELECT name, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const indexes = await memDb.all(`
|
||||
SELECT name, tbl_name AS tableName, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
AND sql IS NOT NULL
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const tableInfos = {};
|
||||
const tableForeignKeys = {};
|
||||
for (const table of tables) {
|
||||
tableInfos[table.name] = await getTableInfo(memDb, table.name);
|
||||
tableForeignKeys[table.name] = await getForeignKeyInfo(memDb, table.name);
|
||||
}
|
||||
|
||||
return {
|
||||
schemaSql,
|
||||
tables,
|
||||
indexes,
|
||||
tableInfos,
|
||||
tableForeignKeys
|
||||
};
|
||||
} finally {
|
||||
await memDb.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function rebuildTable(db, tableName, createSql) {
|
||||
const oldName = `${tableName}__old_${Date.now()}`;
|
||||
const tableNameQuoted = quoteIdentifier(tableName);
|
||||
const oldNameQuoted = quoteIdentifier(oldName);
|
||||
const beforeInfo = await getTableInfo(db, tableName);
|
||||
|
||||
await db.exec(`ALTER TABLE ${tableNameQuoted} RENAME TO ${oldNameQuoted}`);
|
||||
await db.exec(createSql);
|
||||
|
||||
const afterInfo = await getTableInfo(db, tableName);
|
||||
const beforeColumns = new Set(beforeInfo.map((column) => String(column.name)));
|
||||
const commonColumns = afterInfo
|
||||
.map((column) => String(column.name))
|
||||
.filter((name) => beforeColumns.has(name));
|
||||
|
||||
if (commonColumns.length > 0) {
|
||||
const columnList = commonColumns.map((name) => quoteIdentifier(name)).join(', ');
|
||||
await db.exec(`
|
||||
INSERT INTO ${tableNameQuoted} (${columnList})
|
||||
SELECT ${columnList}
|
||||
FROM ${oldNameQuoted}
|
||||
`);
|
||||
}
|
||||
|
||||
await db.exec(`DROP TABLE ${oldNameQuoted}`);
|
||||
}
|
||||
|
||||
async function syncSchemaToModel(db, model) {
|
||||
const desiredTables = Array.isArray(model?.tables) ? model.tables : [];
|
||||
const desiredIndexes = Array.isArray(model?.indexes) ? model.indexes : [];
|
||||
const desiredTableInfo = model?.tableInfos && typeof model.tableInfos === 'object'
|
||||
? model.tableInfos
|
||||
: {};
|
||||
const desiredTableForeignKeys = model?.tableForeignKeys && typeof model.tableForeignKeys === 'object'
|
||||
? model.tableForeignKeys
|
||||
: {};
|
||||
|
||||
const currentTables = await db.all(`
|
||||
SELECT name, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const currentByName = new Map(currentTables.map((table) => [table.name, table]));
|
||||
const desiredTableNameSet = new Set(desiredTables.map((table) => table.name));
|
||||
|
||||
for (const table of desiredTables) {
|
||||
const tableName = String(table.name || '');
|
||||
const createSql = String(table.sql || '').trim();
|
||||
if (!tableName || !createSql) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentByName.has(tableName)) {
|
||||
await db.exec(createSql);
|
||||
logger.info('schema:create-table', { table: tableName });
|
||||
continue;
|
||||
}
|
||||
|
||||
const currentInfo = await getTableInfo(db, tableName);
|
||||
const wantedInfo = Array.isArray(desiredTableInfo[tableName]) ? desiredTableInfo[tableName] : [];
|
||||
const currentFks = await getForeignKeyInfo(db, tableName);
|
||||
const wantedFks = Array.isArray(desiredTableForeignKeys[tableName]) ? desiredTableForeignKeys[tableName] : [];
|
||||
const shapeMatches = sameTableShape(currentInfo, wantedInfo);
|
||||
const foreignKeysMatch = sameForeignKeys(currentFks, wantedFks);
|
||||
if (!shapeMatches || !foreignKeysMatch) {
|
||||
await rebuildTable(db, tableName, createSql);
|
||||
logger.warn('schema:rebuild-table', {
|
||||
table: tableName,
|
||||
reason: !shapeMatches ? 'shape-mismatch' : 'foreign-key-mismatch'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const table of currentTables) {
|
||||
if (desiredTableNameSet.has(table.name)) {
|
||||
continue;
|
||||
}
|
||||
await db.exec(`DROP TABLE IF EXISTS ${quoteIdentifier(table.name)}`);
|
||||
logger.warn('schema:drop-table', { table: table.name });
|
||||
}
|
||||
|
||||
const currentIndexes = await db.all(`
|
||||
SELECT name, tbl_name AS tableName, sql
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
AND sql IS NOT NULL
|
||||
ORDER BY rowid ASC
|
||||
`);
|
||||
const desiredIndexNameSet = new Set(desiredIndexes.map((index) => index.name));
|
||||
|
||||
for (const index of currentIndexes) {
|
||||
if (desiredIndexNameSet.has(index.name)) {
|
||||
continue;
|
||||
}
|
||||
await db.exec(`DROP INDEX IF EXISTS ${quoteIdentifier(index.name)}`);
|
||||
logger.warn('schema:drop-index', { index: index.name, table: index.tableName });
|
||||
}
|
||||
|
||||
for (const index of desiredIndexes) {
|
||||
let sql = String(index.sql || '').trim();
|
||||
if (!sql) {
|
||||
continue;
|
||||
}
|
||||
if (/^CREATE\s+UNIQUE\s+INDEX\s+/i.test(sql)) {
|
||||
sql = sql.replace(/^CREATE\s+UNIQUE\s+INDEX\s+/i, 'CREATE UNIQUE INDEX IF NOT EXISTS ');
|
||||
} else if (/^CREATE\s+INDEX\s+/i.test(sql)) {
|
||||
sql = sql.replace(/^CREATE\s+INDEX\s+/i, 'CREATE INDEX IF NOT EXISTS ');
|
||||
}
|
||||
await db.exec(sql);
|
||||
}
|
||||
}
|
||||
|
||||
async function exportLegacyJobLogsToFiles(db) {
|
||||
const hasJobLogsTable = await tableExists(db, 'job_logs');
|
||||
if (!hasJobLogsTable) {
|
||||
return;
|
||||
}
|
||||
|
||||
const rows = await db.all(`
|
||||
SELECT job_id, source, message, timestamp
|
||||
FROM job_logs
|
||||
ORDER BY job_id ASC, id ASC
|
||||
`);
|
||||
if (!Array.isArray(rows) || rows.length === 0) {
|
||||
logger.info('legacy-job-logs:export:skip-empty');
|
||||
return;
|
||||
}
|
||||
|
||||
const targetDir = getJobLogDir();
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
const streams = new Map();
|
||||
|
||||
try {
|
||||
for (const row of rows) {
|
||||
const jobId = Number(row?.job_id);
|
||||
if (!Number.isFinite(jobId) || jobId <= 0) {
|
||||
continue;
|
||||
}
|
||||
const key = String(Math.trunc(jobId));
|
||||
if (!streams.has(key)) {
|
||||
const filePath = path.join(targetDir, `job-${key}.process.log`);
|
||||
const stream = fs.createWriteStream(filePath, {
|
||||
flags: 'w',
|
||||
encoding: 'utf-8'
|
||||
});
|
||||
streams.set(key, stream);
|
||||
}
|
||||
const line = `[${String(row?.timestamp || '')}] [${String(row?.source || 'SYSTEM')}] ${String(row?.message || '')}\n`;
|
||||
streams.get(key).write(line);
|
||||
}
|
||||
} finally {
|
||||
await Promise.all(
|
||||
[...streams.values()].map(
|
||||
(stream) =>
|
||||
new Promise((resolve) => {
|
||||
stream.end(resolve);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
logger.warn('legacy-job-logs:exported', {
|
||||
lines: rows.length,
|
||||
jobs: streams.size,
|
||||
targetDir
|
||||
});
|
||||
}
|
||||
|
||||
async function applySchemaModel(db, model) {
|
||||
await db.exec('PRAGMA foreign_keys = OFF;');
|
||||
await db.exec('BEGIN');
|
||||
try {
|
||||
await syncSchemaToModel(db, model);
|
||||
await db.exec('COMMIT');
|
||||
} catch (error) {
|
||||
await db.exec('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
await db.exec('PRAGMA foreign_keys = ON;');
|
||||
}
|
||||
}
|
||||
|
||||
async function openAndPrepareDatabase() {
|
||||
fs.mkdirSync(path.dirname(dbPath), { recursive: true });
|
||||
logger.info('init:open', { dbPath });
|
||||
|
||||
dbInstance = await open({
|
||||
filename: dbPath,
|
||||
driver: sqlite3.Database
|
||||
});
|
||||
|
||||
await dbInstance.exec('PRAGMA journal_mode = WAL;');
|
||||
await dbInstance.exec('PRAGMA foreign_keys = ON;');
|
||||
const initialLogRoot = await configureRuntimeLogRootFromSettings(dbInstance, { ensure: true });
|
||||
logger.info('log-root:initialized', {
|
||||
configured: initialLogRoot.configured || null,
|
||||
resolved: initialLogRoot.resolved
|
||||
});
|
||||
await exportLegacyJobLogsToFiles(dbInstance);
|
||||
const schemaModel = await loadSchemaModel();
|
||||
await applySchemaModel(dbInstance, schemaModel);
|
||||
|
||||
await seedDefaultSettings(dbInstance);
|
||||
await removeDeprecatedSettings(dbInstance);
|
||||
await ensurePipelineStateRow(dbInstance);
|
||||
const syncedLogRoot = await configureRuntimeLogRootFromSettings(dbInstance, { ensure: true });
|
||||
logger.info('log-root:synced', {
|
||||
configured: syncedLogRoot.configured || null,
|
||||
resolved: syncedLogRoot.resolved
|
||||
});
|
||||
logger.info('init:done');
|
||||
return dbInstance;
|
||||
}
|
||||
|
||||
async function initDatabase({ allowRecovery = true } = {}) {
|
||||
if (dbInstance) {
|
||||
return dbInstance;
|
||||
}
|
||||
|
||||
try {
|
||||
return await openAndPrepareDatabase();
|
||||
} catch (error) {
|
||||
logger.error('init:failed', { error: errorToMeta(error), allowRecovery });
|
||||
|
||||
if (dbInstance) {
|
||||
try {
|
||||
await dbInstance.close();
|
||||
} catch (_closeError) {
|
||||
// ignore close errors during failed init
|
||||
}
|
||||
dbInstance = undefined;
|
||||
}
|
||||
|
||||
if (allowRecovery && isSqliteCorruptionError(error)) {
|
||||
logger.warn('recovery:corrupt-db-detected', { dbPath });
|
||||
quarantineCorruptDatabaseFiles();
|
||||
return initDatabase({ allowRecovery: false });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function seedDefaultSettings(db) {
|
||||
let seeded = 0;
|
||||
for (const item of defaultSchema) {
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO settings_schema
|
||||
(key, category, label, type, required, description, default_value, options_json, validation_json, order_index)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
category = excluded.category,
|
||||
label = excluded.label,
|
||||
type = excluded.type,
|
||||
required = excluded.required,
|
||||
description = excluded.description,
|
||||
default_value = COALESCE(settings_schema.default_value, excluded.default_value),
|
||||
options_json = excluded.options_json,
|
||||
validation_json = excluded.validation_json,
|
||||
order_index = excluded.order_index,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`,
|
||||
[
|
||||
item.key,
|
||||
item.category,
|
||||
item.label,
|
||||
item.type,
|
||||
item.required,
|
||||
item.description || null,
|
||||
item.defaultValue || null,
|
||||
JSON.stringify(item.options || []),
|
||||
JSON.stringify(item.validation || {}),
|
||||
item.orderIndex || 0
|
||||
]
|
||||
);
|
||||
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO settings_values (key, value)
|
||||
VALUES (?, ?)
|
||||
ON CONFLICT(key) DO NOTHING
|
||||
`,
|
||||
[item.key, item.defaultValue || null]
|
||||
);
|
||||
seeded += 1;
|
||||
}
|
||||
logger.info('seed:settings', { count: seeded });
|
||||
}
|
||||
|
||||
async function ensurePipelineStateRow(db) {
|
||||
await db.run(
|
||||
`
|
||||
INSERT INTO pipeline_state (id, state, active_job_id, progress, eta, status_text, context_json)
|
||||
VALUES (1, 'IDLE', NULL, 0, NULL, NULL, '{}')
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
`
|
||||
);
|
||||
}
|
||||
|
||||
async function removeDeprecatedSettings(db) {
|
||||
const deprecatedKeys = ['pushover_notify_disc_detected'];
|
||||
for (const key of deprecatedKeys) {
|
||||
const result = await db.run('DELETE FROM settings_schema WHERE key = ?', [key]);
|
||||
if (result?.changes > 0) {
|
||||
logger.info('migrate:remove-deprecated-setting', { key });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function getDb() {
|
||||
return initDatabase();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initDatabase,
|
||||
getDb
|
||||
};
|
||||
463
backend/src/db/defaultSettings.js
Normal file
463
backend/src/db/defaultSettings.js
Normal file
@@ -0,0 +1,463 @@
|
||||
const defaultSchema = [
|
||||
{
|
||||
key: 'drive_mode',
|
||||
category: 'Laufwerk',
|
||||
label: 'Laufwerksmodus',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'Auto-Discovery oder explizites Device.',
|
||||
defaultValue: 'auto',
|
||||
options: [
|
||||
{ label: 'Auto Discovery', value: 'auto' },
|
||||
{ label: 'Explizites Device', value: 'explicit' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 10
|
||||
},
|
||||
{
|
||||
key: 'drive_device',
|
||||
category: 'Laufwerk',
|
||||
label: 'Device Pfad',
|
||||
type: 'path',
|
||||
required: 0,
|
||||
description: 'Nur für expliziten Modus, z.B. /dev/sr0.',
|
||||
defaultValue: '/dev/sr0',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 20
|
||||
},
|
||||
{
|
||||
key: 'makemkv_source_index',
|
||||
category: 'Laufwerk',
|
||||
label: 'MakeMKV Source Index',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Disc Index im Auto-Modus.',
|
||||
defaultValue: '0',
|
||||
options: [],
|
||||
validation: { min: 0, max: 20 },
|
||||
orderIndex: 30
|
||||
},
|
||||
{
|
||||
key: 'disc_poll_interval_ms',
|
||||
category: 'Laufwerk',
|
||||
label: 'Polling Intervall (ms)',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Intervall für Disk-Erkennung.',
|
||||
defaultValue: '4000',
|
||||
options: [],
|
||||
validation: { min: 1000, max: 60000 },
|
||||
orderIndex: 40
|
||||
},
|
||||
{
|
||||
key: 'raw_dir',
|
||||
category: 'Pfade',
|
||||
label: 'Raw Ausgabeordner',
|
||||
type: 'path',
|
||||
required: 1,
|
||||
description: 'Zwischenablage für MakeMKV Rip.',
|
||||
defaultValue: '/mnt/arm-storage/media/raw',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 100
|
||||
},
|
||||
{
|
||||
key: 'movie_dir',
|
||||
category: 'Pfade',
|
||||
label: 'Film Ausgabeordner',
|
||||
type: 'path',
|
||||
required: 1,
|
||||
description: 'Finale HandBrake Ausgabe.',
|
||||
defaultValue: '/mnt/arm-storage/media/movies',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 110
|
||||
},
|
||||
{
|
||||
key: 'log_dir',
|
||||
category: 'Pfade',
|
||||
label: 'Log Ordner',
|
||||
type: 'path',
|
||||
required: 1,
|
||||
description: 'Basisordner für Logs. Job-Logs liegen direkt hier, Backend-Logs in /backend.',
|
||||
defaultValue: '/mnt/arm-storage/logs',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 120
|
||||
},
|
||||
{
|
||||
key: 'makemkv_command',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Kommando',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Pfad oder Befehl für makemkvcon.',
|
||||
defaultValue: 'makemkvcon',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 200
|
||||
},
|
||||
{
|
||||
key: 'makemkv_registration_key',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Key',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Optionaler Registrierungsschlüssel. Wird vor Analyze/Rip automatisch per "makemkvcon reg" gesetzt.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 202
|
||||
},
|
||||
{
|
||||
key: 'mediainfo_command',
|
||||
category: 'Tools',
|
||||
label: 'Mediainfo Kommando',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Pfad oder Befehl für mediainfo.',
|
||||
defaultValue: 'mediainfo',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 205
|
||||
},
|
||||
{
|
||||
key: 'mediainfo_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'Mediainfo Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Parameter für mediainfo.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 206
|
||||
},
|
||||
{
|
||||
key: 'makemkv_min_length_minutes',
|
||||
category: 'Tools',
|
||||
label: 'Minimale Titellänge (Minuten)',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Filtert kurze Titel beim Rip.',
|
||||
defaultValue: '60',
|
||||
options: [],
|
||||
validation: { min: 1, max: 1000 },
|
||||
orderIndex: 210
|
||||
},
|
||||
{
|
||||
key: 'makemkv_rip_mode',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Rip Modus',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'mkv: direkte MKV-Dateien; backup: vollständige Blu-ray Struktur im RAW-Ordner.',
|
||||
defaultValue: 'backup',
|
||||
options: [
|
||||
{ label: 'MKV', value: 'mkv' },
|
||||
{ label: 'Backup', value: 'backup' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 212
|
||||
},
|
||||
{
|
||||
key: 'makemkv_analyze_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Analyze Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Parameter für Analyze.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 220
|
||||
},
|
||||
{
|
||||
key: 'makemkv_rip_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'MakeMKV Rip Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Parameter für Rip.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 230
|
||||
},
|
||||
{
|
||||
key: 'handbrake_command',
|
||||
category: 'Tools',
|
||||
label: 'HandBrake Kommando',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Pfad oder Befehl für HandBrakeCLI.',
|
||||
defaultValue: 'HandBrakeCLI',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 300
|
||||
},
|
||||
{
|
||||
key: 'handbrake_preset',
|
||||
category: 'Tools',
|
||||
label: 'HandBrake Preset',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Preset Name für -Z.',
|
||||
defaultValue: 'H.264 MKV 1080p30',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 310
|
||||
},
|
||||
{
|
||||
key: 'handbrake_extra_args',
|
||||
category: 'Tools',
|
||||
label: 'HandBrake Extra Args',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Zusätzliche CLI-Argumente.',
|
||||
defaultValue: '--audio-lang-list deu,eng --first-audio --subtitle-lang-list deu,eng --first-subtitle --aencoder copy --audio-copy-mask ac3,eac3,dts --audio-fallback ac3 --encoder-preset slow --quality 18 --encoder-tune film --encoder-profile high --encoder-level 4.1',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 320
|
||||
},
|
||||
{
|
||||
key: 'output_extension',
|
||||
category: 'Tools',
|
||||
label: 'Ausgabeformat',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'Dateiendung für finale Datei.',
|
||||
defaultValue: 'mkv',
|
||||
options: [
|
||||
{ label: 'MKV', value: 'mkv' },
|
||||
{ label: 'MP4', value: 'mp4' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 330
|
||||
},
|
||||
{
|
||||
key: 'filename_template',
|
||||
category: 'Tools',
|
||||
label: 'Dateiname Template',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Verfügbare Tokens: ${title}, ${year}, ${imdbId}.',
|
||||
defaultValue: '${title} (${year})',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 340
|
||||
},
|
||||
{
|
||||
key: 'omdb_api_key',
|
||||
category: 'Metadaten',
|
||||
label: 'OMDb API Key',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'API Key für Metadatensuche.',
|
||||
defaultValue: '186322c4',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 400
|
||||
},
|
||||
{
|
||||
key: 'omdb_default_type',
|
||||
category: 'Metadaten',
|
||||
label: 'OMDb Typ',
|
||||
type: 'select',
|
||||
required: 1,
|
||||
description: 'Vorauswahl für Suche.',
|
||||
defaultValue: 'movie',
|
||||
options: [
|
||||
{ label: 'Movie', value: 'movie' },
|
||||
{ label: 'Series', value: 'series' },
|
||||
{ label: 'Episode', value: 'episode' }
|
||||
],
|
||||
validation: {},
|
||||
orderIndex: 410
|
||||
},
|
||||
{
|
||||
key: 'pushover_enabled',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver aktiviert',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Master-Schalter für PushOver Versand.',
|
||||
defaultValue: 'false',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 500
|
||||
},
|
||||
{
|
||||
key: 'pushover_token',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Token',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Application Token für PushOver.',
|
||||
defaultValue: 'a476diddeew53w8fi4kv88n6ghbfqq',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 510
|
||||
},
|
||||
{
|
||||
key: 'pushover_user',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver User',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'User-Key für PushOver.',
|
||||
defaultValue: 'u47227hupodan28a629az1k43644jg',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 520
|
||||
},
|
||||
{
|
||||
key: 'pushover_device',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Device (optional)',
|
||||
type: 'string',
|
||||
required: 0,
|
||||
description: 'Optionales Ziel-Device in PushOver.',
|
||||
defaultValue: '',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 530
|
||||
},
|
||||
{
|
||||
key: 'pushover_title_prefix',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Titel-Präfix',
|
||||
type: 'string',
|
||||
required: 1,
|
||||
description: 'Prefix im PushOver Titel.',
|
||||
defaultValue: 'Ripster',
|
||||
options: [],
|
||||
validation: { minLength: 1 },
|
||||
orderIndex: 540
|
||||
},
|
||||
{
|
||||
key: 'pushover_priority',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Priority',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'Priorität -2 bis 2.',
|
||||
defaultValue: '0',
|
||||
options: [],
|
||||
validation: { min: -2, max: 2 },
|
||||
orderIndex: 550
|
||||
},
|
||||
{
|
||||
key: 'pushover_timeout_ms',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'PushOver Timeout (ms)',
|
||||
type: 'number',
|
||||
required: 1,
|
||||
description: 'HTTP Timeout für PushOver Requests.',
|
||||
defaultValue: '7000',
|
||||
options: [],
|
||||
validation: { min: 1000, max: 60000 },
|
||||
orderIndex: 560
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_metadata_ready',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Metadaten-Auswahl senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet wenn Metadaten zur Auswahl bereitstehen.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 570
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_rip_started',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Rip-Start senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet beim Start des MakeMKV-Rips.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 580
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_encoding_started',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Encode-Start senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet beim Start von HandBrake.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 590
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_job_finished',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Erfolg senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet bei erfolgreich abgeschlossenem Job.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 600
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_job_error',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Fehler senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet bei Fehlern in der Pipeline.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 610
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_job_cancelled',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Abbruch senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet wenn Job manuell abgebrochen wurde.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 620
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_reencode_started',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Re-Encode Start senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet beim Start von RAW Re-Encode.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 630
|
||||
},
|
||||
{
|
||||
key: 'pushover_notify_reencode_finished',
|
||||
category: 'Benachrichtigungen',
|
||||
label: 'Bei Re-Encode Erfolg senden',
|
||||
type: 'boolean',
|
||||
required: 1,
|
||||
description: 'Sendet bei erfolgreichem RAW Re-Encode.',
|
||||
defaultValue: 'true',
|
||||
options: [],
|
||||
validation: {},
|
||||
orderIndex: 640
|
||||
}
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
defaultSchema
|
||||
};
|
||||
Reference in New Issue
Block a user