0.10.1-1 AAX Encode

This commit is contained in:
2026-03-15 10:48:01 +00:00
parent a957dfea73
commit 52ef155c7c
14 changed files with 250 additions and 90 deletions

View File

@@ -1,12 +1,12 @@
{
"name": "ripster-backend",
"version": "0.10.1",
"version": "0.10.1-1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ripster-backend",
"version": "0.10.1",
"version": "0.10.1-1",
"dependencies": {
"cors": "^2.8.5",
"dotenv": "^16.4.7",

View File

@@ -1,6 +1,6 @@
{
"name": "ripster-backend",
"version": "0.10.1",
"version": "0.10.1-1",
"private": true,
"type": "commonjs",
"scripts": {

View File

@@ -7,6 +7,8 @@ const pipelineService = require('../services/pipelineService');
const diskDetectionService = require('../services/diskDetectionService');
const hardwareMonitorService = require('../services/hardwareMonitorService');
const logger = require('../services/logger').child('PIPELINE_ROUTE');
const activationBytesService = require('../services/activationBytesService');
const { getDb } = require('../db/database');
const router = express.Router();
const audiobookUpload = multer({
@@ -155,6 +157,25 @@ router.post(
})
);
router.get(
'/audiobook/pending-activation',
asyncHandler(async (req, res) => {
const db = await getDb();
// Jobs die eine Checksum haben, aber noch keine Activation Bytes im Cache
const pending = await db.all(`
SELECT j.id AS jobId, j.aax_checksum AS checksum
FROM jobs j
WHERE j.aax_checksum IS NOT NULL
AND j.status NOT IN ('DONE', 'ERROR', 'CANCELLED')
AND NOT EXISTS (
SELECT 1 FROM aax_activation_bytes ab WHERE ab.checksum = j.aax_checksum
)
ORDER BY j.created_at DESC
`);
res.json({ pending });
})
);
router.post(
'/audiobook/start/:jobId',
asyncHandler(async (req, res) => {

View File

@@ -385,4 +385,19 @@ router.get(
})
);
router.post(
'/activation-bytes',
asyncHandler(async (req, res) => {
const { checksum, activationBytes } = req.body || {};
if (!checksum || !activationBytes) {
const error = new Error('checksum und activationBytes sind erforderlich');
error.statusCode = 400;
throw error;
}
logger.debug('post:settings:activation-bytes', { reqId: req.reqId, checksum });
const saved = await activationBytesService.saveActivationBytes(checksum, activationBytes);
res.json({ success: true, checksum, activationBytes: saved });
})
);
module.exports = router;

View File

@@ -1,13 +1,11 @@
const fs = require('fs');
const crypto = require('crypto');
const https = require('https');
const { getDb } = require('../db/database');
const logger = require('./logger').child('ActivationBytes');
const FIXED_KEY = Buffer.from([0x77, 0x21, 0x4d, 0x4b, 0x19, 0x6a, 0x87, 0xcd, 0x52, 0x00, 0x45, 0xfd, 0x20, 0xa5, 0x1d, 0x67]);
const AAX_CHECKSUM_OFFSET = 653;
const AAX_CHECKSUM_LENGTH = 20;
const AUDIBLE_TOOLS_API = 'https://aaxapiserverfunction20220831180001.azurewebsites.net';
function sha1(data) {
return crypto.createHash('sha1').update(data).digest();
@@ -41,69 +39,36 @@ async function lookupCached(checksum) {
return row ? row.activation_bytes : null;
}
async function saveToCache(checksum, activationBytes) {
async function saveActivationBytes(checksum, activationBytesHex) {
const normalized = String(activationBytesHex || '').trim().toLowerCase();
if (!/^[0-9a-f]{8}$/.test(normalized)) {
throw new Error('Activation Bytes müssen genau 8 Hex-Zeichen (4 Bytes) sein');
}
if (!verifyActivationBytes(normalized, checksum)) {
throw new Error('Activation Bytes passen nicht zur Checksum bitte nochmals prüfen');
}
const db = await getDb();
await db.run(
'INSERT OR IGNORE INTO aax_activation_bytes (checksum, activation_bytes) VALUES (?, ?)',
'INSERT OR REPLACE INTO aax_activation_bytes (checksum, activation_bytes) VALUES (?, ?)',
checksum,
activationBytes
normalized
);
}
function fetchFromApi(checksum) {
return new Promise((resolve, reject) => {
const url = `${AUDIBLE_TOOLS_API}/api/v2/activation/${checksum}`;
https.get(url, (res) => {
let data = '';
res.on('data', chunk => { data += chunk; });
res.on('end', () => {
try {
resolve(JSON.parse(data));
} catch {
reject(new Error('Ungültige API-Antwort'));
}
});
}).on('error', reject);
});
logger.info({ checksum, activationBytes: normalized }, 'Activation Bytes manuell gespeichert');
return normalized;
}
async function resolveActivationBytes(filePath) {
const checksum = readAaxChecksum(filePath);
logger.info({ checksum }, 'AAX Checksum gelesen');
// 1. Cache prüfen
const cached = await lookupCached(checksum);
if (cached) {
logger.info({ checksum }, 'Activation Bytes aus lokalem Cache');
return { checksum, activationBytes: cached, source: 'cache' };
return { checksum, activationBytes: cached };
}
// 2. Audible-Tools API anfragen
logger.info({ checksum }, 'Frage Audible-Tools API an...');
let activationBytes = null;
try {
const result = await fetchFromApi(checksum);
if (result.success === true && result.activationBytes) {
if (verifyActivationBytes(result.activationBytes, checksum)) {
activationBytes = result.activationBytes;
logger.info({ checksum, activationBytes }, 'Activation Bytes via API verifiziert');
} else {
logger.warn({ checksum }, 'API-Antwort konnte nicht verifiziert werden');
}
} else {
logger.warn({ checksum }, 'Checksum der API unbekannt');
}
} catch (err) {
logger.warn({ checksum, err: err.message }, 'API nicht erreichbar');
}
if (!activationBytes) {
throw new Error(`Activation Bytes für Checksum ${checksum} nicht gefunden (API unbekannt oder nicht erreichbar)`);
}
// 3. Lokal cachen
await saveToCache(checksum, activationBytes);
return { checksum, activationBytes, source: 'api' };
logger.info({ checksum }, 'Keine Activation Bytes im Cache manuelle Eingabe erforderlich');
return { checksum, activationBytes: null };
}
async function listCachedEntries() {
@@ -111,4 +76,4 @@ async function listCachedEntries() {
return db.all('SELECT checksum, activation_bytes, created_at FROM aax_activation_bytes ORDER BY created_at DESC');
}
module.exports = { resolveActivationBytes, readAaxChecksum, listCachedEntries };
module.exports = { resolveActivationBytes, readAaxChecksum, saveActivationBytes, verifyActivationBytes, listCachedEntries };

View File

@@ -618,6 +618,7 @@ function buildEncodeCommand(ffmpegCommand, inputPath, outputPath, outputFormat =
const extra = options && typeof options === 'object' ? options : {};
const commonArgs = [
'-y',
...(extra.activationBytes ? ['-activation_bytes', extra.activationBytes] : []),
'-i', inputPath
];
if (extra.chapterMetadataPath) {
@@ -657,11 +658,13 @@ function buildChapterEncodeCommand(
formatOptions = {},
metadata = {},
chapter = {},
chapterTotal = 1
chapterTotal = 1,
options = {}
) {
const cmd = String(ffmpegCommand || 'ffmpeg').trim() || 'ffmpeg';
const format = normalizeOutputFormat(outputFormat);
const normalizedOptions = normalizeFormatOptions(format, formatOptions);
const extra = options && typeof options === 'object' ? options : {};
const safeChapter = normalizeChapterList([chapter], {
durationMs: metadata?.durationMs,
fallbackTitle: metadata?.title || 'Kapitel',
@@ -679,6 +682,7 @@ function buildChapterEncodeCommand(
cmd,
args: [
'-y',
...(extra.activationBytes ? ['-activation_bytes', extra.activationBytes] : []),
'-i', inputPath,
'-ss', formatSecondsArg(safeChapter?.startSeconds),
'-t', formatSecondsArg(durationSeconds),

View File

@@ -10947,25 +10947,22 @@ class PipelineService extends EventEmitter {
stagedRawFilePath
});
// Activation Bytes: erst Cache prüfen, dann einmalig Azure-API anfragen und persistent speichern
// Activation Bytes: Cache prüfen und Checksum am Job speichern
let aaxChecksum = null;
let aaxNeedsActivationBytes = false;
try {
const { checksum, activationBytes, source } = await activationBytesService.resolveActivationBytes(stagedRawFilePath);
await historyService.appendLog(
job.id,
'SYSTEM',
`Activation Bytes aufgelöst (${source}): checksum=${checksum} bytes=${activationBytes}`
);
logger.info('audiobook:upload:activation-bytes', { jobId: job.id, checksum, activationBytes, source });
const abResult = await activationBytesService.resolveActivationBytes(stagedRawFilePath);
aaxChecksum = abResult.checksum;
await historyService.updateJob(job.id, { aax_checksum: aaxChecksum });
if (abResult.activationBytes) {
await historyService.appendLog(job.id, 'SYSTEM', `Activation Bytes im Cache gefunden: checksum=${abResult.checksum}`);
logger.info('audiobook:upload:activation-bytes', { jobId: job.id, checksum: abResult.checksum, source: 'cache' });
} else {
aaxNeedsActivationBytes = true;
logger.info('audiobook:upload:activation-bytes-needed', { jobId: job.id, checksum: abResult.checksum });
}
} catch (abError) {
logger.warn('audiobook:upload:activation-bytes-failed', {
jobId: job.id,
error: errorToMeta(abError)
});
await historyService.appendLog(
job.id,
'SYSTEM',
`Activation Bytes konnten nicht aufgelöst werden: ${abError?.message || 'unknown'}`
).catch(() => {});
logger.warn('audiobook:upload:activation-bytes-failed', { jobId: job.id, error: errorToMeta(abError) });
}
let detectedAsin = null;
@@ -11104,7 +11101,8 @@ class PipelineService extends EventEmitter {
jobId: job.id,
started: false,
queued: false,
stage: 'READY_TO_START'
stage: 'READY_TO_START',
...(aaxNeedsActivationBytes ? { needsActivationBytes: true, checksum: aaxChecksum } : {})
};
}
@@ -11401,6 +11399,22 @@ class PipelineService extends EventEmitter {
let temporaryChapterMetadataPath = null;
// Activation Bytes für AAX-Dateien aus Cache lesen
let encodeActivationBytes = null;
if (path.extname(inputPath).toLowerCase() === '.aax') {
try {
const abResult = await activationBytesService.resolveActivationBytes(inputPath);
encodeActivationBytes = abResult.activationBytes || null;
if (!encodeActivationBytes) {
throw new Error('Activation Bytes nicht im Cache bitte zuerst über den Upload-Dialog eintragen');
}
logger.info('audiobook:encode:activation-bytes', { jobId, checksum: abResult.checksum });
} catch (abError) {
logger.error('audiobook:encode:activation-bytes-failed', { jobId, error: errorToMeta(abError) });
throw abError;
}
}
try {
let ffmpegRunInfo = null;
if (isSplitOutput) {
@@ -11451,7 +11465,8 @@ class PipelineService extends EventEmitter {
formatOptions,
metadata,
chapter,
outputFiles.length
outputFiles.length,
{ activationBytes: encodeActivationBytes }
);
const baseParser = audiobookService.buildProgressParser(chapter?.durationMs || 0);
const scaledParser = baseParser
@@ -11525,7 +11540,8 @@ class PipelineService extends EventEmitter {
formatOptions,
{
chapterMetadataPath: temporaryChapterMetadataPath,
metadata
metadata,
activationBytes: encodeActivationBytes
}
);
logger.info('audiobook:encode:command', { jobId, cmd: ffmpegConfig.cmd, args: ffmpegConfig.args });