parent
78bf921e86
commit
4afe5cc0b8
@ -1,237 +1,227 @@
|
|||||||
/* eslint-disable */
|
/* eslint-disable no-unused-vars */
|
||||||
//PLugin runs multipass loudnorm filter
|
module.exports.dependencies = ['axios@0.27.2'];
|
||||||
//first run gets the required details and stores for the next pass
|
|
||||||
//second pass applies the values
|
|
||||||
|
|
||||||
//stages
|
// PLugin runs multipass loudnorm filter
|
||||||
|
// first run gets the required details and stores for the next pass
|
||||||
|
// second pass applies the values
|
||||||
|
|
||||||
|
// stages
|
||||||
// Determined Loudnorm Values
|
// Determined Loudnorm Values
|
||||||
// Applying Normalisation
|
// Applying Normalisation
|
||||||
// Normalisation Complete
|
// Normalisation Complete
|
||||||
|
|
||||||
|
|
||||||
//setup global vars
|
|
||||||
|
|
||||||
var secondPass = false;
|
|
||||||
var logOutFile = '';
|
|
||||||
|
|
||||||
// tdarrSkipTest
|
// tdarrSkipTest
|
||||||
const details = () => {
|
const details = () => ({
|
||||||
return {
|
id: 'Tdarr_Plugin_NIfPZuCLU_2_Pass_Loudnorm_Audio_Normalisation',
|
||||||
id: "Tdarr_Plugin_NIfPZuCLU_2_Pass_Loudnorm_Audio_Normalisation",
|
Stage: 'Pre-processing',
|
||||||
Stage: 'Pre-processing',
|
Name: '2 Pass Loudnorm Volume Normalisation',
|
||||||
Name: "2 Pass Loudnorm Volume Normalisation",
|
Type: 'Video',
|
||||||
Type: "Video",
|
Operation: 'Transcode',
|
||||||
Operation: "Transcode",
|
Description: `PLEASE READ FULL DESCRIPTION BEFORE USE
|
||||||
Description: "PLEASE READ FULL DESCRIPTION BEFORE USE \n Uses multiple passes to normalise audio streams of videos using loudnorm.\n\n The first pass will create an log file in the same directory as the video.\nSecond pass will apply the values determined in the first pass to the file.\nOutput will be MKV to allow metadata to be added for tracking normalisation stage.",
|
Uses multiple passes to normalise audio streams of videos using loudnorm.
|
||||||
Version: "0.1",
|
The first pass will create an log file in the same directory as the video.
|
||||||
Tags: "pre-processing,ffmpeg,configurable",
|
Second pass will apply the values determined in the first pass to the file.
|
||||||
|
Output will be MKV to allow metadata to be added for tracking normalisation stage.`,
|
||||||
Inputs: [
|
Version: '0.1',
|
||||||
//(Optional) Inputs you'd like the user to enter to allow your plugin to be easily configurable from the UI
|
Tags: 'pre-processing,ffmpeg,configurable',
|
||||||
{
|
|
||||||
name: "i",
|
Inputs: [
|
||||||
type: 'string',
|
// (Optional) Inputs you'd like the user to enter to allow your plugin to be easily configurable from the UI
|
||||||
defaultValue:'-23.0',
|
{
|
||||||
inputUI: {
|
name: 'i',
|
||||||
type: 'text',
|
type: 'string',
|
||||||
},
|
defaultValue: '-23.0',
|
||||||
tooltip: `\"I\" value used in loudnorm pass \n
|
inputUI: {
|
||||||
defaults to -23.0`, //Each line following `Example:` will be clearly formatted. \\n used for line breaks
|
type: 'text',
|
||||||
},
|
},
|
||||||
{
|
tooltip: `"i" value used in loudnorm pass \\n
|
||||||
name: "lra",
|
defaults to -23.0`,
|
||||||
type: 'string',
|
},
|
||||||
defaultValue:'7.0',
|
{
|
||||||
inputUI: {
|
name: 'lra',
|
||||||
type: 'text',
|
type: 'string',
|
||||||
},
|
defaultValue: '7.0',
|
||||||
tooltip: `Desired lra value. \n Defaults to 7.0
|
inputUI: {
|
||||||
|
type: 'text',
|
||||||
|
},
|
||||||
|
tooltip: `Desired lra value. \\n Defaults to 7.0
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "tp",
|
name: 'tp',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
defaultValue:'-2.0',
|
defaultValue: '-2.0',
|
||||||
inputUI: {
|
inputUI: {
|
||||||
type: 'text',
|
type: 'text',
|
||||||
},
|
},
|
||||||
tooltip: `Desired \"tp\" value. \n Defaults to -2.0
|
tooltip: `Desired "tp" value. \\n Defaults to -2.0
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
{
|
],
|
||||||
name: "offset",
|
});
|
||||||
type: 'string',
|
|
||||||
defaultValue:'0.0',
|
const parseJobName = (text) => {
|
||||||
inputUI: {
|
const parts0 = text.split('.txt');
|
||||||
type: 'text',
|
const parts1 = parts0[0].split('()');
|
||||||
},
|
return {
|
||||||
tooltip: `Desired "offset" value. \n Defaults to 0.0
|
jobId: parts1[3],
|
||||||
`,
|
start: Number(parts1[4]),
|
||||||
},
|
};
|
||||||
],
|
};
|
||||||
|
|
||||||
|
const getloudNormValues = async (response, file) => {
|
||||||
|
// eslint-disable-next-line import/no-unresolved
|
||||||
|
const axios = require('axios');
|
||||||
|
const serverUrl = `http://${process.env.serverIp}:${process.env.serverPort}`;
|
||||||
|
let loudNormValues = {};
|
||||||
|
try {
|
||||||
|
// wait for job report to be updated by server,
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||||
|
|
||||||
|
const logFilesReq = await axios.post(`${serverUrl}/api/v2/list-footprintId-reports`, {
|
||||||
|
data: {
|
||||||
|
footprintId: file.footprintId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (logFilesReq.status !== 200) {
|
||||||
|
throw new Error('Failed to get log files, please rerun');
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
let logFiles = logFilesReq.data;
|
||||||
const plugin = (file, librarySettings, inputs, otherArguments) => {
|
|
||||||
|
|
||||||
const lib = require('../methods/lib')(); const fs = require('fs');
|
|
||||||
// eslint-disable-next-line no-unused-vars,no-param-reassign
|
|
||||||
inputs = lib.loadDefaultValues(inputs, details);
|
|
||||||
|
|
||||||
//Must return this object at some point
|
logFiles = logFiles.sort((a, b) => {
|
||||||
var response = {
|
const joba = parseJobName(a);
|
||||||
processFile: false,
|
const jobb = parseJobName(b);
|
||||||
preset: '',
|
return jobb.start - joba.start;
|
||||||
container: '.mkv',
|
});
|
||||||
handBrakeMode: false,
|
|
||||||
FFmpegMode: true,
|
|
||||||
reQueueAfter: true,
|
|
||||||
infoLog: '',
|
|
||||||
|
|
||||||
}
|
const latestJob = logFiles[0];
|
||||||
|
|
||||||
response.infoLog += ""
|
const reportReq = await axios.post(`${serverUrl}/api/v2/read-job-file`, {
|
||||||
//grab the current file being processed and make an out file for the ffmpeg log
|
data: {
|
||||||
let currentfilename = file._id;
|
footprintId: file.footprintId,
|
||||||
logOutFile = currentfilename.substr(0, currentfilename.lastIndexOf(".")) + ".out"
|
jobId: parseJobName(latestJob).jobId,
|
||||||
console.log("Log out file: " + logOutFile)
|
jobFileId: latestJob,
|
||||||
|
},
|
||||||
let probeData;
|
});
|
||||||
if (file && file.ffProbeData && file.ffProbeData.format) {
|
|
||||||
probeData = file.ffProbeData;
|
|
||||||
} else {
|
|
||||||
//get an updated version of the file for checking metadata
|
|
||||||
probeData = JSON.parse(require("child_process").execSync(`ffprobe -v quiet -print_format json -show_format -show_streams "${currentfilename}"`).toString())
|
|
||||||
}
|
|
||||||
|
|
||||||
//setup required varibles
|
if (reportReq.status !== 200) {
|
||||||
var loudNorm_i = -23.0
|
throw new Error('Failed to get read latest log file, please rerun');
|
||||||
var lra = 7.0
|
|
||||||
var tp = -2.0
|
|
||||||
var offset = 0.0
|
|
||||||
|
|
||||||
//create local varibles for inputs
|
|
||||||
if (inputs !== undefined) {
|
|
||||||
if (inputs.i !== undefined) loudNorm_i = inputs.i
|
|
||||||
if (inputs.lra !== undefined) lra = inputs.lra
|
|
||||||
if (inputs.tp !== undefined) tp = inputs.tp
|
|
||||||
if (inputs.offset !== undefined) offset = inputs.offset
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const report = reportReq.data.text;
|
||||||
|
const lines = report.split('\n');
|
||||||
|
|
||||||
//check for previous pass tags
|
let idx = -1;
|
||||||
|
|
||||||
if (typeof probeData.format === "undefined" || typeof probeData.format.tags.NORMALISATIONSTAGE === "undefined" || probeData.format.tags.NORMALISATIONSTAGE === "" || file.forceProcessing === true) {
|
|
||||||
|
|
||||||
//no metadata found first pass is required
|
// get last index of Parsed_loudnorm
|
||||||
console.log("Searching for audio normailisation values")
|
lines.forEach((line, i) => {
|
||||||
response.infoLog += "Searching for required normalisation values. \n"
|
if (line.includes('Parsed_loudnorm')) {
|
||||||
var loudNormInfo = "";
|
idx = i;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
//Do the first pass, output the log to the out file and use a secondary output for an unchanged file to allow Tdarr to track, Set metadata stage
|
if (idx === -1) {
|
||||||
response.preset = `<io>-af loudnorm=I=${loudNorm_i}:LRA=${lra}:TP=${tp}:print_format=json -f null NUL -map 0 -c copy -metadata NORMALISATIONSTAGE="FirstPassComplete" 2>"${logOutFile}"`
|
throw new Error('Failed to find loudnorm in report, please rerun');
|
||||||
response.container = '.mkv'
|
|
||||||
response.handBrakeMode = false
|
|
||||||
response.FFmpegMode = true
|
|
||||||
response.reQueueAfter = true;
|
|
||||||
response.processFile = true
|
|
||||||
response.infoLog += "Normalisation first pass processing \n"
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
if (probeData.format.tags.NORMALISATIONSTAGE === "FirstPassComplete") {
|
|
||||||
|
|
||||||
//ensure previous out file exists
|
|
||||||
if (fs.existsSync(logOutFile)) {
|
|
||||||
secondPass = true;
|
|
||||||
loudNormInfo = fs.readFileSync(logOutFile).toString();
|
|
||||||
|
|
||||||
//grab the json from the out file
|
|
||||||
var startIndex = loudNormInfo.lastIndexOf("{");
|
|
||||||
var endIndex = loudNormInfo.lastIndexOf("}");
|
|
||||||
|
|
||||||
var outValues = loudNormInfo.toString().substr(startIndex, endIndex)
|
|
||||||
|
|
||||||
response.infoLog += "Loudnorm first pass values returned: \n" + outValues
|
|
||||||
|
|
||||||
//parse the JSON
|
|
||||||
var loudNormValues = JSON.parse(outValues)
|
|
||||||
|
|
||||||
//use parsed values in second pass
|
|
||||||
response.preset = `-y<io>-af loudnorm=print_format=summary:linear=true:I=${loudNorm_i}:LRA=${lra}:TP=${tp}:measured_i=${loudNormValues.input_i}:measured_lra=${loudNormValues.input_lra}:measured_tp=${loudNormValues.input_tp}:measured_thresh=${loudNormValues.input_thresh}:offset=${loudNormValues.target_offset} -c:a aac -b:a 192k -c:s copy -c:v copy -metadata NORMALISATIONSTAGE="Complete"`
|
|
||||||
response.container = '.mkv'
|
|
||||||
response.handBrakeMode = false
|
|
||||||
response.FFmpegMode = true
|
|
||||||
response.reQueueAfter = true;
|
|
||||||
response.processFile = true
|
|
||||||
response.infoLog += "Normalisation pass processing \n"
|
|
||||||
return response
|
|
||||||
} else {
|
|
||||||
response.infoLog += "Previous log output file is missing. Please rerun with force processing to regenerate."
|
|
||||||
response.processFile = false;
|
|
||||||
return response
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if(probeData.format.tags.NORMALISATIONSTAGE === "Complete"){
|
|
||||||
response.processFile = false;
|
|
||||||
response.infoLog += "File is already marked as normalised \n"
|
|
||||||
return response
|
|
||||||
} else {
|
|
||||||
//what is this tag?
|
|
||||||
response.processFile = false;
|
|
||||||
response.infoLog += "Unknown normalisation stage tag: \n" + probeData.format.tags.NORMALISATIONSTAGE
|
|
||||||
return response
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const loudNormDataArr = [];
|
||||||
|
|
||||||
}
|
for (let i = (idx + 1); i < lines.length; i += 1) {
|
||||||
|
const lineArr = lines[i].split(' ');
|
||||||
module.exports.onTranscodeSuccess = function onTranscodeSuccess(
|
lineArr.shift();
|
||||||
file,
|
loudNormDataArr.push(lineArr.join(' '));
|
||||||
librarySettings,
|
if (lines[i].includes('}')) {
|
||||||
inputs
|
break;
|
||||||
) {
|
}
|
||||||
const fs = require('fs');
|
|
||||||
var response = {
|
|
||||||
file,
|
|
||||||
removeFromDB: false,
|
|
||||||
updateDB: true,
|
|
||||||
};
|
|
||||||
if (secondPass) {
|
|
||||||
response.infoLog += "Audio normalisation complete. \n"
|
|
||||||
//remove old out file
|
|
||||||
if (fs.existsSync(logOutFile)) {
|
|
||||||
fs.unlinkSync(logOutFile);
|
|
||||||
}
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response.infoLog += "Audio normalisation first pass complete. \n"
|
|
||||||
return response;
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.onTranscodeError = function onTranscodeError(
|
|
||||||
file,
|
|
||||||
librarySettings,
|
|
||||||
inputs
|
|
||||||
) {
|
|
||||||
console.log("Failed to normalise audio");
|
|
||||||
|
|
||||||
//Optional response if you need to modify database
|
loudNormValues = JSON.parse(loudNormDataArr.join(''));
|
||||||
var response = {
|
} catch (err) {
|
||||||
file,
|
response.infoLog += err;
|
||||||
removeFromDB: false,
|
throw new Error(err);
|
||||||
updateDB: false,
|
}
|
||||||
};
|
|
||||||
|
|
||||||
return response;
|
return loudNormValues;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
const plugin = async (file, librarySettings, inputs, otherArguments) => {
|
||||||
|
const lib = require('../methods/lib')(); const fs = require('fs');
|
||||||
|
// eslint-disable-next-line no-unused-vars,no-param-reassign
|
||||||
|
inputs = lib.loadDefaultValues(inputs, details);
|
||||||
|
|
||||||
|
// Must return this object at some point
|
||||||
|
const response = {
|
||||||
|
processFile: false,
|
||||||
|
preset: '',
|
||||||
|
container: `.${file.container}`,
|
||||||
|
handBrakeMode: false,
|
||||||
|
FFmpegMode: false,
|
||||||
|
infoLog: '',
|
||||||
|
custom: {
|
||||||
|
args: [],
|
||||||
|
cliPath: '',
|
||||||
|
outputPath: ',',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
response.infoLog += '';
|
||||||
|
|
||||||
|
const probeData = file.ffProbeData;
|
||||||
|
|
||||||
|
// setup required varibles
|
||||||
|
let loudNorm_i = -23.0;
|
||||||
|
let lra = 7.0;
|
||||||
|
let tp = -2.0;
|
||||||
|
|
||||||
|
// create local varibles for inputs
|
||||||
|
if (inputs !== undefined) {
|
||||||
|
if (inputs.i !== undefined) loudNorm_i = inputs.i;
|
||||||
|
if (inputs.lra !== undefined) lra = inputs.lra;
|
||||||
|
if (inputs.tp !== undefined) tp = inputs.tp;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for previous pass tags
|
||||||
|
if (!probeData?.format?.tags?.NORMALISATIONSTAGE) {
|
||||||
|
// no metadata found first pass is required
|
||||||
|
response.infoLog += 'Searching for required normalisation values. \n';
|
||||||
|
response.infoLog += 'Normalisation first pass processing \n';
|
||||||
|
|
||||||
|
// Do the first pass, output the log to the out file and use a secondary output for an unchanged file to
|
||||||
|
// allow Tdarr to track, Set metadata stage
|
||||||
|
response.preset = `<io>-af loudnorm=I=${loudNorm_i}:LRA=${lra}:TP=${tp}:print_format=json`
|
||||||
|
+ ' -f null NUL -map 0 -c copy -metadata NORMALISATIONSTAGE=FirstPassComplete';
|
||||||
|
response.FFmpegMode = true;
|
||||||
|
response.processFile = true;
|
||||||
|
return response;
|
||||||
|
} if (
|
||||||
|
probeData.format.tags.NORMALISATIONSTAGE === 'FirstPassComplete'
|
||||||
|
) {
|
||||||
|
const loudNormValues = await getloudNormValues(response, file);
|
||||||
|
|
||||||
|
response.infoLog += `Loudnorm first pass values returned: \n${JSON.stringify(loudNormValues)}`;
|
||||||
|
|
||||||
|
// use parsed values in second pass
|
||||||
|
response.preset = `-y<io>-af loudnorm=print_format=summary:linear=true:I=${loudNorm_i}:LRA=${lra}:TP=${tp}:`
|
||||||
|
+ `measured_i=${loudNormValues.input_i}:`
|
||||||
|
+ `measured_lra=${loudNormValues.input_lra}:`
|
||||||
|
+ `measured_tp=${loudNormValues.input_tp}:`
|
||||||
|
+ `measured_thresh=${loudNormValues.input_thresh}:offset=${loudNormValues.target_offset} `
|
||||||
|
+ '-c:a aac -b:a 192k -c:s copy -c:v copy -metadata NORMALISATIONSTAGE=Complete';
|
||||||
|
response.FFmpegMode = true;
|
||||||
|
response.processFile = true;
|
||||||
|
response.infoLog += 'Normalisation pass processing \n';
|
||||||
|
return response;
|
||||||
|
} if (probeData.format.tags.NORMALISATIONSTAGE === 'Complete') {
|
||||||
|
response.infoLog += 'File is already marked as normalised \n';
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
// what is this tag?
|
||||||
|
response.infoLog += `Unknown normalisation stage tag: \n${probeData.format.tags.NORMALISATIONSTAGE}`;
|
||||||
|
return response;
|
||||||
|
};
|
||||||
|
|
||||||
module.exports.details = details;
|
module.exports.details = details;
|
||||||
module.exports.plugin = plugin;
|
module.exports.plugin = plugin;
|
||||||
|
|||||||
Loading…
Reference in new issue