Compare commits
17 Commits
c60aa40666
...
fix/valida
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
13d1f83a85 | ||
|
|
f96476bd23 | ||
|
|
12fce6c549 | ||
|
|
814ee3d763 | ||
|
|
31928fd124 | ||
|
|
7e40ea0797 | ||
|
|
dec5f63b21 | ||
|
|
fe2631f29b | ||
|
|
bf39b9df42 | ||
|
|
f95ef43f05 | ||
|
|
89aec9a7eb | ||
|
|
135dfc31d3 | ||
|
|
96fdf2a27a | ||
|
|
c698e5a1bc | ||
|
|
089f4c5129 | ||
|
|
82094d8d09 | ||
|
|
27a6d3c709 |
@@ -2,10 +2,11 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
class AssetLoader {
|
||||
constructor() {
|
||||
constructor(maxCacheSize = 100) {
|
||||
this.relPath = './'
|
||||
this.baseDir = path.resolve(__dirname, this.relPath);
|
||||
this.cache = new Map(); // Cache loaded JSON files for better performance
|
||||
this.cache = new Map();
|
||||
this.maxCacheSize = maxCacheSize;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -25,7 +26,11 @@ class AssetLoader {
|
||||
*/
|
||||
loadAsset(datasetType, assetId) {
|
||||
//const cacheKey = `${datasetType}/${assetId}`;
|
||||
const cacheKey = `${assetId}`;
|
||||
const normalizedAssetId = String(assetId || '').trim();
|
||||
if (!normalizedAssetId) {
|
||||
return null;
|
||||
}
|
||||
const cacheKey = normalizedAssetId.toLowerCase();
|
||||
|
||||
|
||||
// Check cache first
|
||||
@@ -34,11 +39,11 @@ class AssetLoader {
|
||||
}
|
||||
|
||||
try {
|
||||
const filePath = path.join(this.baseDir, `${assetId}.json`);
|
||||
const filePath = this._resolveAssetPath(normalizedAssetId);
|
||||
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
console.warn(`Asset not found: ${filePath}`);
|
||||
if (!filePath || !fs.existsSync(filePath)) {
|
||||
console.warn(`Asset not found for id '${normalizedAssetId}' in ${this.baseDir}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -46,7 +51,11 @@ class AssetLoader {
|
||||
const rawData = fs.readFileSync(filePath, 'utf8');
|
||||
const assetData = JSON.parse(rawData);
|
||||
|
||||
// Cache the result
|
||||
// Cache the result (evict oldest if at capacity)
|
||||
if (this.cache.size >= this.maxCacheSize) {
|
||||
const oldestKey = this.cache.keys().next().value;
|
||||
this.cache.delete(oldestKey);
|
||||
}
|
||||
this.cache.set(cacheKey, assetData);
|
||||
|
||||
return assetData;
|
||||
@@ -56,6 +65,21 @@ class AssetLoader {
|
||||
}
|
||||
}
|
||||
|
||||
_resolveAssetPath(assetId) {
|
||||
const exactPath = path.join(this.baseDir, `${assetId}.json`);
|
||||
if (fs.existsSync(exactPath)) {
|
||||
return exactPath;
|
||||
}
|
||||
|
||||
const target = `${assetId}.json`.toLowerCase();
|
||||
const files = fs.readdirSync(this.baseDir);
|
||||
const matched = files.find((file) => file.toLowerCase() === target);
|
||||
if (!matched) {
|
||||
return null;
|
||||
}
|
||||
return path.join(this.baseDir, matched);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available assets in a dataset
|
||||
* @param {string} datasetType - The dataset folder name
|
||||
|
||||
@@ -47,6 +47,69 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "Endress+Hauser",
|
||||
"name": "Endress+Hauser",
|
||||
"types": [
|
||||
{
|
||||
"id": "flow",
|
||||
"name": "Flow",
|
||||
"models": [
|
||||
{ "id": "Promag-W400", "name": "Promag W400", "units": ["m3/h", "l/s", "gpm"] },
|
||||
{ "id": "Promag-W300", "name": "Promag W300", "units": ["m3/h", "l/s", "gpm"] }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "pressure",
|
||||
"name": "Pressure",
|
||||
"models": [
|
||||
{ "id": "Cerabar-PMC51", "name": "Cerabar PMC51", "units": ["mbar", "bar", "psi"] },
|
||||
{ "id": "Cerabar-PMC71", "name": "Cerabar PMC71", "units": ["mbar", "bar", "psi"] }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "level",
|
||||
"name": "Level",
|
||||
"models": [
|
||||
{ "id": "Levelflex-FMP50", "name": "Levelflex FMP50", "units": ["m", "mm", "ft"] }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "Hach",
|
||||
"name": "Hach",
|
||||
"types": [
|
||||
{
|
||||
"id": "dissolved-oxygen",
|
||||
"name": "Dissolved Oxygen",
|
||||
"models": [
|
||||
{ "id": "LDO2", "name": "LDO2", "units": ["mg/L", "ppm"] }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "ammonium",
|
||||
"name": "Ammonium",
|
||||
"models": [
|
||||
{ "id": "Amtax-sc", "name": "Amtax sc", "units": ["mg/L"] }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "nitrate",
|
||||
"name": "Nitrate",
|
||||
"models": [
|
||||
{ "id": "Nitratax-sc", "name": "Nitratax sc", "units": ["mg/L"] }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "tss",
|
||||
"name": "TSS (Suspended Solids)",
|
||||
"models": [
|
||||
{ "id": "Solitax-sc", "name": "Solitax sc", "units": ["mg/L", "g/L"] }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
13
index.js
13
index.js
@@ -29,8 +29,11 @@ const { state } = require('./src/state/index.js');
|
||||
const convert = require('./src/convert/index.js');
|
||||
const MenuManager = require('./src/menu/index.js');
|
||||
const { predict, interpolation } = require('./src/predict/index.js');
|
||||
const { PIDController, CascadePIDController, createPidController, createCascadePidController } = require('./src/pid/index.js');
|
||||
const { loadCurve } = require('./datasets/assetData/curves/index.js'); //deprecated replace with load model data
|
||||
const { loadModel } = require('./datasets/assetData/modelData/index.js');
|
||||
const { POSITIONS, POSITION_VALUES, isValidPosition } = require('./src/constants/positions.js');
|
||||
const Fysics = require('./src/convert/fysics.js');
|
||||
|
||||
// Export everything
|
||||
module.exports = {
|
||||
@@ -49,8 +52,16 @@ module.exports = {
|
||||
coolprop,
|
||||
convert,
|
||||
MenuManager,
|
||||
PIDController,
|
||||
CascadePIDController,
|
||||
createPidController,
|
||||
createCascadePidController,
|
||||
childRegistrationUtils,
|
||||
loadCurve, //deprecated replace with loadModel
|
||||
loadModel,
|
||||
gravity
|
||||
gravity,
|
||||
POSITIONS,
|
||||
POSITION_VALUES,
|
||||
isValidPosition,
|
||||
Fysics
|
||||
};
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
"./helper": "./src/helper/index.js",
|
||||
"./state": "./src/state/index.js",
|
||||
"./predict": "./src/predict/index.js",
|
||||
"./pid": "./src/pid/index.js",
|
||||
"./nrmse": "./src/nrmse/index.js",
|
||||
"./outliers": "./src/outliers/index.js"
|
||||
},
|
||||
|
||||
85
src/configs/baseConfig.json
Normal file
85
src/configs/baseConfig.json
Normal file
@@ -0,0 +1,85 @@
|
||||
{
|
||||
"general": {
|
||||
"name": {
|
||||
"default": "Unnamed Node",
|
||||
"rules": { "type": "string", "description": "Human-readable name for this node." }
|
||||
},
|
||||
"id": {
|
||||
"default": null,
|
||||
"rules": { "type": "string", "nullable": true, "description": "Unique node identifier (set at runtime)." }
|
||||
},
|
||||
"unit": {
|
||||
"default": "unitless",
|
||||
"rules": { "type": "string", "description": "Default measurement unit." }
|
||||
},
|
||||
"logging": {
|
||||
"logLevel": {
|
||||
"default": "info",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "debug", "description": "Verbose diagnostic messages." },
|
||||
{ "value": "info", "description": "General informational messages." },
|
||||
{ "value": "warn", "description": "Warning messages." },
|
||||
{ "value": "error", "description": "Error level messages only." }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enabled": {
|
||||
"default": true,
|
||||
"rules": { "type": "boolean", "description": "Enable or disable logging." }
|
||||
}
|
||||
}
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "unknown",
|
||||
"rules": { "type": "string", "description": "Software type identifier for parent-child registration." }
|
||||
},
|
||||
"role": {
|
||||
"default": "Generic EVOLV node",
|
||||
"rules": { "type": "string", "description": "Describes the functional role of this node." }
|
||||
},
|
||||
"positionVsParent": {
|
||||
"default": "atEquipment",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "upstream", "description": "Upstream of parent equipment." },
|
||||
{ "value": "atEquipment", "description": "At equipment level." },
|
||||
{ "value": "downstream", "description": "Downstream of parent equipment." }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"asset": {
|
||||
"uuid": {
|
||||
"default": null,
|
||||
"rules": { "type": "string", "nullable": true, "description": "Asset UUID from asset management system." }
|
||||
},
|
||||
"tagCode": {
|
||||
"default": null,
|
||||
"rules": { "type": "string", "nullable": true, "description": "Asset tag code." }
|
||||
},
|
||||
"supplier": {
|
||||
"default": "Unknown",
|
||||
"rules": { "type": "string", "description": "Equipment supplier." }
|
||||
},
|
||||
"category": {
|
||||
"default": "sensor",
|
||||
"rules": { "type": "string", "description": "Asset category." }
|
||||
},
|
||||
"type": {
|
||||
"default": "Unknown",
|
||||
"rules": { "type": "string", "description": "Asset type." }
|
||||
},
|
||||
"model": {
|
||||
"default": "Unknown",
|
||||
"rules": { "type": "string", "description": "Equipment model." }
|
||||
},
|
||||
"unit": {
|
||||
"default": "unitless",
|
||||
"rules": { "type": "string", "description": "Asset measurement unit." }
|
||||
}
|
||||
}
|
||||
}
|
||||
111
src/configs/diffuser.json
Normal file
111
src/configs/diffuser.json
Normal file
@@ -0,0 +1,111 @@
|
||||
{
|
||||
"general": {
|
||||
"name": {
|
||||
"default": "Diffuser",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "A human-readable name for this diffuser zone."
|
||||
}
|
||||
},
|
||||
"id": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Unique identifier for this diffuser node."
|
||||
}
|
||||
},
|
||||
"unit": {
|
||||
"default": "Nm3/h",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Default airflow unit for this diffuser."
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
"logLevel": {
|
||||
"default": "info",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "debug", "description": "Verbose diagnostic messages." },
|
||||
{ "value": "info", "description": "General informational messages." },
|
||||
{ "value": "warn", "description": "Warning messages." },
|
||||
{ "value": "error", "description": "Error level messages only." }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enabled": {
|
||||
"default": true,
|
||||
"rules": {
|
||||
"type": "boolean",
|
||||
"description": "Enable or disable logging."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "diffuser",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Software type identifier for parent-child registration."
|
||||
}
|
||||
},
|
||||
"role": {
|
||||
"default": "Aeration diffuser",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Describes the functional role of this node."
|
||||
}
|
||||
},
|
||||
"positionVsParent": {
|
||||
"default": "atEquipment",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "upstream", "description": "Upstream of parent equipment." },
|
||||
{ "value": "atEquipment", "description": "At equipment level." },
|
||||
{ "value": "downstream", "description": "Downstream of parent equipment." }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"diffuser": {
|
||||
"number": {
|
||||
"default": 1,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Sequential diffuser zone number."
|
||||
}
|
||||
},
|
||||
"elements": {
|
||||
"default": 1,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Number of diffuser elements in the zone."
|
||||
}
|
||||
},
|
||||
"density": {
|
||||
"default": 2.4,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Installed diffuser density per square meter."
|
||||
}
|
||||
},
|
||||
"waterHeight": {
|
||||
"default": 0,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Water column height above the diffuser."
|
||||
}
|
||||
},
|
||||
"alfaFactor": {
|
||||
"default": 0.7,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Alpha factor used for oxygen transfer correction."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,52 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
/**
|
||||
* Current config version. All config JSONs should declare this version.
|
||||
* Bump this when the config schema changes.
|
||||
*/
|
||||
const CURRENT_CONFIG_VERSION = '1.0.0';
|
||||
|
||||
class ConfigManager {
|
||||
constructor(relPath = '.') {
|
||||
this.configDir = path.resolve(__dirname, relPath);
|
||||
|
||||
/**
|
||||
* Migration functions keyed by "fromVersion->toVersion".
|
||||
* Each function receives a config object and returns the migrated config.
|
||||
*
|
||||
* Example:
|
||||
* this.migrations['1.0.0->1.1.0'] = (config) => {
|
||||
* config.newSection = { enabled: false };
|
||||
* return config;
|
||||
* };
|
||||
*/
|
||||
this.migrations = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a configuration file by name
|
||||
* Load a configuration file by name.
|
||||
* Automatically checks the config version and migrates if needed.
|
||||
* @param {string} configName - Name of the config file (without .json extension)
|
||||
* @returns {Object} Parsed configuration object
|
||||
* @returns {Object} Parsed configuration object (migrated to current version if necessary)
|
||||
*/
|
||||
getConfig(configName) {
|
||||
try {
|
||||
const configPath = path.resolve(this.configDir, `${configName}.json`);
|
||||
const configData = fs.readFileSync(configPath, 'utf8');
|
||||
return JSON.parse(configData);
|
||||
let config = JSON.parse(configData);
|
||||
|
||||
// Auto-migrate if version is behind current
|
||||
const configVersion = config.version || '0.0.0';
|
||||
if (configVersion !== CURRENT_CONFIG_VERSION) {
|
||||
config = this.migrateConfig(config, configVersion, CURRENT_CONFIG_VERSION);
|
||||
}
|
||||
|
||||
return config;
|
||||
} catch (error) {
|
||||
if (error.message && error.message.startsWith('Failed to load config')) {
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`Failed to load config '${configName}': ${error.message}`);
|
||||
}
|
||||
}
|
||||
@@ -47,6 +77,94 @@ class ConfigManager {
|
||||
return fs.existsSync(configPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a runtime config by merging base schema + node schema + UI overrides.
|
||||
* Eliminates the need for each nodeClass to manually construct general/asset/functionality sections.
|
||||
*
|
||||
* @param {string} nodeName - Node type name (e.g., 'valve', 'measurement')
|
||||
* @param {object} uiConfig - Raw config from Node-RED UI
|
||||
* @param {string} nodeId - Node-RED node ID (from node.id)
|
||||
* @param {object} [domainConfig={}] - Domain-specific config sections (e.g., { scaling: {...}, smoothing: {...} })
|
||||
* @returns {object} Merged runtime config
|
||||
*
|
||||
* @example
|
||||
* const cfgMgr = new ConfigManager();
|
||||
* const config = cfgMgr.buildConfig('measurement', uiConfig, node.id, {
|
||||
* scaling: { enabled: uiConfig.scaling, inputMin: uiConfig.i_min, ... },
|
||||
* smoothing: { smoothWindow: uiConfig.count, ... }
|
||||
* });
|
||||
*/
|
||||
buildConfig(nodeName, uiConfig, nodeId, domainConfig = {}) {
|
||||
// Build base sections from UI config (common to ALL nodes)
|
||||
const config = {
|
||||
general: {
|
||||
name: uiConfig.name || nodeName,
|
||||
id: nodeId,
|
||||
unit: uiConfig.unit || 'unitless',
|
||||
logging: {
|
||||
enabled: uiConfig.enableLog !== undefined ? uiConfig.enableLog : true,
|
||||
logLevel: uiConfig.logLevel || 'info'
|
||||
}
|
||||
},
|
||||
functionality: {
|
||||
softwareType: nodeName.toLowerCase(),
|
||||
positionVsParent: uiConfig.positionVsParent || 'atEquipment',
|
||||
distance: uiConfig.hasDistance ? uiConfig.distance : undefined
|
||||
},
|
||||
output: {
|
||||
process: uiConfig.processOutputFormat || 'process',
|
||||
dbase: uiConfig.dbaseOutputFormat || 'influxdb'
|
||||
}
|
||||
};
|
||||
|
||||
// Add asset section if UI provides asset fields
|
||||
if (uiConfig.supplier || uiConfig.category || uiConfig.assetType || uiConfig.model) {
|
||||
config.asset = {
|
||||
uuid: uiConfig.uuid || uiConfig.assetUuid || null,
|
||||
tagCode: uiConfig.tagCode || uiConfig.assetTagCode || null,
|
||||
supplier: uiConfig.supplier || 'Unknown',
|
||||
category: uiConfig.category || 'sensor',
|
||||
type: uiConfig.assetType || 'Unknown',
|
||||
model: uiConfig.model || 'Unknown',
|
||||
unit: uiConfig.unit || 'unitless'
|
||||
};
|
||||
}
|
||||
|
||||
// Merge domain-specific sections
|
||||
Object.assign(config, domainConfig);
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate a config object from one version to another by applying
|
||||
* registered migration functions in sequence.
|
||||
* @param {object} config - The config object to migrate
|
||||
* @param {string} fromVersion - Current version of the config
|
||||
* @param {string} toVersion - Target version
|
||||
* @returns {object} Migrated config with updated version field
|
||||
*/
|
||||
migrateConfig(config, fromVersion, toVersion) {
|
||||
const migrationKey = `${fromVersion}->${toVersion}`;
|
||||
const migrationFn = this.migrations[migrationKey];
|
||||
|
||||
if (migrationFn) {
|
||||
config = migrationFn(config);
|
||||
}
|
||||
|
||||
// Stamp the current version so it won't re-migrate
|
||||
config.version = toVersion;
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the base config schema (shared across all nodes).
|
||||
* @returns {object} Base config schema
|
||||
*/
|
||||
getBaseConfig() {
|
||||
return this.getConfig('baseConfig');
|
||||
}
|
||||
|
||||
createEndpoint(nodeName) {
|
||||
try {
|
||||
// Load the config for this node
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "machineGroup",
|
||||
"default": "machinegroupcontrol",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Logical name identifying the software type."
|
||||
|
||||
@@ -59,7 +59,7 @@
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "pumpingStation",
|
||||
"default": "pumpingstation",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Specified software type used to locate the proper default configuration."
|
||||
@@ -93,6 +93,14 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"distance": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"nullable": true,
|
||||
"description": "Optional distance to parent asset for registration metadata."
|
||||
}
|
||||
},
|
||||
"tickIntervalMs": {
|
||||
"default": 1000,
|
||||
"rules": {
|
||||
@@ -150,7 +158,7 @@
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"default": "pumpingStation",
|
||||
"default": "pumpingstation",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Specific asset type used to identify this configuration."
|
||||
@@ -316,6 +324,13 @@
|
||||
"description": "Basis for minimum height check: inlet or outlet."
|
||||
}
|
||||
},
|
||||
"basinBottomRef": {
|
||||
"default": 0,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Absolute elevation reference of basin bottom."
|
||||
}
|
||||
},
|
||||
"staticHead": {
|
||||
"default": 12,
|
||||
"rules": {
|
||||
@@ -463,6 +478,76 @@
|
||||
}
|
||||
},
|
||||
"flowBased": {
|
||||
"flowSetpoint": {
|
||||
"default": 0,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"description": "Target outflow setpoint used by flow-based control (m3/h)."
|
||||
}
|
||||
},
|
||||
"flowDeadband": {
|
||||
"default": 0,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"description": "Allowed deadband around the outflow setpoint before corrective actions are taken (m3/h)."
|
||||
}
|
||||
},
|
||||
"pid": {
|
||||
"default": {},
|
||||
"rules": {
|
||||
"type": "object",
|
||||
"schema": {
|
||||
"kp": {
|
||||
"default": 1.5,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Proportional gain for flow-based PID control."
|
||||
}
|
||||
},
|
||||
"ki": {
|
||||
"default": 0.05,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Integral gain for flow-based PID control."
|
||||
}
|
||||
},
|
||||
"kd": {
|
||||
"default": 0.01,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"description": "Derivative gain for flow-based PID control."
|
||||
}
|
||||
},
|
||||
"derivativeFilter": {
|
||||
"default": 0.2,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"max": 1,
|
||||
"description": "Derivative filter coefficient (0..1)."
|
||||
}
|
||||
},
|
||||
"rateUp": {
|
||||
"default": 30,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"description": "Maximum controller output increase rate (%/s)."
|
||||
}
|
||||
},
|
||||
"rateDown": {
|
||||
"default": 40,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"description": "Maximum controller output decrease rate (%/s)."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"equalizationTargetPercent": {
|
||||
"default": 60,
|
||||
"rules": {
|
||||
|
||||
202
src/configs/reactor.json
Normal file
202
src/configs/reactor.json
Normal file
@@ -0,0 +1,202 @@
|
||||
{
|
||||
"general": {
|
||||
"name": {
|
||||
"default": "Reactor",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "A human-readable name for this reactor."
|
||||
}
|
||||
},
|
||||
"id": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Unique identifier for this reactor node."
|
||||
}
|
||||
},
|
||||
"unit": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Default measurement unit."
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
"logLevel": {
|
||||
"default": "info",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "debug", "description": "Verbose diagnostic messages." },
|
||||
{ "value": "info", "description": "General informational messages." },
|
||||
{ "value": "warn", "description": "Warning messages." },
|
||||
{ "value": "error", "description": "Error level messages only." }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enabled": {
|
||||
"default": true,
|
||||
"rules": {
|
||||
"type": "boolean",
|
||||
"description": "Enable or disable logging."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "reactor",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Software type identifier for parent-child registration."
|
||||
}
|
||||
},
|
||||
"role": {
|
||||
"default": "Biological reactor for wastewater treatment",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Describes the functional role of this node."
|
||||
}
|
||||
},
|
||||
"positionVsParent": {
|
||||
"default": "atEquipment",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "upstream", "description": "Upstream of parent equipment." },
|
||||
{ "value": "atEquipment", "description": "At equipment level." },
|
||||
{ "value": "downstream", "description": "Downstream of parent equipment." }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"reactor": {
|
||||
"reactor_type": {
|
||||
"default": "CSTR",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "CSTR", "description": "Continuous Stirred Tank Reactor - fully mixed." },
|
||||
{ "value": "PFR", "description": "Plug Flow Reactor - spatial gradient along length." }
|
||||
]
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"default": 1000,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"unit": "m3",
|
||||
"description": "Reactor volume in cubic meters."
|
||||
}
|
||||
},
|
||||
"length": {
|
||||
"default": 10,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"unit": "m",
|
||||
"description": "Reactor length (relevant for PFR spatial discretization)."
|
||||
}
|
||||
},
|
||||
"resolution_L": {
|
||||
"default": 10,
|
||||
"rules": {
|
||||
"type": "integer",
|
||||
"min": 1,
|
||||
"description": "Number of spatial segments for PFR discretization."
|
||||
}
|
||||
},
|
||||
"alpha": {
|
||||
"default": 0.5,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"max": 1,
|
||||
"description": "Dispersion coefficient alpha (0 = plug flow, 1 = fully mixed)."
|
||||
}
|
||||
},
|
||||
"n_inlets": {
|
||||
"default": 1,
|
||||
"rules": {
|
||||
"type": "integer",
|
||||
"min": 1,
|
||||
"description": "Number of inlet points along the reactor."
|
||||
}
|
||||
},
|
||||
"kla": {
|
||||
"default": 0,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0,
|
||||
"unit": "1/h",
|
||||
"description": "Oxygen mass transfer coefficient (KLa)."
|
||||
}
|
||||
},
|
||||
"timeStep": {
|
||||
"default": 0.001,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0.0001,
|
||||
"unit": "h",
|
||||
"description": "Integration time step for the reactor model."
|
||||
}
|
||||
}
|
||||
},
|
||||
"initialState": {
|
||||
"S_O": {
|
||||
"default": 0,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial dissolved oxygen concentration." }
|
||||
},
|
||||
"S_I": {
|
||||
"default": 30,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial inert soluble COD." }
|
||||
},
|
||||
"S_S": {
|
||||
"default": 70,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial readily biodegradable substrate." }
|
||||
},
|
||||
"S_NH": {
|
||||
"default": 25,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial ammonium nitrogen." }
|
||||
},
|
||||
"S_N2": {
|
||||
"default": 0,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial dinitrogen (N2)." }
|
||||
},
|
||||
"S_NO": {
|
||||
"default": 0,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial nitrate and nitrite nitrogen." }
|
||||
},
|
||||
"S_HCO": {
|
||||
"default": 5,
|
||||
"rules": { "type": "number", "unit": "mmol/L", "description": "Initial alkalinity (bicarbonate)." }
|
||||
},
|
||||
"X_I": {
|
||||
"default": 1000,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial inert particulate COD." }
|
||||
},
|
||||
"X_S": {
|
||||
"default": 100,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial slowly biodegradable substrate." }
|
||||
},
|
||||
"X_H": {
|
||||
"default": 2000,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial heterotrophic biomass." }
|
||||
},
|
||||
"X_STO": {
|
||||
"default": 0,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial stored COD in biomass." }
|
||||
},
|
||||
"X_A": {
|
||||
"default": 200,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial autotrophic biomass." }
|
||||
},
|
||||
"X_TS": {
|
||||
"default": 3500,
|
||||
"rules": { "type": "number", "unit": "mg/L", "description": "Initial total suspended solids." }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "machine",
|
||||
"default": "rotatingmachine",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Specified software type for this configuration."
|
||||
@@ -110,6 +110,14 @@
|
||||
"description": "Asset tag code which is a unique identifier for this asset. May be null if not assigned."
|
||||
}
|
||||
},
|
||||
"tagNumber": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Optional asset tag number for legacy integrations."
|
||||
}
|
||||
},
|
||||
"geoLocation": {
|
||||
"default": {},
|
||||
"rules": {
|
||||
@@ -175,6 +183,47 @@
|
||||
"description": "The unit of measurement for this asset (e.g., 'meters', 'seconds', 'unitless')."
|
||||
}
|
||||
},
|
||||
"curveUnits": {
|
||||
"default": {
|
||||
"pressure": "mbar",
|
||||
"flow": "m3/h",
|
||||
"power": "kW",
|
||||
"control": "%"
|
||||
},
|
||||
"rules": {
|
||||
"type": "object",
|
||||
"schema": {
|
||||
"pressure": {
|
||||
"default": "mbar",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Pressure unit used on the machine curve dimension axis."
|
||||
}
|
||||
},
|
||||
"flow": {
|
||||
"default": "m3/h",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Flow unit used in the machine curve output (nq.y)."
|
||||
}
|
||||
},
|
||||
"power": {
|
||||
"default": "kW",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Power unit used in the machine curve output (np.y)."
|
||||
}
|
||||
},
|
||||
"control": {
|
||||
"default": "%",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Control axis unit used in the curve x-dimension."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"accuracy": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
|
||||
75
src/configs/settler.json
Normal file
75
src/configs/settler.json
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"general": {
|
||||
"name": {
|
||||
"default": "Settler",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "A human-readable name for this settler."
|
||||
}
|
||||
},
|
||||
"id": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Unique identifier for this settler node."
|
||||
}
|
||||
},
|
||||
"unit": {
|
||||
"default": null,
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Default measurement unit."
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
"logLevel": {
|
||||
"default": "info",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "debug", "description": "Verbose diagnostic messages." },
|
||||
{ "value": "info", "description": "General informational messages." },
|
||||
{ "value": "warn", "description": "Warning messages." },
|
||||
{ "value": "error", "description": "Error level messages only." }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enabled": {
|
||||
"default": true,
|
||||
"rules": {
|
||||
"type": "boolean",
|
||||
"description": "Enable or disable logging."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "settler",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Software type identifier for parent-child registration."
|
||||
}
|
||||
},
|
||||
"role": {
|
||||
"default": "Secondary settler for sludge separation",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Describes the functional role of this node."
|
||||
}
|
||||
},
|
||||
"positionVsParent": {
|
||||
"default": "downstream",
|
||||
"rules": {
|
||||
"type": "enum",
|
||||
"values": [
|
||||
{ "value": "upstream", "description": "Upstream of parent equipment." },
|
||||
{ "value": "atEquipment", "description": "At equipment level." },
|
||||
{ "value": "downstream", "description": "Downstream of parent equipment." }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -60,7 +60,7 @@
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "valveGroupControl",
|
||||
"default": "valvegroupcontrol",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Specified software type for this configuration."
|
||||
|
||||
18
src/constants/positions.js
Normal file
18
src/constants/positions.js
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* Canonical position constants for parent-child relationships.
|
||||
* Use these instead of hardcoded strings throughout the codebase.
|
||||
*/
|
||||
const POSITIONS = Object.freeze({
|
||||
UPSTREAM: 'upstream',
|
||||
DOWNSTREAM: 'downstream',
|
||||
AT_EQUIPMENT: 'atEquipment',
|
||||
DELTA: 'delta',
|
||||
});
|
||||
|
||||
const POSITION_VALUES = Object.freeze(Object.values(POSITIONS));
|
||||
|
||||
function isValidPosition(pos) {
|
||||
return POSITION_VALUES.includes(pos);
|
||||
}
|
||||
|
||||
module.exports = { POSITIONS, POSITION_VALUES, isValidPosition };
|
||||
@@ -1,5 +1,4 @@
|
||||
var metric
|
||||
, imperial;
|
||||
var metric;
|
||||
|
||||
metric = {
|
||||
ea: {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
var metric
|
||||
, imperial;
|
||||
var metric;
|
||||
|
||||
metric = {
|
||||
ppm: {
|
||||
|
||||
@@ -127,7 +127,7 @@ Converter.prototype.toBest = function(options) {
|
||||
if(!this.origin)
|
||||
throw new Error('.toBest must be called after .from');
|
||||
|
||||
var options = Object.assign({
|
||||
options = Object.assign({
|
||||
exclude: [],
|
||||
cutOffNumber: 1,
|
||||
}, options)
|
||||
@@ -249,7 +249,7 @@ Converter.prototype.list = function (measure) {
|
||||
Converter.prototype.throwUnsupportedUnitError = function (what) {
|
||||
var validUnits = [];
|
||||
|
||||
each(measures, function (systems, measure) {
|
||||
each(measures, function (systems, _measure) {
|
||||
each(systems, function (units, system) {
|
||||
if(system == '_anchors')
|
||||
return false;
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
* Available under MIT license <http://lodash.com/license>
|
||||
*/
|
||||
var isObject = require('./../lodash.isobject'),
|
||||
noop = require('./../lodash.noop'),
|
||||
reNative = require('./../lodash._renative');
|
||||
|
||||
/* Native method shortcuts for methods with the same name as other `lodash` methods */
|
||||
@@ -21,12 +20,12 @@ var nativeCreate = reNative.test(nativeCreate = Object.create) && nativeCreate;
|
||||
* @param {Object} prototype The object to inherit from.
|
||||
* @returns {Object} Returns the new object.
|
||||
*/
|
||||
function baseCreate(prototype, properties) {
|
||||
function baseCreate(prototype, _properties) { // eslint-disable-line no-func-assign
|
||||
return isObject(prototype) ? nativeCreate(prototype) : {};
|
||||
}
|
||||
// fallback for browsers without `Object.create`
|
||||
if (!nativeCreate) {
|
||||
baseCreate = (function() {
|
||||
baseCreate = (function() { // eslint-disable-line no-func-assign
|
||||
function Object() {}
|
||||
return function(prototype) {
|
||||
if (isObject(prototype)) {
|
||||
|
||||
@@ -47,7 +47,7 @@ function createWrapper(func, bitmask, partialArgs, partialRightArgs, thisArg, ar
|
||||
var isBind = bitmask & 1,
|
||||
isBindKey = bitmask & 2,
|
||||
isCurry = bitmask & 4,
|
||||
isCurryBound = bitmask & 8,
|
||||
/* isCurryBound = bitmask & 8, */
|
||||
isPartial = bitmask & 16,
|
||||
isPartialRight = bitmask & 32;
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ var defineProperty = (function() {
|
||||
var o = {},
|
||||
func = reNative.test(func = Object.defineProperty) && func,
|
||||
result = func(o, o, o) && func;
|
||||
} catch(e) { }
|
||||
} catch(e) { /* intentionally empty */ }
|
||||
return result;
|
||||
}());
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
* Available under MIT license <http://lodash.com/license>
|
||||
*/
|
||||
var createWrapper = require('./../lodash._createwrapper'),
|
||||
reNative = require('./../lodash._renative'),
|
||||
slice = require('./../lodash._slice');
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,7 +15,7 @@ class ChildRegistrationUtils {
|
||||
return false;
|
||||
}
|
||||
|
||||
const softwareType = child.config.functionality.softwareType;
|
||||
const softwareType = (child.config.functionality.softwareType || '').toLowerCase();
|
||||
const name = child.config.general.name || child.config.general.id || 'unknown';
|
||||
const id = child.config.general.id || name;
|
||||
|
||||
@@ -49,7 +49,7 @@ class ChildRegistrationUtils {
|
||||
|
||||
// IMPORTANT: Only call parent registration - no automatic handling and if parent has this function then try to register this child
|
||||
if (typeof this.mainClass.registerChild === 'function') {
|
||||
this.mainClass.registerChild(child, softwareType);
|
||||
return this.mainClass.registerChild(child, softwareType);
|
||||
}
|
||||
|
||||
this.logger.info(`✅ Child ${name} registered successfully`);
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
// ChildRegistrationUtils.js
|
||||
class ChildRegistrationUtils {
|
||||
constructor(mainClass) {
|
||||
this.mainClass = mainClass; // Reference to the main class
|
||||
this.logger = mainClass.logger;
|
||||
}
|
||||
|
||||
async registerChild(child, positionVsParent) {
|
||||
|
||||
this.logger.debug(`Registering child: ${child.id} with position=${positionVsParent}`);
|
||||
const { softwareType } = child.config.functionality;
|
||||
const { name, id, unit } = child.config.general;
|
||||
const { category = "", type = "" } = child.config.asset || {};
|
||||
console.log(`Registering child: ${name}, id: ${id}, softwareType: ${softwareType}, category: ${category}, type: ${type}, positionVsParent: ${positionVsParent}` );
|
||||
const emitter = child.emitter;
|
||||
|
||||
//define position vs parent in child
|
||||
child.positionVsParent = positionVsParent;
|
||||
child.parent = this.mainClass;
|
||||
|
||||
if (!this.mainClass.child) this.mainClass.child = {};
|
||||
if (!this.mainClass.child[softwareType])
|
||||
this.mainClass.child[softwareType] = {};
|
||||
if (!this.mainClass.child[softwareType][category])
|
||||
this.mainClass.child[softwareType][category] = {};
|
||||
if (!this.mainClass.child[softwareType][category][type])
|
||||
this.mainClass.child[softwareType][category][type] = {};
|
||||
|
||||
// Use an array to handle multiple categories
|
||||
if (!Array.isArray(this.mainClass.child[softwareType][category][type])) {
|
||||
this.mainClass.child[softwareType][category][type] = [];
|
||||
}
|
||||
|
||||
// Push the new child to the array of the mainclass so we can track the childs
|
||||
this.mainClass.child[softwareType][category][type].push({
|
||||
name,
|
||||
id,
|
||||
unit,
|
||||
emitter,
|
||||
});
|
||||
|
||||
//then connect the child depending on the type type etc..
|
||||
this.connectChild(
|
||||
id,
|
||||
softwareType,
|
||||
emitter,
|
||||
category,
|
||||
child,
|
||||
type,
|
||||
positionVsParent
|
||||
);
|
||||
}
|
||||
|
||||
connectChild(
|
||||
id,
|
||||
softwareType,
|
||||
emitter,
|
||||
category,
|
||||
child,
|
||||
type,
|
||||
positionVsParent
|
||||
) {
|
||||
this.logger.debug(
|
||||
`Connecting child id=${id}: desc=${softwareType}, category=${category},type=${type}, position=${positionVsParent}`
|
||||
);
|
||||
|
||||
switch (softwareType) {
|
||||
case "measurement":
|
||||
this.logger.debug(
|
||||
`Registering measurement child: ${id} with category=${category}`
|
||||
);
|
||||
this.connectMeasurement(child, type, positionVsParent);
|
||||
break;
|
||||
|
||||
case "machine":
|
||||
this.logger.debug(`Registering complete machine child: ${id}`);
|
||||
this.connectMachine(child);
|
||||
break;
|
||||
|
||||
case "valve":
|
||||
this.logger.debug(`Registering complete valve child: ${id}`);
|
||||
this.connectValve(child);
|
||||
break;
|
||||
|
||||
case "machineGroup":
|
||||
this.logger.debug(`Registering complete machineGroup child: ${id}`);
|
||||
this.connectMachineGroup(child);
|
||||
break;
|
||||
|
||||
case "actuator":
|
||||
this.logger.debug(`Registering linear actuator child: ${id}`);
|
||||
this.connectActuator(child,positionVsParent);
|
||||
break;
|
||||
|
||||
default:
|
||||
this.logger.error(`Child registration unrecognized desc: ${desc}`);
|
||||
this.logger.error(`Unrecognized softwareType: ${softwareType}`);
|
||||
}
|
||||
}
|
||||
|
||||
connectMeasurement(child, type, position) {
|
||||
this.logger.debug(
|
||||
`Connecting measurement child: ${type} with position=${position}`
|
||||
);
|
||||
|
||||
// Check if type is valid
|
||||
if (!type) {
|
||||
this.logger.error(`Invalid type for measurement: ${type}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// initialize the measurement to a number - logging each step for debugging
|
||||
try {
|
||||
this.logger.debug(
|
||||
`Initializing measurement: ${type}, position: ${position} value: 0`
|
||||
);
|
||||
const typeResult = this.mainClass.measurements.type(type);
|
||||
const variantResult = typeResult.variant("measured");
|
||||
const positionResult = variantResult.position(position);
|
||||
positionResult.value(0);
|
||||
|
||||
this.logger.debug(
|
||||
`Subscribing on mAbs event for measurement: ${type}, position: ${position}`
|
||||
);
|
||||
// Listen for the mAbs event and update the measurement
|
||||
|
||||
this.logger.debug(
|
||||
`Successfully initialized measurement: ${type}, position: ${position}`
|
||||
);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to initialize measurement: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
//testing new emitter strategy
|
||||
child.measurements.emitter.on("newValue", (data) => {
|
||||
this.logger.warn(
|
||||
`Value change event received for measurement: ${type}, position: ${position}, value: ${data.value}`
|
||||
);
|
||||
});
|
||||
|
||||
child.emitter.on("mAbs", (value) => {
|
||||
// Use the same method chaining approach that worked during initialization
|
||||
this.mainClass.measurements
|
||||
.type(type)
|
||||
.variant("measured")
|
||||
.position(position)
|
||||
.value(value);
|
||||
this.mainClass.updateMeasurement("measured", type, value, position);
|
||||
//this.logger.debug(`--------->>>>>>>>>Updated measurement: ${type}, value: ${value}, position: ${position}`);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
connectMachine(machine) {
|
||||
if (!machine) {
|
||||
this.logger.error("Invalid machine provided.");
|
||||
return;
|
||||
}
|
||||
|
||||
const machineId = Object.keys(this.mainClass.machines).length + 1;
|
||||
this.mainClass.machines[machineId] = machine;
|
||||
|
||||
this.logger.info(
|
||||
`Setting up pressureChange listener for machine ${machineId}`
|
||||
);
|
||||
|
||||
machine.emitter.on("pressureChange", () =>
|
||||
this.mainClass.handlePressureChange(machine)
|
||||
);
|
||||
|
||||
//update of child triggers the handler
|
||||
this.mainClass.handleChildChange();
|
||||
|
||||
this.logger.info(`Machine ${machineId} registered successfully.`);
|
||||
}
|
||||
|
||||
connectValve(valve) {
|
||||
if (!valve) {
|
||||
this.logger.warn("Invalid valve provided.");
|
||||
return;
|
||||
}
|
||||
const valveId = Object.keys(this.mainClass.valves).length + 1;
|
||||
this.mainClass.valves[valveId] = valve; // Gooit valve object in de valves attribute met valve objects
|
||||
|
||||
valve.state.emitter.on("positionChange", (data) => {
|
||||
//ValveGroupController abboneren op klepstand verandering
|
||||
this.mainClass.logger.debug(`Position change of valve detected: ${data}`);
|
||||
this.mainClass.calcValveFlows();
|
||||
}); //bepaal nieuwe flow per valve
|
||||
valve.emitter.on("deltaPChange", () => {
|
||||
this.mainClass.logger.debug("DeltaP change of valve detected");
|
||||
this.mainClass.calcMaxDeltaP();
|
||||
}); //bepaal nieuwe max deltaP
|
||||
|
||||
this.logger.info(`Valve ${valveId} registered successfully.`);
|
||||
}
|
||||
|
||||
connectMachineGroup(machineGroup) {
|
||||
if (!machineGroup) {
|
||||
this.logger.warn("Invalid machineGroup provided.");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const machineGroupId = Object.keys(this.mainClass.machineGroups).length + 1;
|
||||
this.mainClass.machineGroups[machineGroupId] = machineGroup;
|
||||
} catch (error) {
|
||||
this.logger.warn(`Skip machinegroup connnection: ${error.message}`);
|
||||
}
|
||||
|
||||
machineGroup.emitter.on("totalFlowChange", (data) => {
|
||||
this.mainClass.logger.debug('Total flow change of machineGroup detected');
|
||||
this.mainClass.handleInput("parent", "totalFlowChange", data)}); //Geef nieuwe totale flow door aan valveGrouControl
|
||||
|
||||
this.logger.info(`MachineGroup ${machineGroup.config.general.name} registered successfully.`);
|
||||
}
|
||||
|
||||
connectActuator(actuator, positionVsParent) {
|
||||
if (!actuator) {
|
||||
this.logger.warn("Invalid actuator provided.");
|
||||
return;
|
||||
}
|
||||
|
||||
//Special case gateGroupControl
|
||||
if (
|
||||
this.mainClass.config.functionality.softwareType == "gateGroupControl"
|
||||
) {
|
||||
if (Object.keys(this.mainClass.actuators).length < 2) {
|
||||
if (positionVsParent == "downstream") {
|
||||
this.mainClass.actuators[0] = actuator;
|
||||
}
|
||||
|
||||
if (positionVsParent == "upstream") {
|
||||
this.mainClass.actuators[1] = actuator;
|
||||
}
|
||||
//define emitters
|
||||
actuator.state.emitter.on("positionChange", (data) => {
|
||||
this.mainClass.logger.debug(`Position change of actuator detected: ${data}`);
|
||||
this.mainClass.eventUpdate();
|
||||
});
|
||||
|
||||
//define emitters
|
||||
actuator.state.emitter.on("stateChange", (data) => {
|
||||
this.mainClass.logger.debug(`State change of actuator detected: ${data}`);
|
||||
this.mainClass.eventUpdate();
|
||||
});
|
||||
|
||||
} else {
|
||||
this.logger.error(
|
||||
"Too many actuators registered. Only two are allowed."
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//wanneer hij deze ontvangt is deltaP van een van de valves veranderd (kan ook zijn niet child zijn, maar dat maakt niet uit)
|
||||
}
|
||||
|
||||
module.exports = ChildRegistrationUtils;
|
||||
@@ -80,7 +80,7 @@ class ConfigUtils {
|
||||
// loop through objects and merge them obj1 will be updated with obj2 values
|
||||
mergeObjects(obj1, obj2) {
|
||||
for (let key in obj2) {
|
||||
if (obj2.hasOwnProperty(key)) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj2, key)) {
|
||||
const nextValue = obj2[key];
|
||||
|
||||
if (Array.isArray(nextValue)) {
|
||||
|
||||
44
src/helper/formatters/csvFormatter.js
Normal file
44
src/helper/formatters/csvFormatter.js
Normal file
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* CSV formatter
|
||||
* Produces a single CSV line: timestamp,measurement,field1=val1,field2=val2,...
|
||||
*
|
||||
* Values are escaped if they contain commas or quotes.
|
||||
*
|
||||
* @param {string} measurement - The measurement name (e.g. node name)
|
||||
* @param {object} metadata - { fields, tags }
|
||||
* - fields: key/value pairs of changed data points
|
||||
* - tags: flat key/value string pairs (included as columns)
|
||||
* @returns {string} CSV-formatted line
|
||||
*/
|
||||
function format(measurement, metadata) {
|
||||
const { fields, tags } = metadata;
|
||||
const timestamp = new Date().toISOString();
|
||||
const parts = [escapeCSV(timestamp), escapeCSV(measurement)];
|
||||
|
||||
// Append tags first, then fields
|
||||
if (tags) {
|
||||
for (const key of Object.keys(tags).sort()) {
|
||||
parts.push(escapeCSV(`${key}=${tags[key]}`));
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of Object.keys(fields).sort()) {
|
||||
parts.push(escapeCSV(`${key}=${fields[key]}`));
|
||||
}
|
||||
|
||||
return parts.join(',');
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes a value for safe inclusion in a CSV field.
|
||||
* Wraps in double quotes if the value contains a comma, quote, or newline.
|
||||
*/
|
||||
function escapeCSV(value) {
|
||||
const str = String(value);
|
||||
if (str.includes(',') || str.includes('"') || str.includes('\n')) {
|
||||
return '"' + str.replace(/"/g, '""') + '"';
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
module.exports = { format };
|
||||
60
src/helper/formatters/index.js
Normal file
60
src/helper/formatters/index.js
Normal file
@@ -0,0 +1,60 @@
|
||||
/**
|
||||
* Formatter Registry
|
||||
* ------------------
|
||||
* Maps format names to formatter modules.
|
||||
* Each formatter exports: format(measurement, metadata) => string|object
|
||||
*
|
||||
* Usage:
|
||||
* const { getFormatter, registerFormatter } = require('./formatters');
|
||||
* const fmt = getFormatter('json');
|
||||
* const output = fmt.format('pump1', { fields: {...}, tags: {...} });
|
||||
*/
|
||||
|
||||
const influxdbFormatter = require('./influxdbFormatter');
|
||||
const jsonFormatter = require('./jsonFormatter');
|
||||
const csvFormatter = require('./csvFormatter');
|
||||
const processFormatter = require('./processFormatter');
|
||||
|
||||
// Built-in registry
|
||||
const registry = {
|
||||
influxdb: influxdbFormatter,
|
||||
json: jsonFormatter,
|
||||
csv: csvFormatter,
|
||||
process: processFormatter,
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieve a formatter by name.
|
||||
* @param {string} name - Format name (e.g. 'influxdb', 'json', 'csv')
|
||||
* @returns {object} Formatter with a .format() method
|
||||
* @throws {Error} If the format name is not registered
|
||||
*/
|
||||
function getFormatter(name) {
|
||||
const formatter = registry[name];
|
||||
if (!formatter) {
|
||||
throw new Error(`Unknown output format: "${name}". Registered formats: ${Object.keys(registry).join(', ')}`);
|
||||
}
|
||||
return formatter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a custom formatter at runtime.
|
||||
* @param {string} name - Format name
|
||||
* @param {object} formatter - Object with a .format(measurement, metadata) method
|
||||
*/
|
||||
function registerFormatter(name, formatter) {
|
||||
if (typeof formatter.format !== 'function') {
|
||||
throw new Error('Formatter must have a .format(measurement, metadata) method');
|
||||
}
|
||||
registry[name] = formatter;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all registered format names.
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getRegisteredFormats() {
|
||||
return Object.keys(registry);
|
||||
}
|
||||
|
||||
module.exports = { getFormatter, registerFormatter, getRegisteredFormats };
|
||||
22
src/helper/formatters/influxdbFormatter.js
Normal file
22
src/helper/formatters/influxdbFormatter.js
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* InfluxDB formatter
|
||||
* Produces the structured object expected by Node-RED InfluxDB nodes:
|
||||
* { measurement, fields, tags, timestamp }
|
||||
*
|
||||
* @param {string} measurement - The measurement name (e.g. node name)
|
||||
* @param {object} metadata - { fields, tags }
|
||||
* - fields: key/value pairs of changed data points
|
||||
* - tags: flat key/value string pairs (InfluxDB tags)
|
||||
* @returns {string|object} Formatted payload (object for InfluxDB)
|
||||
*/
|
||||
function format(measurement, metadata) {
|
||||
const { fields, tags } = metadata;
|
||||
return {
|
||||
measurement: measurement,
|
||||
fields: fields,
|
||||
tags: tags || {},
|
||||
timestamp: new Date(),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { format };
|
||||
22
src/helper/formatters/jsonFormatter.js
Normal file
22
src/helper/formatters/jsonFormatter.js
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* JSON formatter
|
||||
* Produces a JSON string suitable for MQTT, REST APIs, etc.
|
||||
*
|
||||
* @param {string} measurement - The measurement name (e.g. node name)
|
||||
* @param {object} metadata - { fields, tags }
|
||||
* - fields: key/value pairs of changed data points
|
||||
* - tags: flat key/value string pairs
|
||||
* @returns {string} JSON-encoded string
|
||||
*/
|
||||
function format(measurement, metadata) {
|
||||
const { fields, tags } = metadata;
|
||||
const payload = {
|
||||
measurement: measurement,
|
||||
fields: fields,
|
||||
tags: tags || {},
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
return JSON.stringify(payload);
|
||||
}
|
||||
|
||||
module.exports = { format };
|
||||
9
src/helper/formatters/processFormatter.js
Normal file
9
src/helper/formatters/processFormatter.js
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Process formatter
|
||||
* Keeps the existing process-port behaviour: emit only changed fields as an object.
|
||||
*/
|
||||
function format(_measurement, metadata) {
|
||||
return metadata.fields;
|
||||
}
|
||||
|
||||
module.exports = { format };
|
||||
123
src/helper/menu/dataFetching.js
Normal file
123
src/helper/menu/dataFetching.js
Normal file
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* Data fetching methods for MenuUtils.
|
||||
* Handles primary/fallback URL fetching and API calls.
|
||||
*/
|
||||
|
||||
const dataFetching = {
|
||||
async fetchData(url, fallbackUrl) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||
const responsData = await response.json();
|
||||
//responsData
|
||||
const data = responsData.data;
|
||||
/* .map(item => {
|
||||
const { vendor_name, ...rest } = item;
|
||||
return {
|
||||
name: vendor_name,
|
||||
...rest
|
||||
};
|
||||
}); */
|
||||
console.log(url);
|
||||
console.log("Response Data: ", data);
|
||||
return data;
|
||||
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`Primary URL failed: ${url}. Trying fallback URL: ${fallbackUrl}`,
|
||||
err
|
||||
);
|
||||
try {
|
||||
const response = await fetch(fallbackUrl);
|
||||
if (!response.ok)
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
return await response.json();
|
||||
} catch (fallbackErr) {
|
||||
console.error("Both primary and fallback URLs failed:", fallbackErr);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async fetchProjectData(url) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||
const responsData = await response.json();
|
||||
console.log("Response Data: ", responsData);
|
||||
return responsData;
|
||||
|
||||
} catch (err) {
|
||||
/* intentionally empty */
|
||||
}
|
||||
},
|
||||
|
||||
// Save changes to API
|
||||
async apiCall(node) {
|
||||
try{
|
||||
// OLFIANT when a browser refreshes the tag code is lost!!! fix this later!!!!!
|
||||
// FIX UUID ALSO LATER
|
||||
|
||||
if(node.assetTagCode !== "" || node.assetTagCode !== null){ /* intentionally empty */ }
|
||||
// API call to register or check asset in central database
|
||||
let assetregisterAPI = node.configUrls.cloud.taggcodeAPI + "/asset/create_asset.php";
|
||||
|
||||
const assetModelId = node.modelMetadata.id; //asset_product_model_id
|
||||
const uuid = node.uuid; //asset_product_model_uuid
|
||||
const assetName = node.assetType; //asset_name / type?
|
||||
const description = node.name; // asset_description
|
||||
const assetStatus = "actief"; //asset_status -> koppel aan enable / disable node ? or make dropdown ?
|
||||
const assetProfileId = 1; //asset_profile_id these are the rules to check if the childs are valid under this node (parent / child id?)
|
||||
const child_assets = ["63247"]; //child_assets tagnummer of id?
|
||||
const assetProcessId = node.processId; //asset_process_id
|
||||
const assetLocationId = node.locationId; //asset_location_id
|
||||
const tagCode = node.assetTagCode; // if already exists in the node information use it to tell the api it exists and it will update else we will get it from the api call
|
||||
//console.log(`this is my tagCode: ${tagCode}`);
|
||||
|
||||
// Build base URL with required parameters
|
||||
let apiUrl = `?asset_product_model_id=${assetModelId}&asset_product_model_uuid=${uuid}&asset_name=${assetName}&asset_description=${description}&asset_status=${assetStatus}&asset_profile_id=${assetProfileId}&asset_location_id=${assetLocationId}&asset_process_id=${assetProcessId}&child_assets=${child_assets}`;
|
||||
|
||||
// Only add tagCode to URL if it exists
|
||||
if (tagCode) {
|
||||
apiUrl += `&asset_tag_number=${tagCode}`;
|
||||
console.log('hello there');
|
||||
}
|
||||
|
||||
assetregisterAPI += apiUrl;
|
||||
console.log("API call to register asset in central database", assetregisterAPI);
|
||||
|
||||
const response = await fetch(assetregisterAPI, {
|
||||
method: "POST"
|
||||
});
|
||||
|
||||
// Get the response text first
|
||||
const responseText = await response.text();
|
||||
console.log("Raw API response:", responseText);
|
||||
|
||||
// Try to parse the JSON, handling potential parsing errors
|
||||
let jsonResponse;
|
||||
try {
|
||||
jsonResponse = JSON.parse(responseText);
|
||||
} catch (parseError) {
|
||||
console.error("JSON Parsing Error:", parseError);
|
||||
console.error("Response that could not be parsed:", responseText);
|
||||
throw new Error("Failed to parse API response");
|
||||
}
|
||||
|
||||
console.log(jsonResponse);
|
||||
|
||||
if(jsonResponse.success){
|
||||
console.log(`${jsonResponse.message}, tag number: ${jsonResponse.asset_tag_number}, asset id: ${jsonResponse.asset_id}`);
|
||||
// Save the asset tag number and id to the node
|
||||
} else {
|
||||
console.log("Asset not registered in central database");
|
||||
}
|
||||
return jsonResponse;
|
||||
|
||||
} catch (error) {
|
||||
console.log("Error saving changes to asset register API", error);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = dataFetching;
|
||||
283
src/helper/menu/dropdownPopulation.js
Normal file
283
src/helper/menu/dropdownPopulation.js
Normal file
@@ -0,0 +1,283 @@
|
||||
/**
|
||||
* Dropdown population methods for MenuUtils.
|
||||
* Handles populating and cascading dropdown menus for assets, suppliers, models, units, etc.
|
||||
*/
|
||||
|
||||
const dropdownPopulation = {
|
||||
populateSmoothingMethods(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const smoothingMethods =
|
||||
configData.smoothing?.smoothMethod?.rules?.values?.map(
|
||||
(o) => o.value
|
||||
) || [];
|
||||
this.populateDropdown(
|
||||
elements.smoothMethod,
|
||||
smoothingMethods,
|
||||
node,
|
||||
"smooth_method"
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error loading smoothing methods", err);
|
||||
});
|
||||
},
|
||||
|
||||
populateInterpolationMethods(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const interpolationMethods =
|
||||
configData?.interpolation?.type?.rules?.values.map((m) => m.value) ||
|
||||
[];
|
||||
this.populateDropdown(
|
||||
elements.interpolationMethodInput,
|
||||
interpolationMethods,
|
||||
node,
|
||||
"interpolationMethod"
|
||||
);
|
||||
|
||||
// Find the selected method and use it to spawn 1 more field to fill in tension
|
||||
//const selectedMethod = interpolationMethods.find(m => m === node.interpolationMethod);
|
||||
this.initTensionToggles(elements, node);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error loading interpolation methods", err);
|
||||
});
|
||||
},
|
||||
|
||||
populateLogLevelOptions(logLevelSelect, configData, node) {
|
||||
// debug log level
|
||||
//console.log("Displaying configData => ", configData) ;
|
||||
|
||||
const logLevels =
|
||||
configData?.general?.logging?.logLevel?.rules?.values?.map(
|
||||
(l) => l.value
|
||||
) || [];
|
||||
|
||||
//console.log("Displaying logLevels => ", logLevels);
|
||||
|
||||
// Reuse your existing generic populateDropdown helper
|
||||
this.populateDropdown(logLevelSelect, logLevels, node.logLevel);
|
||||
},
|
||||
|
||||
//cascade dropdowns for asset type, supplier, subType, model, unit
|
||||
fetchAndPopulateDropdowns(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
const localSuppliersUrl = this.constructUrl(configUrls.local.taggcodeAPI,`${assetType}s`,"suppliers.json");
|
||||
const cloudSuppliersUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/vendor/get_vendors.php");
|
||||
|
||||
return this.fetchData(cloudSuppliersUrl, localSuppliersUrl)
|
||||
.then((supplierData) => {
|
||||
|
||||
const suppliers = supplierData.map((supplier) => supplier.name);
|
||||
|
||||
// Populate suppliers dropdown and set up its change handler
|
||||
return this.populateDropdown(
|
||||
elements.supplier,
|
||||
suppliers,
|
||||
node,
|
||||
"supplier",
|
||||
function (selectedSupplier) {
|
||||
if (selectedSupplier) {
|
||||
this.populateSubTypes(configUrls, elements, node, selectedSupplier);
|
||||
}
|
||||
}
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// If we have a saved supplier, trigger subTypes population
|
||||
if (node.supplier) {
|
||||
this.populateSubTypes(configUrls, elements, node, node.supplier);
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error in initial dropdown population:", error);
|
||||
});
|
||||
},
|
||||
|
||||
populateSubTypes(configUrls, elements, node, selectedSupplier) {
|
||||
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
const supplierFolder = this.constructUrl( configUrls.local.taggcodeAPI, `${assetType}s`, selectedSupplier );
|
||||
|
||||
const localSubTypesUrl = this.constructUrl(supplierFolder, "subtypes.json");
|
||||
const cloudSubTypesUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/product/get_subtypesFromVendor.php?vendor_name=" + selectedSupplier);
|
||||
|
||||
return this.fetchData(cloudSubTypesUrl, localSubTypesUrl)
|
||||
.then((subTypeData) => {
|
||||
const subTypes = subTypeData.map((subType) => subType.name);
|
||||
|
||||
return this.populateDropdown(
|
||||
elements.subType,
|
||||
subTypes,
|
||||
node,
|
||||
"subType",
|
||||
function (selectedSubType) {
|
||||
if (selectedSubType) {
|
||||
// When subType changes, update both models and units
|
||||
this.populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
selectedSubType
|
||||
);
|
||||
this.populateUnitsForSubType(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSubType
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// If we have a saved subType, trigger both models and units population
|
||||
if (node.subType) {
|
||||
this.populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
node.subType
|
||||
);
|
||||
this.populateUnitsForSubType(configUrls, elements, node, node.subType);
|
||||
}
|
||||
//console.log("In fetch part of subtypes ");
|
||||
// Store all data from selected model
|
||||
/* node["modelMetadata"] = modelData.find(
|
||||
(model) => model.name === node.model
|
||||
);
|
||||
console.log("Model Metadata: ", node["modelMetadata"]); */
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error populating subtypes:", error);
|
||||
});
|
||||
},
|
||||
|
||||
populateUnitsForSubType(configUrls, elements, node, selectedSubType) {
|
||||
// Fetch the units data
|
||||
this.fetchData(configUrls.cloud.units, configUrls.local.units)
|
||||
.then((unitsData) => {
|
||||
// Find the category that matches the subType name
|
||||
const categoryData = unitsData.units.find(
|
||||
(category) =>
|
||||
category.category.toLowerCase() === selectedSubType.toLowerCase()
|
||||
);
|
||||
|
||||
if (categoryData) {
|
||||
// Extract just the unit values and descriptions
|
||||
const units = categoryData.values.map((unit) => ({
|
||||
value: unit.value,
|
||||
description: unit.description,
|
||||
}));
|
||||
|
||||
// Create the options array with descriptions as labels
|
||||
const options = units.map((unit) => ({
|
||||
value: unit.value,
|
||||
label: `${unit.value} - ${unit.description}`,
|
||||
}));
|
||||
|
||||
// Populate the units dropdown
|
||||
this.populateDropdown(
|
||||
elements.unit,
|
||||
options.map((opt) => opt.value),
|
||||
node,
|
||||
"unit"
|
||||
);
|
||||
|
||||
// If there's no currently selected unit but we have options, select the first one
|
||||
if (!node.unit && options.length > 0) {
|
||||
node.unit = options[0].value;
|
||||
elements.unit.value = options[0].value;
|
||||
}
|
||||
} else {
|
||||
// If no matching category is found, provide a default % option
|
||||
const defaultUnits = [{ value: "%", description: "Percentage" }];
|
||||
this.populateDropdown(
|
||||
elements.unit,
|
||||
defaultUnits.map((unit) => unit.value),
|
||||
node,
|
||||
"unit"
|
||||
);
|
||||
console.warn(
|
||||
`No matching unit category found for subType: ${selectedSubType}`
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error fetching units:", error);
|
||||
});
|
||||
},
|
||||
|
||||
populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
selectedSubType
|
||||
) {
|
||||
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
// save assetType to fetch later
|
||||
node.assetType = assetType;
|
||||
|
||||
const supplierFolder = this.constructUrl( configUrls.local.taggcodeAPI,`${assetType}s`,selectedSupplier);
|
||||
const subTypeFolder = this.constructUrl(supplierFolder, selectedSubType);
|
||||
const localModelsUrl = this.constructUrl(subTypeFolder, "models.json");
|
||||
const cloudModelsUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/product/get_product_models.php?vendor_name=" + selectedSupplier + "&product_subtype_name=" + selectedSubType);
|
||||
|
||||
return this.fetchData(cloudModelsUrl, localModelsUrl).then((modelData) => {
|
||||
const models = modelData.map((model) => model.name); // use this to populate the dropdown
|
||||
|
||||
// If a model is already selected, store its metadata immediately
|
||||
if (node.model) {
|
||||
node["modelMetadata"] = modelData.find((model) => model.name === node.model);
|
||||
}
|
||||
|
||||
this.populateDropdown(elements.model, models, node, "model", (selectedModel) => {
|
||||
// Store only the metadata for the selected model
|
||||
node["modelMetadata"] = modelData.find((model) => model.name === selectedModel);
|
||||
});
|
||||
/*
|
||||
console.log('hello here I am:');
|
||||
console.log(node["modelMetadata"]);
|
||||
*/
|
||||
});
|
||||
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error populating models:", error);
|
||||
});
|
||||
},
|
||||
|
||||
async populateDropdown(
|
||||
htmlElement,
|
||||
options,
|
||||
node,
|
||||
property,
|
||||
callback
|
||||
) {
|
||||
this.generateHtml(htmlElement, options, node[property]);
|
||||
|
||||
htmlElement.addEventListener("change", async (e) => {
|
||||
const newValue = e.target.value;
|
||||
console.log(`Dropdown changed: ${property} = ${newValue}`);
|
||||
node[property] = newValue;
|
||||
|
||||
RED.nodes.dirty(true);
|
||||
if (callback) await callback(newValue); // Ensure async callback completion
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = dropdownPopulation;
|
||||
151
src/helper/menu/htmlGeneration.js
Normal file
151
src/helper/menu/htmlGeneration.js
Normal file
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* HTML generation and endpoint methods for MenuUtils.
|
||||
* Handles generating dropdown HTML and serving MenuUtils code to the browser.
|
||||
*/
|
||||
|
||||
const htmlGeneration = {
|
||||
generateHtml(htmlElement, options, savedValue) {
|
||||
htmlElement.innerHTML = options.length
|
||||
? `<option value="">Select...</option>${options
|
||||
.map((opt) => `<option value="${opt}">${opt}</option>`)
|
||||
.join("")}`
|
||||
: "<option value=''>No options available</option>";
|
||||
|
||||
if (savedValue && options.includes(savedValue)) {
|
||||
htmlElement.value = savedValue;
|
||||
}
|
||||
},
|
||||
|
||||
createMenuUtilsEndpoint(RED, nodeName, customHelpers = {}, options = {}) {
|
||||
const basePath = `/${nodeName}/resources`;
|
||||
|
||||
RED.httpAdmin.get(`${basePath}/menuUtilsData.json`, function(req, res) {
|
||||
res.json(this.generateMenuUtilsData(nodeName, customHelpers, options));
|
||||
}.bind(this));
|
||||
|
||||
RED.httpAdmin.get(`${basePath}/menuUtils.legacy.js`, function(req, res) {
|
||||
res.set('Content-Type', 'application/javascript');
|
||||
const browserCode = this.generateLegacyMenuUtilsCode(nodeName, customHelpers);
|
||||
res.send(browserCode);
|
||||
}.bind(this));
|
||||
|
||||
RED.httpAdmin.get(`${basePath}/menuUtils.js`, function(req, res) {
|
||||
res.set('Content-Type', 'application/javascript');
|
||||
res.send(this.generateMenuUtilsBootstrap(nodeName));
|
||||
}.bind(this));
|
||||
},
|
||||
|
||||
generateMenuUtilsData(nodeName, customHelpers = {}, options = {}) {
|
||||
const defaultHelpers = {
|
||||
validateRequired: `function(value) {
|
||||
return value && value.toString().trim() !== '';
|
||||
}`,
|
||||
formatDisplayValue: `function(value, unit) {
|
||||
return \`\${value} \${unit || ''}\`.trim();
|
||||
}`
|
||||
};
|
||||
|
||||
return {
|
||||
nodeName,
|
||||
helpers: { ...defaultHelpers, ...customHelpers },
|
||||
options: {
|
||||
autoLoadLegacy: options.autoLoadLegacy !== false,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
generateMenuUtilsBootstrap(nodeName) {
|
||||
return `
|
||||
// Stable bootstrap for EVOLV menu utils (${nodeName})
|
||||
(function() {
|
||||
const nodeName = ${JSON.stringify(nodeName)};
|
||||
const basePath = '/' + nodeName + '/resources';
|
||||
|
||||
window.EVOLV = window.EVOLV || {};
|
||||
window.EVOLV.nodes = window.EVOLV.nodes || {};
|
||||
window.EVOLV.nodes[nodeName] = window.EVOLV.nodes[nodeName] || {};
|
||||
window.EVOLV.nodes[nodeName].utils = window.EVOLV.nodes[nodeName].utils || {};
|
||||
|
||||
function parseHelper(fnBody) {
|
||||
try {
|
||||
return (new Function('return (' + fnBody + ')'))();
|
||||
} catch (error) {
|
||||
console.error('[menuUtils] helper parse failed:', error);
|
||||
return function() { return null; };
|
||||
}
|
||||
}
|
||||
|
||||
function loadLegacyIfNeeded(autoLoadLegacy) {
|
||||
if (!autoLoadLegacy || typeof window.MenuUtils === 'function') return Promise.resolve();
|
||||
return new Promise((resolve, reject) => {
|
||||
const script = document.createElement('script');
|
||||
script.src = basePath + '/menuUtils.legacy.js';
|
||||
script.onload = resolve;
|
||||
script.onerror = reject;
|
||||
document.head.appendChild(script);
|
||||
});
|
||||
}
|
||||
|
||||
fetch(basePath + '/menuUtilsData.json')
|
||||
.then(function(res) { return res.json(); })
|
||||
.then(function(payload) {
|
||||
const helperFns = {};
|
||||
Object.entries(payload.helpers || {}).forEach(function(entry) {
|
||||
helperFns[entry[0]] = parseHelper(entry[1]);
|
||||
});
|
||||
window.EVOLV.nodes[nodeName].utils.helpers = helperFns;
|
||||
return loadLegacyIfNeeded(payload.options && payload.options.autoLoadLegacy);
|
||||
})
|
||||
.then(function() {
|
||||
if (typeof window.MenuUtils === 'function' && !window.EVOLV.nodes[nodeName].utils.menuUtils) {
|
||||
window.EVOLV.nodes[nodeName].utils.menuUtils = new window.MenuUtils();
|
||||
}
|
||||
})
|
||||
.catch(function(error) {
|
||||
console.error('[menuUtils] bootstrap failed for ' + nodeName, error);
|
||||
});
|
||||
})();
|
||||
`;
|
||||
},
|
||||
|
||||
generateLegacyMenuUtilsCode(nodeName, customHelpers = {}) {
|
||||
const allHelpers = { ...this.generateMenuUtilsData(nodeName).helpers, ...customHelpers };
|
||||
|
||||
const helpersCode = Object.entries(allHelpers)
|
||||
.map(([name, func]) => ` ${name}: ${func}`)
|
||||
.join(',\n');
|
||||
|
||||
const classCode = this.constructor.toString(); // <-- this gives full class MenuUtils {...}
|
||||
|
||||
return `
|
||||
// Create EVOLV namespace structure
|
||||
window.EVOLV = window.EVOLV || {};
|
||||
window.EVOLV.nodes = window.EVOLV.nodes || {};
|
||||
window.EVOLV.nodes.${nodeName} = window.EVOLV.nodes.${nodeName} || {};
|
||||
|
||||
// Inject MenuUtils class
|
||||
${classCode}
|
||||
|
||||
// Expose MenuUtils instance to namespace
|
||||
window.EVOLV.nodes.${nodeName}.utils = {
|
||||
menuUtils: new MenuUtils(),
|
||||
|
||||
helpers: {
|
||||
${helpersCode}
|
||||
}
|
||||
};
|
||||
|
||||
// Optionally expose globally
|
||||
window.MenuUtils = MenuUtils;
|
||||
|
||||
console.log('${nodeName} utilities loaded in namespace');
|
||||
`;
|
||||
},
|
||||
|
||||
// Backward-compatible alias
|
||||
generateMenuUtilsCode(nodeName, customHelpers = {}) {
|
||||
return this.generateLegacyMenuUtilsCode(nodeName, customHelpers);
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = htmlGeneration;
|
||||
18
src/helper/menu/index.js
Normal file
18
src/helper/menu/index.js
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* menu/index.js
|
||||
* Barrel file for the menu module components.
|
||||
*/
|
||||
|
||||
const toggles = require('./toggles');
|
||||
const dataFetching = require('./dataFetching');
|
||||
const urlUtils = require('./urlUtils');
|
||||
const dropdownPopulation = require('./dropdownPopulation');
|
||||
const htmlGeneration = require('./htmlGeneration');
|
||||
|
||||
module.exports = {
|
||||
toggles,
|
||||
dataFetching,
|
||||
urlUtils,
|
||||
dropdownPopulation,
|
||||
htmlGeneration,
|
||||
};
|
||||
56
src/helper/menu/toggles.js
Normal file
56
src/helper/menu/toggles.js
Normal file
@@ -0,0 +1,56 @@
|
||||
/**
|
||||
* Toggle initialization methods for MenuUtils.
|
||||
* Controls visibility of UI elements based on checkbox/dropdown state.
|
||||
*/
|
||||
|
||||
const toggles = {
|
||||
initBasicToggles(elements) {
|
||||
// Toggle visibility for log level
|
||||
elements.logCheckbox.addEventListener("change", function () {
|
||||
elements.rowLogLevel.style.display = this.checked ? "block" : "none";
|
||||
});
|
||||
elements.rowLogLevel.style.display = elements.logCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
},
|
||||
|
||||
// Define the initialize toggles function within scope
|
||||
initMeasurementToggles(elements) {
|
||||
// Toggle visibility for scaling inputs
|
||||
elements.scalingCheckbox.addEventListener("change", function () {
|
||||
elements.rowInputMin.style.display = this.checked ? "block" : "none";
|
||||
elements.rowInputMax.style.display = this.checked ? "block" : "none";
|
||||
});
|
||||
|
||||
// Set initial states
|
||||
elements.rowInputMin.style.display = elements.scalingCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
elements.rowInputMax.style.display = elements.scalingCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
},
|
||||
|
||||
initTensionToggles(elements, node) {
|
||||
const currentMethod = node.interpolationMethod;
|
||||
elements.rowTension.style.display =
|
||||
currentMethod === "monotone_cubic_spline" ? "block" : "none";
|
||||
console.log(
|
||||
"Initial tension row display: ",
|
||||
elements.rowTension.style.display
|
||||
);
|
||||
|
||||
elements.interpolationMethodInput.addEventListener("change", function () {
|
||||
const selectedMethod = this.value;
|
||||
console.log(`Interpolation method changed: ${selectedMethod}`);
|
||||
node.interpolationMethod = selectedMethod;
|
||||
|
||||
// Toggle visibility for tension input
|
||||
elements.rowTension.style.display =
|
||||
selectedMethod === "monotone_cubic_spline" ? "block" : "none";
|
||||
console.log("Tension row display: ", elements.rowTension.style.display);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = toggles;
|
||||
39
src/helper/menu/urlUtils.js
Normal file
39
src/helper/menu/urlUtils.js
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* URL construction methods for MenuUtils.
|
||||
* Helpers for building API and config URLs.
|
||||
*/
|
||||
|
||||
const urlUtils = {
|
||||
getSpecificConfigUrl(nodeName, cloudAPI) {
|
||||
const cloudConfigURL = cloudAPI + "/config/" + nodeName + ".json";
|
||||
const localConfigURL = "http://localhost:1880/"+ nodeName + "/dependencies/"+ nodeName + "/" + nodeName + "Config.json";
|
||||
|
||||
return { cloudConfigURL, localConfigURL };
|
||||
},
|
||||
|
||||
// Helper function to construct a URL from a base and path internal
|
||||
constructUrl(base, ...paths) {
|
||||
// Remove trailing slash from base and leading slashes from paths
|
||||
const sanitizedBase = (base || "").replace(/\/+$/, "");
|
||||
const sanitizedPaths = paths.map((path) => path.replace(/^\/+|\/+$/g, ""));
|
||||
|
||||
// Join sanitized base and paths
|
||||
const url = `${sanitizedBase}/${sanitizedPaths.join("/")}`;
|
||||
console.log("Base:", sanitizedBase);
|
||||
console.log("Paths:", sanitizedPaths);
|
||||
console.log("Constructed URL:", url);
|
||||
return url;
|
||||
},
|
||||
|
||||
//Adjust for API Gateway
|
||||
constructCloudURL(base, ...paths) {
|
||||
// Remove trailing slash from base and leading slashes from paths
|
||||
const sanitizedBase = base.replace(/\/+$/, "");
|
||||
const sanitizedPaths = paths.map((path) => path.replace(/^\/+|\/+$/g, ""));
|
||||
// Join sanitized base and paths
|
||||
const url = `${sanitizedBase}/${sanitizedPaths.join("/")}`;
|
||||
return url;
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = urlUtils;
|
||||
@@ -1,616 +1,34 @@
|
||||
/**
|
||||
* MenuUtils — UI menu helper for Node-RED editor.
|
||||
* Methods are split across focused modules under ./menu/ and mixed onto the prototype.
|
||||
*/
|
||||
|
||||
const toggles = require('./menu/toggles');
|
||||
const dataFetching = require('./menu/dataFetching');
|
||||
const urlUtils = require('./menu/urlUtils');
|
||||
const dropdownPopulation = require('./menu/dropdownPopulation');
|
||||
const htmlGeneration = require('./menu/htmlGeneration');
|
||||
|
||||
class MenuUtils {
|
||||
|
||||
|
||||
initBasicToggles(elements) {
|
||||
// Toggle visibility for log level
|
||||
elements.logCheckbox.addEventListener("change", function () {
|
||||
elements.rowLogLevel.style.display = this.checked ? "block" : "none";
|
||||
});
|
||||
elements.rowLogLevel.style.display = elements.logCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
constructor() {
|
||||
this.isCloud = false;
|
||||
this.configData = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Define the initialize toggles function within scope
|
||||
initMeasurementToggles(elements) {
|
||||
// Toggle visibility for scaling inputs
|
||||
elements.scalingCheckbox.addEventListener("change", function () {
|
||||
elements.rowInputMin.style.display = this.checked ? "block" : "none";
|
||||
elements.rowInputMax.style.display = this.checked ? "block" : "none";
|
||||
});
|
||||
|
||||
// Set initial states
|
||||
elements.rowInputMin.style.display = elements.scalingCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
elements.rowInputMax.style.display = elements.scalingCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
}
|
||||
|
||||
initTensionToggles(elements, node) {
|
||||
const currentMethod = node.interpolationMethod;
|
||||
elements.rowTension.style.display =
|
||||
currentMethod === "monotone_cubic_spline" ? "block" : "none";
|
||||
console.log(
|
||||
"Initial tension row display: ",
|
||||
elements.rowTension.style.display
|
||||
);
|
||||
|
||||
elements.interpolationMethodInput.addEventListener("change", function () {
|
||||
const selectedMethod = this.value;
|
||||
console.log(`Interpolation method changed: ${selectedMethod}`);
|
||||
node.interpolationMethod = selectedMethod;
|
||||
|
||||
// Toggle visibility for tension input
|
||||
elements.rowTension.style.display =
|
||||
selectedMethod === "monotone_cubic_spline" ? "block" : "none";
|
||||
console.log("Tension row display: ", elements.rowTension.style.display);
|
||||
// Mix all method groups onto the prototype
|
||||
const mixins = [toggles, dataFetching, urlUtils, dropdownPopulation, htmlGeneration];
|
||||
for (const mixin of mixins) {
|
||||
for (const [name, fn] of Object.entries(mixin)) {
|
||||
if (typeof fn === 'function') {
|
||||
Object.defineProperty(MenuUtils.prototype, name, {
|
||||
value: fn,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
// Define the smoothing methods population function within scope
|
||||
populateSmoothingMethods(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const smoothingMethods =
|
||||
configData.smoothing?.smoothMethod?.rules?.values?.map(
|
||||
(o) => o.value
|
||||
) || [];
|
||||
this.populateDropdown(
|
||||
elements.smoothMethod,
|
||||
smoothingMethods,
|
||||
node,
|
||||
"smooth_method"
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error loading smoothing methods", err);
|
||||
});
|
||||
}
|
||||
|
||||
populateInterpolationMethods(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const interpolationMethods =
|
||||
configData?.interpolation?.type?.rules?.values.map((m) => m.value) ||
|
||||
[];
|
||||
this.populateDropdown(
|
||||
elements.interpolationMethodInput,
|
||||
interpolationMethods,
|
||||
node,
|
||||
"interpolationMethod"
|
||||
);
|
||||
|
||||
// Find the selected method and use it to spawn 1 more field to fill in tension
|
||||
//const selectedMethod = interpolationMethods.find(m => m === node.interpolationMethod);
|
||||
this.initTensionToggles(elements, node);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error loading interpolation methods", err);
|
||||
});
|
||||
}
|
||||
|
||||
populateLogLevelOptions(logLevelSelect, configData, node) {
|
||||
// debug log level
|
||||
//console.log("Displaying configData => ", configData) ;
|
||||
|
||||
const logLevels =
|
||||
configData?.general?.logging?.logLevel?.rules?.values?.map(
|
||||
(l) => l.value
|
||||
) || [];
|
||||
|
||||
//console.log("Displaying logLevels => ", logLevels);
|
||||
|
||||
// Reuse your existing generic populateDropdown helper
|
||||
this.populateDropdown(logLevelSelect, logLevels, node.logLevel);
|
||||
}
|
||||
|
||||
//cascade dropdowns for asset type, supplier, subType, model, unit
|
||||
fetchAndPopulateDropdowns(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
const localSuppliersUrl = this.constructUrl(configUrls.local.taggcodeAPI,`${assetType}s`,"suppliers.json");
|
||||
const cloudSuppliersUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/vendor/get_vendors.php");
|
||||
|
||||
return this.fetchData(cloudSuppliersUrl, localSuppliersUrl)
|
||||
.then((supplierData) => {
|
||||
|
||||
const suppliers = supplierData.map((supplier) => supplier.name);
|
||||
|
||||
// Populate suppliers dropdown and set up its change handler
|
||||
return this.populateDropdown(
|
||||
elements.supplier,
|
||||
suppliers,
|
||||
node,
|
||||
"supplier",
|
||||
function (selectedSupplier) {
|
||||
if (selectedSupplier) {
|
||||
this.populateSubTypes(configUrls, elements, node, selectedSupplier);
|
||||
}
|
||||
}
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// If we have a saved supplier, trigger subTypes population
|
||||
if (node.supplier) {
|
||||
this.populateSubTypes(configUrls, elements, node, node.supplier);
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error in initial dropdown population:", error);
|
||||
});
|
||||
}
|
||||
|
||||
getSpecificConfigUrl(nodeName,cloudAPI) {
|
||||
|
||||
const cloudConfigURL = cloudAPI + "/config/" + nodeName + ".json";
|
||||
const localConfigURL = "http://localhost:1880/"+ nodeName + "/dependencies/"+ nodeName + "/" + nodeName + "Config.json";
|
||||
|
||||
return { cloudConfigURL, localConfigURL };
|
||||
|
||||
}
|
||||
|
||||
// Save changes to API
|
||||
async apiCall(node) {
|
||||
try{
|
||||
// OLFIANT when a browser refreshes the tag code is lost!!! fix this later!!!!!
|
||||
// FIX UUID ALSO LATER
|
||||
|
||||
if(node.assetTagCode !== "" || node.assetTagCode !== null){ }
|
||||
// API call to register or check asset in central database
|
||||
let assetregisterAPI = node.configUrls.cloud.taggcodeAPI + "/asset/create_asset.php";
|
||||
|
||||
const assetModelId = node.modelMetadata.id; //asset_product_model_id
|
||||
const uuid = node.uuid; //asset_product_model_uuid
|
||||
const assetName = node.assetType; //asset_name / type?
|
||||
const description = node.name; // asset_description
|
||||
const assetStatus = "actief"; //asset_status -> koppel aan enable / disable node ? or make dropdown ?
|
||||
const assetProfileId = 1; //asset_profile_id these are the rules to check if the childs are valid under this node (parent / child id?)
|
||||
const child_assets = ["63247"]; //child_assets tagnummer of id?
|
||||
const assetProcessId = node.processId; //asset_process_id
|
||||
const assetLocationId = node.locationId; //asset_location_id
|
||||
const tagCode = node.assetTagCode; // if already exists in the node information use it to tell the api it exists and it will update else we will get it from the api call
|
||||
//console.log(`this is my tagCode: ${tagCode}`);
|
||||
|
||||
// Build base URL with required parameters
|
||||
let apiUrl = `?asset_product_model_id=${assetModelId}&asset_product_model_uuid=${uuid}&asset_name=${assetName}&asset_description=${description}&asset_status=${assetStatus}&asset_profile_id=${assetProfileId}&asset_location_id=${assetLocationId}&asset_process_id=${assetProcessId}&child_assets=${child_assets}`;
|
||||
|
||||
// Only add tagCode to URL if it exists
|
||||
if (tagCode) {
|
||||
apiUrl += `&asset_tag_number=${tagCode}`;
|
||||
}
|
||||
|
||||
assetregisterAPI += apiUrl;
|
||||
console.log("API call to register asset in central database", assetregisterAPI);
|
||||
|
||||
const response = await fetch(assetregisterAPI, {
|
||||
method: "POST"
|
||||
});
|
||||
|
||||
// Get the response text first
|
||||
const responseText = await response.text();
|
||||
console.log("Raw API response:", responseText);
|
||||
|
||||
// Try to parse the JSON, handling potential parsing errors
|
||||
let jsonResponse;
|
||||
try {
|
||||
jsonResponse = JSON.parse(responseText);
|
||||
} catch (parseError) {
|
||||
console.error("JSON Parsing Error:", parseError);
|
||||
console.error("Response that could not be parsed:", responseText);
|
||||
throw new Error("Failed to parse API response");
|
||||
}
|
||||
|
||||
console.log(jsonResponse);
|
||||
|
||||
if(jsonResponse.success){
|
||||
console.log(`${jsonResponse.message}, tag number: ${jsonResponse.asset_tag_number}, asset id: ${jsonResponse.asset_id}`);
|
||||
// Save the asset tag number and id to the node
|
||||
} else {
|
||||
console.log("Asset not registered in central database");
|
||||
}
|
||||
return jsonResponse;
|
||||
|
||||
} catch (error) {
|
||||
console.log("Error saving changes to asset register API", error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fetchData(url, fallbackUrl) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||
const responsData = await response.json();
|
||||
//responsData
|
||||
const data = responsData.data;
|
||||
/* .map(item => {
|
||||
const { vendor_name, ...rest } = item;
|
||||
return {
|
||||
name: vendor_name,
|
||||
...rest
|
||||
};
|
||||
}); */
|
||||
console.log(url);
|
||||
console.log("Response Data: ", data);
|
||||
return data;
|
||||
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`Primary URL failed: ${url}. Trying fallback URL: ${fallbackUrl}`,
|
||||
err
|
||||
);
|
||||
try {
|
||||
const response = await fetch(fallbackUrl);
|
||||
if (!response.ok)
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
return await response.json();
|
||||
} catch (fallbackErr) {
|
||||
console.error("Both primary and fallback URLs failed:", fallbackErr);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fetchProjectData(url) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||
const responsData = await response.json();
|
||||
console.log("Response Data: ", responsData);
|
||||
return responsData;
|
||||
|
||||
} catch (err) {
|
||||
}
|
||||
}
|
||||
|
||||
async populateDropdown(
|
||||
htmlElement,
|
||||
options,
|
||||
node,
|
||||
property,
|
||||
callback
|
||||
) {
|
||||
this.generateHtml(htmlElement, options, node[property]);
|
||||
|
||||
htmlElement.addEventListener("change", async (e) => {
|
||||
const newValue = e.target.value;
|
||||
console.log(`Dropdown changed: ${property} = ${newValue}`);
|
||||
node[property] = newValue;
|
||||
|
||||
RED.nodes.dirty(true);
|
||||
if (callback) await callback(newValue); // Ensure async callback completion
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to construct a URL from a base and path internal
|
||||
constructUrl(base, ...paths) {
|
||||
|
||||
// Remove trailing slash from base and leading slashes from paths
|
||||
const sanitizedBase = (base || "").replace(/\/+$/, "");
|
||||
const sanitizedPaths = paths.map((path) => path.replace(/^\/+|\/+$/g, ""));
|
||||
|
||||
// Join sanitized base and paths
|
||||
const url = `${sanitizedBase}/${sanitizedPaths.join("/")}`;
|
||||
console.log("Base:", sanitizedBase);
|
||||
console.log("Paths:", sanitizedPaths);
|
||||
console.log("Constructed URL:", url);
|
||||
return url;
|
||||
}
|
||||
|
||||
//Adjust for API Gateway
|
||||
constructCloudURL(base, ...paths) {
|
||||
// Remove trailing slash from base and leading slashes from paths
|
||||
const sanitizedBase = base.replace(/\/+$/, "");
|
||||
const sanitizedPaths = paths.map((path) => path.replace(/^\/+|\/+$/g, ""));
|
||||
// Join sanitized base and paths
|
||||
const url = `${sanitizedBase}/${sanitizedPaths.join("/")}`;
|
||||
return url;
|
||||
}
|
||||
|
||||
populateSubTypes(configUrls, elements, node, selectedSupplier) {
|
||||
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
const supplierFolder = this.constructUrl( configUrls.local.taggcodeAPI, `${assetType}s`, selectedSupplier );
|
||||
|
||||
const localSubTypesUrl = this.constructUrl(supplierFolder, "subtypes.json");
|
||||
const cloudSubTypesUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/product/get_subtypesFromVendor.php?vendor_name=" + selectedSupplier);
|
||||
|
||||
return this.fetchData(cloudSubTypesUrl, localSubTypesUrl)
|
||||
.then((subTypeData) => {
|
||||
const subTypes = subTypeData.map((subType) => subType.name);
|
||||
|
||||
return this.populateDropdown(
|
||||
elements.subType,
|
||||
subTypes,
|
||||
node,
|
||||
"subType",
|
||||
function (selectedSubType) {
|
||||
if (selectedSubType) {
|
||||
// When subType changes, update both models and units
|
||||
this.populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
selectedSubType
|
||||
);
|
||||
this.populateUnitsForSubType(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSubType
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// If we have a saved subType, trigger both models and units population
|
||||
if (node.subType) {
|
||||
this.populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
node.subType
|
||||
);
|
||||
this.populateUnitsForSubType(configUrls, elements, node, node.subType);
|
||||
}
|
||||
//console.log("In fetch part of subtypes ");
|
||||
// Store all data from selected model
|
||||
/* node["modelMetadata"] = modelData.find(
|
||||
(model) => model.name === node.model
|
||||
);
|
||||
console.log("Model Metadata: ", node["modelMetadata"]); */
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error populating subtypes:", error);
|
||||
});
|
||||
}
|
||||
|
||||
populateUnitsForSubType(configUrls, elements, node, selectedSubType) {
|
||||
// Fetch the units data
|
||||
this.fetchData(configUrls.cloud.units, configUrls.local.units)
|
||||
.then((unitsData) => {
|
||||
// Find the category that matches the subType name
|
||||
const categoryData = unitsData.units.find(
|
||||
(category) =>
|
||||
category.category.toLowerCase() === selectedSubType.toLowerCase()
|
||||
);
|
||||
|
||||
if (categoryData) {
|
||||
// Extract just the unit values and descriptions
|
||||
const units = categoryData.values.map((unit) => ({
|
||||
value: unit.value,
|
||||
description: unit.description,
|
||||
}));
|
||||
|
||||
// Create the options array with descriptions as labels
|
||||
const options = units.map((unit) => ({
|
||||
value: unit.value,
|
||||
label: `${unit.value} - ${unit.description}`,
|
||||
}));
|
||||
|
||||
// Populate the units dropdown
|
||||
this.populateDropdown(
|
||||
elements.unit,
|
||||
options.map((opt) => opt.value),
|
||||
node,
|
||||
"unit"
|
||||
);
|
||||
|
||||
// If there's no currently selected unit but we have options, select the first one
|
||||
if (!node.unit && options.length > 0) {
|
||||
node.unit = options[0].value;
|
||||
elements.unit.value = options[0].value;
|
||||
}
|
||||
} else {
|
||||
// If no matching category is found, provide a default % option
|
||||
const defaultUnits = [{ value: "%", description: "Percentage" }];
|
||||
this.populateDropdown(
|
||||
elements.unit,
|
||||
defaultUnits.map((unit) => unit.value),
|
||||
node,
|
||||
"unit"
|
||||
);
|
||||
console.warn(
|
||||
`No matching unit category found for subType: ${selectedSubType}`
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error fetching units:", error);
|
||||
});
|
||||
}
|
||||
|
||||
populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
selectedSubType
|
||||
) {
|
||||
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
// save assetType to fetch later
|
||||
node.assetType = assetType;
|
||||
|
||||
const supplierFolder = this.constructUrl( configUrls.local.taggcodeAPI,`${assetType}s`,selectedSupplier);
|
||||
const subTypeFolder = this.constructUrl(supplierFolder, selectedSubType);
|
||||
const localModelsUrl = this.constructUrl(subTypeFolder, "models.json");
|
||||
const cloudModelsUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/product/get_product_models.php?vendor_name=" + selectedSupplier + "&product_subtype_name=" + selectedSubType);
|
||||
|
||||
return this.fetchData(cloudModelsUrl, localModelsUrl).then((modelData) => {
|
||||
const models = modelData.map((model) => model.name); // use this to populate the dropdown
|
||||
|
||||
// If a model is already selected, store its metadata immediately
|
||||
if (node.model) {
|
||||
node["modelMetadata"] = modelData.find((model) => model.name === node.model);
|
||||
}
|
||||
|
||||
this.populateDropdown(elements.model, models, node, "model", (selectedModel) => {
|
||||
// Store only the metadata for the selected model
|
||||
node["modelMetadata"] = modelData.find((model) => model.name === selectedModel);
|
||||
});
|
||||
});
|
||||
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error populating models:", error);
|
||||
});
|
||||
}
|
||||
|
||||
generateHtml(htmlElement, options, savedValue) {
|
||||
htmlElement.innerHTML = options.length
|
||||
? `<option value="">Select...</option>${options
|
||||
.map((opt) => `<option value="${opt}">${opt}</option>`)
|
||||
.join("")}`
|
||||
: "<option value=''>No options available</option>";
|
||||
|
||||
if (savedValue && options.includes(savedValue)) {
|
||||
htmlElement.value = savedValue;
|
||||
}
|
||||
}
|
||||
|
||||
createMenuUtilsEndpoint(RED, nodeName, customHelpers = {}, options = {}) {
|
||||
const basePath = `/${nodeName}/resources`;
|
||||
|
||||
RED.httpAdmin.get(`${basePath}/menuUtilsData.json`, function(req, res) {
|
||||
res.json(this.generateMenuUtilsData(nodeName, customHelpers, options));
|
||||
}.bind(this));
|
||||
|
||||
RED.httpAdmin.get(`${basePath}/menuUtils.legacy.js`, function(req, res) {
|
||||
res.set('Content-Type', 'application/javascript');
|
||||
const browserCode = this.generateLegacyMenuUtilsCode(nodeName, customHelpers);
|
||||
res.send(browserCode);
|
||||
}.bind(this));
|
||||
|
||||
RED.httpAdmin.get(`${basePath}/menuUtils.js`, function(req, res) {
|
||||
res.set('Content-Type', 'application/javascript');
|
||||
res.send(this.generateMenuUtilsBootstrap(nodeName));
|
||||
}.bind(this));
|
||||
}
|
||||
|
||||
generateMenuUtilsData(nodeName, customHelpers = {}, options = {}) {
|
||||
const defaultHelpers = {
|
||||
validateRequired: `function(value) {
|
||||
return value && value.toString().trim() !== '';
|
||||
}`,
|
||||
formatDisplayValue: `function(value, unit) {
|
||||
return \`\${value} \${unit || ''}\`.trim();
|
||||
}`
|
||||
};
|
||||
|
||||
return {
|
||||
nodeName,
|
||||
helpers: { ...defaultHelpers, ...customHelpers },
|
||||
options: {
|
||||
autoLoadLegacy: options.autoLoadLegacy !== false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
generateMenuUtilsBootstrap(nodeName) {
|
||||
return `
|
||||
// Stable bootstrap for EVOLV menu utils (${nodeName})
|
||||
(function() {
|
||||
const nodeName = ${JSON.stringify(nodeName)};
|
||||
const basePath = '/' + nodeName + '/resources';
|
||||
|
||||
window.EVOLV = window.EVOLV || {};
|
||||
window.EVOLV.nodes = window.EVOLV.nodes || {};
|
||||
window.EVOLV.nodes[nodeName] = window.EVOLV.nodes[nodeName] || {};
|
||||
window.EVOLV.nodes[nodeName].utils = window.EVOLV.nodes[nodeName].utils || {};
|
||||
|
||||
function parseHelper(fnBody) {
|
||||
try {
|
||||
return (new Function('return (' + fnBody + ')'))();
|
||||
} catch (error) {
|
||||
console.error('[menuUtils] helper parse failed:', error);
|
||||
return function() { return null; };
|
||||
}
|
||||
}
|
||||
|
||||
function loadLegacyIfNeeded(autoLoadLegacy) {
|
||||
if (!autoLoadLegacy || typeof window.MenuUtils === 'function') return Promise.resolve();
|
||||
return new Promise((resolve, reject) => {
|
||||
const script = document.createElement('script');
|
||||
script.src = basePath + '/menuUtils.legacy.js';
|
||||
script.onload = resolve;
|
||||
script.onerror = reject;
|
||||
document.head.appendChild(script);
|
||||
});
|
||||
}
|
||||
|
||||
fetch(basePath + '/menuUtilsData.json')
|
||||
.then(function(res) { return res.json(); })
|
||||
.then(function(payload) {
|
||||
const helperFns = {};
|
||||
Object.entries(payload.helpers || {}).forEach(function(entry) {
|
||||
helperFns[entry[0]] = parseHelper(entry[1]);
|
||||
});
|
||||
window.EVOLV.nodes[nodeName].utils.helpers = helperFns;
|
||||
return loadLegacyIfNeeded(payload.options && payload.options.autoLoadLegacy);
|
||||
})
|
||||
.then(function() {
|
||||
if (typeof window.MenuUtils === 'function' && !window.EVOLV.nodes[nodeName].utils.menuUtils) {
|
||||
window.EVOLV.nodes[nodeName].utils.menuUtils = new window.MenuUtils();
|
||||
}
|
||||
})
|
||||
.catch(function(error) {
|
||||
console.error('[menuUtils] bootstrap failed for ' + nodeName, error);
|
||||
});
|
||||
})();
|
||||
`;
|
||||
}
|
||||
|
||||
generateLegacyMenuUtilsCode(nodeName, customHelpers = {}) {
|
||||
const allHelpers = { ...this.generateMenuUtilsData(nodeName).helpers, ...customHelpers };
|
||||
|
||||
const helpersCode = Object.entries(allHelpers)
|
||||
.map(([name, func]) => ` ${name}: ${func}`)
|
||||
.join(',\n');
|
||||
|
||||
const classCode = MenuUtils.toString(); // <-- this gives full class MenuUtils {...}
|
||||
|
||||
return `
|
||||
// Create EVOLV namespace structure
|
||||
window.EVOLV = window.EVOLV || {};
|
||||
window.EVOLV.nodes = window.EVOLV.nodes || {};
|
||||
window.EVOLV.nodes.${nodeName} = window.EVOLV.nodes.${nodeName} || {};
|
||||
|
||||
// Inject MenuUtils class
|
||||
${classCode}
|
||||
|
||||
// Expose MenuUtils instance to namespace
|
||||
window.EVOLV.nodes.${nodeName}.utils = {
|
||||
menuUtils: new MenuUtils(),
|
||||
|
||||
helpers: {
|
||||
${helpersCode}
|
||||
}
|
||||
};
|
||||
|
||||
// Optionally expose globally
|
||||
window.MenuUtils = MenuUtils;
|
||||
|
||||
console.log('${nodeName} utilities loaded in namespace');
|
||||
`;
|
||||
}
|
||||
|
||||
// Backward-compatible alias
|
||||
generateMenuUtilsCode(nodeName, customHelpers = {}) {
|
||||
return this.generateLegacyMenuUtilsCode(nodeName, customHelpers);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = MenuUtils;
|
||||
|
||||
@@ -1,539 +0,0 @@
|
||||
class MenuUtils {
|
||||
|
||||
|
||||
initBasicToggles(elements) {
|
||||
// Toggle visibility for log level
|
||||
elements.logCheckbox.addEventListener("change", function () {
|
||||
elements.rowLogLevel.style.display = this.checked ? "block" : "none";
|
||||
});
|
||||
elements.rowLogLevel.style.display = elements.logCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
}
|
||||
|
||||
// Define the initialize toggles function within scope
|
||||
initMeasurementToggles(elements) {
|
||||
// Toggle visibility for scaling inputs
|
||||
elements.scalingCheckbox.addEventListener("change", function () {
|
||||
elements.rowInputMin.style.display = this.checked ? "block" : "none";
|
||||
elements.rowInputMax.style.display = this.checked ? "block" : "none";
|
||||
});
|
||||
|
||||
// Set initial states
|
||||
elements.rowInputMin.style.display = elements.scalingCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
elements.rowInputMax.style.display = elements.scalingCheckbox.checked
|
||||
? "block"
|
||||
: "none";
|
||||
}
|
||||
|
||||
initTensionToggles(elements, node) {
|
||||
const currentMethod = node.interpolationMethod;
|
||||
elements.rowTension.style.display =
|
||||
currentMethod === "monotone_cubic_spline" ? "block" : "none";
|
||||
console.log(
|
||||
"Initial tension row display: ",
|
||||
elements.rowTension.style.display
|
||||
);
|
||||
|
||||
elements.interpolationMethodInput.addEventListener("change", function () {
|
||||
const selectedMethod = this.value;
|
||||
console.log(`Interpolation method changed: ${selectedMethod}`);
|
||||
node.interpolationMethod = selectedMethod;
|
||||
|
||||
// Toggle visibility for tension input
|
||||
elements.rowTension.style.display =
|
||||
selectedMethod === "monotone_cubic_spline" ? "block" : "none";
|
||||
console.log("Tension row display: ", elements.rowTension.style.display);
|
||||
});
|
||||
}
|
||||
// Define the smoothing methods population function within scope
|
||||
populateSmoothingMethods(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const smoothingMethods =
|
||||
configData.smoothing?.smoothMethod?.rules?.values?.map(
|
||||
(o) => o.value
|
||||
) || [];
|
||||
this.populateDropdown(
|
||||
elements.smoothMethod,
|
||||
smoothingMethods,
|
||||
node,
|
||||
"smooth_method"
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error loading smoothing methods", err);
|
||||
});
|
||||
}
|
||||
|
||||
populateInterpolationMethods(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const interpolationMethods =
|
||||
configData?.interpolation?.type?.rules?.values.map((m) => m.value) ||
|
||||
[];
|
||||
this.populateDropdown(
|
||||
elements.interpolationMethodInput,
|
||||
interpolationMethods,
|
||||
node,
|
||||
"interpolationMethod"
|
||||
);
|
||||
|
||||
// Find the selected method and use it to spawn 1 more field to fill in tension
|
||||
//const selectedMethod = interpolationMethods.find(m => m === node.interpolationMethod);
|
||||
this.initTensionToggles(elements, node);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("Error loading interpolation methods", err);
|
||||
});
|
||||
}
|
||||
|
||||
populateLogLevelOptions(logLevelSelect, configData, node) {
|
||||
// debug log level
|
||||
//console.log("Displaying configData => ", configData) ;
|
||||
|
||||
const logLevels =
|
||||
configData?.general?.logging?.logLevel?.rules?.values?.map(
|
||||
(l) => l.value
|
||||
) || [];
|
||||
|
||||
//console.log("Displaying logLevels => ", logLevels);
|
||||
|
||||
// Reuse your existing generic populateDropdown helper
|
||||
this.populateDropdown(logLevelSelect, logLevels, node.logLevel);
|
||||
}
|
||||
|
||||
//cascade dropdowns for asset type, supplier, subType, model, unit
|
||||
fetchAndPopulateDropdowns(configUrls, elements, node) {
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
const localSuppliersUrl = this.constructUrl(configUrls.local.taggcodeAPI,`${assetType}s`,"suppliers.json");
|
||||
const cloudSuppliersUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/vendor/get_vendors.php");
|
||||
|
||||
return this.fetchData(cloudSuppliersUrl, localSuppliersUrl)
|
||||
.then((supplierData) => {
|
||||
|
||||
const suppliers = supplierData.map((supplier) => supplier.name);
|
||||
|
||||
// Populate suppliers dropdown and set up its change handler
|
||||
return this.populateDropdown(
|
||||
elements.supplier,
|
||||
suppliers,
|
||||
node,
|
||||
"supplier",
|
||||
function (selectedSupplier) {
|
||||
if (selectedSupplier) {
|
||||
this.populateSubTypes(configUrls, elements, node, selectedSupplier);
|
||||
}
|
||||
}
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// If we have a saved supplier, trigger subTypes population
|
||||
if (node.supplier) {
|
||||
this.populateSubTypes(configUrls, elements, node, node.supplier);
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error in initial dropdown population:", error);
|
||||
});
|
||||
}
|
||||
|
||||
getSpecificConfigUrl(nodeName,cloudAPI) {
|
||||
|
||||
const cloudConfigURL = cloudAPI + "/config/" + nodeName + ".json";
|
||||
const localConfigURL = "http://localhost:1880/"+ nodeName + "/dependencies/"+ nodeName + "/" + nodeName + "Config.json";
|
||||
|
||||
return { cloudConfigURL, localConfigURL };
|
||||
|
||||
}
|
||||
|
||||
// Save changes to API
|
||||
async apiCall(node) {
|
||||
try{
|
||||
// OLFIANT when a browser refreshes the tag code is lost!!! fix this later!!!!!
|
||||
// FIX UUID ALSO LATER
|
||||
|
||||
if(node.assetTagCode !== "" || node.assetTagCode !== null){ }
|
||||
// API call to register or check asset in central database
|
||||
let assetregisterAPI = node.configUrls.cloud.taggcodeAPI + "/asset/create_asset.php";
|
||||
|
||||
const assetModelId = node.modelMetadata.id; //asset_product_model_id
|
||||
const uuid = node.uuid; //asset_product_model_uuid
|
||||
const assetName = node.assetType; //asset_name / type?
|
||||
const description = node.name; // asset_description
|
||||
const assetStatus = "actief"; //asset_status -> koppel aan enable / disable node ? or make dropdown ?
|
||||
const assetProfileId = 1; //asset_profile_id these are the rules to check if the childs are valid under this node (parent / child id?)
|
||||
const child_assets = ["63247"]; //child_assets tagnummer of id?
|
||||
const assetProcessId = node.processId; //asset_process_id
|
||||
const assetLocationId = node.locationId; //asset_location_id
|
||||
const tagCode = node.assetTagCode; // if already exists in the node information use it to tell the api it exists and it will update else we will get it from the api call
|
||||
//console.log(`this is my tagCode: ${tagCode}`);
|
||||
|
||||
// Build base URL with required parameters
|
||||
let apiUrl = `?asset_product_model_id=${assetModelId}&asset_product_model_uuid=${uuid}&asset_name=${assetName}&asset_description=${description}&asset_status=${assetStatus}&asset_profile_id=${assetProfileId}&asset_location_id=${assetLocationId}&asset_process_id=${assetProcessId}&child_assets=${child_assets}`;
|
||||
|
||||
// Only add tagCode to URL if it exists
|
||||
if (tagCode) {
|
||||
apiUrl += `&asset_tag_number=${tagCode}`;
|
||||
}
|
||||
|
||||
assetregisterAPI += apiUrl;
|
||||
console.log("API call to register asset in central database", assetregisterAPI);
|
||||
|
||||
const response = await fetch(assetregisterAPI, {
|
||||
method: "POST"
|
||||
});
|
||||
|
||||
// Get the response text first
|
||||
const responseText = await response.text();
|
||||
console.log("Raw API response:", responseText);
|
||||
|
||||
// Try to parse the JSON, handling potential parsing errors
|
||||
let jsonResponse;
|
||||
try {
|
||||
jsonResponse = JSON.parse(responseText);
|
||||
} catch (parseError) {
|
||||
console.error("JSON Parsing Error:", parseError);
|
||||
console.error("Response that could not be parsed:", responseText);
|
||||
throw new Error("Failed to parse API response");
|
||||
}
|
||||
|
||||
console.log(jsonResponse);
|
||||
|
||||
if(jsonResponse.success){
|
||||
console.log(`${jsonResponse.message}, tag number: ${jsonResponse.asset_tag_number}, asset id: ${jsonResponse.asset_id}`);
|
||||
// Save the asset tag number and id to the node
|
||||
} else {
|
||||
console.log("Asset not registered in central database");
|
||||
}
|
||||
return jsonResponse;
|
||||
|
||||
} catch (error) {
|
||||
console.log("Error saving changes to asset register API", error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fetchData(url, fallbackUrl) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||
const responsData = await response.json();
|
||||
//responsData
|
||||
const data = responsData.data;
|
||||
/* .map(item => {
|
||||
const { vendor_name, ...rest } = item;
|
||||
return {
|
||||
name: vendor_name,
|
||||
...rest
|
||||
};
|
||||
}); */
|
||||
console.log(url);
|
||||
console.log("Response Data: ", data);
|
||||
return data;
|
||||
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`Primary URL failed: ${url}. Trying fallback URL: ${fallbackUrl}`,
|
||||
err
|
||||
);
|
||||
try {
|
||||
const response = await fetch(fallbackUrl);
|
||||
if (!response.ok)
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
return await response.json();
|
||||
} catch (fallbackErr) {
|
||||
console.error("Both primary and fallback URLs failed:", fallbackErr);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fetchProjectData(url) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||
const responsData = await response.json();
|
||||
console.log("Response Data: ", responsData);
|
||||
return responsData;
|
||||
|
||||
} catch (err) {
|
||||
}
|
||||
}
|
||||
|
||||
async populateDropdown(
|
||||
htmlElement,
|
||||
options,
|
||||
node,
|
||||
property,
|
||||
callback
|
||||
) {
|
||||
this.generateHtml(htmlElement, options, node[property]);
|
||||
|
||||
htmlElement.addEventListener("change", async (e) => {
|
||||
const newValue = e.target.value;
|
||||
console.log(`Dropdown changed: ${property} = ${newValue}`);
|
||||
node[property] = newValue;
|
||||
|
||||
RED.nodes.dirty(true);
|
||||
if (callback) await callback(newValue); // Ensure async callback completion
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to construct a URL from a base and path internal
|
||||
constructUrl(base, ...paths) {
|
||||
|
||||
// Remove trailing slash from base and leading slashes from paths
|
||||
const sanitizedBase = (base || "").replace(/\/+$/, "");
|
||||
const sanitizedPaths = paths.map((path) => path.replace(/^\/+|\/+$/g, ""));
|
||||
|
||||
// Join sanitized base and paths
|
||||
const url = `${sanitizedBase}/${sanitizedPaths.join("/")}`;
|
||||
console.log("Base:", sanitizedBase);
|
||||
console.log("Paths:", sanitizedPaths);
|
||||
console.log("Constructed URL:", url);
|
||||
return url;
|
||||
}
|
||||
|
||||
//Adjust for API Gateway
|
||||
constructCloudURL(base, ...paths) {
|
||||
// Remove trailing slash from base and leading slashes from paths
|
||||
const sanitizedBase = base.replace(/\/+$/, "");
|
||||
const sanitizedPaths = paths.map((path) => path.replace(/^\/+|\/+$/g, ""));
|
||||
// Join sanitized base and paths
|
||||
const url = `${sanitizedBase}/${sanitizedPaths.join("/")}`;
|
||||
return url;
|
||||
}
|
||||
|
||||
populateSubTypes(configUrls, elements, node, selectedSupplier) {
|
||||
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
const supplierFolder = this.constructUrl( configUrls.local.taggcodeAPI, `${assetType}s`, selectedSupplier );
|
||||
|
||||
const localSubTypesUrl = this.constructUrl(supplierFolder, "subtypes.json");
|
||||
const cloudSubTypesUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/product/get_subtypesFromVendor.php?vendor_name=" + selectedSupplier);
|
||||
|
||||
return this.fetchData(cloudSubTypesUrl, localSubTypesUrl)
|
||||
.then((subTypeData) => {
|
||||
const subTypes = subTypeData.map((subType) => subType.name);
|
||||
|
||||
return this.populateDropdown(
|
||||
elements.subType,
|
||||
subTypes,
|
||||
node,
|
||||
"subType",
|
||||
function (selectedSubType) {
|
||||
if (selectedSubType) {
|
||||
// When subType changes, update both models and units
|
||||
this.populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
selectedSubType
|
||||
);
|
||||
this.populateUnitsForSubType(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSubType
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// If we have a saved subType, trigger both models and units population
|
||||
if (node.subType) {
|
||||
this.populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
node.subType
|
||||
);
|
||||
this.populateUnitsForSubType(configUrls, elements, node, node.subType);
|
||||
}
|
||||
//console.log("In fetch part of subtypes ");
|
||||
// Store all data from selected model
|
||||
/* node["modelMetadata"] = modelData.find(
|
||||
(model) => model.name === node.model
|
||||
);
|
||||
console.log("Model Metadata: ", node["modelMetadata"]); */
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error populating subtypes:", error);
|
||||
});
|
||||
}
|
||||
|
||||
populateUnitsForSubType(configUrls, elements, node, selectedSubType) {
|
||||
// Fetch the units data
|
||||
this.fetchData(configUrls.cloud.units, configUrls.local.units)
|
||||
.then((unitsData) => {
|
||||
// Find the category that matches the subType name
|
||||
const categoryData = unitsData.units.find(
|
||||
(category) =>
|
||||
category.category.toLowerCase() === selectedSubType.toLowerCase()
|
||||
);
|
||||
|
||||
if (categoryData) {
|
||||
// Extract just the unit values and descriptions
|
||||
const units = categoryData.values.map((unit) => ({
|
||||
value: unit.value,
|
||||
description: unit.description,
|
||||
}));
|
||||
|
||||
// Create the options array with descriptions as labels
|
||||
const options = units.map((unit) => ({
|
||||
value: unit.value,
|
||||
label: `${unit.value} - ${unit.description}`,
|
||||
}));
|
||||
|
||||
// Populate the units dropdown
|
||||
this.populateDropdown(
|
||||
elements.unit,
|
||||
options.map((opt) => opt.value),
|
||||
node,
|
||||
"unit"
|
||||
);
|
||||
|
||||
// If there's no currently selected unit but we have options, select the first one
|
||||
if (!node.unit && options.length > 0) {
|
||||
node.unit = options[0].value;
|
||||
elements.unit.value = options[0].value;
|
||||
}
|
||||
} else {
|
||||
// If no matching category is found, provide a default % option
|
||||
const defaultUnits = [{ value: "%", description: "Percentage" }];
|
||||
this.populateDropdown(
|
||||
elements.unit,
|
||||
defaultUnits.map((unit) => unit.value),
|
||||
node,
|
||||
"unit"
|
||||
);
|
||||
console.warn(
|
||||
`No matching unit category found for subType: ${selectedSubType}`
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error fetching units:", error);
|
||||
});
|
||||
}
|
||||
|
||||
populateModels(
|
||||
configUrls,
|
||||
elements,
|
||||
node,
|
||||
selectedSupplier,
|
||||
selectedSubType
|
||||
) {
|
||||
|
||||
this.fetchData(configUrls.cloud.config, configUrls.local.config)
|
||||
.then((configData) => {
|
||||
const assetType = configData.asset?.type?.default;
|
||||
// save assetType to fetch later
|
||||
node.assetType = assetType;
|
||||
|
||||
const supplierFolder = this.constructUrl( configUrls.local.taggcodeAPI,`${assetType}s`,selectedSupplier);
|
||||
const subTypeFolder = this.constructUrl(supplierFolder, selectedSubType);
|
||||
const localModelsUrl = this.constructUrl(subTypeFolder, "models.json");
|
||||
const cloudModelsUrl = this.constructCloudURL(configUrls.cloud.taggcodeAPI, "/product/get_product_models.php?vendor_name=" + selectedSupplier + "&product_subtype_name=" + selectedSubType);
|
||||
|
||||
return this.fetchData(cloudModelsUrl, localModelsUrl).then((modelData) => {
|
||||
const models = modelData.map((model) => model.name); // use this to populate the dropdown
|
||||
|
||||
// If a model is already selected, store its metadata immediately
|
||||
if (node.model) {
|
||||
node["modelMetadata"] = modelData.find((model) => model.name === node.model);
|
||||
}
|
||||
|
||||
this.populateDropdown(elements.model, models, node, "model", (selectedModel) => {
|
||||
// Store only the metadata for the selected model
|
||||
node["modelMetadata"] = modelData.find((model) => model.name === selectedModel);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error populating models:", error);
|
||||
});
|
||||
}
|
||||
|
||||
generateHtml(htmlElement, options, savedValue) {
|
||||
htmlElement.innerHTML = options.length
|
||||
? `<option value="">Select...</option>${options
|
||||
.map((opt) => `<option value="${opt}">${opt}</option>`)
|
||||
.join("")}`
|
||||
: "<option value=''>No options available</option>";
|
||||
|
||||
if (savedValue && options.includes(savedValue)) {
|
||||
htmlElement.value = savedValue;
|
||||
}
|
||||
}
|
||||
|
||||
createMenuUtilsEndpoint(RED, nodeName, customHelpers = {}) {
|
||||
RED.httpAdmin.get(`/${nodeName}/resources/menuUtils.js`, function(req, res) {
|
||||
console.log(`Serving menuUtils.js for ${nodeName} node`);
|
||||
res.set('Content-Type', 'application/javascript');
|
||||
|
||||
const browserCode = this.generateMenuUtilsCode(nodeName, customHelpers);
|
||||
res.send(browserCode);
|
||||
}.bind(this));
|
||||
}
|
||||
|
||||
generateMenuUtilsCode(nodeName, customHelpers = {}) {
|
||||
const defaultHelpers = {
|
||||
validateRequired: `function(value) {
|
||||
return value && value.toString().trim() !== '';
|
||||
}`,
|
||||
formatDisplayValue: `function(value, unit) {
|
||||
return \`\${value} \${unit || ''}\`.trim();
|
||||
}`
|
||||
};
|
||||
|
||||
const allHelpers = { ...defaultHelpers, ...customHelpers };
|
||||
|
||||
const helpersCode = Object.entries(allHelpers)
|
||||
.map(([name, func]) => ` ${name}: ${func}`)
|
||||
.join(',\n');
|
||||
|
||||
const classCode = MenuUtils.toString(); // <-- this gives full class MenuUtils {...}
|
||||
|
||||
return `
|
||||
// Create EVOLV namespace structure
|
||||
window.EVOLV = window.EVOLV || {};
|
||||
window.EVOLV.nodes = window.EVOLV.nodes || {};
|
||||
window.EVOLV.nodes.${nodeName} = window.EVOLV.nodes.${nodeName} || {};
|
||||
|
||||
// Inject MenuUtils class
|
||||
${classCode}
|
||||
|
||||
// Expose MenuUtils instance to namespace
|
||||
window.EVOLV.nodes.${nodeName}.utils = {
|
||||
menuUtils: new MenuUtils(),
|
||||
|
||||
helpers: {
|
||||
${helpersCode}
|
||||
}
|
||||
};
|
||||
|
||||
// Optionally expose globally
|
||||
window.MenuUtils = MenuUtils;
|
||||
|
||||
console.log('${nodeName} utilities loaded in namespace');
|
||||
`;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = MenuUtils;
|
||||
@@ -1,18 +1,19 @@
|
||||
const { getFormatter } = require('./formatters');
|
||||
|
||||
//this class will handle the output events for the node red node
|
||||
class OutputUtils {
|
||||
constructor() {
|
||||
this.output = {};
|
||||
this.output['influxdb'] = {};
|
||||
this.output['process'] = {};
|
||||
}
|
||||
|
||||
checkForChanges(output, format) {
|
||||
if (!output || typeof output !== 'object') {
|
||||
return {};
|
||||
}
|
||||
this.output[format] = this.output[format] || {};
|
||||
const changedFields = {};
|
||||
for (const key in output) {
|
||||
if (output.hasOwnProperty(key) && output[key] !== this.output[format][key]) {
|
||||
if (Object.prototype.hasOwnProperty.call(output, key) && output[key] !== this.output[format][key]) {
|
||||
let value = output[key];
|
||||
// For fields: if the value is an object (and not a Date), stringify it.
|
||||
if (value !== null && typeof value === 'object' && !(value instanceof Date)) {
|
||||
@@ -30,66 +31,56 @@ class OutputUtils {
|
||||
}
|
||||
|
||||
formatMsg(output, config, format) {
|
||||
|
||||
//define emtpy message
|
||||
let msg = {};
|
||||
|
||||
// Compare output with last output and only include changed values
|
||||
const changedFields = this.checkForChanges(output,format);
|
||||
|
||||
if (Object.keys(changedFields).length > 0) {
|
||||
|
||||
switch (format) {
|
||||
case 'influxdb':
|
||||
// Extract the relevant config properties.
|
||||
const relevantConfig = this.extractRelevantConfig(config);
|
||||
// Flatten the tags so that no nested objects are passed on.
|
||||
const flatTags = this.flattenTags(relevantConfig);
|
||||
msg = this.influxDBFormat(changedFields, config, flatTags);
|
||||
|
||||
break;
|
||||
|
||||
case 'process':
|
||||
|
||||
// Compare output with last output and only include changed values
|
||||
msg = this.processFormat(changedFields,config);
|
||||
//console.log(msg);
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return msg;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
influxDBFormat(changedFields, config , flatTags) {
|
||||
// Create the measurement and topic using softwareType and name config.functionality.softwareType + .
|
||||
const measurement = `${config.functionality?.softwareType}_${config.general?.id}`;
|
||||
const payload = {
|
||||
measurement: measurement,
|
||||
const measurement = config.general.name;
|
||||
const flatTags = this.flattenTags(this.extractRelevantConfig(config));
|
||||
const formatterName = this.resolveFormatterName(config, format);
|
||||
const formatter = getFormatter(formatterName);
|
||||
const payload = formatter.format(measurement, {
|
||||
fields: changedFields,
|
||||
tags: flatTags,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
|
||||
const topic = measurement;
|
||||
const msg = { topic: topic, payload: payload };
|
||||
config,
|
||||
channel: format,
|
||||
});
|
||||
msg = this.wrapMessage(measurement, payload);
|
||||
return msg;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
resolveFormatterName(config, channel) {
|
||||
const outputConfig = config.output || {};
|
||||
if (channel === 'process') {
|
||||
return outputConfig.process || 'process';
|
||||
}
|
||||
if (channel === 'influxdb') {
|
||||
return outputConfig.dbase || 'influxdb';
|
||||
}
|
||||
return outputConfig[channel] || channel;
|
||||
}
|
||||
|
||||
wrapMessage(measurement, payload) {
|
||||
return {
|
||||
topic: measurement,
|
||||
payload,
|
||||
};
|
||||
}
|
||||
|
||||
flattenTags(obj) {
|
||||
const result = {};
|
||||
for (const key in obj) {
|
||||
if (obj.hasOwnProperty(key)) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
const value = obj[key];
|
||||
if (value !== null && typeof value === 'object' && !(value instanceof Date)) {
|
||||
// Recursively flatten the nested object.
|
||||
const flatChild = this.flattenTags(value);
|
||||
for (const childKey in flatChild) {
|
||||
if (flatChild.hasOwnProperty(childKey)) {
|
||||
if (Object.prototype.hasOwnProperty.call(flatChild, childKey)) {
|
||||
result[`${key}_${childKey}`] = String(flatChild[childKey]);
|
||||
}
|
||||
}
|
||||
@@ -120,15 +111,6 @@ class OutputUtils {
|
||||
unit: config.general?.unit,
|
||||
};
|
||||
}
|
||||
|
||||
processFormat(changedFields,config) {
|
||||
// Create the measurement and topic using softwareType and name config.functionality.softwareType + .
|
||||
const measurement = `${config.functionality?.softwareType}_${config.general?.id}`;
|
||||
const payload = changedFields;
|
||||
const topic = measurement;
|
||||
const msg = { topic: topic, payload: payload };
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OutputUtils;
|
||||
|
||||
@@ -28,17 +28,44 @@
|
||||
* @module ValidationUtils
|
||||
* @requires Logger
|
||||
* @exports ValidationUtils
|
||||
* @version 0.1.0
|
||||
* @version 0.2.0
|
||||
* @since 0.1.0
|
||||
*/
|
||||
|
||||
const Logger = require("./logger");
|
||||
const { validateNumber, validateInteger, validateBoolean, validateString, validateEnum } = require("./validators/typeValidators");
|
||||
const { validateArray, validateSet, validateObject } = require("./validators/collectionValidators");
|
||||
const { validateCurve, validateMachineCurve } = require("./validators/curveValidator");
|
||||
|
||||
// Strategy registry: maps rules.type to a handler function
|
||||
const VALIDATORS = {
|
||||
number: (cv, rules, fs, name, key, logger) => validateNumber(cv, rules, fs, name, key, logger),
|
||||
integer: (cv, rules, fs, name, key, logger) => validateInteger(cv, rules, fs, name, key, logger),
|
||||
boolean: (cv, _rules, _fs, name, key, logger) => validateBoolean(cv, name, key, logger),
|
||||
string: (cv, rules, fs, name, key, logger) => validateString(cv, rules, fs, name, key, logger),
|
||||
enum: (cv, rules, fs, name, key, logger) => validateEnum(cv, rules, fs, name, key, logger),
|
||||
array: (cv, rules, fs, name, key, logger) => validateArray(cv, rules, fs, name, key, logger),
|
||||
set: (cv, rules, fs, name, key, logger) => validateSet(cv, rules, fs, name, key, logger),
|
||||
};
|
||||
|
||||
class ValidationUtils {
|
||||
constructor(IloggerEnabled, IloggerLevel) {
|
||||
const loggerEnabled = IloggerEnabled ?? true;
|
||||
const loggerLevel = IloggerLevel ?? "warn";
|
||||
this.logger = new Logger(loggerEnabled, loggerLevel, 'ValidationUtils');
|
||||
this._onceLogCache = new Set();
|
||||
}
|
||||
|
||||
_logOnce(level, onceKey, message) {
|
||||
if (onceKey && this._onceLogCache.has(onceKey)) {
|
||||
return;
|
||||
}
|
||||
if (onceKey) {
|
||||
this._onceLogCache.add(onceKey);
|
||||
}
|
||||
if (typeof this.logger?.[level] === "function") {
|
||||
this.logger[level](message);
|
||||
}
|
||||
}
|
||||
|
||||
constrain(value, min, max) {
|
||||
@@ -68,11 +95,19 @@ class ValidationUtils {
|
||||
// Validate each key in the schema and loop over wildcards if they are not in schema
|
||||
for ( const key in schema ) {
|
||||
|
||||
if (key === "rules" || key === "description" || key === "schema") {
|
||||
if (key === "rules" || key === "description" || key === "schema" || key === "version") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fieldSchema = schema[key];
|
||||
|
||||
// Skip non-object schema entries (e.g. primitive values injected by migration)
|
||||
if (fieldSchema === null || typeof fieldSchema !== 'object') {
|
||||
this.logger.debug(`${name}.${key} has a non-object schema entry (${typeof fieldSchema}). Skipping.`);
|
||||
validatedConfig[key] = fieldSchema;
|
||||
continue;
|
||||
}
|
||||
|
||||
const { rules = {} } = fieldSchema;
|
||||
|
||||
// Default to the schema's default value if the key is missing
|
||||
@@ -96,7 +131,7 @@ class ValidationUtils {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
this.logger.info(`There is no value provided for ${name}.${key}. Using default value.`);
|
||||
this.logger.debug(`No value provided for ${name}.${key}. Using default value.`);
|
||||
configValue = fieldSchema.default;
|
||||
}
|
||||
//continue;
|
||||
@@ -105,63 +140,44 @@ class ValidationUtils {
|
||||
configValue = config[key] !== undefined ? config[key] : fieldSchema.default;
|
||||
}
|
||||
|
||||
// Attempt to parse the value to the expected type if possible
|
||||
switch (rules.type) {
|
||||
|
||||
case "number":
|
||||
configValue = this.validateNumber(configValue, rules, fieldSchema, name, key);
|
||||
break;
|
||||
case "boolean":
|
||||
configValue = this.validateBoolean(configValue, name, key);
|
||||
break;
|
||||
|
||||
case "string":
|
||||
configValue = this.validateString(configValue,rules,fieldSchema, name, key);
|
||||
break;
|
||||
|
||||
case "array":
|
||||
configValue = this.validateArray(configValue, rules, fieldSchema, name, key);
|
||||
break;
|
||||
|
||||
case "set":
|
||||
configValue = this.validateSet(configValue, rules, fieldSchema, name, key);
|
||||
break;
|
||||
|
||||
case "object":
|
||||
configValue = this.validateObject(configValue, rules, fieldSchema, name, key);
|
||||
break;
|
||||
|
||||
case "enum":
|
||||
configValue = this.validateEnum(configValue, rules, fieldSchema, name, key);
|
||||
break;
|
||||
|
||||
case "curve":
|
||||
validatedConfig[key] = this.validateCurve(configValue,fieldSchema.default);
|
||||
// Handle curve types (they use continue, so handle separately)
|
||||
if (rules.type === "curve") {
|
||||
validatedConfig[key] = validateCurve(configValue, fieldSchema.default, this.logger);
|
||||
continue;
|
||||
|
||||
case "machineCurve":
|
||||
validatedConfig[key] = this.validateMachineCurve(configValue,fieldSchema.default);
|
||||
}
|
||||
if (rules.type === "machineCurve") {
|
||||
validatedConfig[key] = validateMachineCurve(configValue, fieldSchema.default, this.logger);
|
||||
continue;
|
||||
}
|
||||
|
||||
case "integer":
|
||||
validatedConfig[key] = this.validateInteger(configValue, rules, fieldSchema, name, key);
|
||||
// Handle object type (needs recursive validateSchema reference)
|
||||
if (rules.type === "object") {
|
||||
validatedConfig[key] = validateObject(
|
||||
configValue, rules, fieldSchema, name, key,
|
||||
(c, s, n) => this.validateSchema(c, s, n),
|
||||
this.logger
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
case undefined:
|
||||
// If we see 'rules.schema' but no 'rules.type', treat it like an object:
|
||||
// Handle undefined type
|
||||
if (rules.type === undefined) {
|
||||
if (rules.schema && !rules.type) {
|
||||
// Log a warning and skip the extra pass for nested schema
|
||||
this.logger.warn(
|
||||
`${name}.${key} has a nested schema but no type. ` +
|
||||
`Treating it as type="object" to skip extra pass.`
|
||||
);
|
||||
} else {
|
||||
// Otherwise, fallback to your existing "validateUndefined" logic
|
||||
validatedConfig[key] = this.validateUndefined(configValue, fieldSchema, name, key);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
default:
|
||||
// Use the strategy registry for all other types
|
||||
const handler = VALIDATORS[rules.type];
|
||||
if (handler) {
|
||||
configValue = handler(configValue, rules, fieldSchema, name, key, this.logger);
|
||||
} else {
|
||||
this.logger.warn(`${name}.${key} has an unknown validation type: ${rules.type}. Skipping validation.`);
|
||||
validatedConfig[key] = fieldSchema.default;
|
||||
continue;
|
||||
@@ -204,323 +220,6 @@ class ValidationUtils {
|
||||
return obj;
|
||||
}
|
||||
|
||||
validateMachineCurve(curve, defaultCurve) {
|
||||
if (!curve || typeof curve !== "object" || Object.keys(curve).length === 0) {
|
||||
this.logger.warn("Curve is missing or invalid. Defaulting to basic curve.");
|
||||
return defaultCurve;
|
||||
}
|
||||
|
||||
// Validate that nq and np exist and are objects
|
||||
const { nq, np } = curve;
|
||||
if (!nq || typeof nq !== "object" || !np || typeof np !== "object") {
|
||||
this.logger.warn("Curve must contain valid 'nq' and 'np' objects. Defaulting to basic curve.");
|
||||
return defaultCurve;
|
||||
}
|
||||
|
||||
// Validate that each dimension key points to a valid object with x and y arrays
|
||||
const validatedNq = this.validateDimensionStructure(nq, "nq");
|
||||
const validatedNp = this.validateDimensionStructure(np, "np");
|
||||
|
||||
if (!validatedNq || !validatedNp) {
|
||||
return defaultCurve;
|
||||
}
|
||||
|
||||
return { nq: validatedNq, np: validatedNp }; // Return the validated curve
|
||||
}
|
||||
|
||||
validateCurve(curve, defaultCurve) {
|
||||
if (!curve || typeof curve !== "object" || Object.keys(curve).length === 0) {
|
||||
this.logger.warn("Curve is missing or invalid. Defaulting to basic curve.");
|
||||
return defaultCurve;
|
||||
}
|
||||
|
||||
// Validate that each dimension key points to a valid object with x and y arrays
|
||||
const validatedCurve = this.validateDimensionStructure(curve, "curve");
|
||||
if (!validatedCurve) {
|
||||
return defaultCurve;
|
||||
}
|
||||
|
||||
return validatedCurve; // Return the validated curve
|
||||
}
|
||||
|
||||
validateDimensionStructure(dimension, name) {
|
||||
const validatedDimension = {};
|
||||
|
||||
for (const [key, value] of Object.entries(dimension)) {
|
||||
// Validate that each key points to an object with x and y arrays
|
||||
if (typeof value !== "object") {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' is not valid. Returning to default.`);
|
||||
return false;
|
||||
}
|
||||
// Validate that x and y are arrays
|
||||
else if (!Array.isArray(value.x) || !Array.isArray(value.y)) {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' is missing x or y arrays. Converting to arrays.`);
|
||||
// Try to convert to arrays first
|
||||
value.x = Object.values(value.x);
|
||||
value.y = Object.values(value.y);
|
||||
|
||||
// If still not arrays return false
|
||||
if (!Array.isArray(value.x) || !Array.isArray(value.y)) {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' is not valid. Returning to default.`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Validate that x and y arrays are the same length
|
||||
else if (value.x.length !== value.y.length) {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' has mismatched x and y lengths. Ignoring this key.`);
|
||||
return false;
|
||||
}
|
||||
// Validate that x values are in ascending order
|
||||
else if (!this.isSorted(value.x)) {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' has unsorted x values. Sorting...`);
|
||||
return false;
|
||||
}
|
||||
// Validate that x values are unique
|
||||
else if (!this.isUnique(value.x)) {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' has duplicate x values. Removing duplicates...`);
|
||||
return false;
|
||||
}
|
||||
// Validate that y values are numbers
|
||||
else if (!this.areNumbers(value.y)) {
|
||||
this.logger.warn(`Dimension '${name}' key '${key}' has non-numeric y values. Ignoring this key.`);
|
||||
return false;
|
||||
}
|
||||
|
||||
validatedDimension[key] = value;
|
||||
}
|
||||
return validatedDimension;
|
||||
}
|
||||
|
||||
isSorted(arr) {
|
||||
return arr.every((_, i) => i === 0 || arr[i] >= arr[i - 1]);
|
||||
}
|
||||
|
||||
isUnique(arr) {
|
||||
return new Set(arr).size === arr.length;
|
||||
}
|
||||
|
||||
areNumbers(arr) {
|
||||
return arr.every((x) => typeof x === "number");
|
||||
}
|
||||
|
||||
validateNumber(configValue, rules, fieldSchema, name, key) {
|
||||
|
||||
if (typeof configValue !== "number") {
|
||||
const parsedValue = parseFloat(configValue);
|
||||
if (!isNaN(parsedValue)) {
|
||||
this.logger.warn(`${name}.${key} was parsed to a number: ${configValue} -> ${parsedValue}`);
|
||||
configValue = parsedValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (rules.min !== undefined && configValue < rules.min) {
|
||||
this.logger.warn(
|
||||
`${name}.${key} is below the minimum (${rules.min}). Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
if (rules.max !== undefined && configValue > rules.max) {
|
||||
this.logger.warn(
|
||||
`${name}.${key} exceeds the maximum (${rules.max}). Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
this.logger.debug(`${name}.${key} is a valid number: ${configValue}`);
|
||||
|
||||
return configValue;
|
||||
}
|
||||
|
||||
|
||||
validateInteger(configValue, rules, fieldSchema, name, key) {
|
||||
if (typeof configValue !== "number" || !Number.isInteger(configValue)) {
|
||||
const parsedValue = parseInt(configValue, 10);
|
||||
if (!isNaN(parsedValue) && Number.isInteger(parsedValue)) {
|
||||
this.logger.warn(`${name}.${key} was parsed to an integer: ${configValue} -> ${parsedValue}`);
|
||||
configValue = parsedValue;
|
||||
} else {
|
||||
this.logger.warn(`${name}.${key} is not a valid integer. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
}
|
||||
|
||||
if (rules.min !== undefined && configValue < rules.min) {
|
||||
this.logger.warn(`${name}.${key} is below the minimum integer value (${rules.min}). Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
if (rules.max !== undefined && configValue > rules.max) {
|
||||
this.logger.warn(`${name}.${key} exceeds the maximum integer value (${rules.max}). Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
this.logger.debug(`${name}.${key} is a valid integer: ${configValue}`);
|
||||
return configValue;
|
||||
}
|
||||
|
||||
validateBoolean(configValue, name, key) {
|
||||
if (typeof configValue !== "boolean") {
|
||||
if (configValue === "true" || configValue === "false") {
|
||||
const parsedValue = configValue === "true";
|
||||
this.logger.debug(`${name}.${key} was parsed to a boolean: ${configValue} -> ${parsedValue}`);
|
||||
configValue = parsedValue;
|
||||
}
|
||||
}
|
||||
return configValue;
|
||||
}
|
||||
|
||||
validateString(configValue, rules, fieldSchema, name, key) {
|
||||
let newConfigValue = configValue;
|
||||
|
||||
if (typeof configValue !== "string") {
|
||||
//check if the value is nullable
|
||||
if(rules.nullable){
|
||||
if(configValue === null){
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.warn(`${name}.${key} is not a string. Trying to convert to string.`);
|
||||
newConfigValue = String(configValue); // Coerce to string if not already
|
||||
}
|
||||
|
||||
//check if the string is a valid string after conversion
|
||||
if (typeof newConfigValue !== "string") {
|
||||
this.logger.warn(`${name}.${key} is not a valid string. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
// Check for uppercase characters and convert to lowercase if present
|
||||
if (newConfigValue !== newConfigValue.toLowerCase()) {
|
||||
this.logger.warn(`${name}.${key} contains uppercase characters. Converting to lowercase: ${newConfigValue} -> ${newConfigValue.toLowerCase()}`);
|
||||
newConfigValue = newConfigValue.toLowerCase();
|
||||
}
|
||||
|
||||
return newConfigValue;
|
||||
}
|
||||
|
||||
validateSet(configValue, rules, fieldSchema, name, key) {
|
||||
// 1. Ensure we have a Set. If not, use default.
|
||||
if (!(configValue instanceof Set)) {
|
||||
this.logger.info(`${name}.${key} is not a Set. Converting to one using default value.`);
|
||||
return new Set(fieldSchema.default);
|
||||
}
|
||||
|
||||
// 2. Convert the Set to an array for easier filtering.
|
||||
const validatedArray = [...configValue]
|
||||
.filter((item) => {
|
||||
// 3. Filter based on `rules.itemType`.
|
||||
switch (rules.itemType) {
|
||||
case "number":
|
||||
return typeof item === "number";
|
||||
case "string":
|
||||
return typeof item === "string";
|
||||
case "null":
|
||||
// "null" might mean no type restriction (your usage may vary).
|
||||
return true;
|
||||
default:
|
||||
// Fallback if itemType is something else
|
||||
return typeof item === rules.itemType;
|
||||
}
|
||||
})
|
||||
.slice(0, rules.maxLength || Infinity);
|
||||
|
||||
// 4. Check if the filtered array meets the minimum length.
|
||||
if (validatedArray.length < (rules.minLength || 1)) {
|
||||
this.logger.warn(
|
||||
`${name}.${key} contains fewer items than allowed (${rules.minLength}). Using default value.`
|
||||
);
|
||||
return new Set(fieldSchema.default);
|
||||
}
|
||||
|
||||
// 5. Return a new Set containing only the valid items.
|
||||
return new Set(validatedArray);
|
||||
}
|
||||
|
||||
validateArray(configValue, rules, fieldSchema, name, key) {
|
||||
if (!Array.isArray(configValue)) {
|
||||
this.logger.info(`${name}.${key} is not an array. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
// Validate individual items in the array
|
||||
const validatedArray = configValue
|
||||
.filter((item) => {
|
||||
switch (rules.itemType) {
|
||||
case "number":
|
||||
return typeof item === "number";
|
||||
case "string":
|
||||
return typeof item === "string";
|
||||
case "null":
|
||||
// anything goes
|
||||
return true;
|
||||
default:
|
||||
return typeof item === rules.itemType;
|
||||
}
|
||||
})
|
||||
.slice(0, rules.maxLength || Infinity);
|
||||
|
||||
if (validatedArray.length < (rules.minLength || 1)) {
|
||||
this.logger.warn(
|
||||
`${name}.${key} contains fewer items than allowed (${rules.minLength}). Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
return validatedArray;
|
||||
}
|
||||
|
||||
validateObject(configValue, rules, fieldSchema, name, key) {
|
||||
if (typeof configValue !== "object" || Array.isArray(configValue)) {
|
||||
this.logger.warn(`${name}.${key} is not a valid object. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
if (rules.schema) {
|
||||
// Recursively validate nested objects if a schema is defined
|
||||
return this.validateSchema(configValue || {}, rules.schema, `${name}.${key}`);
|
||||
} else {
|
||||
// If no schema is defined, log a warning and use the default
|
||||
this.logger.warn(`${name}.${key} is an object with no schema. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
}
|
||||
|
||||
validateEnum(configValue, rules, fieldSchema, name, key) {
|
||||
|
||||
if (Array.isArray(rules.values)) {
|
||||
|
||||
//if value is null take default
|
||||
if(configValue === null){
|
||||
this.logger.warn(`${name}.${key} is null. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
if (typeof configValue !== "string") {
|
||||
this.logger.warn(`${name}.${key} is not a valid enum string. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
const validValues = rules.values.map(e => e.value.toLowerCase());
|
||||
|
||||
//remove caps
|
||||
configValue = configValue.toLowerCase();
|
||||
|
||||
if (!validValues.includes(configValue)) {
|
||||
this.logger.warn(
|
||||
`${name}.${key} has an invalid value : ${configValue}. Allowed values: [${validValues.join(", ")}]. Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`${name}.${key} is an enum with no 'values' array. Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
return configValue;
|
||||
}
|
||||
|
||||
validateUndefined(configValue, fieldSchema, name, key) {
|
||||
if (typeof configValue === "object" && !Array.isArray(configValue)) {
|
||||
|
||||
|
||||
66
src/helper/validators/collectionValidators.js
Normal file
66
src/helper/validators/collectionValidators.js
Normal file
@@ -0,0 +1,66 @@
|
||||
/**
|
||||
* Standalone collection validation functions extracted from validationUtils.js.
|
||||
*/
|
||||
|
||||
function validateArray(configValue, rules, fieldSchema, name, key, logger) {
|
||||
if (!Array.isArray(configValue)) {
|
||||
logger.info(`${name}.${key} is not an array. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
const validatedArray = configValue
|
||||
.filter((item) => {
|
||||
switch (rules.itemType) {
|
||||
case "number": return typeof item === "number";
|
||||
case "string": return typeof item === "string";
|
||||
case "null": return true;
|
||||
default: return typeof item === rules.itemType;
|
||||
}
|
||||
})
|
||||
.slice(0, rules.maxLength || Infinity);
|
||||
if (validatedArray.length < (rules.minLength || 1)) {
|
||||
logger.warn(
|
||||
`${name}.${key} contains fewer items than allowed (${rules.minLength}). Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
return validatedArray;
|
||||
}
|
||||
|
||||
function validateSet(configValue, rules, fieldSchema, name, key, logger) {
|
||||
if (!(configValue instanceof Set)) {
|
||||
logger.info(`${name}.${key} is not a Set. Converting to one using default value.`);
|
||||
return new Set(fieldSchema.default);
|
||||
}
|
||||
const validatedArray = [...configValue]
|
||||
.filter((item) => {
|
||||
switch (rules.itemType) {
|
||||
case "number": return typeof item === "number";
|
||||
case "string": return typeof item === "string";
|
||||
case "null": return true;
|
||||
default: return typeof item === rules.itemType;
|
||||
}
|
||||
})
|
||||
.slice(0, rules.maxLength || Infinity);
|
||||
if (validatedArray.length < (rules.minLength || 1)) {
|
||||
logger.warn(
|
||||
`${name}.${key} contains fewer items than allowed (${rules.minLength}). Using default value.`
|
||||
);
|
||||
return new Set(fieldSchema.default);
|
||||
}
|
||||
return new Set(validatedArray);
|
||||
}
|
||||
|
||||
function validateObject(configValue, rules, fieldSchema, name, key, validateSchemaFn, logger) {
|
||||
if (typeof configValue !== "object" || Array.isArray(configValue)) {
|
||||
logger.warn(`${name}.${key} is not a valid object. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
if (rules.schema) {
|
||||
return validateSchemaFn(configValue || {}, rules.schema, `${name}.${key}`);
|
||||
} else {
|
||||
logger.warn(`${name}.${key} is an object with no schema. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { validateArray, validateSet, validateObject };
|
||||
108
src/helper/validators/curveValidator.js
Normal file
108
src/helper/validators/curveValidator.js
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Curve validation strategies for machine curves and generic curves.
|
||||
* Extracted from validationUtils.js for modularity.
|
||||
*/
|
||||
|
||||
function isSorted(arr) {
|
||||
return arr.every((_, i) => i === 0 || arr[i] >= arr[i - 1]);
|
||||
}
|
||||
|
||||
function isUnique(arr) {
|
||||
return new Set(arr).size === arr.length;
|
||||
}
|
||||
|
||||
function areNumbers(arr) {
|
||||
return arr.every((x) => typeof x === "number");
|
||||
}
|
||||
|
||||
function validateDimensionStructure(dimension, name, logger) {
|
||||
const validatedDimension = {};
|
||||
|
||||
for (const [key, value] of Object.entries(dimension)) {
|
||||
if (typeof value !== "object") {
|
||||
logger.warn(`Dimension '${name}' key '${key}' is not valid. Returning to default.`);
|
||||
return false;
|
||||
}
|
||||
else if (!Array.isArray(value.x) || !Array.isArray(value.y)) {
|
||||
logger.warn(`Dimension '${name}' key '${key}' is missing x or y arrays. Converting to arrays.`);
|
||||
value.x = Object.values(value.x);
|
||||
value.y = Object.values(value.y);
|
||||
if (!Array.isArray(value.x) || !Array.isArray(value.y)) {
|
||||
logger.warn(`Dimension '${name}' key '${key}' is not valid. Returning to default.`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (value.x.length !== value.y.length) {
|
||||
logger.warn(`Dimension '${name}' key '${key}' has mismatched x and y lengths. Ignoring this key.`);
|
||||
return false;
|
||||
}
|
||||
else if (!isSorted(value.x)) {
|
||||
logger.warn(`Dimension '${name}' key '${key}' has unsorted x values. Sorting...`);
|
||||
const indices = value.x.map((_v, i) => i);
|
||||
indices.sort((a, b) => value.x[a] - value.x[b]);
|
||||
value.x = indices.map(i => value.x[i]);
|
||||
value.y = indices.map(i => value.y[i]);
|
||||
}
|
||||
if (!isUnique(value.x)) {
|
||||
logger.warn(`Dimension '${name}' key '${key}' has duplicate x values. Removing duplicates...`);
|
||||
const seen = new Set();
|
||||
const uniqueX = [];
|
||||
const uniqueY = [];
|
||||
for (let i = 0; i < value.x.length; i++) {
|
||||
if (!seen.has(value.x[i])) {
|
||||
seen.add(value.x[i]);
|
||||
uniqueX.push(value.x[i]);
|
||||
uniqueY.push(value.y[i]);
|
||||
}
|
||||
}
|
||||
value.x = uniqueX;
|
||||
value.y = uniqueY;
|
||||
}
|
||||
if (!areNumbers(value.y)) {
|
||||
logger.warn(`Dimension '${name}' key '${key}' has non-numeric y values. Ignoring this key.`);
|
||||
return false;
|
||||
}
|
||||
|
||||
validatedDimension[key] = value;
|
||||
}
|
||||
return validatedDimension;
|
||||
}
|
||||
|
||||
function validateCurve(configValue, defaultCurve, logger) {
|
||||
if (!configValue || typeof configValue !== "object" || Object.keys(configValue).length === 0) {
|
||||
logger.warn("Curve is missing or invalid. Defaulting to basic curve.");
|
||||
return defaultCurve;
|
||||
}
|
||||
const validatedCurve = validateDimensionStructure(configValue, "curve", logger);
|
||||
if (!validatedCurve) {
|
||||
return defaultCurve;
|
||||
}
|
||||
return validatedCurve;
|
||||
}
|
||||
|
||||
function validateMachineCurve(configValue, defaultCurve, logger) {
|
||||
if (!configValue || typeof configValue !== "object" || Object.keys(configValue).length === 0) {
|
||||
logger.warn("Curve is missing or invalid. Defaulting to basic curve.");
|
||||
return defaultCurve;
|
||||
}
|
||||
const { nq, np } = configValue;
|
||||
if (!nq || typeof nq !== "object" || !np || typeof np !== "object") {
|
||||
logger.warn("Curve must contain valid 'nq' and 'np' objects. Defaulting to basic curve.");
|
||||
return defaultCurve;
|
||||
}
|
||||
const validatedNq = validateDimensionStructure(nq, "nq", logger);
|
||||
const validatedNp = validateDimensionStructure(np, "np", logger);
|
||||
if (!validatedNq || !validatedNp) {
|
||||
return defaultCurve;
|
||||
}
|
||||
return { nq: validatedNq, np: validatedNp };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateCurve,
|
||||
validateMachineCurve,
|
||||
validateDimensionStructure,
|
||||
isSorted,
|
||||
isUnique,
|
||||
areNumbers
|
||||
};
|
||||
158
src/helper/validators/typeValidators.js
Normal file
158
src/helper/validators/typeValidators.js
Normal file
@@ -0,0 +1,158 @@
|
||||
/**
|
||||
* Standalone type validation functions extracted from validationUtils.js.
|
||||
*/
|
||||
|
||||
function validateNumber(configValue, rules, fieldSchema, name, key, logger) {
|
||||
if (typeof configValue !== "number") {
|
||||
const parsedValue = parseFloat(configValue);
|
||||
if (!isNaN(parsedValue)) {
|
||||
logger.warn(`${name}.${key} was parsed to a number: ${configValue} -> ${parsedValue}`);
|
||||
configValue = parsedValue;
|
||||
}
|
||||
}
|
||||
if (rules.min !== undefined && configValue < rules.min) {
|
||||
logger.warn(`${name}.${key} is below the minimum (${rules.min}). Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
if (rules.max !== undefined && configValue > rules.max) {
|
||||
logger.warn(`${name}.${key} exceeds the maximum (${rules.max}). Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
logger.debug(`${name}.${key} is a valid number: ${configValue}`);
|
||||
return configValue;
|
||||
}
|
||||
|
||||
function validateInteger(configValue, rules, fieldSchema, name, key, logger) {
|
||||
if (typeof configValue !== "number" || !Number.isInteger(configValue)) {
|
||||
const parsedValue = parseInt(configValue, 10);
|
||||
if (!isNaN(parsedValue) && Number.isInteger(parsedValue)) {
|
||||
logger.warn(`${name}.${key} was parsed to an integer: ${configValue} -> ${parsedValue}`);
|
||||
configValue = parsedValue;
|
||||
} else {
|
||||
logger.warn(`${name}.${key} is not a valid integer. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
}
|
||||
if (rules.min !== undefined && configValue < rules.min) {
|
||||
logger.warn(`${name}.${key} is below the minimum integer value (${rules.min}). Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
if (rules.max !== undefined && configValue > rules.max) {
|
||||
logger.warn(`${name}.${key} exceeds the maximum integer value (${rules.max}). Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
logger.debug(`${name}.${key} is a valid integer: ${configValue}`);
|
||||
return configValue;
|
||||
}
|
||||
|
||||
function validateBoolean(configValue, name, key, logger) {
|
||||
if (typeof configValue !== "boolean") {
|
||||
if (configValue === "true" || configValue === "false") {
|
||||
const parsedValue = configValue === "true";
|
||||
logger.debug(`${name}.${key} was parsed to a boolean: ${configValue} -> ${parsedValue}`);
|
||||
configValue = parsedValue;
|
||||
}
|
||||
}
|
||||
return configValue;
|
||||
}
|
||||
|
||||
function _isUnitLikeField(path) {
|
||||
const normalized = String(path || "").toLowerCase();
|
||||
if (!normalized) return false;
|
||||
return /(^|\.)([a-z0-9]*unit|units)(\.|$)/.test(normalized)
|
||||
|| normalized.includes(".curveunits.");
|
||||
}
|
||||
|
||||
function _resolveStringNormalizeMode(path) {
|
||||
const normalized = String(path || "").toLowerCase();
|
||||
if (!normalized) return "none";
|
||||
|
||||
if (_isUnitLikeField(normalized)) return "none";
|
||||
if (normalized.endsWith(".name")) return "none";
|
||||
if (normalized.endsWith(".model")) return "none";
|
||||
if (normalized.endsWith(".supplier")) return "none";
|
||||
if (normalized.endsWith(".role")) return "none";
|
||||
if (normalized.endsWith(".description")) return "none";
|
||||
|
||||
if (normalized.endsWith(".softwaretype")) return "lowercase";
|
||||
if (normalized.endsWith(".type")) return "lowercase";
|
||||
if (normalized.endsWith(".category")) return "lowercase";
|
||||
|
||||
return "lowercase";
|
||||
}
|
||||
|
||||
function validateString(configValue, rules, fieldSchema, name, key, logger) {
|
||||
let newConfigValue = configValue;
|
||||
|
||||
if (typeof configValue !== "string") {
|
||||
//check if the value is nullable
|
||||
if(rules.nullable){
|
||||
if(configValue === null){
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn(`${name}.${key} is not a string. Trying to convert to string.`);
|
||||
newConfigValue = String(configValue); // Coerce to string if not already
|
||||
}
|
||||
|
||||
//check if the string is a valid string after conversion
|
||||
if (typeof newConfigValue !== "string") {
|
||||
logger.warn(`${name}.${key} is not a valid string. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
const keyString = `${name}.${key}`;
|
||||
const normalizeMode = rules.normalize || _resolveStringNormalizeMode(keyString);
|
||||
const preserveCase = normalizeMode !== "lowercase";
|
||||
|
||||
// Check for uppercase characters and convert to lowercase if present
|
||||
if (!preserveCase && newConfigValue !== newConfigValue.toLowerCase()) {
|
||||
logger.info(
|
||||
`${name}.${key} normalized to lowercase: ${newConfigValue} -> ${newConfigValue.toLowerCase()}`
|
||||
);
|
||||
newConfigValue = newConfigValue.toLowerCase();
|
||||
}
|
||||
|
||||
return newConfigValue;
|
||||
}
|
||||
|
||||
function validateEnum(configValue, rules, fieldSchema, name, key, logger) {
|
||||
if (Array.isArray(rules.values)) {
|
||||
//if value is null take default
|
||||
if(configValue === null){
|
||||
logger.warn(`${name}.${key} is null. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
if (typeof configValue !== "string") {
|
||||
logger.warn(`${name}.${key} is not a valid enum string. Using default value.`);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
|
||||
const validValues = rules.values.map(e => e.value.toLowerCase());
|
||||
|
||||
//remove caps
|
||||
configValue = configValue.toLowerCase();
|
||||
if (!validValues.includes(configValue)) {
|
||||
logger.warn(
|
||||
`${name}.${key} has an invalid value : ${configValue}. Allowed values: [${validValues.join(", ")}]. Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
} else {
|
||||
logger.warn(
|
||||
`${name}.${key} is an enum with no 'values' array. Using default value.`
|
||||
);
|
||||
return fieldSchema.default;
|
||||
}
|
||||
return configValue;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateNumber,
|
||||
validateInteger,
|
||||
validateBoolean,
|
||||
validateString,
|
||||
validateEnum,
|
||||
};
|
||||
@@ -115,7 +115,6 @@ class Measurement {
|
||||
|
||||
// Create a new measurement that is the difference between two positions
|
||||
static createDifference(upstreamMeasurement, downstreamMeasurement) {
|
||||
|
||||
if (upstreamMeasurement.type !== downstreamMeasurement.type ||
|
||||
upstreamMeasurement.variant !== downstreamMeasurement.variant) {
|
||||
throw new Error('Cannot calculate difference between different measurement types or variants');
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const MeasurementBuilder = require('./MeasurementBuilder');
|
||||
const EventEmitter = require('events');
|
||||
const convertModule = require('../convert/index');
|
||||
const { POSITIONS } = require('../constants/positions');
|
||||
|
||||
class MeasurementContainer {
|
||||
constructor(options = {},logger) {
|
||||
@@ -17,7 +18,7 @@ class MeasurementContainer {
|
||||
this._currentDistance = null;
|
||||
this._unit = null;
|
||||
|
||||
// Default units for each measurement type
|
||||
// Default units for each measurement type (ingress/preferred)
|
||||
this.defaultUnits = {
|
||||
pressure: 'mbar',
|
||||
flow: 'm3/h',
|
||||
@@ -28,9 +29,47 @@ class MeasurementContainer {
|
||||
...options.defaultUnits // Allow override
|
||||
};
|
||||
|
||||
// Canonical storage unit map (single conversion anchor per measurement type)
|
||||
this.canonicalUnits = {
|
||||
pressure: 'Pa',
|
||||
atmPressure: 'Pa',
|
||||
flow: 'm3/s',
|
||||
power: 'W',
|
||||
hydraulicPower: 'W',
|
||||
temperature: 'K',
|
||||
volume: 'm3',
|
||||
length: 'm',
|
||||
mass: 'kg',
|
||||
energy: 'J',
|
||||
...options.canonicalUnits,
|
||||
};
|
||||
|
||||
// Auto-conversion settings
|
||||
this.autoConvert = options.autoConvert !== false; // Default to true
|
||||
this.preferredUnits = options.preferredUnits || {}; // Per-measurement overrides
|
||||
this.storeCanonical = options.storeCanonical === true;
|
||||
this.strictUnitValidation = options.strictUnitValidation === true;
|
||||
this.throwOnInvalidUnit = options.throwOnInvalidUnit === true;
|
||||
this.requireUnitForTypes = new Set(
|
||||
(options.requireUnitForTypes || []).map((t) => String(t).trim().toLowerCase())
|
||||
);
|
||||
|
||||
// Map EVOLV measurement types to convert-module measure families
|
||||
this.measureMap = {
|
||||
pressure: 'pressure',
|
||||
atmpressure: 'pressure',
|
||||
flow: 'volumeFlowRate',
|
||||
power: 'power',
|
||||
hydraulicpower: 'power',
|
||||
reactivepower: 'reactivePower',
|
||||
apparentpower: 'apparentPower',
|
||||
temperature: 'temperature',
|
||||
volume: 'volume',
|
||||
length: 'length',
|
||||
mass: 'mass',
|
||||
energy: 'energy',
|
||||
reactiveenergy: 'reactiveEnergy',
|
||||
};
|
||||
|
||||
// For chaining context
|
||||
this._currentType = null;
|
||||
@@ -72,6 +111,11 @@ class MeasurementContainer {
|
||||
return this;
|
||||
}
|
||||
|
||||
setCanonicalUnit(measurementType, unit) {
|
||||
this.canonicalUnits[measurementType] = unit;
|
||||
return this;
|
||||
}
|
||||
|
||||
// Get the target unit for a measurement type
|
||||
_getTargetUnit(measurementType) {
|
||||
return this.preferredUnits[measurementType] ||
|
||||
@@ -79,6 +123,77 @@ class MeasurementContainer {
|
||||
null;
|
||||
}
|
||||
|
||||
_getCanonicalUnit(measurementType) {
|
||||
return this.canonicalUnits[measurementType] || null;
|
||||
}
|
||||
|
||||
_normalizeType(measurementType) {
|
||||
return String(measurementType || '').trim().toLowerCase();
|
||||
}
|
||||
|
||||
_describeUnit(unit) {
|
||||
if (typeof unit !== 'string' || unit.trim() === '') return null;
|
||||
try {
|
||||
return convertModule().describe(unit.trim());
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
isUnitCompatible(measurementType, unit) {
|
||||
const desc = this._describeUnit(unit);
|
||||
if (!desc) return false;
|
||||
const normalizedType = this._normalizeType(measurementType);
|
||||
const expectedMeasure = this.measureMap[normalizedType];
|
||||
if (!expectedMeasure) return true;
|
||||
return desc.measure === expectedMeasure;
|
||||
}
|
||||
|
||||
_handleUnitViolation(message) {
|
||||
if (this.throwOnInvalidUnit) {
|
||||
throw new Error(message);
|
||||
}
|
||||
if (this.logger) {
|
||||
this.logger.warn(message);
|
||||
}
|
||||
}
|
||||
|
||||
_resolveUnitPolicy(measurementType, sourceUnit = null) {
|
||||
const normalizedType = this._normalizeType(measurementType);
|
||||
const rawSourceUnit = typeof sourceUnit === 'string' && sourceUnit.trim()
|
||||
? sourceUnit.trim()
|
||||
: null;
|
||||
const fallbackIngressUnit = this._getTargetUnit(measurementType);
|
||||
const canonicalUnit = this._getCanonicalUnit(measurementType);
|
||||
const resolvedSourceUnit = rawSourceUnit || fallbackIngressUnit || canonicalUnit || null;
|
||||
|
||||
if (this.requireUnitForTypes.has(normalizedType) && !rawSourceUnit) {
|
||||
this._handleUnitViolation(`Missing source unit for required measurement type '${measurementType}'.`);
|
||||
return { valid: false };
|
||||
}
|
||||
|
||||
if (resolvedSourceUnit && !this.isUnitCompatible(measurementType, resolvedSourceUnit)) {
|
||||
this._handleUnitViolation(`Incompatible or unknown source unit '${resolvedSourceUnit}' for measurement type '${measurementType}'.`);
|
||||
return { valid: false };
|
||||
}
|
||||
|
||||
const resolvedStorageUnit = this.storeCanonical
|
||||
? (canonicalUnit || fallbackIngressUnit || resolvedSourceUnit)
|
||||
: (fallbackIngressUnit || canonicalUnit || resolvedSourceUnit);
|
||||
|
||||
if (resolvedStorageUnit && !this.isUnitCompatible(measurementType, resolvedStorageUnit)) {
|
||||
this._handleUnitViolation(`Incompatible storage unit '${resolvedStorageUnit}' for measurement type '${measurementType}'.`);
|
||||
return { valid: false };
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
sourceUnit: resolvedSourceUnit,
|
||||
storageUnit: resolvedStorageUnit || null,
|
||||
strictValidation: this.strictUnitValidation,
|
||||
};
|
||||
}
|
||||
|
||||
getUnit(type) {
|
||||
if (!type) return null;
|
||||
if (this.preferredUnits && this.preferredUnits[type]) return this.preferredUnits[type];
|
||||
@@ -136,33 +251,39 @@ class MeasurementContainer {
|
||||
value(val, timestamp = Date.now(), sourceUnit = null) {
|
||||
if (!this._ensureChainIsValid()) return this;
|
||||
|
||||
const unitPolicy = this._resolveUnitPolicy(this._currentType, sourceUnit);
|
||||
if (!unitPolicy.valid) return this;
|
||||
|
||||
const measurement = this._getOrCreateMeasurement();
|
||||
const targetUnit = this._getTargetUnit(this._currentType);
|
||||
const targetUnit = unitPolicy.storageUnit;
|
||||
|
||||
let convertedValue = val;
|
||||
let finalUnit = sourceUnit || targetUnit;
|
||||
let finalUnit = targetUnit || unitPolicy.sourceUnit;
|
||||
|
||||
// Auto-convert if enabled and units are specified
|
||||
if (this.autoConvert && sourceUnit && targetUnit && sourceUnit !== targetUnit) {
|
||||
if (this.autoConvert && unitPolicy.sourceUnit && targetUnit && unitPolicy.sourceUnit !== targetUnit) {
|
||||
try {
|
||||
convertedValue = convertModule(val).from(sourceUnit).to(targetUnit);
|
||||
convertedValue = convertModule(val).from(unitPolicy.sourceUnit).to(targetUnit);
|
||||
finalUnit = targetUnit;
|
||||
|
||||
if (this.logger) {
|
||||
this.logger.debug(`Auto-converted ${val} ${sourceUnit} to ${convertedValue} ${targetUnit}`);
|
||||
this.logger.debug(`Auto-converted ${val} ${unitPolicy.sourceUnit} to ${convertedValue} ${targetUnit}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.logger) {
|
||||
this.logger.warn(`Auto-conversion failed from ${sourceUnit} to ${targetUnit}: ${error.message}`);
|
||||
const message = `Auto-conversion failed from ${unitPolicy.sourceUnit} to ${targetUnit}: ${error.message}`;
|
||||
if (this.strictUnitValidation) {
|
||||
this._handleUnitViolation(message);
|
||||
return this;
|
||||
}
|
||||
if (this.logger) this.logger.warn(message);
|
||||
convertedValue = val;
|
||||
finalUnit = sourceUnit;
|
||||
finalUnit = unitPolicy.sourceUnit;
|
||||
}
|
||||
}
|
||||
|
||||
measurement.setValue(convertedValue, timestamp);
|
||||
|
||||
if (finalUnit && !measurement.unit) {
|
||||
if (finalUnit) {
|
||||
measurement.setUnit(finalUnit);
|
||||
}
|
||||
|
||||
@@ -171,7 +292,7 @@ class MeasurementContainer {
|
||||
value: convertedValue,
|
||||
originalValue: val,
|
||||
unit: finalUnit,
|
||||
sourceUnit: sourceUnit,
|
||||
sourceUnit: unitPolicy.sourceUnit,
|
||||
timestamp,
|
||||
position: this._currentPosition,
|
||||
distance: this._currentDistance,
|
||||
@@ -408,21 +529,22 @@ class MeasurementContainer {
|
||||
.reduce((acc, v) => acc + v, 0);
|
||||
}
|
||||
|
||||
getFlattenedOutput() {
|
||||
getFlattenedOutput(options = {}) {
|
||||
const requestedUnits = options.requestedUnits || (options.usePreferredUnits ? this.preferredUnits : null);
|
||||
const out = {};
|
||||
Object.entries(this.measurements).forEach(([type, variants]) => {
|
||||
Object.entries(variants).forEach(([variant, positions]) => {
|
||||
Object.entries(positions).forEach(([position, entry]) => {
|
||||
// Legacy single series
|
||||
if (entry?.getCurrentValue) {
|
||||
out[`${type}.${variant}.${position}`] = entry.getCurrentValue();
|
||||
out[`${type}.${variant}.${position}`] = this._resolveOutputValue(type, entry, requestedUnits);
|
||||
return;
|
||||
}
|
||||
// Child-bucketed series
|
||||
if (entry && typeof entry === 'object') {
|
||||
Object.entries(entry).forEach(([childId, m]) => {
|
||||
if (m?.getCurrentValue) {
|
||||
out[`${type}.${variant}.${position}.${childId}`] = m.getCurrentValue();
|
||||
out[`${type}.${variant}.${position}.${childId}`] = this._resolveOutputValue(type, m, requestedUnits);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -433,7 +555,7 @@ class MeasurementContainer {
|
||||
}
|
||||
|
||||
// Difference calculations between positions
|
||||
difference({ from = "downstream", to = "upstream", unit: requestedUnit } = {}) {
|
||||
difference({ from = POSITIONS.DOWNSTREAM, to = POSITIONS.UPSTREAM, unit: requestedUnit } = {}) {
|
||||
if (!this._currentType || !this._currentVariant) {
|
||||
if (this.logger) {
|
||||
this.logger.warn('difference() ignored: type and variant must be specified');
|
||||
@@ -528,6 +650,18 @@ class MeasurementContainer {
|
||||
Object.keys(this.measurements[this._currentType]) : [];
|
||||
}
|
||||
|
||||
_resolveOutputValue(type, measurement, requestedUnits = null) {
|
||||
const value = measurement.getCurrentValue();
|
||||
if (!requestedUnits || value === null || typeof value === 'undefined') {
|
||||
return value;
|
||||
}
|
||||
const targetUnit = requestedUnits[type];
|
||||
if (!targetUnit) {
|
||||
return value;
|
||||
}
|
||||
return this._convertValueToUnit(value, measurement.unit, targetUnit);
|
||||
}
|
||||
|
||||
getPositions() {
|
||||
if (!this._currentType || !this._currentVariant) {
|
||||
if (this.logger) {
|
||||
@@ -549,11 +683,13 @@ class MeasurementContainer {
|
||||
this._currentType = null;
|
||||
this._currentVariant = null;
|
||||
this._currentPosition = null;
|
||||
this._currentDistance = null;
|
||||
this._unit = null;
|
||||
}
|
||||
|
||||
// Helper method for value conversion
|
||||
_convertValueToUnit(value, fromUnit, toUnit) {
|
||||
if (!value || !fromUnit || !toUnit || fromUnit === toUnit) {
|
||||
if ((value === null || typeof value === 'undefined') || !fromUnit || !toUnit || fromUnit === toUnit) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -572,19 +708,7 @@ class MeasurementContainer {
|
||||
const type = measurementType || this._currentType;
|
||||
if (!type) return [];
|
||||
|
||||
// Map measurement types to convert module measures
|
||||
const measureMap = {
|
||||
pressure: 'pressure',
|
||||
flow: 'volumeFlowRate',
|
||||
power: 'power',
|
||||
temperature: 'temperature',
|
||||
volume: 'volume',
|
||||
length: 'length',
|
||||
mass: 'mass',
|
||||
energy: 'energy'
|
||||
};
|
||||
|
||||
const convertMeasure = measureMap[type];
|
||||
const convertMeasure = this.measureMap[this._normalizeType(type)];
|
||||
if (!convertMeasure) return [];
|
||||
|
||||
try {
|
||||
@@ -618,11 +742,11 @@ class MeasurementContainer {
|
||||
|
||||
_convertPositionStr2Num(positionString) {
|
||||
switch(positionString) {
|
||||
case "atEquipment":
|
||||
case POSITIONS.AT_EQUIPMENT:
|
||||
return 0;
|
||||
case "upstream":
|
||||
case POSITIONS.UPSTREAM:
|
||||
return Number.POSITIVE_INFINITY;
|
||||
case "downstream":
|
||||
case POSITIONS.DOWNSTREAM:
|
||||
return Number.NEGATIVE_INFINITY;
|
||||
|
||||
default:
|
||||
@@ -635,13 +759,13 @@ class MeasurementContainer {
|
||||
|
||||
_convertPositionNum2Str(positionValue) {
|
||||
if (positionValue === 0) {
|
||||
return "atEquipment";
|
||||
return POSITIONS.AT_EQUIPMENT;
|
||||
}
|
||||
if (positionValue < 0) {
|
||||
return "upstream";
|
||||
return POSITIONS.UPSTREAM;
|
||||
}
|
||||
if (positionValue > 0) {
|
||||
return "downstream";
|
||||
return POSITIONS.DOWNSTREAM;
|
||||
}
|
||||
if (this.logger) {
|
||||
this.logger.warn(`Invalid position provided: ${positionValue}`);
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
const { MeasurementContainer } = require('./index');
|
||||
const { POSITIONS } = require('../constants/positions');
|
||||
|
||||
const measurements = new MeasurementContainer();
|
||||
|
||||
console.log('=== MEASUREMENT CONTAINER EXAMPLES ===\n');
|
||||
console.log('This guide shows how to use the MeasurementContainer for storing,');
|
||||
@@ -27,7 +30,7 @@ console.log('\nSetting pressure values with distances:');
|
||||
basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.distance(1.5)
|
||||
.value(100)
|
||||
.unit('psi');
|
||||
@@ -35,7 +38,7 @@ basicContainer
|
||||
basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('downstream')
|
||||
.position(POSITIONS.DOWNSTREAM)
|
||||
.distance(5.2)
|
||||
.value(95)
|
||||
.unit('psi');
|
||||
@@ -44,7 +47,7 @@ basicContainer
|
||||
basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('downstream')
|
||||
.position(POSITIONS.DOWNSTREAM)
|
||||
.value(90); // distance 5.2 is automatically reused
|
||||
|
||||
console.log('✅ Basic setup complete\n');
|
||||
@@ -53,7 +56,7 @@ console.log('✅ Basic setup complete\n');
|
||||
const upstreamPressure = basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.get();
|
||||
|
||||
console.log(`Retrieved upstream pressure: ${upstreamPressure.getCurrentValue()} ${upstreamPressure.unit}`);
|
||||
@@ -83,7 +86,7 @@ console.log('Adding pressure with auto-conversion:');
|
||||
autoContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.distance(0.5)
|
||||
.value(1.5, Date.now(), 'bar'); // Input: 1.5 bar → Auto-stored as ~21.76 psi
|
||||
|
||||
@@ -91,7 +94,7 @@ autoContainer
|
||||
const converted = autoContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.get();
|
||||
|
||||
console.log(`Stored as: ${converted.getCurrentValue()} ${converted.unit} (distance=${converted.distance}m)`);
|
||||
@@ -105,14 +108,14 @@ console.log('--- Example 3: Unit Conversion on Retrieval ---');
|
||||
autoContainer
|
||||
.type('flow')
|
||||
.variant('predicted')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.distance(2.4)
|
||||
.value(100, Date.now(), 'l/min');
|
||||
|
||||
const flowMeasurement = autoContainer
|
||||
.type('flow')
|
||||
.variant('predicted')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.get();
|
||||
|
||||
console.log(`Flow in l/min: ${flowMeasurement.getCurrentValue('l/min')}`);
|
||||
@@ -153,13 +156,13 @@ console.log('--- Example 5: Basic Value Retrieval ---');
|
||||
const upstreamVal = basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.getCurrentValue();
|
||||
|
||||
const upstreamData = basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.get();
|
||||
|
||||
console.log(`Upstream: ${upstreamVal} ${upstreamData.unit} at ${upstreamData.distance}m`);
|
||||
@@ -167,31 +170,31 @@ console.log(`Upstream: ${upstreamVal} ${upstreamData.unit} at ${upstreamData.dis
|
||||
const downstreamVal = basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('downstream')
|
||||
.position(POSITIONS.DOWNSTREAM)
|
||||
.getCurrentValue();
|
||||
|
||||
const downstreamData = basicContainer
|
||||
.type('pressure')
|
||||
.variant('measured')
|
||||
.position('downstream')
|
||||
.position(POSITIONS.DOWNSTREAM)
|
||||
.get();
|
||||
|
||||
//check wether a serie exists
|
||||
const hasSeries = basicContainer
|
||||
const hasSeries = basicContainer // eslint-disable-line no-unused-vars
|
||||
.type("flow")
|
||||
.variant("measured")
|
||||
.exists(); // true if any position exists
|
||||
|
||||
const hasUpstreamValues = basicContainer
|
||||
const hasUpstreamValues = basicContainer // eslint-disable-line no-unused-vars
|
||||
.type("flow")
|
||||
.variant("measured")
|
||||
.exists({ position: "upstream", requireValues: true });
|
||||
.exists({ position: POSITIONS.UPSTREAM, requireValues: true });
|
||||
|
||||
// Passing everything explicitly
|
||||
const hasPercent = basicContainer.exists({
|
||||
const hasPercent = basicContainer.exists({ // eslint-disable-line no-unused-vars
|
||||
type: "volume",
|
||||
variant: "percent",
|
||||
position: "atEquipment",
|
||||
position: POSITIONS.AT_EQUIPMENT,
|
||||
});
|
||||
|
||||
|
||||
@@ -205,7 +208,7 @@ console.log('--- Example 6: Calculations & Statistics ---');
|
||||
basicContainer
|
||||
.type('flow')
|
||||
.variant('predicted')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.distance(3.0)
|
||||
.value(200)
|
||||
.unit('gpm');
|
||||
@@ -213,7 +216,7 @@ basicContainer
|
||||
basicContainer
|
||||
.type('flow')
|
||||
.variant('predicted')
|
||||
.position('downstream')
|
||||
.position(POSITIONS.DOWNSTREAM)
|
||||
.distance(8.5)
|
||||
.value(195)
|
||||
.unit('gpm');
|
||||
@@ -221,7 +224,7 @@ basicContainer
|
||||
const flowAvg = basicContainer
|
||||
.type('flow')
|
||||
.variant('predicted')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.getAverage();
|
||||
|
||||
console.log(`Average upstream flow: ${flowAvg.toFixed(1)} gpm`);
|
||||
@@ -234,8 +237,8 @@ const pressureDiff = basicContainer
|
||||
console.log(`Pressure difference: ${pressureDiff.value} ${pressureDiff.unit}\n`);
|
||||
|
||||
//reversable difference
|
||||
const deltaP = basicContainer.type("pressure").variant("measured").difference(); // defaults to downstream - upstream
|
||||
const netFlow = basicContainer.type("flow").variant("measured").difference({ from: "upstream", to: "downstream" });
|
||||
const deltaP = basicContainer.type("pressure").variant("measured").difference(); // eslint-disable-line no-unused-vars -- defaults to downstream - upstream
|
||||
const netFlow = basicContainer.type("flow").variant("measured").difference({ from: POSITIONS.UPSTREAM, to: POSITIONS.DOWNSTREAM }); // eslint-disable-line no-unused-vars
|
||||
|
||||
// ====================================
|
||||
// ADVANCED STATISTICS & HISTORY
|
||||
@@ -245,7 +248,7 @@ console.log('--- Example 7: Advanced Statistics & History ---');
|
||||
basicContainer
|
||||
.type('flow')
|
||||
.variant('measured')
|
||||
.position('upstream')
|
||||
.position(POSITIONS.UPSTREAM)
|
||||
.distance(3.0)
|
||||
.value(210)
|
||||
.value(215)
|
||||
@@ -257,7 +260,7 @@ basicContainer
|
||||
const stats = basicContainer
|
||||
.type('flow')
|
||||
.variant('measured')
|
||||
.position('upstream');
|
||||
.position(POSITIONS.UPSTREAM);
|
||||
|
||||
const statsData = stats.get();
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const AssetMenu = require('./asset.js');
|
||||
const { TagcodeApp, DynamicAssetMenu } = require('./tagcodeApp.js');
|
||||
// TagcodeApp and DynamicAssetMenu available via ./tagcodeApp.js
|
||||
const LoggerMenu = require('./logger.js');
|
||||
const PhysicalPositionMenu = require('./physicalPosition.js');
|
||||
const AquonSamplesMenu = require('./aquonSamples.js');
|
||||
|
||||
@@ -1,125 +1,126 @@
|
||||
//load local dependencies
|
||||
const EventEmitter = require('events');
|
||||
|
||||
//load all config modules
|
||||
const defaultConfig = require('./nrmseConfig.json');
|
||||
const ConfigUtils = require('../helper/configUtils');
|
||||
|
||||
class ErrorMetrics {
|
||||
constructor(config = {}, logger) {
|
||||
|
||||
this.emitter = new EventEmitter(); // Own EventEmitter
|
||||
this.emitter = new EventEmitter();
|
||||
this.configUtils = new ConfigUtils(defaultConfig);
|
||||
this.config = this.configUtils.initConfig(config);
|
||||
|
||||
// Init after config is set
|
||||
this.logger = logger;
|
||||
|
||||
// For long-term NRMSD accumulation
|
||||
this.metricState = new Map();
|
||||
this.legacyMetricId = 'default';
|
||||
|
||||
// Backward-compatible fields retained for existing callers/tests.
|
||||
this.cumNRMSD = 0;
|
||||
this.cumCount = 0;
|
||||
}
|
||||
|
||||
//INCLUDE timestamps in the next update OLIFANT
|
||||
meanSquaredError(predicted, measured) {
|
||||
if (predicted.length !== measured.length) {
|
||||
this.logger.error("Comparing MSE Arrays must have the same length.");
|
||||
return 0;
|
||||
registerMetric(metricId, profile = {}) {
|
||||
const key = String(metricId || this.legacyMetricId);
|
||||
const state = this._ensureMetricState(key);
|
||||
state.profile = { ...state.profile, ...profile };
|
||||
return state.profile;
|
||||
}
|
||||
|
||||
resetMetric(metricId = this.legacyMetricId) {
|
||||
this.metricState.delete(String(metricId));
|
||||
if (metricId === this.legacyMetricId) {
|
||||
this.cumNRMSD = 0;
|
||||
this.cumCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
getMetricState(metricId = this.legacyMetricId) {
|
||||
return this.metricState.get(String(metricId)) || null;
|
||||
}
|
||||
|
||||
meanSquaredError(predicted, measured, options = {}) {
|
||||
const { p, m } = this._validateSeries(predicted, measured, options);
|
||||
let sumSqError = 0;
|
||||
for (let i = 0; i < predicted.length; i++) {
|
||||
const err = predicted[i] - measured[i];
|
||||
for (let i = 0; i < p.length; i += 1) {
|
||||
const err = p[i] - m[i];
|
||||
sumSqError += err * err;
|
||||
}
|
||||
return sumSqError / predicted.length;
|
||||
return sumSqError / p.length;
|
||||
}
|
||||
|
||||
rootMeanSquaredError(predicted, measured) {
|
||||
return Math.sqrt(this.meanSquaredError(predicted, measured));
|
||||
rootMeanSquaredError(predicted, measured, options = {}) {
|
||||
return Math.sqrt(this.meanSquaredError(predicted, measured, options));
|
||||
}
|
||||
|
||||
normalizedRootMeanSquaredError(predicted, measured, processMin, processMax) {
|
||||
const range = processMax - processMin;
|
||||
if (range <= 0) {
|
||||
this.logger.error("Invalid process range: processMax must be greater than processMin.");
|
||||
normalizedRootMeanSquaredError(predicted, measured, processMin, processMax, options = {}) {
|
||||
const range = Number(processMax) - Number(processMin);
|
||||
if (!Number.isFinite(range) || range <= 0) {
|
||||
this._failOrLog(
|
||||
`Invalid process range: processMax (${processMax}) must be greater than processMin (${processMin}).`,
|
||||
options
|
||||
);
|
||||
return NaN;
|
||||
}
|
||||
const rmse = this.rootMeanSquaredError(predicted, measured);
|
||||
const rmse = this.rootMeanSquaredError(predicted, measured, options);
|
||||
return rmse / range;
|
||||
}
|
||||
|
||||
longTermNRMSD(input) {
|
||||
|
||||
const storedNRMSD = this.cumNRMSD;
|
||||
const storedCount = this.cumCount;
|
||||
const newCount = storedCount + 1;
|
||||
|
||||
// Update cumulative values
|
||||
this.cumCount = newCount;
|
||||
|
||||
// Calculate new running average
|
||||
if (storedCount === 0) {
|
||||
this.cumNRMSD = input; // First value
|
||||
} else {
|
||||
// Running average formula: newAvg = oldAvg + (newValue - oldAvg) / newCount
|
||||
this.cumNRMSD = storedNRMSD + (input - storedNRMSD) / newCount;
|
||||
normalizeUsingRealtime(predicted, measured, options = {}) {
|
||||
const { p, m } = this._validateSeries(predicted, measured, options);
|
||||
const realtimeMin = Math.min(Math.min(...p), Math.min(...m));
|
||||
const realtimeMax = Math.max(Math.max(...p), Math.max(...m));
|
||||
const range = realtimeMax - realtimeMin;
|
||||
if (!Number.isFinite(range) || range <= 0) {
|
||||
throw new Error('Invalid process range: processMax must be greater than processMin.');
|
||||
}
|
||||
const rmse = this.rootMeanSquaredError(p, m, options);
|
||||
return rmse / range;
|
||||
}
|
||||
|
||||
if(newCount >= 100) {
|
||||
// Return the current NRMSD value, not just the contribution from this sample
|
||||
return this.cumNRMSD;
|
||||
}
|
||||
longTermNRMSD(input, metricId = this.legacyMetricId, options = {}) {
|
||||
const metricKey = String(metricId || this.legacyMetricId);
|
||||
const state = this._ensureMetricState(metricKey);
|
||||
const profile = this._resolveProfile(metricKey, options);
|
||||
const value = Number(input);
|
||||
if (!Number.isFinite(value)) {
|
||||
this._failOrLog(`longTermNRMSD input must be finite. Received: ${input}`, options);
|
||||
return 0;
|
||||
}
|
||||
|
||||
normalizeUsingRealtime(predicted, measured) {
|
||||
const realtimeMin = Math.min(Math.min(...predicted), Math.min(...measured));
|
||||
const realtimeMax = Math.max(Math.max(...predicted), Math.max(...measured));
|
||||
const range = realtimeMax - realtimeMin;
|
||||
if (range <= 0) {
|
||||
throw new Error("Invalid process range: processMax must be greater than processMin.");
|
||||
// Keep backward compatibility if callers manipulate cumCount/cumNRMSD directly.
|
||||
if (metricKey === this.legacyMetricId && (state.sampleCount !== this.cumCount || state.longTermEwma !== this.cumNRMSD)) {
|
||||
state.sampleCount = Number(this.cumCount) || 0;
|
||||
state.longTermEwma = Number(this.cumNRMSD) || 0;
|
||||
}
|
||||
const rmse = this.rootMeanSquaredError(predicted, measured);
|
||||
return rmse / range;
|
||||
|
||||
state.sampleCount += 1;
|
||||
const alpha = profile.ewmaAlpha;
|
||||
state.longTermEwma = state.sampleCount === 1 ? value : (alpha * value) + ((1 - alpha) * state.longTermEwma);
|
||||
|
||||
if (metricKey === this.legacyMetricId) {
|
||||
this.cumCount = state.sampleCount;
|
||||
this.cumNRMSD = state.longTermEwma;
|
||||
}
|
||||
|
||||
if (state.sampleCount < profile.minSamplesForLongTerm) {
|
||||
return 0;
|
||||
}
|
||||
return state.longTermEwma;
|
||||
}
|
||||
|
||||
detectImmediateDrift(nrmse) {
|
||||
let ImmDrift = {};
|
||||
this.logger.debug(`checking immediate drift with thresholds : ${this.config.thresholds.NRMSE_HIGH} ${this.config.thresholds.NRMSE_MEDIUM} ${this.config.thresholds.NRMSE_LOW}`);
|
||||
switch (true) {
|
||||
case( nrmse > this.config.thresholds.NRMSE_HIGH ) :
|
||||
ImmDrift = {level : 3 , feedback : "High immediate drift detected"};
|
||||
break;
|
||||
case( nrmse > this.config.thresholds.NRMSE_MEDIUM ) :
|
||||
ImmDrift = {level : 2 , feedback : "Medium immediate drift detected"};
|
||||
break;
|
||||
case(nrmse > this.config.thresholds.NRMSE_LOW ):
|
||||
ImmDrift = {level : 1 , feedback : "Low immediate drift detected"};
|
||||
break;
|
||||
default:
|
||||
ImmDrift = {level : 0 , feedback : "No drift detected"};
|
||||
}
|
||||
return ImmDrift;
|
||||
const thresholds = this.config.thresholds;
|
||||
if (nrmse > thresholds.NRMSE_HIGH) return { level: 3, feedback: 'High immediate drift detected' };
|
||||
if (nrmse > thresholds.NRMSE_MEDIUM) return { level: 2, feedback: 'Medium immediate drift detected' };
|
||||
if (nrmse > thresholds.NRMSE_LOW) return { level: 1, feedback: 'Low immediate drift detected' };
|
||||
return { level: 0, feedback: 'No drift detected' };
|
||||
}
|
||||
|
||||
detectLongTermDrift(longTermNRMSD) {
|
||||
let LongTermDrift = {};
|
||||
this.logger.debug(`checking longterm drift with thresholds : ${this.config.thresholds.LONG_TERM_HIGH} ${this.config.thresholds.LONG_TERM_MEDIUM} ${this.config.thresholds.LONG_TERM_LOW}`);
|
||||
switch (true) {
|
||||
case(Math.abs(longTermNRMSD) > this.config.thresholds.LONG_TERM_HIGH) :
|
||||
LongTermDrift = {level : 3 , feedback : "High long-term drift detected"};
|
||||
break;
|
||||
case (Math.abs(longTermNRMSD) > this.config.thresholds.LONG_TERM_MEDIUM) :
|
||||
LongTermDrift = {level : 2 , feedback : "Medium long-term drift detected"};
|
||||
break;
|
||||
case ( Math.abs(longTermNRMSD) > this.config.thresholds.LONG_TERM_LOW ) :
|
||||
LongTermDrift = {level : 1 , feedback : "Low long-term drift detected"};
|
||||
break;
|
||||
default:
|
||||
LongTermDrift = {level : 0 , feedback : "No drift detected"};
|
||||
}
|
||||
return LongTermDrift;
|
||||
const thresholds = this.config.thresholds;
|
||||
const absValue = Math.abs(longTermNRMSD);
|
||||
if (absValue > thresholds.LONG_TERM_HIGH) return { level: 3, feedback: 'High long-term drift detected' };
|
||||
if (absValue > thresholds.LONG_TERM_MEDIUM) return { level: 2, feedback: 'Medium long-term drift detected' };
|
||||
if (absValue > thresholds.LONG_TERM_LOW) return { level: 1, feedback: 'Low long-term drift detected' };
|
||||
return { level: 0, feedback: 'No drift detected' };
|
||||
}
|
||||
|
||||
detectDrift(nrmse, longTermNRMSD) {
|
||||
@@ -128,27 +129,272 @@ class ErrorMetrics {
|
||||
return { ImmDrift, LongTermDrift };
|
||||
}
|
||||
|
||||
// asses the drift
|
||||
assessDrift(predicted, measured, processMin, processMax) {
|
||||
// Compute NRMSE and check for immediate drift
|
||||
const nrmse = this.normalizedRootMeanSquaredError(predicted, measured, processMin, processMax);
|
||||
this.logger.debug(`NRMSE: ${nrmse}`);
|
||||
// cmopute long-term NRMSD and add result to cumalitve NRMSD
|
||||
const longTermNRMSD = this.longTermNRMSD(nrmse);
|
||||
// return the drift
|
||||
// Return the drift assessment object
|
||||
assessDrift(predicted, measured, processMin, processMax, options = {}) {
|
||||
const metricKey = String(options.metricId || this.legacyMetricId);
|
||||
const profile = this._resolveProfile(metricKey, options);
|
||||
const strict = this._resolveStrict(options, profile);
|
||||
|
||||
const aligned = this._alignSeriesByTimestamp(predicted, measured, options, profile);
|
||||
if (!aligned.valid) {
|
||||
if (strict) {
|
||||
throw new Error(aligned.reason);
|
||||
}
|
||||
return this._invalidAssessment(metricKey, aligned.reason);
|
||||
}
|
||||
|
||||
const nrmse = this.normalizedRootMeanSquaredError(
|
||||
aligned.predicted,
|
||||
aligned.measured,
|
||||
processMin,
|
||||
processMax,
|
||||
{ ...options, strictValidation: strict }
|
||||
);
|
||||
if (!Number.isFinite(nrmse)) {
|
||||
if (strict) {
|
||||
throw new Error('NRMSE calculation returned a non-finite value.');
|
||||
}
|
||||
return this._invalidAssessment(metricKey, 'non_finite_nrmse');
|
||||
}
|
||||
|
||||
const longTermNRMSD = this.longTermNRMSD(nrmse, metricKey, { ...options, strictValidation: strict });
|
||||
const driftAssessment = this.detectDrift(nrmse, longTermNRMSD);
|
||||
return {
|
||||
const state = this._ensureMetricState(metricKey);
|
||||
state.lastResult = {
|
||||
nrmse,
|
||||
longTermNRMSD,
|
||||
immediateLevel: driftAssessment.ImmDrift.level,
|
||||
immediateFeedback: driftAssessment.ImmDrift.feedback,
|
||||
longTermLevel: driftAssessment.LongTermDrift.level,
|
||||
longTermFeedback: driftAssessment.LongTermDrift.feedback
|
||||
longTermFeedback: driftAssessment.LongTermDrift.feedback,
|
||||
valid: true,
|
||||
metricId: metricKey,
|
||||
sampleCount: state.sampleCount,
|
||||
longTermReady: state.sampleCount >= profile.minSamplesForLongTerm,
|
||||
flags: [],
|
||||
};
|
||||
return state.lastResult;
|
||||
}
|
||||
|
||||
assessPoint(metricId, predictedValue, measuredValue, options = {}) {
|
||||
const metricKey = String(metricId || this.legacyMetricId);
|
||||
const profile = this._resolveProfile(metricKey, options);
|
||||
const state = this._ensureMetricState(metricKey);
|
||||
const strict = this._resolveStrict(options, profile);
|
||||
|
||||
const p = Number(predictedValue);
|
||||
const m = Number(measuredValue);
|
||||
if (!Number.isFinite(p) || !Number.isFinite(m)) {
|
||||
const reason = `assessPoint requires finite numbers. predicted=${predictedValue}, measured=${measuredValue}`;
|
||||
if (strict) {
|
||||
throw new Error(reason);
|
||||
}
|
||||
return this._invalidAssessment(metricKey, reason);
|
||||
}
|
||||
|
||||
const predictedTimestamp = Number(options.predictedTimestamp ?? options.timestamp ?? Date.now());
|
||||
const measuredTimestamp = Number(options.measuredTimestamp ?? options.timestamp ?? Date.now());
|
||||
const delta = Math.abs(predictedTimestamp - measuredTimestamp);
|
||||
if (delta > profile.alignmentToleranceMs) {
|
||||
const reason = `Sample timestamp delta (${delta} ms) exceeds tolerance (${profile.alignmentToleranceMs} ms)`;
|
||||
if (strict) {
|
||||
throw new Error(reason);
|
||||
}
|
||||
return this._invalidAssessment(metricKey, reason);
|
||||
}
|
||||
|
||||
state.predicted.push(p);
|
||||
state.measured.push(m);
|
||||
state.predictedTimestamps.push(predictedTimestamp);
|
||||
state.measuredTimestamps.push(measuredTimestamp);
|
||||
|
||||
while (state.predicted.length > profile.windowSize) state.predicted.shift();
|
||||
while (state.measured.length > profile.windowSize) state.measured.shift();
|
||||
while (state.predictedTimestamps.length > profile.windowSize) state.predictedTimestamps.shift();
|
||||
while (state.measuredTimestamps.length > profile.windowSize) state.measuredTimestamps.shift();
|
||||
|
||||
if (state.predicted.length < 2 || state.measured.length < 2) {
|
||||
return this._invalidAssessment(metricKey, 'insufficient_samples');
|
||||
}
|
||||
|
||||
let processMin = Number(options.processMin);
|
||||
let processMax = Number(options.processMax);
|
||||
if (!Number.isFinite(processMin) || !Number.isFinite(processMax) || processMax <= processMin) {
|
||||
processMin = Math.min(...state.predicted, ...state.measured);
|
||||
processMax = Math.max(...state.predicted, ...state.measured);
|
||||
if (!Number.isFinite(processMin) || !Number.isFinite(processMax) || processMax <= processMin) {
|
||||
processMin = 0;
|
||||
processMax = 1;
|
||||
}
|
||||
}
|
||||
|
||||
return this.assessDrift(state.predicted, state.measured, processMin, processMax, {
|
||||
...options,
|
||||
metricId: metricKey,
|
||||
strictValidation: strict,
|
||||
predictedTimestamps: state.predictedTimestamps,
|
||||
measuredTimestamps: state.measuredTimestamps,
|
||||
});
|
||||
}
|
||||
|
||||
_ensureMetricState(metricId) {
|
||||
const key = String(metricId || this.legacyMetricId);
|
||||
if (!this.metricState.has(key)) {
|
||||
this.metricState.set(key, {
|
||||
predicted: [],
|
||||
measured: [],
|
||||
predictedTimestamps: [],
|
||||
measuredTimestamps: [],
|
||||
sampleCount: 0,
|
||||
longTermEwma: 0,
|
||||
profile: {},
|
||||
lastResult: null,
|
||||
});
|
||||
}
|
||||
return this.metricState.get(key);
|
||||
}
|
||||
|
||||
_resolveProfile(metricId, options = {}) {
|
||||
const state = this._ensureMetricState(metricId);
|
||||
const base = this.config.processing || {};
|
||||
return {
|
||||
windowSize: Number(options.windowSize ?? state.profile.windowSize ?? base.windowSize ?? 50),
|
||||
minSamplesForLongTerm: Number(options.minSamplesForLongTerm ?? state.profile.minSamplesForLongTerm ?? base.minSamplesForLongTerm ?? 100),
|
||||
ewmaAlpha: Number(options.ewmaAlpha ?? state.profile.ewmaAlpha ?? base.ewmaAlpha ?? 0.1),
|
||||
alignmentToleranceMs: Number(options.alignmentToleranceMs ?? state.profile.alignmentToleranceMs ?? base.alignmentToleranceMs ?? 2000),
|
||||
strictValidation: Boolean(options.strictValidation ?? state.profile.strictValidation ?? base.strictValidation ?? true),
|
||||
};
|
||||
}
|
||||
|
||||
_resolveStrict(options = {}, profile = null) {
|
||||
if (Object.prototype.hasOwnProperty.call(options, 'strictValidation')) {
|
||||
return Boolean(options.strictValidation);
|
||||
}
|
||||
if (profile && Object.prototype.hasOwnProperty.call(profile, 'strictValidation')) {
|
||||
return Boolean(profile.strictValidation);
|
||||
}
|
||||
return Boolean(this.config.processing?.strictValidation ?? true);
|
||||
}
|
||||
|
||||
_validateSeries(predicted, measured, options = {}) {
|
||||
if (!Array.isArray(predicted) || !Array.isArray(measured)) {
|
||||
this._failOrLog('predicted and measured must be arrays.', options);
|
||||
return { p: [], m: [] };
|
||||
}
|
||||
if (!predicted.length || !measured.length) {
|
||||
this._failOrLog('predicted and measured arrays must not be empty.', options);
|
||||
return { p: [], m: [] };
|
||||
}
|
||||
if (predicted.length !== measured.length) {
|
||||
this._failOrLog('predicted and measured arrays must have the same length.', options);
|
||||
return { p: [], m: [] };
|
||||
}
|
||||
|
||||
const p = predicted.map(Number);
|
||||
const m = measured.map(Number);
|
||||
const hasBad = p.some((v) => !Number.isFinite(v)) || m.some((v) => !Number.isFinite(v));
|
||||
if (hasBad) {
|
||||
this._failOrLog('predicted and measured arrays must contain finite numeric values.', options);
|
||||
return { p: [], m: [] };
|
||||
}
|
||||
return { p, m };
|
||||
}
|
||||
|
||||
_alignSeriesByTimestamp(predicted, measured, options = {}, profile = null) {
|
||||
const strict = this._resolveStrict(options, profile);
|
||||
const tolerance = Number(options.alignmentToleranceMs ?? profile?.alignmentToleranceMs ?? 2000);
|
||||
const predictedTimestamps = Array.isArray(options.predictedTimestamps) ? options.predictedTimestamps.map(Number) : null;
|
||||
const measuredTimestamps = Array.isArray(options.measuredTimestamps) ? options.measuredTimestamps.map(Number) : null;
|
||||
|
||||
if (!predictedTimestamps || !measuredTimestamps) {
|
||||
if (!Array.isArray(predicted) || !Array.isArray(measured)) {
|
||||
return { valid: false, reason: 'predicted and measured must be arrays.' };
|
||||
}
|
||||
if (predicted.length !== measured.length) {
|
||||
const reason = `Series length mismatch without timestamps: predicted=${predicted.length}, measured=${measured.length}`;
|
||||
if (strict) return { valid: false, reason };
|
||||
const n = Math.min(predicted.length, measured.length);
|
||||
if (n < 2) return { valid: false, reason };
|
||||
return {
|
||||
valid: true,
|
||||
predicted: predicted.slice(-n).map(Number),
|
||||
measured: measured.slice(-n).map(Number),
|
||||
flags: ['length_mismatch_realigned'],
|
||||
};
|
||||
}
|
||||
try {
|
||||
const { p, m } = this._validateSeries(predicted, measured, { ...options, strictValidation: true });
|
||||
return { valid: true, predicted: p, measured: m, flags: [] };
|
||||
} catch (error) {
|
||||
return { valid: false, reason: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(predicted) || !Array.isArray(measured)) {
|
||||
return { valid: false, reason: 'predicted and measured must be arrays.' };
|
||||
}
|
||||
if (predicted.length !== predictedTimestamps.length || measured.length !== measuredTimestamps.length) {
|
||||
return { valid: false, reason: 'timestamp arrays must match value-array lengths.' };
|
||||
}
|
||||
|
||||
const predictedSamples = predicted
|
||||
.map((v, i) => ({ value: Number(v), ts: predictedTimestamps[i] }))
|
||||
.filter((s) => Number.isFinite(s.value) && Number.isFinite(s.ts))
|
||||
.sort((a, b) => a.ts - b.ts);
|
||||
const measuredSamples = measured
|
||||
.map((v, i) => ({ value: Number(v), ts: measuredTimestamps[i] }))
|
||||
.filter((s) => Number.isFinite(s.value) && Number.isFinite(s.ts))
|
||||
.sort((a, b) => a.ts - b.ts);
|
||||
|
||||
const alignedPredicted = [];
|
||||
const alignedMeasured = [];
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
while (i < predictedSamples.length && j < measuredSamples.length) {
|
||||
const p = predictedSamples[i];
|
||||
const m = measuredSamples[j];
|
||||
const delta = p.ts - m.ts;
|
||||
if (Math.abs(delta) <= tolerance) {
|
||||
alignedPredicted.push(p.value);
|
||||
alignedMeasured.push(m.value);
|
||||
i += 1;
|
||||
j += 1;
|
||||
} else if (delta < 0) {
|
||||
i += 1;
|
||||
} else {
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (alignedPredicted.length < 2 || alignedMeasured.length < 2) {
|
||||
return { valid: false, reason: 'insufficient aligned samples after timestamp matching.' };
|
||||
}
|
||||
|
||||
return { valid: true, predicted: alignedPredicted, measured: alignedMeasured, flags: [] };
|
||||
}
|
||||
|
||||
_invalidAssessment(metricId, reason) {
|
||||
return {
|
||||
nrmse: NaN,
|
||||
longTermNRMSD: 0,
|
||||
immediateLevel: 0,
|
||||
immediateFeedback: 'Drift assessment unavailable',
|
||||
longTermLevel: 0,
|
||||
longTermFeedback: 'Drift assessment unavailable',
|
||||
valid: false,
|
||||
metricId: String(metricId || this.legacyMetricId),
|
||||
sampleCount: this._ensureMetricState(metricId).sampleCount,
|
||||
longTermReady: false,
|
||||
flags: [reason],
|
||||
};
|
||||
}
|
||||
|
||||
_failOrLog(message, options = {}) {
|
||||
const strict = this._resolveStrict(options);
|
||||
if (strict) {
|
||||
throw new Error(message);
|
||||
}
|
||||
this.logger?.warn?.(message);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ErrorMetrics;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"general": {
|
||||
"name": {
|
||||
"default": "ErrorMetrics",
|
||||
"default": "errormetrics",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "A human-readable name for the configuration."
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
"functionality": {
|
||||
"softwareType": {
|
||||
"default": "errorMetrics",
|
||||
"default": "errormetrics",
|
||||
"rules": {
|
||||
"type": "string",
|
||||
"description": "Logical name identifying the software type."
|
||||
@@ -134,5 +134,47 @@
|
||||
"description": "High threshold for long-term normalized root mean squared deviation."
|
||||
}
|
||||
}
|
||||
},
|
||||
"processing": {
|
||||
"windowSize": {
|
||||
"default": 50,
|
||||
"rules": {
|
||||
"type": "integer",
|
||||
"min": 2,
|
||||
"description": "Rolling sample window size used for drift evaluation."
|
||||
}
|
||||
},
|
||||
"minSamplesForLongTerm": {
|
||||
"default": 100,
|
||||
"rules": {
|
||||
"type": "integer",
|
||||
"min": 1,
|
||||
"description": "Minimum sample count before long-term drift is considered mature."
|
||||
}
|
||||
},
|
||||
"ewmaAlpha": {
|
||||
"default": 0.1,
|
||||
"rules": {
|
||||
"type": "number",
|
||||
"min": 0.001,
|
||||
"max": 1,
|
||||
"description": "EWMA smoothing factor for long-term drift trend."
|
||||
}
|
||||
},
|
||||
"alignmentToleranceMs": {
|
||||
"default": 2000,
|
||||
"rules": {
|
||||
"type": "integer",
|
||||
"min": 0,
|
||||
"description": "Maximum timestamp delta allowed between predicted and measured sample pairs."
|
||||
}
|
||||
},
|
||||
"strictValidation": {
|
||||
"default": true,
|
||||
"rules": {
|
||||
"type": "boolean",
|
||||
"description": "When true, invalid inputs raise errors instead of producing silent outputs."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Discrete PID controller with optional derivative filtering and integral limits.
|
||||
* Sample times are expressed in milliseconds to align with Node.js timestamps.
|
||||
* Production-focused discrete PID controller with modern control features:
|
||||
* - auto/manual and bumpless transfer
|
||||
* - freeze/unfreeze (hold output while optionally tracking process)
|
||||
* - derivative filtering and derivative-on-measurement/error
|
||||
* - anti-windup (clamp or back-calculation)
|
||||
* - output and integral limits
|
||||
* - output rate limiting
|
||||
* - deadband
|
||||
* - gain scheduling (array/function)
|
||||
* - feedforward and dynamic tunings at runtime
|
||||
*/
|
||||
class PIDController {
|
||||
constructor(options = {}) {
|
||||
@@ -17,7 +25,19 @@ class PIDController {
|
||||
integralMin = null,
|
||||
integralMax = null,
|
||||
derivativeOnMeasurement = true,
|
||||
autoMode = true
|
||||
setpointWeight = 1,
|
||||
derivativeWeight = 0,
|
||||
deadband = 0,
|
||||
outputRateLimitUp = Number.POSITIVE_INFINITY,
|
||||
outputRateLimitDown = Number.POSITIVE_INFINITY,
|
||||
antiWindupMode = 'clamp',
|
||||
backCalculationGain = 0,
|
||||
gainSchedule = null,
|
||||
autoMode = true,
|
||||
trackOnManual = true,
|
||||
frozen = false,
|
||||
freezeTrackMeasurement = true,
|
||||
freezeTrackError = false,
|
||||
} = options;
|
||||
|
||||
this.kp = 0;
|
||||
@@ -29,17 +49,23 @@ class PIDController {
|
||||
this.setOutputLimits(outputMin, outputMax);
|
||||
this.setIntegralLimits(integralMin, integralMax);
|
||||
this.setDerivativeFilter(derivativeFilter);
|
||||
this.setSetpointWeights({ beta: setpointWeight, gamma: derivativeWeight });
|
||||
this.setDeadband(deadband);
|
||||
this.setOutputRateLimits(outputRateLimitUp, outputRateLimitDown);
|
||||
this.setAntiWindup({ mode: antiWindupMode, backCalculationGain });
|
||||
this.setGainSchedule(gainSchedule);
|
||||
|
||||
this.derivativeOnMeasurement = Boolean(derivativeOnMeasurement);
|
||||
this.autoMode = Boolean(autoMode);
|
||||
this.trackOnManual = Boolean(trackOnManual);
|
||||
|
||||
this.frozen = Boolean(frozen);
|
||||
this.freezeTrackMeasurement = Boolean(freezeTrackMeasurement);
|
||||
this.freezeTrackError = Boolean(freezeTrackError);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update controller gains at runtime.
|
||||
* Accepts partial objects, e.g. setTunings({ kp: 2.0 }).
|
||||
*/
|
||||
setTunings({ kp = this.kp, ki = this.ki, kd = this.kd } = {}) {
|
||||
[kp, ki, kd].forEach((gain, index) => {
|
||||
if (!Number.isFinite(gain)) {
|
||||
@@ -54,9 +80,6 @@ class PIDController {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the controller execution interval in milliseconds.
|
||||
*/
|
||||
setSampleTime(sampleTimeMs = this.sampleTime) {
|
||||
if (!Number.isFinite(sampleTimeMs) || sampleTimeMs <= 0) {
|
||||
throw new RangeError('sampleTime must be a positive number of milliseconds');
|
||||
@@ -66,9 +89,6 @@ class PIDController {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constrain controller output.
|
||||
*/
|
||||
setOutputLimits(min = this.outputMin, max = this.outputMax) {
|
||||
if (!Number.isFinite(min) && min !== Number.NEGATIVE_INFINITY) {
|
||||
throw new TypeError('outputMin must be finite or -Infinity');
|
||||
@@ -86,9 +106,6 @@ class PIDController {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constrain the accumulated integral term.
|
||||
*/
|
||||
setIntegralLimits(min = this.integralMin ?? null, max = this.integralMax ?? null) {
|
||||
if (min !== null && !Number.isFinite(min)) {
|
||||
throw new TypeError('integralMin must be null or a finite number');
|
||||
@@ -106,10 +123,6 @@ class PIDController {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure exponential filter applied to the derivative term.
|
||||
* Value 0 disables filtering, 1 keeps the previous derivative entirely.
|
||||
*/
|
||||
setDerivativeFilter(value = this.derivativeFilter ?? 0) {
|
||||
if (!Number.isFinite(value) || value < 0 || value > 1) {
|
||||
throw new RangeError('derivativeFilter must be between 0 and 1');
|
||||
@@ -119,94 +132,294 @@ class PIDController {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch between automatic (closed-loop) and manual mode.
|
||||
*/
|
||||
setMode(mode) {
|
||||
if (mode !== 'automatic' && mode !== 'manual') {
|
||||
throw new Error('mode must be either "automatic" or "manual"');
|
||||
setSetpointWeights({ beta = this.setpointWeight ?? 1, gamma = this.derivativeWeight ?? 0 } = {}) {
|
||||
if (!Number.isFinite(beta) || !Number.isFinite(gamma)) {
|
||||
throw new TypeError('setpoint and derivative weights must be finite numbers');
|
||||
}
|
||||
|
||||
this.autoMode = mode === 'automatic';
|
||||
this.setpointWeight = beta;
|
||||
this.derivativeWeight = gamma;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDeadband(value = this.deadband ?? 0) {
|
||||
if (!Number.isFinite(value) || value < 0) {
|
||||
throw new RangeError('deadband must be a non-negative finite number');
|
||||
}
|
||||
|
||||
this.deadband = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
setOutputRateLimits(up = this.outputRateLimitUp, down = this.outputRateLimitDown) {
|
||||
if (!Number.isFinite(up) && up !== Number.POSITIVE_INFINITY) {
|
||||
throw new TypeError('outputRateLimitUp must be finite or Infinity');
|
||||
}
|
||||
if (!Number.isFinite(down) && down !== Number.POSITIVE_INFINITY) {
|
||||
throw new TypeError('outputRateLimitDown must be finite or Infinity');
|
||||
}
|
||||
if (up <= 0 || down <= 0) {
|
||||
throw new RangeError('output rate limits must be positive values');
|
||||
}
|
||||
|
||||
this.outputRateLimitUp = up;
|
||||
this.outputRateLimitDown = down;
|
||||
return this;
|
||||
}
|
||||
|
||||
setAntiWindup({ mode = this.antiWindupMode ?? 'clamp', backCalculationGain = this.backCalculationGain ?? 0 } = {}) {
|
||||
const normalized = String(mode || 'clamp').trim().toLowerCase();
|
||||
if (normalized !== 'clamp' && normalized !== 'backcalc') {
|
||||
throw new RangeError('anti windup mode must be "clamp" or "backcalc"');
|
||||
}
|
||||
if (!Number.isFinite(backCalculationGain) || backCalculationGain < 0) {
|
||||
throw new RangeError('backCalculationGain must be a non-negative finite number');
|
||||
}
|
||||
|
||||
this.antiWindupMode = normalized;
|
||||
this.backCalculationGain = backCalculationGain;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Force a manual output (typically when in manual mode).
|
||||
* Gain schedule options:
|
||||
* - null: disabled
|
||||
* - function(input, state) => { kp, ki, kd }
|
||||
* - array: [{ min, max, kp, ki, kd }, ...]
|
||||
*/
|
||||
setGainSchedule(schedule = null) {
|
||||
if (schedule == null) {
|
||||
this.gainSchedule = null;
|
||||
return this;
|
||||
}
|
||||
|
||||
if (typeof schedule === 'function') {
|
||||
this.gainSchedule = schedule;
|
||||
return this;
|
||||
}
|
||||
|
||||
if (!Array.isArray(schedule)) {
|
||||
throw new TypeError('gainSchedule must be null, a function, or an array');
|
||||
}
|
||||
|
||||
schedule.forEach((entry, index) => {
|
||||
if (!entry || typeof entry !== 'object') {
|
||||
throw new TypeError(`gainSchedule[${index}] must be an object`);
|
||||
}
|
||||
const { min = Number.NEGATIVE_INFINITY, max = Number.POSITIVE_INFINITY, kp, ki, kd } = entry;
|
||||
if (!Number.isFinite(min) && min !== Number.NEGATIVE_INFINITY) {
|
||||
throw new TypeError(`gainSchedule[${index}].min must be finite or -Infinity`);
|
||||
}
|
||||
if (!Number.isFinite(max) && max !== Number.POSITIVE_INFINITY) {
|
||||
throw new TypeError(`gainSchedule[${index}].max must be finite or Infinity`);
|
||||
}
|
||||
if (min >= max) {
|
||||
throw new RangeError(`gainSchedule[${index}] min must be smaller than max`);
|
||||
}
|
||||
[kp, ki, kd].forEach((value, gainIndex) => {
|
||||
const label = ['kp', 'ki', 'kd'][gainIndex];
|
||||
if (!Number.isFinite(value)) {
|
||||
throw new TypeError(`gainSchedule[${index}].${label} must be finite`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
this.gainSchedule = schedule;
|
||||
return this;
|
||||
}
|
||||
|
||||
setMode(mode, options = {}) {
|
||||
if (mode !== 'automatic' && mode !== 'manual') {
|
||||
throw new Error('mode must be either "automatic" or "manual"');
|
||||
}
|
||||
|
||||
const nextAuto = mode === 'automatic';
|
||||
const previousAuto = this.autoMode;
|
||||
this.autoMode = nextAuto;
|
||||
|
||||
if (options && Number.isFinite(options.manualOutput)) {
|
||||
this.setManualOutput(options.manualOutput);
|
||||
}
|
||||
|
||||
if (!previousAuto && nextAuto) {
|
||||
this._initializeForAuto(options);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
freeze(options = {}) {
|
||||
this.frozen = true;
|
||||
this.freezeTrackMeasurement = options.trackMeasurement !== false;
|
||||
this.freezeTrackError = Boolean(options.trackError);
|
||||
|
||||
if (Number.isFinite(options.output)) {
|
||||
this.setManualOutput(options.output);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
unfreeze() {
|
||||
this.frozen = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
isFrozen() {
|
||||
return this.frozen;
|
||||
}
|
||||
|
||||
setManualOutput(value) {
|
||||
this._assertNumeric('manual output', value);
|
||||
this.lastOutput = this._clamp(value, this.outputMin, this.outputMax);
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset dynamic state (integral, derivative memory, timestamps).
|
||||
*/
|
||||
reset(state = {}) {
|
||||
const {
|
||||
integral = 0,
|
||||
lastOutput = 0,
|
||||
timestamp = null
|
||||
timestamp = null,
|
||||
prevMeasurement = null,
|
||||
prevError = null,
|
||||
prevDerivativeInput = null,
|
||||
derivativeState = 0,
|
||||
} = state;
|
||||
|
||||
this.integral = this._applyIntegralLimits(Number.isFinite(integral) ? integral : 0);
|
||||
this.prevError = null;
|
||||
this.prevMeasurement = null;
|
||||
this.prevError = Number.isFinite(prevError) ? prevError : null;
|
||||
this.prevMeasurement = Number.isFinite(prevMeasurement) ? prevMeasurement : null;
|
||||
this.prevDerivativeInput = Number.isFinite(prevDerivativeInput) ? prevDerivativeInput : null;
|
||||
this.lastOutput = this._clamp(
|
||||
Number.isFinite(lastOutput) ? lastOutput : 0,
|
||||
this.outputMin ?? Number.NEGATIVE_INFINITY,
|
||||
this.outputMax ?? Number.POSITIVE_INFINITY
|
||||
);
|
||||
this.lastTimestamp = Number.isFinite(timestamp) ? timestamp : null;
|
||||
this.derivativeState = 0;
|
||||
this.derivativeState = Number.isFinite(derivativeState) ? derivativeState : 0;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute one control loop iteration.
|
||||
*/
|
||||
update(setpoint, measurement, timestamp = Date.now()) {
|
||||
update(setpoint, measurement, timestamp = Date.now(), options = {}) {
|
||||
if (timestamp && typeof timestamp === 'object' && options && Object.keys(options).length === 0) {
|
||||
options = timestamp;
|
||||
timestamp = Date.now();
|
||||
}
|
||||
|
||||
this._assertNumeric('setpoint', setpoint);
|
||||
this._assertNumeric('measurement', measurement);
|
||||
this._assertNumeric('timestamp', timestamp);
|
||||
|
||||
const opts = options || {};
|
||||
|
||||
if (opts.tunings && typeof opts.tunings === 'object') {
|
||||
this.setTunings(opts.tunings);
|
||||
}
|
||||
|
||||
if (Number.isFinite(opts.gainInput)) {
|
||||
this._applyGainSchedule(opts.gainInput, { setpoint, measurement, timestamp });
|
||||
}
|
||||
|
||||
if (typeof opts.setMode === 'string') {
|
||||
this.setMode(opts.setMode, opts);
|
||||
}
|
||||
|
||||
if (opts.freeze === true) this.freeze(opts);
|
||||
if (opts.unfreeze === true) this.unfreeze();
|
||||
|
||||
if (Number.isFinite(opts.manualOutput)) {
|
||||
this.setManualOutput(opts.manualOutput);
|
||||
}
|
||||
|
||||
const feedForward = Number.isFinite(opts.feedForward) ? opts.feedForward : 0;
|
||||
const force = Boolean(opts.force);
|
||||
|
||||
const error = setpoint - measurement;
|
||||
|
||||
if (!this.autoMode) {
|
||||
this.prevError = setpoint - measurement;
|
||||
this.prevMeasurement = measurement;
|
||||
this.lastTimestamp = timestamp;
|
||||
if (this.trackOnManual) {
|
||||
this._trackProcessState(setpoint, measurement, error, timestamp);
|
||||
}
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
if (this.lastTimestamp !== null && (timestamp - this.lastTimestamp) < this.sampleTime) {
|
||||
if (this.frozen) {
|
||||
if (this.freezeTrackMeasurement || this.freezeTrackError) {
|
||||
this._trackProcessState(setpoint, measurement, error, timestamp, {
|
||||
trackMeasurement: this.freezeTrackMeasurement,
|
||||
trackError: this.freezeTrackError,
|
||||
});
|
||||
}
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
if (!force && this.lastTimestamp !== null && (timestamp - this.lastTimestamp) < this.sampleTime) {
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
const elapsedMs = this.lastTimestamp === null ? this.sampleTime : (timestamp - this.lastTimestamp);
|
||||
const dtSeconds = Math.max(elapsedMs / 1000, Number.EPSILON);
|
||||
|
||||
const error = setpoint - measurement;
|
||||
this.integral = this._applyIntegralLimits(this.integral + error * dtSeconds);
|
||||
|
||||
const derivative = this._computeDerivative({ error, measurement, dtSeconds });
|
||||
this.derivativeState = this.derivativeFilter === 0
|
||||
? derivative
|
||||
: this.derivativeState + (derivative - this.derivativeState) * (1 - this.derivativeFilter);
|
||||
|
||||
const output = (this.kp * error) + (this.ki * this.integral) + (this.kd * this.derivativeState);
|
||||
this.lastOutput = this._clamp(output, this.outputMin, this.outputMax);
|
||||
|
||||
const inDeadband = Math.abs(error) <= this.deadband;
|
||||
if (inDeadband) {
|
||||
this.prevError = error;
|
||||
this.prevMeasurement = measurement;
|
||||
this.prevDerivativeInput = this.derivativeOnMeasurement
|
||||
? measurement
|
||||
: ((this.derivativeWeight * setpoint) - measurement);
|
||||
this.lastTimestamp = timestamp;
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
const effectiveError = error;
|
||||
|
||||
const pInput = (this.setpointWeight * setpoint) - measurement;
|
||||
const pTerm = this.kp * pInput;
|
||||
|
||||
const derivativeRaw = this._computeDerivative({ setpoint, measurement, error, dtSeconds });
|
||||
this.derivativeState = this.derivativeFilter === 0
|
||||
? derivativeRaw
|
||||
: this.derivativeState + (derivativeRaw - this.derivativeState) * (1 - this.derivativeFilter);
|
||||
|
||||
const dTerm = this.kd * this.derivativeState;
|
||||
|
||||
const nextIntegral = this._applyIntegralLimits(this.integral + (effectiveError * dtSeconds));
|
||||
let unclampedOutput = pTerm + (this.ki * nextIntegral) + dTerm + feedForward;
|
||||
let clampedOutput = this._clamp(unclampedOutput, this.outputMin, this.outputMax);
|
||||
|
||||
if (this.antiWindupMode === 'backcalc' && this.ki !== 0 && this.backCalculationGain > 0) {
|
||||
const correctedIntegral = nextIntegral + ((clampedOutput - unclampedOutput) * this.backCalculationGain * dtSeconds);
|
||||
this.integral = this._applyIntegralLimits(correctedIntegral);
|
||||
} else {
|
||||
const saturatingHigh = clampedOutput >= this.outputMax && effectiveError > 0;
|
||||
const saturatingLow = clampedOutput <= this.outputMin && effectiveError < 0;
|
||||
this.integral = (saturatingHigh || saturatingLow) ? this.integral : nextIntegral;
|
||||
}
|
||||
|
||||
let output = pTerm + (this.ki * this.integral) + dTerm + feedForward;
|
||||
output = this._clamp(output, this.outputMin, this.outputMax);
|
||||
|
||||
if (this.lastTimestamp !== null) {
|
||||
output = this._applyRateLimit(output, this.lastOutput, dtSeconds);
|
||||
}
|
||||
|
||||
if (Number.isFinite(opts.trackingOutput)) {
|
||||
this._trackIntegralToOutput(opts.trackingOutput, { pTerm, dTerm, feedForward });
|
||||
output = this._clamp(opts.trackingOutput, this.outputMin, this.outputMax);
|
||||
}
|
||||
|
||||
this.lastOutput = output;
|
||||
this.prevError = error;
|
||||
this.prevMeasurement = measurement;
|
||||
this.prevDerivativeInput = this.derivativeOnMeasurement
|
||||
? measurement
|
||||
: ((this.derivativeWeight * setpoint) - measurement);
|
||||
this.lastTimestamp = timestamp;
|
||||
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspect controller state for diagnostics or persistence.
|
||||
*/
|
||||
getState() {
|
||||
return {
|
||||
kp: this.kp,
|
||||
@@ -217,10 +430,18 @@ class PIDController {
|
||||
integralLimits: { min: this.integralMin, max: this.integralMax },
|
||||
derivativeFilter: this.derivativeFilter,
|
||||
derivativeOnMeasurement: this.derivativeOnMeasurement,
|
||||
setpointWeight: this.setpointWeight,
|
||||
derivativeWeight: this.derivativeWeight,
|
||||
deadband: this.deadband,
|
||||
outputRateLimits: { up: this.outputRateLimitUp, down: this.outputRateLimitDown },
|
||||
antiWindupMode: this.antiWindupMode,
|
||||
backCalculationGain: this.backCalculationGain,
|
||||
autoMode: this.autoMode,
|
||||
frozen: this.frozen,
|
||||
integral: this.integral,
|
||||
derivativeState: this.derivativeState,
|
||||
lastOutput: this.lastOutput,
|
||||
lastTimestamp: this.lastTimestamp
|
||||
lastTimestamp: this.lastTimestamp,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -228,22 +449,110 @@ class PIDController {
|
||||
return this.lastOutput;
|
||||
}
|
||||
|
||||
_computeDerivative({ error, measurement, dtSeconds }) {
|
||||
_initializeForAuto(options = {}) {
|
||||
const setpoint = Number.isFinite(options.setpoint) ? options.setpoint : null;
|
||||
const measurement = Number.isFinite(options.measurement) ? options.measurement : null;
|
||||
const timestamp = Number.isFinite(options.timestamp) ? options.timestamp : Date.now();
|
||||
|
||||
if (measurement !== null) {
|
||||
this.prevMeasurement = measurement;
|
||||
}
|
||||
if (setpoint !== null && measurement !== null) {
|
||||
this.prevError = setpoint - measurement;
|
||||
this.prevDerivativeInput = this.derivativeOnMeasurement
|
||||
? measurement
|
||||
: ((this.derivativeWeight * setpoint) - measurement);
|
||||
}
|
||||
|
||||
this.lastTimestamp = timestamp;
|
||||
|
||||
if (this.ki !== 0 && setpoint !== null && measurement !== null) {
|
||||
const pTerm = this.kp * ((this.setpointWeight * setpoint) - measurement);
|
||||
const dTerm = this.kd * this.derivativeState;
|
||||
const trackedIntegral = (this.lastOutput - pTerm - dTerm) / this.ki;
|
||||
this.integral = this._applyIntegralLimits(Number.isFinite(trackedIntegral) ? trackedIntegral : this.integral);
|
||||
}
|
||||
}
|
||||
|
||||
_trackProcessState(setpoint, measurement, error, timestamp, tracking = {}) {
|
||||
const trackMeasurement = tracking.trackMeasurement !== false;
|
||||
const trackError = Boolean(tracking.trackError);
|
||||
|
||||
if (trackMeasurement) {
|
||||
this.prevMeasurement = measurement;
|
||||
this.prevDerivativeInput = this.derivativeOnMeasurement
|
||||
? measurement
|
||||
: ((this.derivativeWeight * setpoint) - measurement);
|
||||
}
|
||||
|
||||
if (trackError) {
|
||||
this.prevError = error;
|
||||
}
|
||||
|
||||
this.lastTimestamp = timestamp;
|
||||
}
|
||||
|
||||
_trackIntegralToOutput(trackingOutput, terms) {
|
||||
if (this.ki === 0) return;
|
||||
const { pTerm, dTerm, feedForward } = terms;
|
||||
const targetIntegral = (trackingOutput - pTerm - dTerm - feedForward) / this.ki;
|
||||
if (Number.isFinite(targetIntegral)) {
|
||||
this.integral = this._applyIntegralLimits(targetIntegral);
|
||||
}
|
||||
}
|
||||
|
||||
_applyGainSchedule(input, state) {
|
||||
if (!this.gainSchedule) return;
|
||||
|
||||
if (typeof this.gainSchedule === 'function') {
|
||||
const tunings = this.gainSchedule(input, this.getState(), state);
|
||||
if (tunings && typeof tunings === 'object') {
|
||||
this.setTunings(tunings);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const matched = this.gainSchedule.find((entry) => input >= entry.min && input < entry.max);
|
||||
if (matched) {
|
||||
this.setTunings({ kp: matched.kp, ki: matched.ki, kd: matched.kd });
|
||||
}
|
||||
}
|
||||
|
||||
_computeDerivative({ setpoint, measurement, error, dtSeconds }) {
|
||||
if (!(dtSeconds > 0) || !Number.isFinite(dtSeconds)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (this.derivativeOnMeasurement && this.prevMeasurement !== null) {
|
||||
if (this.derivativeOnMeasurement) {
|
||||
if (this.prevMeasurement === null) return 0;
|
||||
return -(measurement - this.prevMeasurement) / dtSeconds;
|
||||
}
|
||||
|
||||
if (this.prevError === null) {
|
||||
return 0;
|
||||
const derivativeInput = (this.derivativeWeight * setpoint) - measurement;
|
||||
if (this.prevDerivativeInput === null) return 0;
|
||||
const derivativeFromInput = (derivativeInput - this.prevDerivativeInput) / dtSeconds;
|
||||
|
||||
if (Number.isFinite(derivativeFromInput)) {
|
||||
return derivativeFromInput;
|
||||
}
|
||||
|
||||
if (this.prevError === null) return 0;
|
||||
return (error - this.prevError) / dtSeconds;
|
||||
}
|
||||
|
||||
_applyRateLimit(nextOutput, previousOutput, dtSeconds) {
|
||||
const maxRise = Number.isFinite(this.outputRateLimitUp)
|
||||
? this.outputRateLimitUp * dtSeconds
|
||||
: Number.POSITIVE_INFINITY;
|
||||
const maxFall = Number.isFinite(this.outputRateLimitDown)
|
||||
? this.outputRateLimitDown * dtSeconds
|
||||
: Number.POSITIVE_INFINITY;
|
||||
|
||||
const lower = previousOutput - maxFall;
|
||||
const upper = previousOutput + maxRise;
|
||||
return this._clamp(nextOutput, lower, upper);
|
||||
}
|
||||
|
||||
_applyIntegralLimits(value) {
|
||||
if (!Number.isFinite(value)) {
|
||||
return 0;
|
||||
@@ -266,14 +575,89 @@ class PIDController {
|
||||
}
|
||||
|
||||
_clamp(value, min, max) {
|
||||
if (value < min) {
|
||||
return min;
|
||||
}
|
||||
if (value > max) {
|
||||
return max;
|
||||
}
|
||||
if (value < min) return min;
|
||||
if (value > max) return max;
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PIDController;
|
||||
/**
|
||||
* Cascade PID utility:
|
||||
* - primary PID controls the outer variable
|
||||
* - primary output becomes setpoint for secondary PID
|
||||
*/
|
||||
class CascadePIDController {
|
||||
constructor(options = {}) {
|
||||
const {
|
||||
primary = {},
|
||||
secondary = {},
|
||||
} = options;
|
||||
|
||||
this.primary = primary instanceof PIDController ? primary : new PIDController(primary);
|
||||
this.secondary = secondary instanceof PIDController ? secondary : new PIDController(secondary);
|
||||
}
|
||||
|
||||
update({
|
||||
setpoint,
|
||||
primaryMeasurement,
|
||||
secondaryMeasurement,
|
||||
timestamp = Date.now(),
|
||||
primaryOptions = {},
|
||||
secondaryOptions = {},
|
||||
} = {}) {
|
||||
if (!Number.isFinite(setpoint)) {
|
||||
throw new TypeError('setpoint must be a finite number');
|
||||
}
|
||||
if (!Number.isFinite(primaryMeasurement)) {
|
||||
throw new TypeError('primaryMeasurement must be a finite number');
|
||||
}
|
||||
if (!Number.isFinite(secondaryMeasurement)) {
|
||||
throw new TypeError('secondaryMeasurement must be a finite number');
|
||||
}
|
||||
|
||||
const secondarySetpoint = this.primary.update(setpoint, primaryMeasurement, timestamp, primaryOptions);
|
||||
const controlOutput = this.secondary.update(secondarySetpoint, secondaryMeasurement, timestamp, secondaryOptions);
|
||||
|
||||
return {
|
||||
primaryOutput: secondarySetpoint,
|
||||
secondaryOutput: controlOutput,
|
||||
state: this.getState(),
|
||||
};
|
||||
}
|
||||
|
||||
setMode(mode, options = {}) {
|
||||
this.primary.setMode(mode, options.primary || options);
|
||||
this.secondary.setMode(mode, options.secondary || options);
|
||||
return this;
|
||||
}
|
||||
|
||||
freeze(options = {}) {
|
||||
this.primary.freeze(options.primary || options);
|
||||
this.secondary.freeze(options.secondary || options);
|
||||
return this;
|
||||
}
|
||||
|
||||
unfreeze() {
|
||||
this.primary.unfreeze();
|
||||
this.secondary.unfreeze();
|
||||
return this;
|
||||
}
|
||||
|
||||
reset(state = {}) {
|
||||
this.primary.reset(state.primary || {});
|
||||
this.secondary.reset(state.secondary || {});
|
||||
return this;
|
||||
}
|
||||
|
||||
getState() {
|
||||
return {
|
||||
primary: this.primary.getState(),
|
||||
secondary: this.secondary.getState(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PIDController,
|
||||
CascadePIDController,
|
||||
};
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
const PIDController = require('./PIDController');
|
||||
const { PIDController, CascadePIDController } = require('./PIDController');
|
||||
|
||||
/**
|
||||
* Convenience factory for one-line instantiation.
|
||||
* Convenience factories.
|
||||
*/
|
||||
const createPidController = (options) => new PIDController(options);
|
||||
const createCascadePidController = (options) => new CascadePIDController(options);
|
||||
|
||||
module.exports = {
|
||||
PIDController,
|
||||
createPidController
|
||||
CascadePIDController,
|
||||
createPidController,
|
||||
createCascadePidController,
|
||||
};
|
||||
|
||||
@@ -88,7 +88,7 @@ class Interpolation {
|
||||
array_values(obj) {
|
||||
const new_array = [];
|
||||
for (let i in obj) {
|
||||
if (obj.hasOwnProperty(i)) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, i)) {
|
||||
new_array.push(obj[i]);
|
||||
}
|
||||
}
|
||||
@@ -101,6 +101,7 @@ class Interpolation {
|
||||
} else if (type == "monotone_cubic_spline") {
|
||||
this.monotonic_cubic_spline();
|
||||
} else if (type == "linear") {
|
||||
/* intentionally empty */
|
||||
} else {
|
||||
this.error = 1000;
|
||||
}
|
||||
@@ -230,7 +231,6 @@ class Interpolation {
|
||||
let xdata = this.input_xdata;
|
||||
let ydata = this.input_ydata;
|
||||
|
||||
let interpolationtype = this.interpolationtype;
|
||||
let tension = this.tension;
|
||||
|
||||
let n = ydata.length;
|
||||
@@ -266,6 +266,7 @@ class Interpolation {
|
||||
let k = 0;
|
||||
|
||||
if (xpoint < xdata[0] || xpoint > xdata[n - 1]) {
|
||||
/* intentionally empty */
|
||||
}
|
||||
|
||||
while (k < n - 1 && xpoint > xdata[k + 1] && !(xpoint < xdata[0] || xpoint > xdata[n - 1])) {
|
||||
|
||||
@@ -161,6 +161,11 @@ class Predict {
|
||||
//find index of y peak
|
||||
const { peak , peakIndex } = this.getLocalPeak(curve.y);
|
||||
|
||||
// Guard against invalid peakIndex (e.g. empty array returns -1)
|
||||
if (peakIndex < 0 || peakIndex >= curve.x.length) {
|
||||
return { yPeak: null, x: null, xProcent: null };
|
||||
}
|
||||
|
||||
// scale the x value to procentual value
|
||||
const yPeak = peak;
|
||||
const x = curve.x[peakIndex];
|
||||
|
||||
@@ -13,12 +13,12 @@ class movementManager {
|
||||
|
||||
this.speed = speed;
|
||||
this.maxSpeed = maxSpeed;
|
||||
console.log(`MovementManager: Initial speed=${this.speed}, maxSpeed=${maxSpeed}`);
|
||||
this.interval = interval;
|
||||
this.timeleft = 0; // timeleft of current movement
|
||||
|
||||
this.logger = logger;
|
||||
this.movementMode = config.movement.mode;
|
||||
this.logger?.debug?.(`MovementManager initialized: speed=${this.speed}, maxSpeed=${this.maxSpeed}`);
|
||||
}
|
||||
|
||||
getCurrentPosition() {
|
||||
@@ -49,15 +49,17 @@ class movementManager {
|
||||
try {
|
||||
// Execute the movement logic based on the mode
|
||||
switch (this.movementMode) {
|
||||
case "staticspeed":
|
||||
case "staticspeed": {
|
||||
const movelinFeedback = await this.moveLinear(targetPosition,signal);
|
||||
this.logger.info(`Linear move: ${movelinFeedback} `);
|
||||
break;
|
||||
}
|
||||
|
||||
case "dynspeed":
|
||||
case "dynspeed": {
|
||||
const moveDynFeedback = await this.moveEaseInOut(targetPosition,signal);
|
||||
this.logger.info(`Dynamic move : ${moveDynFeedback}`);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported movement mode: ${this.movementMode}`);
|
||||
@@ -211,7 +213,6 @@ class movementManager {
|
||||
return reject(new Error("Movement aborted"));
|
||||
}
|
||||
|
||||
const direction = targetPosition > this.currentPosition ? 1 : -1;
|
||||
const totalDistance = Math.abs(targetPosition - this.currentPosition);
|
||||
const startPosition = this.currentPosition;
|
||||
const velocity = this.getVelocity();
|
||||
|
||||
@@ -20,6 +20,10 @@ test('barrel exports expected public members', () => {
|
||||
'coolprop',
|
||||
'convert',
|
||||
'MenuManager',
|
||||
'PIDController',
|
||||
'CascadePIDController',
|
||||
'createPidController',
|
||||
'createCascadePidController',
|
||||
'childRegistrationUtils',
|
||||
'loadCurve',
|
||||
'loadModel',
|
||||
@@ -38,5 +42,9 @@ test('barrel types are callable where expected', () => {
|
||||
assert.equal(typeof barrel.outputUtils, 'function');
|
||||
assert.equal(typeof barrel.MeasurementContainer, 'function');
|
||||
assert.equal(typeof barrel.convert, 'function');
|
||||
assert.equal(typeof barrel.PIDController, 'function');
|
||||
assert.equal(typeof barrel.CascadePIDController, 'function');
|
||||
assert.equal(typeof barrel.createPidController, 'function');
|
||||
assert.equal(typeof barrel.createCascadePidController, 'function');
|
||||
assert.equal(typeof barrel.gravity.getStandardGravity, 'function');
|
||||
});
|
||||
|
||||
360
test/childRegistration.test.js
Normal file
360
test/childRegistration.test.js
Normal file
@@ -0,0 +1,360 @@
|
||||
const ChildRegistrationUtils = require('../src/helper/childRegistrationUtils');
|
||||
const { POSITIONS } = require('../src/constants/positions');
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
/** Create a minimal mock parent (mainClass) that ChildRegistrationUtils expects. */
|
||||
function createMockParent(opts = {}) {
|
||||
return {
|
||||
child: {},
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
// optionally provide a registerChild callback so the utils can delegate
|
||||
registerChild: opts.registerChild || undefined,
|
||||
...opts,
|
||||
};
|
||||
}
|
||||
|
||||
/** Create a minimal mock child node with the given overrides. */
|
||||
function createMockChild(overrides = {}) {
|
||||
const defaults = {
|
||||
config: {
|
||||
general: {
|
||||
id: overrides.id || 'child-1',
|
||||
name: overrides.name || 'TestChild',
|
||||
},
|
||||
functionality: {
|
||||
softwareType: overrides.softwareType !== undefined ? overrides.softwareType : 'measurement',
|
||||
positionVsParent: overrides.position || POSITIONS.UPSTREAM,
|
||||
},
|
||||
asset: {
|
||||
category: overrides.category || 'sensor',
|
||||
type: overrides.assetType || 'pressure',
|
||||
},
|
||||
},
|
||||
measurements: overrides.measurements || null,
|
||||
};
|
||||
// allow caller to add extra top-level props
|
||||
return { ...defaults, ...(overrides.extra || {}) };
|
||||
}
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('ChildRegistrationUtils', () => {
|
||||
let parent;
|
||||
let utils;
|
||||
|
||||
beforeEach(() => {
|
||||
parent = createMockParent();
|
||||
utils = new ChildRegistrationUtils(parent);
|
||||
});
|
||||
|
||||
// ── Construction ─────────────────────────────────────────────────────────
|
||||
describe('constructor', () => {
|
||||
it('should store a reference to the mainClass', () => {
|
||||
expect(utils.mainClass).toBe(parent);
|
||||
});
|
||||
|
||||
it('should initialise with an empty registeredChildren map', () => {
|
||||
expect(utils.registeredChildren.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should use the parent logger', () => {
|
||||
expect(utils.logger).toBe(parent.logger);
|
||||
});
|
||||
});
|
||||
|
||||
// ── registerChild ────────────────────────────────────────────────────────
|
||||
describe('registerChild()', () => {
|
||||
it('should register a child and store it in the internal map', async () => {
|
||||
const child = createMockChild();
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(utils.registeredChildren.size).toBe(1);
|
||||
expect(utils.registeredChildren.has('child-1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should store softwareType, position and timestamp in the registry entry', async () => {
|
||||
const child = createMockChild({ softwareType: 'machine' });
|
||||
const before = Date.now();
|
||||
await utils.registerChild(child, POSITIONS.DOWNSTREAM);
|
||||
const after = Date.now();
|
||||
|
||||
const entry = utils.registeredChildren.get('child-1');
|
||||
expect(entry.softwareType).toBe('machine');
|
||||
expect(entry.position).toBe(POSITIONS.DOWNSTREAM);
|
||||
expect(entry.registeredAt).toBeGreaterThanOrEqual(before);
|
||||
expect(entry.registeredAt).toBeLessThanOrEqual(after);
|
||||
});
|
||||
|
||||
it('should store the child in mainClass.child[softwareType][category]', async () => {
|
||||
const child = createMockChild({ softwareType: 'measurement', category: 'sensor' });
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(parent.child.measurement).toBeDefined();
|
||||
expect(parent.child.measurement.sensor).toBeInstanceOf(Array);
|
||||
expect(parent.child.measurement.sensor).toContain(child);
|
||||
});
|
||||
|
||||
it('should set the parent reference on the child', async () => {
|
||||
const child = createMockChild();
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(child.parent).toEqual([parent]);
|
||||
});
|
||||
|
||||
it('should set positionVsParent on the child', async () => {
|
||||
const child = createMockChild();
|
||||
await utils.registerChild(child, POSITIONS.DOWNSTREAM);
|
||||
|
||||
expect(child.positionVsParent).toBe(POSITIONS.DOWNSTREAM);
|
||||
});
|
||||
|
||||
it('should lowercase the softwareType before storing', async () => {
|
||||
const child = createMockChild({ softwareType: 'Measurement' });
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
const entry = utils.registeredChildren.get('child-1');
|
||||
expect(entry.softwareType).toBe('measurement');
|
||||
expect(parent.child.measurement).toBeDefined();
|
||||
});
|
||||
|
||||
it('should delegate to mainClass.registerChild when it is a function', async () => {
|
||||
const registerSpy = jest.fn();
|
||||
parent.registerChild = registerSpy;
|
||||
const child = createMockChild({ softwareType: 'measurement' });
|
||||
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(registerSpy).toHaveBeenCalledWith(child, 'measurement');
|
||||
});
|
||||
|
||||
it('should NOT throw when mainClass has no registerChild method', async () => {
|
||||
delete parent.registerChild;
|
||||
const child = createMockChild();
|
||||
|
||||
await expect(utils.registerChild(child, POSITIONS.UPSTREAM)).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should log a debug message on registration', async () => {
|
||||
const child = createMockChild({ name: 'Pump1', id: 'p1' });
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(parent.logger.debug).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Registering child: Pump1')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle empty softwareType gracefully', async () => {
|
||||
const child = createMockChild({ softwareType: '' });
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
const entry = utils.registeredChildren.get('child-1');
|
||||
expect(entry.softwareType).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
// ── Multiple children ────────────────────────────────────────────────────
|
||||
describe('multiple children registration', () => {
|
||||
it('should register multiple children of the same softwareType', async () => {
|
||||
const c1 = createMockChild({ id: 'c1', name: 'Sensor1', softwareType: 'measurement' });
|
||||
const c2 = createMockChild({ id: 'c2', name: 'Sensor2', softwareType: 'measurement' });
|
||||
|
||||
await utils.registerChild(c1, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(c2, POSITIONS.DOWNSTREAM);
|
||||
|
||||
expect(utils.registeredChildren.size).toBe(2);
|
||||
expect(parent.child.measurement.sensor).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should register children of different softwareTypes', async () => {
|
||||
const sensor = createMockChild({ id: 's1', softwareType: 'measurement' });
|
||||
const machine = createMockChild({ id: 'm1', softwareType: 'machine', category: 'pump' });
|
||||
|
||||
await utils.registerChild(sensor, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(machine, POSITIONS.AT_EQUIPMENT);
|
||||
|
||||
expect(parent.child.measurement).toBeDefined();
|
||||
expect(parent.child.machine).toBeDefined();
|
||||
expect(parent.child.machine.pump).toContain(machine);
|
||||
});
|
||||
|
||||
it('should register children of different categories under the same softwareType', async () => {
|
||||
const sensor = createMockChild({ id: 's1', softwareType: 'measurement', category: 'sensor' });
|
||||
const analyser = createMockChild({ id: 'a1', softwareType: 'measurement', category: 'analyser' });
|
||||
|
||||
await utils.registerChild(sensor, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(analyser, POSITIONS.DOWNSTREAM);
|
||||
|
||||
expect(parent.child.measurement.sensor).toHaveLength(1);
|
||||
expect(parent.child.measurement.analyser).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should support multiple parents on a child (array append)', async () => {
|
||||
const parent2 = createMockParent();
|
||||
const utils2 = new ChildRegistrationUtils(parent2);
|
||||
const child = createMockChild();
|
||||
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
await utils2.registerChild(child, POSITIONS.DOWNSTREAM);
|
||||
|
||||
expect(child.parent).toEqual([parent, parent2]);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Duplicate registration ───────────────────────────────────────────────
|
||||
describe('duplicate registration', () => {
|
||||
it('should overwrite the registry entry when the same child id is registered twice', async () => {
|
||||
const child = createMockChild({ id: 'dup-1' });
|
||||
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(child, POSITIONS.DOWNSTREAM);
|
||||
|
||||
// Map.set overwrites, so still size 1
|
||||
expect(utils.registeredChildren.size).toBe(1);
|
||||
const entry = utils.registeredChildren.get('dup-1');
|
||||
expect(entry.position).toBe(POSITIONS.DOWNSTREAM);
|
||||
});
|
||||
|
||||
it('should push the child into the category array again on duplicate registration', async () => {
|
||||
const child = createMockChild({ id: 'dup-1' });
|
||||
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
// _storeChild does a push each time
|
||||
expect(parent.child.measurement.sensor).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Measurement context setup ────────────────────────────────────────────
|
||||
describe('measurement context on child', () => {
|
||||
it('should call setChildId, setChildName, setParentRef when child has measurements', async () => {
|
||||
const measurements = {
|
||||
setChildId: jest.fn(),
|
||||
setChildName: jest.fn(),
|
||||
setParentRef: jest.fn(),
|
||||
};
|
||||
const child = createMockChild({ id: 'mc-1', name: 'Sensor1', measurements });
|
||||
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(measurements.setChildId).toHaveBeenCalledWith('mc-1');
|
||||
expect(measurements.setChildName).toHaveBeenCalledWith('Sensor1');
|
||||
expect(measurements.setParentRef).toHaveBeenCalledWith(parent);
|
||||
});
|
||||
|
||||
it('should skip measurement setup when child has no measurements object', async () => {
|
||||
const child = createMockChild({ measurements: null });
|
||||
|
||||
// Should not throw
|
||||
await expect(utils.registerChild(child, POSITIONS.UPSTREAM)).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
// ── getChildrenOfType ────────────────────────────────────────────────────
|
||||
describe('getChildrenOfType()', () => {
|
||||
beforeEach(async () => {
|
||||
const s1 = createMockChild({ id: 's1', softwareType: 'measurement', category: 'sensor' });
|
||||
const s2 = createMockChild({ id: 's2', softwareType: 'measurement', category: 'sensor' });
|
||||
const a1 = createMockChild({ id: 'a1', softwareType: 'measurement', category: 'analyser' });
|
||||
const m1 = createMockChild({ id: 'm1', softwareType: 'machine', category: 'pump' });
|
||||
|
||||
await utils.registerChild(s1, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(s2, POSITIONS.DOWNSTREAM);
|
||||
await utils.registerChild(a1, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(m1, POSITIONS.AT_EQUIPMENT);
|
||||
});
|
||||
|
||||
it('should return all children of a given softwareType', () => {
|
||||
const measurements = utils.getChildrenOfType('measurement');
|
||||
expect(measurements).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should return children filtered by category', () => {
|
||||
const sensors = utils.getChildrenOfType('measurement', 'sensor');
|
||||
expect(sensors).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should return empty array for unknown softwareType', () => {
|
||||
expect(utils.getChildrenOfType('nonexistent')).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array for unknown category', () => {
|
||||
expect(utils.getChildrenOfType('measurement', 'nonexistent')).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// ── getChildById ─────────────────────────────────────────────────────────
|
||||
describe('getChildById()', () => {
|
||||
it('should return the child by its id', async () => {
|
||||
const child = createMockChild({ id: 'find-me' });
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(utils.getChildById('find-me')).toBe(child);
|
||||
});
|
||||
|
||||
it('should return null for unknown id', () => {
|
||||
expect(utils.getChildById('does-not-exist')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── getAllChildren ───────────────────────────────────────────────────────
|
||||
describe('getAllChildren()', () => {
|
||||
it('should return an empty array when no children registered', () => {
|
||||
expect(utils.getAllChildren()).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all registered child objects', async () => {
|
||||
const c1 = createMockChild({ id: 'c1' });
|
||||
const c2 = createMockChild({ id: 'c2' });
|
||||
await utils.registerChild(c1, POSITIONS.UPSTREAM);
|
||||
await utils.registerChild(c2, POSITIONS.DOWNSTREAM);
|
||||
|
||||
const all = utils.getAllChildren();
|
||||
expect(all).toHaveLength(2);
|
||||
expect(all).toContain(c1);
|
||||
expect(all).toContain(c2);
|
||||
});
|
||||
});
|
||||
|
||||
// ── logChildStructure ───────────────────────────────────────────────────
|
||||
describe('logChildStructure()', () => {
|
||||
it('should log the child structure via debug', async () => {
|
||||
const child = createMockChild({ id: 'log-1', name: 'LogChild' });
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
utils.logChildStructure();
|
||||
|
||||
expect(parent.logger.debug).toHaveBeenCalledWith(
|
||||
'Current child structure:',
|
||||
expect.any(String)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// ── _storeChild (internal) ──────────────────────────────────────────────
|
||||
describe('_storeChild() internal behaviour', () => {
|
||||
it('should create the child object on parent if it does not exist', async () => {
|
||||
delete parent.child;
|
||||
const child = createMockChild();
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(parent.child).toBeDefined();
|
||||
expect(parent.child.measurement.sensor).toContain(child);
|
||||
});
|
||||
|
||||
it('should use "sensor" as default category when asset.category is absent', async () => {
|
||||
const child = createMockChild();
|
||||
// remove asset.category to trigger default
|
||||
delete child.config.asset.category;
|
||||
await utils.registerChild(child, POSITIONS.UPSTREAM);
|
||||
|
||||
expect(parent.child.measurement.sensor).toContain(child);
|
||||
});
|
||||
});
|
||||
});
|
||||
217
test/configManager.test.js
Normal file
217
test/configManager.test.js
Normal file
@@ -0,0 +1,217 @@
|
||||
const path = require('path');
|
||||
const ConfigManager = require('../src/configs/index');
|
||||
|
||||
describe('ConfigManager', () => {
|
||||
const configDir = path.resolve(__dirname, '../src/configs');
|
||||
let cm;
|
||||
|
||||
beforeEach(() => {
|
||||
cm = new ConfigManager(configDir);
|
||||
});
|
||||
|
||||
// ── getConfig() ──────────────────────────────────────────────────────
|
||||
describe('getConfig()', () => {
|
||||
it('should load and parse a known JSON config file', () => {
|
||||
const config = cm.getConfig('baseConfig');
|
||||
expect(config).toBeDefined();
|
||||
expect(typeof config).toBe('object');
|
||||
});
|
||||
|
||||
it('should return the same content on successive calls', () => {
|
||||
const a = cm.getConfig('baseConfig');
|
||||
const b = cm.getConfig('baseConfig');
|
||||
expect(a).toEqual(b);
|
||||
});
|
||||
|
||||
it('should throw when the config file does not exist', () => {
|
||||
expect(() => cm.getConfig('nonExistentConfig_xyz'))
|
||||
.toThrow(/Failed to load config/);
|
||||
});
|
||||
|
||||
it('should throw a descriptive message including the config name', () => {
|
||||
expect(() => cm.getConfig('missing'))
|
||||
.toThrow("Failed to load config 'missing'");
|
||||
});
|
||||
});
|
||||
|
||||
// ── hasConfig() ──────────────────────────────────────────────────────
|
||||
describe('hasConfig()', () => {
|
||||
it('should return true for a config that exists', () => {
|
||||
expect(cm.hasConfig('baseConfig')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for a config that does not exist', () => {
|
||||
expect(cm.hasConfig('doesNotExist_abc')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ── getAvailableConfigs() ────────────────────────────────────────────
|
||||
describe('getAvailableConfigs()', () => {
|
||||
it('should return an array of strings', () => {
|
||||
const configs = cm.getAvailableConfigs();
|
||||
expect(Array.isArray(configs)).toBe(true);
|
||||
configs.forEach(name => expect(typeof name).toBe('string'));
|
||||
});
|
||||
|
||||
it('should include known config names without .json extension', () => {
|
||||
const configs = cm.getAvailableConfigs();
|
||||
expect(configs).toContain('baseConfig');
|
||||
expect(configs).toContain('diffuser');
|
||||
expect(configs).toContain('measurement');
|
||||
});
|
||||
|
||||
it('should not include .json extension in returned names', () => {
|
||||
const configs = cm.getAvailableConfigs();
|
||||
configs.forEach(name => {
|
||||
expect(name).not.toMatch(/\.json$/);
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw when pointed at a non-existent directory', () => {
|
||||
const bad = new ConfigManager('/tmp/nonexistent_dir_xyz_123');
|
||||
expect(() => bad.getAvailableConfigs()).toThrow(/Failed to read config directory/);
|
||||
});
|
||||
});
|
||||
|
||||
// ── buildConfig() ────────────────────────────────────────────────────
|
||||
describe('buildConfig()', () => {
|
||||
it('should return an object with general and functionality sections', () => {
|
||||
const uiConfig = { name: 'TestNode', unit: 'bar', enableLog: true, logLevel: 'debug' };
|
||||
const result = cm.buildConfig('measurement', uiConfig, 'node-id-1');
|
||||
expect(result).toHaveProperty('general');
|
||||
expect(result).toHaveProperty('functionality');
|
||||
expect(result).toHaveProperty('output');
|
||||
});
|
||||
|
||||
it('should populate general.name from uiConfig.name', () => {
|
||||
const uiConfig = { name: 'MySensor' };
|
||||
const result = cm.buildConfig('measurement', uiConfig, 'id-1');
|
||||
expect(result.general.name).toBe('MySensor');
|
||||
});
|
||||
|
||||
it('should default general.name to nodeName when uiConfig.name is empty', () => {
|
||||
const result = cm.buildConfig('measurement', {}, 'id-1');
|
||||
expect(result.general.name).toBe('measurement');
|
||||
});
|
||||
|
||||
it('should set general.id from the nodeId argument', () => {
|
||||
const result = cm.buildConfig('valve', {}, 'node-42');
|
||||
expect(result.general.id).toBe('node-42');
|
||||
});
|
||||
|
||||
it('should default unit to unitless', () => {
|
||||
const result = cm.buildConfig('valve', {}, 'id-1');
|
||||
expect(result.general.unit).toBe('unitless');
|
||||
});
|
||||
|
||||
it('should default logging.enabled to true when enableLog is undefined', () => {
|
||||
const result = cm.buildConfig('valve', {}, 'id-1');
|
||||
expect(result.general.logging.enabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should respect enableLog = false', () => {
|
||||
const result = cm.buildConfig('valve', { enableLog: false }, 'id-1');
|
||||
expect(result.general.logging.enabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should default logLevel to info', () => {
|
||||
const result = cm.buildConfig('valve', {}, 'id-1');
|
||||
expect(result.general.logging.logLevel).toBe('info');
|
||||
});
|
||||
|
||||
it('should set functionality.softwareType to lowercase nodeName', () => {
|
||||
const result = cm.buildConfig('Valve', {}, 'id-1');
|
||||
expect(result.functionality.softwareType).toBe('valve');
|
||||
});
|
||||
|
||||
it('should default positionVsParent to atEquipment', () => {
|
||||
const result = cm.buildConfig('valve', {}, 'id-1');
|
||||
expect(result.functionality.positionVsParent).toBe('atEquipment');
|
||||
});
|
||||
|
||||
it('should set distance when hasDistance is true', () => {
|
||||
const result = cm.buildConfig('valve', { hasDistance: true, distance: 5.5 }, 'id-1');
|
||||
expect(result.functionality.distance).toBe(5.5);
|
||||
});
|
||||
|
||||
it('should set distance to undefined when hasDistance is false', () => {
|
||||
const result = cm.buildConfig('valve', { hasDistance: false, distance: 5.5 }, 'id-1');
|
||||
expect(result.functionality.distance).toBeUndefined();
|
||||
});
|
||||
|
||||
// ── asset section ──────────────────────────────────────────────────
|
||||
it('should not include asset section when no asset fields provided', () => {
|
||||
const result = cm.buildConfig('valve', {}, 'id-1');
|
||||
expect(result.asset).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include asset section when supplier is provided', () => {
|
||||
const result = cm.buildConfig('valve', { supplier: 'Siemens' }, 'id-1');
|
||||
expect(result.asset).toBeDefined();
|
||||
expect(result.asset.supplier).toBe('Siemens');
|
||||
});
|
||||
|
||||
it('should populate asset defaults for missing optional fields', () => {
|
||||
const result = cm.buildConfig('valve', { supplier: 'ABB' }, 'id-1');
|
||||
expect(result.asset.category).toBe('sensor');
|
||||
expect(result.asset.type).toBe('Unknown');
|
||||
expect(result.asset.model).toBe('Unknown');
|
||||
});
|
||||
|
||||
// ── domainConfig merge ─────────────────────────────────────────────
|
||||
it('should merge domainConfig sections into the result', () => {
|
||||
const domain = { scaling: { enabled: true, factor: 2 } };
|
||||
const result = cm.buildConfig('measurement', {}, 'id-1', domain);
|
||||
expect(result.scaling).toEqual({ enabled: true, factor: 2 });
|
||||
});
|
||||
|
||||
it('should handle empty domainConfig gracefully', () => {
|
||||
const result = cm.buildConfig('measurement', {}, 'id-1', {});
|
||||
expect(result).toHaveProperty('general');
|
||||
expect(result).toHaveProperty('functionality');
|
||||
});
|
||||
|
||||
it('should default output formats to process and influxdb', () => {
|
||||
const result = cm.buildConfig('measurement', {}, 'id-1');
|
||||
expect(result.output).toEqual({
|
||||
process: 'process',
|
||||
dbase: 'influxdb',
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow output format overrides from ui config', () => {
|
||||
const result = cm.buildConfig('measurement', {
|
||||
processOutputFormat: 'json',
|
||||
dbaseOutputFormat: 'csv',
|
||||
}, 'id-1');
|
||||
expect(result.output).toEqual({
|
||||
process: 'json',
|
||||
dbase: 'csv',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ── createEndpoint() ─────────────────────────────────────────────────
|
||||
describe('createEndpoint()', () => {
|
||||
it('should return a JavaScript string containing the node name', () => {
|
||||
const script = cm.createEndpoint('baseConfig');
|
||||
expect(typeof script).toBe('string');
|
||||
expect(script).toContain('baseConfig');
|
||||
expect(script).toContain('window.EVOLV');
|
||||
});
|
||||
|
||||
it('should throw for a non-existent config', () => {
|
||||
expect(() => cm.createEndpoint('doesNotExist_xyz'))
|
||||
.toThrow(/Failed to create endpoint/);
|
||||
});
|
||||
});
|
||||
|
||||
// ── getBaseConfig() ──────────────────────────────────────────────────
|
||||
describe('getBaseConfig()', () => {
|
||||
it('should load the baseConfig.json file', () => {
|
||||
const base = cm.getBaseConfig();
|
||||
expect(base).toBeDefined();
|
||||
expect(typeof base).toBe('object');
|
||||
});
|
||||
});
|
||||
});
|
||||
13
test/curve-loader.test.js
Normal file
13
test/curve-loader.test.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { loadCurve } = require('../index.js');
|
||||
|
||||
test('loadCurve resolves curve ids case-insensitively', () => {
|
||||
const canonical = loadCurve('hidrostal-H05K-S03R');
|
||||
const lowercase = loadCurve('hidrostal-h05k-s03r');
|
||||
|
||||
assert.ok(canonical);
|
||||
assert.ok(lowercase);
|
||||
assert.strictEqual(canonical, lowercase);
|
||||
});
|
||||
@@ -59,3 +59,39 @@ test('_convertPositionNum2Str maps signs to labels', () => {
|
||||
assert.equal(c._convertPositionNum2Str(1), 'downstream');
|
||||
assert.equal(c._convertPositionNum2Str(-1), 'upstream');
|
||||
});
|
||||
|
||||
test('storeCanonical stores anchor unit internally and can emit preferred output units', () => {
|
||||
const c = new MeasurementContainer({
|
||||
windowSize: 10,
|
||||
autoConvert: true,
|
||||
defaultUnits: { flow: 'm3/h' },
|
||||
preferredUnits: { flow: 'm3/h' },
|
||||
canonicalUnits: { flow: 'm3/s' },
|
||||
storeCanonical: true,
|
||||
});
|
||||
|
||||
c.type('flow').variant('measured').position('upstream').value(3.6, 1, 'm3/h');
|
||||
|
||||
const internal = c.type('flow').variant('measured').position('upstream').getCurrentValue();
|
||||
assert.ok(Math.abs(internal - 0.001) < 1e-9);
|
||||
|
||||
const flat = c.getFlattenedOutput({ requestedUnits: { flow: 'm3/h' } });
|
||||
assert.ok(Math.abs(flat['flow.measured.upstream.default'] - 3.6) < 1e-9);
|
||||
});
|
||||
|
||||
test('strict unit validation rejects missing required unit and incompatible units', () => {
|
||||
const c = new MeasurementContainer({
|
||||
windowSize: 10,
|
||||
strictUnitValidation: true,
|
||||
throwOnInvalidUnit: true,
|
||||
requireUnitForTypes: ['flow'],
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
c.type('flow').variant('measured').position('upstream').value(10, 1);
|
||||
}, /Missing source unit/i);
|
||||
|
||||
assert.throws(() => {
|
||||
c.type('flow').variant('measured').position('upstream').value(10, 1, 'mbar');
|
||||
}, /Incompatible|unknown source unit/i);
|
||||
});
|
||||
|
||||
336
test/measurementContainer.test.js
Normal file
336
test/measurementContainer.test.js
Normal file
@@ -0,0 +1,336 @@
|
||||
const MeasurementContainer = require('../src/measurements/MeasurementContainer');
|
||||
|
||||
describe('MeasurementContainer', () => {
|
||||
let mc;
|
||||
|
||||
beforeEach(() => {
|
||||
mc = new MeasurementContainer({ windowSize: 5, autoConvert: false });
|
||||
});
|
||||
|
||||
// ── Construction ─────────────────────────────────────────────────────
|
||||
describe('constructor', () => {
|
||||
it('should initialise with default windowSize when none provided', () => {
|
||||
const m = new MeasurementContainer();
|
||||
expect(m.windowSize).toBe(10);
|
||||
});
|
||||
|
||||
it('should accept a custom windowSize', () => {
|
||||
expect(mc.windowSize).toBe(5);
|
||||
});
|
||||
|
||||
it('should start with an empty measurements map', () => {
|
||||
expect(mc.measurements).toEqual({});
|
||||
});
|
||||
|
||||
it('should populate default units', () => {
|
||||
expect(mc.defaultUnits.pressure).toBe('mbar');
|
||||
expect(mc.defaultUnits.flow).toBe('m3/h');
|
||||
});
|
||||
|
||||
it('should allow overriding default units', () => {
|
||||
const m = new MeasurementContainer({ defaultUnits: { pressure: 'Pa' } });
|
||||
expect(m.defaultUnits.pressure).toBe('Pa');
|
||||
});
|
||||
});
|
||||
|
||||
// ── Chainable setters ───────────────────────────────────────────────
|
||||
describe('chaining API — type / variant / position', () => {
|
||||
it('should set type and return this for chaining', () => {
|
||||
const ret = mc.type('pressure');
|
||||
expect(ret).toBe(mc);
|
||||
expect(mc._currentType).toBe('pressure');
|
||||
});
|
||||
|
||||
it('should reset variant and position when type is called', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
mc.type('flow');
|
||||
expect(mc._currentVariant).toBeNull();
|
||||
expect(mc._currentPosition).toBeNull();
|
||||
});
|
||||
|
||||
it('should set variant and return this', () => {
|
||||
mc.type('pressure');
|
||||
const ret = mc.variant('measured');
|
||||
expect(ret).toBe(mc);
|
||||
expect(mc._currentVariant).toBe('measured');
|
||||
});
|
||||
|
||||
it('should throw if variant is called without type', () => {
|
||||
expect(() => mc.variant('measured')).toThrow(/Type must be specified/);
|
||||
});
|
||||
|
||||
it('should set position (lowercased) and return this', () => {
|
||||
mc.type('pressure').variant('measured');
|
||||
const ret = mc.position('Upstream');
|
||||
expect(ret).toBe(mc);
|
||||
expect(mc._currentPosition).toBe('upstream');
|
||||
});
|
||||
|
||||
it('should throw if position is called without variant', () => {
|
||||
mc.type('pressure');
|
||||
expect(() => mc.position('upstream')).toThrow(/Variant must be specified/);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Storing and retrieving values ───────────────────────────────────
|
||||
describe('value() and retrieval methods', () => {
|
||||
beforeEach(() => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
});
|
||||
|
||||
it('should store a value and retrieve it with getCurrentValue()', () => {
|
||||
mc.value(42, 1000);
|
||||
expect(mc.getCurrentValue()).toBe(42);
|
||||
});
|
||||
|
||||
it('should return this for chaining from value()', () => {
|
||||
const ret = mc.value(1, 1000);
|
||||
expect(ret).toBe(mc);
|
||||
});
|
||||
|
||||
it('should store multiple values and keep the latest', () => {
|
||||
mc.value(10, 1).value(20, 2).value(30, 3);
|
||||
expect(mc.getCurrentValue()).toBe(30);
|
||||
});
|
||||
|
||||
it('should respect the windowSize (rolling window)', () => {
|
||||
for (let i = 1; i <= 8; i++) {
|
||||
mc.value(i, i);
|
||||
}
|
||||
const all = mc.getAllValues();
|
||||
// windowSize is 5, so only the last 5 values should remain
|
||||
expect(all.values.length).toBe(5);
|
||||
expect(all.values).toEqual([4, 5, 6, 7, 8]);
|
||||
});
|
||||
|
||||
it('should compute getAverage() correctly', () => {
|
||||
mc.value(10, 1).value(20, 2).value(30, 3);
|
||||
expect(mc.getAverage()).toBe(20);
|
||||
});
|
||||
|
||||
it('should compute getMin()', () => {
|
||||
mc.value(10, 1).value(5, 2).value(20, 3);
|
||||
expect(mc.getMin()).toBe(5);
|
||||
});
|
||||
|
||||
it('should compute getMax()', () => {
|
||||
mc.value(10, 1).value(5, 2).value(20, 3);
|
||||
expect(mc.getMax()).toBe(20);
|
||||
});
|
||||
|
||||
it('should return null for getCurrentValue() when no values exist', () => {
|
||||
expect(mc.getCurrentValue()).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for getAverage() when no values exist', () => {
|
||||
expect(mc.getAverage()).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for getMin() when no values exist', () => {
|
||||
expect(mc.getMin()).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for getMax() when no values exist', () => {
|
||||
expect(mc.getMax()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── getAllValues() ──────────────────────────────────────────────────
|
||||
describe('getAllValues()', () => {
|
||||
it('should return values, timestamps, and unit', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
mc.unit('bar');
|
||||
mc.value(10, 100).value(20, 200);
|
||||
const all = mc.getAllValues();
|
||||
expect(all.values).toEqual([10, 20]);
|
||||
expect(all.timestamps).toEqual([100, 200]);
|
||||
expect(all.unit).toBe('bar');
|
||||
});
|
||||
|
||||
it('should return null when chain is incomplete', () => {
|
||||
mc.type('pressure');
|
||||
expect(mc.getAllValues()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── unit() ──────────────────────────────────────────────────────────
|
||||
describe('unit()', () => {
|
||||
it('should set unit on the underlying measurement', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
mc.unit('bar');
|
||||
const measurement = mc.get();
|
||||
expect(measurement.unit).toBe('bar');
|
||||
});
|
||||
});
|
||||
|
||||
// ── get() ───────────────────────────────────────────────────────────
|
||||
describe('get()', () => {
|
||||
it('should return the Measurement instance for a complete chain', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
mc.value(1, 1);
|
||||
const m = mc.get();
|
||||
expect(m).toBeDefined();
|
||||
expect(m.type).toBe('pressure');
|
||||
expect(m.variant).toBe('measured');
|
||||
expect(m.position).toBe('upstream');
|
||||
});
|
||||
|
||||
it('should return null when chain is incomplete', () => {
|
||||
mc.type('pressure');
|
||||
expect(mc.get()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── exists() ────────────────────────────────────────────────────────
|
||||
describe('exists()', () => {
|
||||
it('should return false for a non-existent measurement', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
expect(mc.exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true after a value has been stored', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream').value(1, 1);
|
||||
expect(mc.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should support requireValues option', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
// Force creation of measurement without values
|
||||
mc.get();
|
||||
expect(mc.exists({ requireValues: false })).toBe(true);
|
||||
expect(mc.exists({ requireValues: true })).toBe(false);
|
||||
});
|
||||
|
||||
it('should support explicit type/variant/position overrides', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream').value(1, 1);
|
||||
// Reset chain, then query by explicit keys
|
||||
mc.type('flow');
|
||||
expect(mc.exists({ type: 'pressure', variant: 'measured', position: 'upstream' })).toBe(true);
|
||||
expect(mc.exists({ type: 'flow', variant: 'measured', position: 'upstream' })).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when type is not set and not provided', () => {
|
||||
const fresh = new MeasurementContainer({ autoConvert: false });
|
||||
expect(fresh.exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ── getLaggedValue() / getLaggedSample() ─────────────────────────────
|
||||
describe('getLaggedValue() and getLaggedSample()', () => {
|
||||
beforeEach(() => {
|
||||
mc.type('pressure').variant('measured').position('upstream');
|
||||
mc.value(10, 100).value(20, 200).value(30, 300);
|
||||
});
|
||||
|
||||
it('should return the value at lag=1 (previous value)', () => {
|
||||
expect(mc.getLaggedValue(1)).toBe(20);
|
||||
});
|
||||
|
||||
it('should return null when lag exceeds stored values', () => {
|
||||
expect(mc.getLaggedValue(10)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return a sample object from getLaggedSample()', () => {
|
||||
const sample = mc.getLaggedSample(0);
|
||||
expect(sample).toHaveProperty('value', 30);
|
||||
expect(sample).toHaveProperty('timestamp', 300);
|
||||
});
|
||||
|
||||
it('should return null from getLaggedSample when not enough values', () => {
|
||||
expect(mc.getLaggedSample(10)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── Listing helpers ─────────────────────────────────────────────────
|
||||
describe('getTypes() / getVariants() / getPositions()', () => {
|
||||
beforeEach(() => {
|
||||
mc.type('pressure').variant('measured').position('upstream').value(1, 1);
|
||||
mc.type('flow').variant('predicted').position('downstream').value(2, 2);
|
||||
});
|
||||
|
||||
it('should list all stored types', () => {
|
||||
const types = mc.getTypes();
|
||||
expect(types).toContain('pressure');
|
||||
expect(types).toContain('flow');
|
||||
});
|
||||
|
||||
it('should list variants for a given type', () => {
|
||||
mc.type('pressure');
|
||||
expect(mc.getVariants()).toContain('measured');
|
||||
});
|
||||
|
||||
it('should return empty array for type with no variants', () => {
|
||||
mc.type('temperature');
|
||||
expect(mc.getVariants()).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw if getVariants() called without type', () => {
|
||||
const fresh = new MeasurementContainer({ autoConvert: false });
|
||||
expect(() => fresh.getVariants()).toThrow(/Type must be specified/);
|
||||
});
|
||||
|
||||
it('should list positions for type+variant', () => {
|
||||
mc.type('pressure').variant('measured');
|
||||
expect(mc.getPositions()).toContain('upstream');
|
||||
});
|
||||
|
||||
it('should throw if getPositions() called without type and variant', () => {
|
||||
const fresh = new MeasurementContainer({ autoConvert: false });
|
||||
expect(() => fresh.getPositions()).toThrow(/Type and variant must be specified/);
|
||||
});
|
||||
});
|
||||
|
||||
// ── clear() ─────────────────────────────────────────────────────────
|
||||
describe('clear()', () => {
|
||||
it('should reset all measurements and chain state', () => {
|
||||
mc.type('pressure').variant('measured').position('upstream').value(1, 1);
|
||||
mc.clear();
|
||||
expect(mc.measurements).toEqual({});
|
||||
expect(mc._currentType).toBeNull();
|
||||
expect(mc._currentVariant).toBeNull();
|
||||
expect(mc._currentPosition).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── Child context setters ───────────────────────────────────────────
|
||||
describe('child context', () => {
|
||||
it('should set childId and return this', () => {
|
||||
expect(mc.setChildId('c1')).toBe(mc);
|
||||
expect(mc.childId).toBe('c1');
|
||||
});
|
||||
|
||||
it('should set childName and return this', () => {
|
||||
expect(mc.setChildName('pump1')).toBe(mc);
|
||||
expect(mc.childName).toBe('pump1');
|
||||
});
|
||||
|
||||
it('should set parentRef and return this', () => {
|
||||
const parent = { id: 'p1' };
|
||||
expect(mc.setParentRef(parent)).toBe(mc);
|
||||
expect(mc.parentRef).toBe(parent);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Event emission ──────────────────────────────────────────────────
|
||||
describe('event emission', () => {
|
||||
it('should emit an event when a value is set', (done) => {
|
||||
mc.emitter.on('pressure.measured.upstream', (data) => {
|
||||
expect(data.value).toBe(42);
|
||||
expect(data.type).toBe('pressure');
|
||||
expect(data.variant).toBe('measured');
|
||||
expect(data.position).toBe('upstream');
|
||||
done();
|
||||
});
|
||||
mc.type('pressure').variant('measured').position('upstream').value(42, 1);
|
||||
});
|
||||
});
|
||||
|
||||
// ── setPreferredUnit ────────────────────────────────────────────────
|
||||
describe('setPreferredUnit()', () => {
|
||||
it('should store preferred unit and return this', () => {
|
||||
const ret = mc.setPreferredUnit('pressure', 'Pa');
|
||||
expect(ret).toBe(mc);
|
||||
expect(mc.preferredUnits.pressure).toBe('Pa');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -13,6 +13,11 @@ test('MSE and RMSE calculations are correct', () => {
|
||||
assert.ok(Math.abs(m.rootMeanSquaredError(predicted, measured) - Math.sqrt(5 / 3)) < 1e-9);
|
||||
});
|
||||
|
||||
test('MSE throws for mismatched series lengths in strict mode', () => {
|
||||
const m = new ErrorMetrics({}, makeLogger());
|
||||
assert.throws(() => m.meanSquaredError([1, 2], [1]), /same length/);
|
||||
});
|
||||
|
||||
test('normalizeUsingRealtime throws when range is zero', () => {
|
||||
const m = new ErrorMetrics({}, makeLogger());
|
||||
assert.throws(() => m.normalizeUsingRealtime([1, 1, 1], [1, 1, 1]), /Invalid process range/);
|
||||
@@ -35,3 +40,17 @@ test('assessDrift returns expected result envelope', () => {
|
||||
assert.ok('immediateLevel' in out);
|
||||
assert.ok('longTermLevel' in out);
|
||||
});
|
||||
|
||||
test('assessPoint keeps per-metric state and returns metric id', () => {
|
||||
const m = new ErrorMetrics({}, makeLogger());
|
||||
m.registerMetric('flow', { windowSize: 5, minSamplesForLongTerm: 3, strictValidation: true });
|
||||
|
||||
m.assessPoint('flow', 100, 99, { processMin: 0, processMax: 200, timestamp: Date.now() - 2000 });
|
||||
m.assessPoint('flow', 101, 100, { processMin: 0, processMax: 200, timestamp: Date.now() - 1000 });
|
||||
const out = m.assessPoint('flow', 102, 101, { processMin: 0, processMax: 200, timestamp: Date.now() });
|
||||
|
||||
assert.equal(out.metricId, 'flow');
|
||||
assert.equal(out.valid, true);
|
||||
assert.equal(typeof out.nrmse, 'number');
|
||||
assert.equal(typeof out.sampleCount, 'number');
|
||||
});
|
||||
|
||||
69
test/outputUtils.test.js
Normal file
69
test/outputUtils.test.js
Normal file
@@ -0,0 +1,69 @@
|
||||
const OutputUtils = require('../src/helper/outputUtils');
|
||||
|
||||
describe('OutputUtils', () => {
|
||||
let outputUtils;
|
||||
let config;
|
||||
|
||||
beforeEach(() => {
|
||||
outputUtils = new OutputUtils();
|
||||
config = {
|
||||
general: {
|
||||
name: 'Pump-1',
|
||||
id: 'node-1',
|
||||
unit: 'm3/h',
|
||||
},
|
||||
functionality: {
|
||||
softwareType: 'pump',
|
||||
role: 'test-role',
|
||||
},
|
||||
asset: {
|
||||
supplier: 'EVOLV',
|
||||
type: 'sensor',
|
||||
},
|
||||
output: {
|
||||
process: 'process',
|
||||
dbase: 'influxdb',
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
it('keeps legacy process output by default', () => {
|
||||
const msg = outputUtils.formatMsg({ flow: 12.5 }, config, 'process');
|
||||
expect(msg).toEqual({
|
||||
topic: 'Pump-1',
|
||||
payload: { flow: 12.5 },
|
||||
});
|
||||
});
|
||||
|
||||
it('keeps legacy influxdb output by default', () => {
|
||||
const msg = outputUtils.formatMsg({ flow: 12.5 }, config, 'influxdb');
|
||||
expect(msg.topic).toBe('Pump-1');
|
||||
expect(msg.payload).toEqual(expect.objectContaining({
|
||||
measurement: 'Pump-1',
|
||||
fields: { flow: 12.5 },
|
||||
tags: expect.objectContaining({
|
||||
id: 'node-1',
|
||||
name: 'Pump-1',
|
||||
softwareType: 'pump',
|
||||
}),
|
||||
}));
|
||||
});
|
||||
|
||||
it('supports config-driven json formatting on the process channel', () => {
|
||||
config.output.process = 'json';
|
||||
const msg = outputUtils.formatMsg({ flow: 12.5 }, config, 'process');
|
||||
expect(msg.topic).toBe('Pump-1');
|
||||
expect(typeof msg.payload).toBe('string');
|
||||
expect(msg.payload).toContain('"measurement":"Pump-1"');
|
||||
expect(msg.payload).toContain('"flow":12.5');
|
||||
});
|
||||
|
||||
it('supports config-driven csv formatting on the database channel', () => {
|
||||
config.output.dbase = 'csv';
|
||||
const msg = outputUtils.formatMsg({ flow: 12.5 }, config, 'influxdb');
|
||||
expect(msg.topic).toBe('Pump-1');
|
||||
expect(typeof msg.payload).toBe('string');
|
||||
expect(msg.payload).toContain('Pump-1');
|
||||
expect(msg.payload).toContain('flow=12.5');
|
||||
});
|
||||
});
|
||||
105
test/pid-controller.test.js
Normal file
105
test/pid-controller.test.js
Normal file
@@ -0,0 +1,105 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { PIDController, CascadePIDController } = require('../src/pid/index.js');
|
||||
|
||||
test('pid supports freeze/unfreeze with held output', () => {
|
||||
const pid = new PIDController({
|
||||
kp: 2,
|
||||
ki: 0.5,
|
||||
kd: 0.1,
|
||||
sampleTime: 100,
|
||||
outputMin: 0,
|
||||
outputMax: 100,
|
||||
});
|
||||
|
||||
const t0 = Date.now();
|
||||
const first = pid.update(10, 2, t0 + 100);
|
||||
pid.freeze({ output: first, trackMeasurement: true });
|
||||
const frozen = pid.update(10, 4, t0 + 200);
|
||||
assert.equal(frozen, first);
|
||||
|
||||
pid.unfreeze();
|
||||
const resumed = pid.update(10, 4, t0 + 300);
|
||||
assert.equal(Number.isFinite(resumed), true);
|
||||
});
|
||||
|
||||
test('pid supports dynamic tunings and gain scheduling', () => {
|
||||
const pid = new PIDController({
|
||||
kp: 1,
|
||||
ki: 0,
|
||||
kd: 0,
|
||||
sampleTime: 100,
|
||||
outputMin: -100,
|
||||
outputMax: 100,
|
||||
gainSchedule: [
|
||||
{ min: Number.NEGATIVE_INFINITY, max: 5, kp: 1, ki: 0, kd: 0 },
|
||||
{ min: 5, max: Number.POSITIVE_INFINITY, kp: 3, ki: 0, kd: 0 },
|
||||
],
|
||||
});
|
||||
|
||||
const t0 = Date.now();
|
||||
const low = pid.update(10, 9, t0 + 100, { gainInput: 4 });
|
||||
const high = pid.update(10, 9, t0 + 200, { gainInput: 6 });
|
||||
|
||||
assert.equal(high > low, true);
|
||||
|
||||
const tuned = pid.update(10, 9, t0 + 300, { tunings: { kp: 10, ki: 0, kd: 0 } });
|
||||
assert.equal(tuned > high, true);
|
||||
});
|
||||
|
||||
test('pid applies deadband and output rate limits', () => {
|
||||
const pid = new PIDController({
|
||||
kp: 10,
|
||||
ki: 0,
|
||||
kd: 0,
|
||||
deadband: 0.5,
|
||||
sampleTime: 100,
|
||||
outputMin: 0,
|
||||
outputMax: 100,
|
||||
outputRateLimitUp: 5, // units per second
|
||||
outputRateLimitDown: 5, // units per second
|
||||
});
|
||||
|
||||
const t0 = Date.now();
|
||||
const out1 = pid.update(10, 10, t0 + 100); // inside deadband -> no action
|
||||
const out2 = pid.update(20, 0, t0 + 200); // strong error but limited by rate
|
||||
|
||||
assert.equal(out1, 0);
|
||||
// 5 units/sec * 0.1 sec = max 0.5 rise per cycle
|
||||
assert.equal(out2 <= 0.5 + 1e-9, true);
|
||||
});
|
||||
|
||||
test('cascade pid computes primary and secondary outputs', () => {
|
||||
const cascade = new CascadePIDController({
|
||||
primary: {
|
||||
kp: 2,
|
||||
ki: 0,
|
||||
kd: 0,
|
||||
sampleTime: 100,
|
||||
outputMin: 0,
|
||||
outputMax: 100,
|
||||
},
|
||||
secondary: {
|
||||
kp: 1,
|
||||
ki: 0,
|
||||
kd: 0,
|
||||
sampleTime: 100,
|
||||
outputMin: 0,
|
||||
outputMax: 100,
|
||||
},
|
||||
});
|
||||
|
||||
const t0 = Date.now();
|
||||
const result = cascade.update({
|
||||
setpoint: 10,
|
||||
primaryMeasurement: 5,
|
||||
secondaryMeasurement: 2,
|
||||
timestamp: t0 + 100,
|
||||
});
|
||||
|
||||
assert.equal(typeof result.primaryOutput, 'number');
|
||||
assert.equal(typeof result.secondaryOutput, 'number');
|
||||
assert.equal(result.primaryOutput > 0, true);
|
||||
assert.equal(result.secondaryOutput > 0, true);
|
||||
});
|
||||
@@ -25,6 +25,30 @@ const schema = {
|
||||
default: 'sensor',
|
||||
rules: { type: 'string' },
|
||||
},
|
||||
asset: {
|
||||
default: {},
|
||||
rules: {
|
||||
type: 'object',
|
||||
schema: {
|
||||
unit: {
|
||||
default: 'm3/h',
|
||||
rules: { type: 'string' },
|
||||
},
|
||||
curveUnits: {
|
||||
default: {},
|
||||
rules: {
|
||||
type: 'object',
|
||||
schema: {
|
||||
power: {
|
||||
default: 'kW',
|
||||
rules: { type: 'string' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test('validateSchema applies defaults and type coercion where supported', () => {
|
||||
@@ -32,7 +56,7 @@ test('validateSchema applies defaults and type coercion where supported', () =>
|
||||
const result = validation.validateSchema({ enabled: 'true', name: 'SENSOR' }, schema, 'test');
|
||||
|
||||
assert.equal(result.enabled, true);
|
||||
assert.equal(result.name, 'sensor');
|
||||
assert.equal(result.name, 'SENSOR');
|
||||
assert.equal(result.mode, 'auto');
|
||||
assert.equal(result.functionality.softwareType, 'measurement');
|
||||
});
|
||||
@@ -60,3 +84,58 @@ test('removeUnwantedKeys handles primitive values without throwing', () => {
|
||||
};
|
||||
assert.doesNotThrow(() => validation.removeUnwantedKeys(input));
|
||||
});
|
||||
|
||||
test('unit-like fields preserve case while regular strings are normalized', () => {
|
||||
const validation = new ValidationUtils(false, 'error');
|
||||
const result = validation.validateSchema(
|
||||
{
|
||||
name: 'RotatingMachine',
|
||||
asset: {
|
||||
unit: 'kW',
|
||||
curveUnits: { power: 'kW' },
|
||||
},
|
||||
},
|
||||
schema,
|
||||
'machine'
|
||||
);
|
||||
|
||||
assert.equal(result.name, 'RotatingMachine');
|
||||
assert.equal(result.asset.unit, 'kW');
|
||||
assert.equal(result.asset.curveUnits.power, 'kW');
|
||||
});
|
||||
|
||||
test('array with minLength 0 accepts empty arrays without fallback warning path', () => {
|
||||
const validation = new ValidationUtils(false, 'error');
|
||||
const localSchema = {
|
||||
functionality: {
|
||||
softwareType: {
|
||||
default: 'measurement',
|
||||
rules: { type: 'string' },
|
||||
},
|
||||
},
|
||||
assetRegistration: {
|
||||
default: { childAssets: ['default'] },
|
||||
rules: {
|
||||
type: 'object',
|
||||
schema: {
|
||||
childAssets: {
|
||||
default: ['default'],
|
||||
rules: {
|
||||
type: 'array',
|
||||
itemType: 'string',
|
||||
minLength: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = validation.validateSchema(
|
||||
{ assetRegistration: { childAssets: [] } },
|
||||
localSchema,
|
||||
'measurement'
|
||||
);
|
||||
|
||||
assert.deepEqual(result.assetRegistration.childAssets, []);
|
||||
});
|
||||
|
||||
554
test/validationUtils.test.js
Normal file
554
test/validationUtils.test.js
Normal file
@@ -0,0 +1,554 @@
|
||||
const ValidationUtils = require('../src/helper/validationUtils');
|
||||
const { validateNumber, validateInteger, validateBoolean, validateString, validateEnum } = require('../src/helper/validators/typeValidators');
|
||||
const { validateArray, validateSet, validateObject } = require('../src/helper/validators/collectionValidators');
|
||||
const { validateCurve, validateMachineCurve, isSorted, isUnique, areNumbers } = require('../src/helper/validators/curveValidator');
|
||||
|
||||
// Shared mock logger used across tests
|
||||
function mockLogger() {
|
||||
return { debug: jest.fn(), info: jest.fn(), warn: jest.fn(), error: jest.fn() };
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Type validators
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
describe('typeValidators', () => {
|
||||
let logger;
|
||||
beforeEach(() => { logger = mockLogger(); });
|
||||
|
||||
// ── validateNumber ──────────────────────────────────────────────────
|
||||
describe('validateNumber()', () => {
|
||||
it('should accept a valid number', () => {
|
||||
expect(validateNumber(42, {}, { default: 0 }, 'n', 'k', logger)).toBe(42);
|
||||
});
|
||||
|
||||
it('should parse a string to a number', () => {
|
||||
expect(validateNumber('3.14', {}, { default: 0 }, 'n', 'k', logger)).toBe(3.14);
|
||||
expect(logger.warn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return default when below min', () => {
|
||||
expect(validateNumber(1, { min: 5 }, { default: 5 }, 'n', 'k', logger)).toBe(5);
|
||||
});
|
||||
|
||||
it('should return default when above max', () => {
|
||||
expect(validateNumber(100, { max: 50 }, { default: 50 }, 'n', 'k', logger)).toBe(50);
|
||||
});
|
||||
|
||||
it('should accept boundary value equal to min', () => {
|
||||
expect(validateNumber(5, { min: 5 }, { default: 0 }, 'n', 'k', logger)).toBe(5);
|
||||
});
|
||||
|
||||
it('should accept boundary value equal to max', () => {
|
||||
expect(validateNumber(50, { max: 50 }, { default: 0 }, 'n', 'k', logger)).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateInteger ─────────────────────────────────────────────────
|
||||
describe('validateInteger()', () => {
|
||||
it('should accept a valid integer', () => {
|
||||
expect(validateInteger(7, {}, { default: 0 }, 'n', 'k', logger)).toBe(7);
|
||||
});
|
||||
|
||||
it('should parse a string to an integer', () => {
|
||||
expect(validateInteger('10', {}, { default: 0 }, 'n', 'k', logger)).toBe(10);
|
||||
});
|
||||
|
||||
it('should return default for a non-parseable value', () => {
|
||||
expect(validateInteger('abc', {}, { default: -1 }, 'n', 'k', logger)).toBe(-1);
|
||||
});
|
||||
|
||||
it('should return default when below min', () => {
|
||||
expect(validateInteger(2, { min: 5 }, { default: 5 }, 'n', 'k', logger)).toBe(5);
|
||||
});
|
||||
|
||||
it('should return default when above max', () => {
|
||||
expect(validateInteger(100, { max: 50 }, { default: 50 }, 'n', 'k', logger)).toBe(50);
|
||||
});
|
||||
|
||||
it('should parse a float string and truncate to integer', () => {
|
||||
expect(validateInteger('7.9', {}, { default: 0 }, 'n', 'k', logger)).toBe(7);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateBoolean ─────────────────────────────────────────────────
|
||||
describe('validateBoolean()', () => {
|
||||
it('should pass through a true boolean', () => {
|
||||
expect(validateBoolean(true, 'n', 'k', logger)).toBe(true);
|
||||
});
|
||||
|
||||
it('should pass through a false boolean', () => {
|
||||
expect(validateBoolean(false, 'n', 'k', logger)).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse string "true" to boolean true', () => {
|
||||
expect(validateBoolean('true', 'n', 'k', logger)).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse string "false" to boolean false', () => {
|
||||
expect(validateBoolean('false', 'n', 'k', logger)).toBe(false);
|
||||
});
|
||||
|
||||
it('should pass through non-boolean non-string values unchanged', () => {
|
||||
expect(validateBoolean(42, 'n', 'k', logger)).toBe(42);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateString ──────────────────────────────────────────────────
|
||||
describe('validateString()', () => {
|
||||
it('should accept a lowercase string', () => {
|
||||
expect(validateString('hello', {}, { default: '' }, 'n', 'k', logger)).toBe('hello');
|
||||
});
|
||||
|
||||
it('should convert uppercase to lowercase', () => {
|
||||
expect(validateString('Hello', {}, { default: '' }, 'n', 'k', logger)).toBe('hello');
|
||||
});
|
||||
|
||||
it('should convert a number to a string', () => {
|
||||
expect(validateString(42, {}, { default: '' }, 'n', 'k', logger)).toBe('42');
|
||||
});
|
||||
|
||||
it('should return null when nullable and value is null', () => {
|
||||
expect(validateString(null, { nullable: true }, { default: '' }, 'n', 'k', logger)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateEnum ────────────────────────────────────────────────────
|
||||
describe('validateEnum()', () => {
|
||||
const rules = { values: [{ value: 'open' }, { value: 'closed' }, { value: 'partial' }] };
|
||||
|
||||
it('should accept a valid enum value', () => {
|
||||
expect(validateEnum('open', rules, { default: 'closed' }, 'n', 'k', logger)).toBe('open');
|
||||
});
|
||||
|
||||
it('should be case-insensitive', () => {
|
||||
expect(validateEnum('OPEN', rules, { default: 'closed' }, 'n', 'k', logger)).toBe('open');
|
||||
});
|
||||
|
||||
it('should return default for an invalid value', () => {
|
||||
expect(validateEnum('invalid', rules, { default: 'closed' }, 'n', 'k', logger)).toBe('closed');
|
||||
});
|
||||
|
||||
it('should return default when value is null', () => {
|
||||
expect(validateEnum(null, rules, { default: 'closed' }, 'n', 'k', logger)).toBe('closed');
|
||||
});
|
||||
|
||||
it('should return default when rules.values is not an array', () => {
|
||||
expect(validateEnum('open', {}, { default: 'closed' }, 'n', 'k', logger)).toBe('closed');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Collection validators
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
describe('collectionValidators', () => {
|
||||
let logger;
|
||||
beforeEach(() => { logger = mockLogger(); });
|
||||
|
||||
// ── validateArray ───────────────────────────────────────────────────
|
||||
describe('validateArray()', () => {
|
||||
it('should return default when value is not an array', () => {
|
||||
expect(validateArray('not-array', { itemType: 'number' }, { default: [1] }, 'n', 'k', logger))
|
||||
.toEqual([1]);
|
||||
});
|
||||
|
||||
it('should filter items by itemType', () => {
|
||||
const result = validateArray([1, 'a', 2], { itemType: 'number', minLength: 1 }, { default: [] }, 'n', 'k', logger);
|
||||
expect(result).toEqual([1, 2]);
|
||||
});
|
||||
|
||||
it('should respect maxLength', () => {
|
||||
const result = validateArray([1, 2, 3, 4, 5], { itemType: 'number', maxLength: 3, minLength: 1 }, { default: [] }, 'n', 'k', logger);
|
||||
expect(result).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('should return default when fewer items than minLength after filtering', () => {
|
||||
const result = validateArray(['a'], { itemType: 'number', minLength: 1 }, { default: [0] }, 'n', 'k', logger);
|
||||
expect(result).toEqual([0]);
|
||||
});
|
||||
|
||||
it('should pass all items through when itemType is null', () => {
|
||||
const result = validateArray([1, 'a', true], { itemType: 'null', minLength: 1 }, { default: [] }, 'n', 'k', logger);
|
||||
expect(result).toEqual([1, 'a', true]);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateSet ─────────────────────────────────────────────────────
|
||||
describe('validateSet()', () => {
|
||||
it('should convert default to Set when value is not a Set', () => {
|
||||
const result = validateSet('not-a-set', { itemType: 'number' }, { default: [1, 2] }, 'n', 'k', logger);
|
||||
expect(result).toBeInstanceOf(Set);
|
||||
expect([...result]).toEqual([1, 2]);
|
||||
});
|
||||
|
||||
it('should filter Set items by type', () => {
|
||||
const input = new Set([1, 'a', 2]);
|
||||
const result = validateSet(input, { itemType: 'number', minLength: 1 }, { default: [] }, 'n', 'k', logger);
|
||||
expect([...result]).toEqual([1, 2]);
|
||||
});
|
||||
|
||||
it('should return default Set when too few items remain', () => {
|
||||
const input = new Set(['a']);
|
||||
const result = validateSet(input, { itemType: 'number', minLength: 1 }, { default: [0] }, 'n', 'k', logger);
|
||||
expect([...result]).toEqual([0]);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateObject ──────────────────────────────────────────────────
|
||||
describe('validateObject()', () => {
|
||||
it('should return default when value is not an object', () => {
|
||||
expect(validateObject('str', {}, { default: { a: 1 } }, 'n', 'k', jest.fn(), logger))
|
||||
.toEqual({ a: 1 });
|
||||
});
|
||||
|
||||
it('should return default when value is an array', () => {
|
||||
expect(validateObject([1, 2], {}, { default: {} }, 'n', 'k', jest.fn(), logger))
|
||||
.toEqual({});
|
||||
});
|
||||
|
||||
it('should return default when no schema is provided', () => {
|
||||
expect(validateObject({ a: 1 }, {}, { default: { b: 2 } }, 'n', 'k', jest.fn(), logger))
|
||||
.toEqual({ b: 2 });
|
||||
});
|
||||
|
||||
it('should call validateSchemaFn when schema is provided', () => {
|
||||
const mockFn = jest.fn().mockReturnValue({ validated: true });
|
||||
const rules = { schema: { x: { default: 1 } } };
|
||||
const result = validateObject({ x: 2 }, rules, {}, 'n', 'k', mockFn, logger);
|
||||
expect(mockFn).toHaveBeenCalledWith({ x: 2 }, rules.schema, 'n.k');
|
||||
expect(result).toEqual({ validated: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Curve validators
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
describe('curveValidator', () => {
|
||||
let logger;
|
||||
beforeEach(() => { logger = mockLogger(); });
|
||||
|
||||
// ── Helper utilities ────────────────────────────────────────────────
|
||||
describe('isSorted()', () => {
|
||||
it('should return true for a sorted array', () => {
|
||||
expect(isSorted([1, 2, 3, 4])).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for an unsorted array', () => {
|
||||
expect(isSorted([3, 1, 2])).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for an empty array', () => {
|
||||
expect(isSorted([])).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for equal adjacent values', () => {
|
||||
expect(isSorted([1, 1, 2])).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isUnique()', () => {
|
||||
it('should return true when all values are unique', () => {
|
||||
expect(isUnique([1, 2, 3])).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when duplicates exist', () => {
|
||||
expect(isUnique([1, 2, 2])).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('areNumbers()', () => {
|
||||
it('should return true for all numbers', () => {
|
||||
expect(areNumbers([1, 2.5, -3])).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when a non-number is present', () => {
|
||||
expect(areNumbers([1, 'a', 3])).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateCurve ───────────────────────────────────────────────────
|
||||
describe('validateCurve()', () => {
|
||||
const defaultCurve = { line1: { x: [0, 1], y: [0, 1] } };
|
||||
|
||||
it('should return default when input is null', () => {
|
||||
expect(validateCurve(null, defaultCurve, logger)).toEqual(defaultCurve);
|
||||
});
|
||||
|
||||
it('should return default for an empty object', () => {
|
||||
expect(validateCurve({}, defaultCurve, logger)).toEqual(defaultCurve);
|
||||
});
|
||||
|
||||
it('should validate a correct curve', () => {
|
||||
const curve = { line1: { x: [1, 2, 3], y: [10, 20, 30] } };
|
||||
const result = validateCurve(curve, defaultCurve, logger);
|
||||
expect(result.line1.x).toEqual([1, 2, 3]);
|
||||
expect(result.line1.y).toEqual([10, 20, 30]);
|
||||
});
|
||||
|
||||
it('should sort unsorted x values and reorder y accordingly', () => {
|
||||
const curve = { line1: { x: [3, 1, 2], y: [30, 10, 20] } };
|
||||
const result = validateCurve(curve, defaultCurve, logger);
|
||||
expect(result.line1.x).toEqual([1, 2, 3]);
|
||||
expect(result.line1.y).toEqual([10, 20, 30]);
|
||||
});
|
||||
|
||||
it('should remove duplicate x values', () => {
|
||||
const curve = { line1: { x: [1, 1, 2], y: [10, 11, 20] } };
|
||||
const result = validateCurve(curve, defaultCurve, logger);
|
||||
expect(result.line1.x).toEqual([1, 2]);
|
||||
expect(result.line1.y.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should return default when y contains non-numbers', () => {
|
||||
const curve = { line1: { x: [1, 2], y: ['a', 'b'] } };
|
||||
expect(validateCurve(curve, defaultCurve, logger)).toEqual(defaultCurve);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateMachineCurve ────────────────────────────────────────────
|
||||
describe('validateMachineCurve()', () => {
|
||||
const defaultMC = {
|
||||
nq: { line1: { x: [0, 1], y: [0, 1] } },
|
||||
np: { line1: { x: [0, 1], y: [0, 1] } },
|
||||
};
|
||||
|
||||
it('should return default when input is null', () => {
|
||||
expect(validateMachineCurve(null, defaultMC, logger)).toEqual(defaultMC);
|
||||
});
|
||||
|
||||
it('should return default when nq or np is missing', () => {
|
||||
expect(validateMachineCurve({ nq: {} }, defaultMC, logger)).toEqual(defaultMC);
|
||||
});
|
||||
|
||||
it('should validate a correct machine curve', () => {
|
||||
const input = {
|
||||
nq: { line1: { x: [1, 2], y: [10, 20] } },
|
||||
np: { line1: { x: [1, 2], y: [5, 10] } },
|
||||
};
|
||||
const result = validateMachineCurve(input, defaultMC, logger);
|
||||
expect(result.nq.line1.x).toEqual([1, 2]);
|
||||
expect(result.np.line1.y).toEqual([5, 10]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// ValidationUtils class
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
describe('ValidationUtils', () => {
|
||||
let vu;
|
||||
|
||||
beforeEach(() => {
|
||||
vu = new ValidationUtils(true, 'error'); // suppress most logging noise
|
||||
});
|
||||
|
||||
// ── constrain() ─────────────────────────────────────────────────────
|
||||
describe('constrain()', () => {
|
||||
it('should return value when within range', () => {
|
||||
expect(vu.constrain(5, 0, 10)).toBe(5);
|
||||
});
|
||||
|
||||
it('should clamp to min when value is below range', () => {
|
||||
expect(vu.constrain(-5, 0, 10)).toBe(0);
|
||||
});
|
||||
|
||||
it('should clamp to max when value is above range', () => {
|
||||
expect(vu.constrain(15, 0, 10)).toBe(10);
|
||||
});
|
||||
|
||||
it('should return min for boundary value equal to min', () => {
|
||||
expect(vu.constrain(0, 0, 10)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return max for boundary value equal to max', () => {
|
||||
expect(vu.constrain(10, 0, 10)).toBe(10);
|
||||
});
|
||||
|
||||
it('should return min when value is not a number', () => {
|
||||
expect(vu.constrain('abc', 0, 10)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return min when value is null', () => {
|
||||
expect(vu.constrain(null, 0, 10)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return min when value is undefined', () => {
|
||||
expect(vu.constrain(undefined, 0, 10)).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ── validateSchema() ────────────────────────────────────────────────
|
||||
describe('validateSchema()', () => {
|
||||
it('should use default value when config key is missing', () => {
|
||||
const schema = {
|
||||
speed: { default: 100, rules: { type: 'number' } },
|
||||
};
|
||||
const result = vu.validateSchema({}, schema, 'test');
|
||||
expect(result.speed).toBe(100);
|
||||
});
|
||||
|
||||
it('should use provided value over default', () => {
|
||||
const schema = {
|
||||
speed: { default: 100, rules: { type: 'number' } },
|
||||
};
|
||||
const result = vu.validateSchema({ speed: 200 }, schema, 'test');
|
||||
expect(result.speed).toBe(200);
|
||||
});
|
||||
|
||||
it('should strip unknown keys from config', () => {
|
||||
const schema = {
|
||||
speed: { default: 100, rules: { type: 'number' } },
|
||||
};
|
||||
const config = { speed: 50, unknownKey: 'bad' };
|
||||
const result = vu.validateSchema(config, schema, 'test');
|
||||
expect(result.unknownKey).toBeUndefined();
|
||||
expect(result.speed).toBe(50);
|
||||
});
|
||||
|
||||
it('should validate number type with min/max', () => {
|
||||
const schema = {
|
||||
speed: { default: 10, rules: { type: 'number', min: 0, max: 100 } },
|
||||
};
|
||||
// within range
|
||||
expect(vu.validateSchema({ speed: 50 }, schema, 'test').speed).toBe(50);
|
||||
// below min -> default
|
||||
expect(vu.validateSchema({ speed: -1 }, schema, 'test').speed).toBe(10);
|
||||
// above max -> default
|
||||
expect(vu.validateSchema({ speed: 101 }, schema, 'test').speed).toBe(10);
|
||||
});
|
||||
|
||||
it('should validate boolean type', () => {
|
||||
const schema = {
|
||||
enabled: { default: true, rules: { type: 'boolean' } },
|
||||
};
|
||||
expect(vu.validateSchema({ enabled: false }, schema, 'test').enabled).toBe(false);
|
||||
expect(vu.validateSchema({ enabled: 'true' }, schema, 'test').enabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate string type (lowercased)', () => {
|
||||
const schema = {
|
||||
mode: { default: 'auto', rules: { type: 'string' } },
|
||||
};
|
||||
expect(vu.validateSchema({ mode: 'Manual' }, schema, 'test').mode).toBe('manual');
|
||||
});
|
||||
|
||||
it('should validate enum type', () => {
|
||||
const schema = {
|
||||
state: {
|
||||
default: 'open',
|
||||
rules: { type: 'enum', values: [{ value: 'open' }, { value: 'closed' }] },
|
||||
},
|
||||
};
|
||||
expect(vu.validateSchema({ state: 'closed' }, schema, 'test').state).toBe('closed');
|
||||
expect(vu.validateSchema({ state: 'invalid' }, schema, 'test').state).toBe('open');
|
||||
});
|
||||
|
||||
it('should validate integer type', () => {
|
||||
const schema = {
|
||||
count: { default: 5, rules: { type: 'integer', min: 1, max: 100 } },
|
||||
};
|
||||
expect(vu.validateSchema({ count: 10 }, schema, 'test').count).toBe(10);
|
||||
expect(vu.validateSchema({ count: '42' }, schema, 'test').count).toBe(42);
|
||||
});
|
||||
|
||||
it('should validate array type', () => {
|
||||
const schema = {
|
||||
items: { default: [1, 2], rules: { type: 'array', itemType: 'number', minLength: 1 } },
|
||||
};
|
||||
expect(vu.validateSchema({ items: [3, 4, 5] }, schema, 'test').items).toEqual([3, 4, 5]);
|
||||
expect(vu.validateSchema({ items: 'not-array' }, schema, 'test').items).toEqual([1, 2]);
|
||||
});
|
||||
|
||||
it('should handle nested object with schema recursively', () => {
|
||||
const schema = {
|
||||
logging: {
|
||||
rules: { type: 'object', schema: {
|
||||
enabled: { default: true, rules: { type: 'boolean' } },
|
||||
level: { default: 'info', rules: { type: 'string' } },
|
||||
}},
|
||||
},
|
||||
};
|
||||
const result = vu.validateSchema(
|
||||
{ logging: { enabled: false, level: 'Debug' } },
|
||||
schema,
|
||||
'test'
|
||||
);
|
||||
expect(result.logging.enabled).toBe(false);
|
||||
expect(result.logging.level).toBe('debug');
|
||||
});
|
||||
|
||||
it('should skip reserved keys (rules, description, schema)', () => {
|
||||
const schema = {
|
||||
rules: 'should be skipped',
|
||||
description: 'should be skipped',
|
||||
schema: 'should be skipped',
|
||||
speed: { default: 10, rules: { type: 'number' } },
|
||||
};
|
||||
const result = vu.validateSchema({}, schema, 'test');
|
||||
expect(result).not.toHaveProperty('rules');
|
||||
expect(result).not.toHaveProperty('description');
|
||||
expect(result).not.toHaveProperty('schema');
|
||||
expect(result.speed).toBe(10);
|
||||
});
|
||||
|
||||
it('should use default for unknown validation type', () => {
|
||||
const schema = {
|
||||
weird: { default: 'fallback', rules: { type: 'unknownType' } },
|
||||
};
|
||||
const result = vu.validateSchema({ weird: 'value' }, schema, 'test');
|
||||
expect(result.weird).toBe('fallback');
|
||||
});
|
||||
|
||||
it('should handle curve type', () => {
|
||||
const schema = {
|
||||
curve: {
|
||||
default: { line1: { x: [0, 1], y: [0, 1] } },
|
||||
rules: { type: 'curve' },
|
||||
},
|
||||
};
|
||||
const validCurve = { line1: { x: [1, 2], y: [10, 20] } };
|
||||
const result = vu.validateSchema({ curve: validCurve }, schema, 'test');
|
||||
expect(result.curve.line1.x).toEqual([1, 2]);
|
||||
});
|
||||
});
|
||||
|
||||
// ── removeUnwantedKeys() ────────────────────────────────────────────
|
||||
describe('removeUnwantedKeys()', () => {
|
||||
it('should remove rules and description keys', () => {
|
||||
const input = {
|
||||
speed: { default: 10, rules: { type: 'number' }, description: 'Speed setting' },
|
||||
};
|
||||
const result = vu.removeUnwantedKeys(input);
|
||||
expect(result.speed).toBe(10);
|
||||
});
|
||||
|
||||
it('should recurse into nested objects', () => {
|
||||
const input = {
|
||||
logging: {
|
||||
enabled: { default: true, rules: {} },
|
||||
level: { default: 'info', description: 'Log level' },
|
||||
},
|
||||
};
|
||||
const result = vu.removeUnwantedKeys(input);
|
||||
expect(result.logging.enabled).toBe(true);
|
||||
expect(result.logging.level).toBe('info');
|
||||
});
|
||||
|
||||
it('should handle arrays', () => {
|
||||
const input = [
|
||||
{ a: { default: 1, rules: {} } },
|
||||
{ b: { default: 2, description: 'x' } },
|
||||
];
|
||||
const result = vu.removeUnwantedKeys(input);
|
||||
expect(result[0].a).toBe(1);
|
||||
expect(result[1].b).toBe(2);
|
||||
});
|
||||
|
||||
it('should return primitives as-is', () => {
|
||||
expect(vu.removeUnwantedKeys(42)).toBe(42);
|
||||
expect(vu.removeUnwantedKeys('hello')).toBe('hello');
|
||||
expect(vu.removeUnwantedKeys(null)).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user