feat: digital (MQTT) mode + fix silent dispatcher bug for camelCase methods
Runtime:
- Fix silent no-op when user selected any camelCase smoothing or outlier
method from the editor. validateEnum in generalFunctions lowercases enum
values (zScore -> zscore, lowPass -> lowpass, ...) but the dispatcher
compared against camelCase keys. Effect: 5 of 11 smoothing methods
(lowPass, highPass, weightedMovingAverage, bandPass, savitzkyGolay) and
2 of 3 outlier methods (zScore, modifiedZScore) silently fell through.
Users got the raw last value or no outlier filtering with no error log.
Review any pre-2026-04-13 flows that relied on these methods.
Fix: normalize method names to lowercase on both sides of the lookup.
- New Channel class (src/channel.js) — self-contained per-channel pipeline:
outlier -> offset -> scaling -> smoothing -> min/max -> constrain -> emit.
Pure domain logic, no Node-RED deps, reusable by future nodes that need
the same signal-conditioning chain.
Digital mode:
- config.mode.current = 'digital' opts in. config.channels declares one
entry per expected JSON key; each channel has its own type, position,
unit, distance, and optional scaling/smoothing/outlierDetection blocks
that override the top-level analog-mode fields. One MQTT-shaped payload
({t:22.5, h:45, p:1013}) dispatches N independent pipelines and emits N
MeasurementContainer slots from a single input message.
- Backward compatible: absent mode config = analog = pre-digital behaviour.
Every existing measurement flow keeps working unchanged.
UI:
- HTML editor: new Mode dropdown and Channels JSON textarea. The Node-RED
help panel is rewritten end-to-end with topic reference, port contracts,
per-mode configuration, smoothing/outlier method tables, and a note
about the pre-fix behaviour.
- README.md rewritten (was a one-line stub).
Tests (12 -> 71, all green):
- test/basic/smoothing-methods.basic.test.js (+16): every smoothing method
including the formerly-broken camelCase ones.
- test/basic/outlier-detection.basic.test.js (+10): every outlier method,
fall-through, toggle.
- test/basic/scaling-and-interpolation.basic.test.js (+10): offset,
interpolateLinear, constrain, handleScaling edge cases, min/max
tracking, updateOutputPercent fallback, updateOutputAbs emit dedup.
- test/basic/calibration-and-stability.basic.test.js (+11): calibrate
(stable and unstable), isStable, evaluateRepeatability refusals,
toggleSimulation, tick simulation on/off.
- test/integration/digital-mode.integration.test.js (+12): channel build
(including malformed entries), payload dispatch, multi-channel emit,
unknown keys, per-channel scaling/smoothing/outlier, empty channels,
non-numeric value rejection, getDigitalOutput shape, analog-default
back-compat.
E2E verified on Dockerized Node-RED: analog regression unchanged; digital
mode deploys with three channels, dispatches MQTT-style payload, emits
per-channel events, accumulates per-channel smoothing, ignores unknown
keys.
Depends on generalFunctions commit e50be2e (permissive unit check +
mode/channels schema).
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
121
test/basic/calibration-and-stability.basic.test.js
Normal file
121
test/basic/calibration-and-stability.basic.test.js
Normal file
@@ -0,0 +1,121 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { makeMeasurementInstance } = require('../helpers/factories');
|
||||
|
||||
/**
|
||||
* Tests for the calibration / stability / repeatability primitives. These
|
||||
* methods interact with the stored window from the smoothing pipeline, so
|
||||
* each test seeds storedValues explicitly.
|
||||
*/
|
||||
|
||||
test("isStable returns false with fewer than 2 samples", () => {
|
||||
const m = makeMeasurementInstance();
|
||||
m.storedValues = [];
|
||||
assert.equal(m.isStable(), false); // current implementation returns false (not object) at <2 samples
|
||||
});
|
||||
|
||||
test("isStable reports stability and stdDev for a flat window", () => {
|
||||
const m = makeMeasurementInstance();
|
||||
m.storedValues = [10, 10, 10, 10, 10];
|
||||
const { isStable, stdDev } = m.isStable();
|
||||
assert.equal(isStable, true);
|
||||
assert.equal(stdDev, 0);
|
||||
});
|
||||
|
||||
test("evaluateRepeatability returns stdDev when conditions are met", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'mean' },
|
||||
});
|
||||
m.storedValues = [10, 10, 10, 10, 10];
|
||||
const rep = m.evaluateRepeatability();
|
||||
assert.equal(rep, 0);
|
||||
});
|
||||
|
||||
test("evaluateRepeatability refuses when smoothing is disabled", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'none' },
|
||||
});
|
||||
m.storedValues = [10, 10, 10, 10, 10];
|
||||
assert.equal(m.evaluateRepeatability(), null);
|
||||
});
|
||||
|
||||
test("evaluateRepeatability refuses with insufficient samples", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'mean' },
|
||||
});
|
||||
m.storedValues = [10];
|
||||
assert.equal(m.evaluateRepeatability(), null);
|
||||
});
|
||||
|
||||
test("calibrate sets offset when input is stable and scaling enabled", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: true, inputMin: 4, inputMax: 20, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'mean' },
|
||||
});
|
||||
// Stable window fed through calculateInput so outputAbs reflects the
|
||||
// pipeline (important because calibrate uses outputAbs for its delta).
|
||||
[3, 3, 3, 3, 3].forEach((v) => m.calculateInput(v));
|
||||
const outputBefore = m.outputAbs;
|
||||
m.calibrate();
|
||||
// Offset should now be inputMin - outputAbs(before).
|
||||
assert.equal(m.config.scaling.offset, 4 - outputBefore);
|
||||
});
|
||||
|
||||
test("calibrate aborts when input is not stable", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: true, inputMin: 0, inputMax: 100, absMin: 0, absMax: 10, offset: 0 },
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'mean' },
|
||||
});
|
||||
// Cheat: populate storedValues with clearly non-stable data. calibrate
|
||||
// calls isStable() -> stdDev > threshold -> warn + no offset change.
|
||||
m.storedValues = [0, 100, 0, 100, 0];
|
||||
const offsetBefore = m.config.scaling.offset;
|
||||
m.calibrate();
|
||||
assert.equal(m.config.scaling.offset, offsetBefore);
|
||||
});
|
||||
|
||||
test("calibrate uses absMin when scaling is disabled", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 5, absMax: 10, offset: 0 },
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'mean' },
|
||||
});
|
||||
[5, 5, 5, 5, 5].forEach((v) => m.calculateInput(v));
|
||||
const out = m.outputAbs;
|
||||
m.calibrate();
|
||||
assert.equal(m.config.scaling.offset, 5 - out);
|
||||
});
|
||||
|
||||
test("toggleSimulation flips the simulation flag", () => {
|
||||
const m = makeMeasurementInstance({ simulation: { enabled: false } });
|
||||
m.toggleSimulation();
|
||||
assert.equal(m.config.simulation.enabled, true);
|
||||
m.toggleSimulation();
|
||||
assert.equal(m.config.simulation.enabled, false);
|
||||
});
|
||||
|
||||
test("tick runs simulateInput when simulation is enabled", async () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' },
|
||||
simulation: { enabled: true },
|
||||
});
|
||||
const before = m.inputValue;
|
||||
await m.tick();
|
||||
await m.tick();
|
||||
await m.tick();
|
||||
// Simulated input must drift from its initial state.
|
||||
assert.notEqual(m.inputValue, before);
|
||||
});
|
||||
|
||||
test("tick is a no-op on inputValue when simulation is disabled", async () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' },
|
||||
simulation: { enabled: false },
|
||||
});
|
||||
m.inputValue = 42;
|
||||
await m.tick();
|
||||
await m.tick();
|
||||
assert.equal(m.inputValue, 42);
|
||||
});
|
||||
98
test/basic/outlier-detection.basic.test.js
Normal file
98
test/basic/outlier-detection.basic.test.js
Normal file
@@ -0,0 +1,98 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { makeMeasurementInstance } = require('../helpers/factories');
|
||||
|
||||
/**
|
||||
* Unit coverage for the three outlier detection strategies shipped by the
|
||||
* measurement node. Each test seeds the storedValues window first, then
|
||||
* probes the classifier directly. This keeps the assertions focused on the
|
||||
* detection logic rather than the full calculateInput pipeline.
|
||||
*/
|
||||
|
||||
function makeDetector(method, threshold) {
|
||||
return makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -1000, absMax: 1000, offset: 0 },
|
||||
smoothing: { smoothWindow: 20, smoothMethod: 'none' },
|
||||
outlierDetection: { enabled: true, method, threshold },
|
||||
});
|
||||
}
|
||||
|
||||
function seed(m, values) {
|
||||
// bypass calculateInput so we don't trigger outlier filtering while seeding
|
||||
m.storedValues = [...values];
|
||||
}
|
||||
|
||||
test("zScore flags a value far above the mean as an outlier", () => {
|
||||
const m = makeDetector('zScore', 3);
|
||||
seed(m, [10, 11, 10, 9, 10, 11, 10, 11, 9, 10]);
|
||||
assert.equal(m.outlierDetection(100), true);
|
||||
});
|
||||
|
||||
test("zScore does not flag a value inside the distribution", () => {
|
||||
const m = makeDetector('zScore', 3);
|
||||
seed(m, [10, 11, 10, 9, 10, 11, 10, 11, 9, 10]);
|
||||
assert.equal(m.outlierDetection(11), false);
|
||||
});
|
||||
|
||||
test("iqr flags a value outside Q1/Q3 fences", () => {
|
||||
const m = makeDetector('iqr');
|
||||
seed(m, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
assert.equal(m.outlierDetection(100), true);
|
||||
});
|
||||
|
||||
test("iqr does not flag a value inside Q1/Q3 fences", () => {
|
||||
const m = makeDetector('iqr');
|
||||
seed(m, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
assert.equal(m.outlierDetection(5), false);
|
||||
});
|
||||
|
||||
test("modifiedZScore flags heavy-tailed outliers", () => {
|
||||
const m = makeDetector('modifiedZScore', 3.5);
|
||||
seed(m, [10, 11, 10, 9, 10, 11, 10, 11, 9, 10]);
|
||||
assert.equal(m.outlierDetection(1000), true);
|
||||
});
|
||||
|
||||
test("modifiedZScore accepts normal data", () => {
|
||||
const m = makeDetector('modifiedZScore', 3.5);
|
||||
seed(m, [10, 11, 10, 9, 10, 11, 10, 11, 9, 10]);
|
||||
assert.equal(m.outlierDetection(11), false);
|
||||
});
|
||||
|
||||
test("unknown outlier method falls back to schema default (zScore) and still runs", () => {
|
||||
// validateEnum replaces unknown values with the schema default. The
|
||||
// schema default is "zScore"; the dispatcher normalizes to lowercase
|
||||
// and routes to zScoreOutlierDetection. With a tight window, value=100
|
||||
// is a clear outlier -> returns true.
|
||||
const m = makeDetector('bogus', 3);
|
||||
seed(m, [1, 2, 3, 4, 5]);
|
||||
assert.equal(m.outlierDetection(100), true);
|
||||
});
|
||||
|
||||
test("outlier detection returns false when window has < 2 samples", () => {
|
||||
const m = makeDetector('zScore', 3);
|
||||
m.storedValues = [];
|
||||
assert.equal(m.outlierDetection(500), false);
|
||||
});
|
||||
|
||||
test("calculateInput ignores a value flagged as outlier", () => {
|
||||
const m = makeDetector('zScore', 3);
|
||||
// Build a tight baseline then throw a spike at it.
|
||||
[10, 10, 10, 10, 10].forEach((v) => m.calculateInput(v));
|
||||
const before = m.outputAbs;
|
||||
m.calculateInput(9999);
|
||||
// Output must not move to the spike (outlier rejected).
|
||||
assert.equal(m.outputAbs, before);
|
||||
});
|
||||
|
||||
test("toggleOutlierDetection flips the flag without corrupting config", () => {
|
||||
const m = makeDetector('zScore', 3);
|
||||
const initial = m.config.outlierDetection.enabled;
|
||||
m.toggleOutlierDetection();
|
||||
assert.equal(m.config.outlierDetection.enabled, !initial);
|
||||
// Re-toggle restores
|
||||
m.toggleOutlierDetection();
|
||||
assert.equal(m.config.outlierDetection.enabled, initial);
|
||||
// Method is preserved (enum values are normalized to lowercase by validateEnum).
|
||||
assert.equal(m.config.outlierDetection.method.toLowerCase(), 'zscore');
|
||||
});
|
||||
122
test/basic/scaling-and-interpolation.basic.test.js
Normal file
122
test/basic/scaling-and-interpolation.basic.test.js
Normal file
@@ -0,0 +1,122 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { makeMeasurementInstance } = require('../helpers/factories');
|
||||
|
||||
/**
|
||||
* Covers the scaling / offset / interpolation primitives and the min/max
|
||||
* tracking side effects that are not exercised by the existing
|
||||
* scaling-and-output test.
|
||||
*/
|
||||
|
||||
test("applyOffset adds configured offset to the input", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 7 },
|
||||
});
|
||||
assert.equal(m.applyOffset(10), 17);
|
||||
assert.equal(m.applyOffset(-3), 4);
|
||||
});
|
||||
|
||||
test("interpolateLinear maps within range", () => {
|
||||
const m = makeMeasurementInstance();
|
||||
assert.equal(m.interpolateLinear(50, 0, 100, 0, 10), 5);
|
||||
assert.equal(m.interpolateLinear(0, 0, 100, 0, 10), 0);
|
||||
assert.equal(m.interpolateLinear(100, 0, 100, 0, 10), 10);
|
||||
});
|
||||
|
||||
test("interpolateLinear warns and returns input when ranges collapse", () => {
|
||||
const m = makeMeasurementInstance();
|
||||
// iMin == iMax -> invalid
|
||||
assert.equal(m.interpolateLinear(42, 0, 0, 0, 10), 42);
|
||||
// oMin > oMax -> invalid
|
||||
assert.equal(m.interpolateLinear(42, 0, 100, 10, 0), 42);
|
||||
});
|
||||
|
||||
test("constrain clamps below, inside, and above range", () => {
|
||||
const m = makeMeasurementInstance();
|
||||
assert.equal(m.constrain(-5, 0, 10), 0);
|
||||
assert.equal(m.constrain(5, 0, 10), 5);
|
||||
assert.equal(m.constrain(15, 0, 10), 10);
|
||||
});
|
||||
|
||||
test("handleScaling falls back when inputRange is invalid", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: true, inputMin: 5, inputMax: 5, absMin: 0, absMax: 10, offset: 0 },
|
||||
});
|
||||
// Before the call, inputRange is 0 (5-5). handleScaling should reset
|
||||
// inputMin/inputMax to defaults [0, 1] and still return a finite number.
|
||||
const result = m.handleScaling(0.5);
|
||||
assert.ok(Number.isFinite(result), `expected finite result, got ${result}`);
|
||||
assert.equal(m.config.scaling.inputMin, 0);
|
||||
assert.equal(m.config.scaling.inputMax, 1);
|
||||
});
|
||||
|
||||
test("handleScaling constrains out-of-range inputs before interpolating", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: true, inputMin: 0, inputMax: 100, absMin: 0, absMax: 10, offset: 0 },
|
||||
});
|
||||
// Input above inputMax is constrained to inputMax then mapped to absMax.
|
||||
assert.equal(m.handleScaling(150), 10);
|
||||
// Input below inputMin is constrained to inputMin then mapped to absMin.
|
||||
assert.equal(m.handleScaling(-20), 0);
|
||||
});
|
||||
|
||||
test("calculateInput updates raw min/max from the unfiltered input", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 1000, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' },
|
||||
});
|
||||
m.calculateInput(10);
|
||||
m.calculateInput(30);
|
||||
m.calculateInput(5);
|
||||
assert.equal(m.totalMinValue, 5);
|
||||
assert.equal(m.totalMaxValue, 30);
|
||||
});
|
||||
|
||||
test("updateOutputPercent falls back to observed min/max when processRange <= 0", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 5, absMax: 5, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' },
|
||||
});
|
||||
// processRange starts at 0 so updateOutputPercent uses totalMinValue/Max.
|
||||
m.totalMinValue = 0;
|
||||
m.totalMaxValue = 100;
|
||||
const pct = m.updateOutputPercent(50);
|
||||
// Linear interp: (50 - 0) / (100 - 0) * 100 = 50.
|
||||
assert.ok(Math.abs(pct - 50) < 0.01, `expected ~50, got ${pct}`);
|
||||
});
|
||||
|
||||
test("updateOutputAbs only emits MeasurementContainer update when value changes", async () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' },
|
||||
});
|
||||
let emitCount = 0;
|
||||
// MeasurementContainer normalizes positions to lowercase, so the
|
||||
// event name uses 'atequipment' not the camelCase config value.
|
||||
m.measurements.emitter.on('pressure.measured.atequipment', () => { emitCount += 1; });
|
||||
|
||||
m.calculateInput(10);
|
||||
await new Promise((r) => setImmediate(r));
|
||||
m.calculateInput(10); // same value -> no emit
|
||||
await new Promise((r) => setImmediate(r));
|
||||
m.calculateInput(20); // new value -> emit
|
||||
await new Promise((r) => setImmediate(r));
|
||||
|
||||
assert.equal(emitCount, 2, `expected 2 emits (two distinct values), got ${emitCount}`);
|
||||
});
|
||||
|
||||
test("getOutput returns the full tracked state object", () => {
|
||||
const m = makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' },
|
||||
});
|
||||
m.calculateInput(15);
|
||||
const out = m.getOutput();
|
||||
assert.equal(typeof out.mAbs, 'number');
|
||||
assert.equal(typeof out.mPercent, 'number');
|
||||
assert.equal(typeof out.totalMinValue, 'number');
|
||||
assert.equal(typeof out.totalMaxValue, 'number');
|
||||
assert.equal(typeof out.totalMinSmooth, 'number');
|
||||
assert.equal(typeof out.totalMaxSmooth, 'number');
|
||||
});
|
||||
132
test/basic/smoothing-methods.basic.test.js
Normal file
132
test/basic/smoothing-methods.basic.test.js
Normal file
@@ -0,0 +1,132 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { makeMeasurementInstance } = require('../helpers/factories');
|
||||
|
||||
/**
|
||||
* Baseline coverage for every smoothing method exposed by the measurement
|
||||
* node. Each test forces scaling off + outlier-detection off so we can
|
||||
* assert on the raw smoothing arithmetic.
|
||||
*/
|
||||
|
||||
function makeSmoother(method, windowSize = 5) {
|
||||
return makeMeasurementInstance({
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 1000, offset: 0 },
|
||||
smoothing: { smoothWindow: windowSize, smoothMethod: method },
|
||||
});
|
||||
}
|
||||
|
||||
function feed(m, values) {
|
||||
values.forEach((v) => m.calculateInput(v));
|
||||
}
|
||||
|
||||
test("smoothing 'none' returns the latest value", () => {
|
||||
const m = makeSmoother('none');
|
||||
feed(m, [10, 20, 30, 40, 50]);
|
||||
assert.equal(m.outputAbs, 50);
|
||||
});
|
||||
|
||||
test("smoothing 'mean' returns arithmetic mean over window", () => {
|
||||
const m = makeSmoother('mean');
|
||||
feed(m, [10, 20, 30, 40, 50]);
|
||||
assert.equal(m.outputAbs, 30);
|
||||
});
|
||||
|
||||
test("smoothing 'min' returns minimum of window", () => {
|
||||
const m = makeSmoother('min');
|
||||
feed(m, [10, 20, 5, 40, 50]);
|
||||
assert.equal(m.outputAbs, 5);
|
||||
});
|
||||
|
||||
test("smoothing 'max' returns maximum of window", () => {
|
||||
const m = makeSmoother('max');
|
||||
feed(m, [10, 20, 5, 40, 50]);
|
||||
assert.equal(m.outputAbs, 50);
|
||||
});
|
||||
|
||||
test("smoothing 'sd' returns standard deviation of window", () => {
|
||||
const m = makeSmoother('sd');
|
||||
feed(m, [2, 4, 4, 4, 5]);
|
||||
// Expected sample sd of [2,4,4,4,5] = 1.0954..., rounded to 1.1 by the outputAbs pipeline
|
||||
assert.ok(Math.abs(m.outputAbs - 1.1) < 0.01, `expected ~1.1, got ${m.outputAbs}`);
|
||||
});
|
||||
|
||||
test("smoothing 'median' returns median (odd window)", () => {
|
||||
const m = makeSmoother('median');
|
||||
feed(m, [10, 50, 20, 40, 30]);
|
||||
assert.equal(m.outputAbs, 30);
|
||||
});
|
||||
|
||||
test("smoothing 'median' returns average of middle pair (even window)", () => {
|
||||
const m = makeSmoother('median', 4);
|
||||
feed(m, [10, 20, 30, 40]);
|
||||
assert.equal(m.outputAbs, 25);
|
||||
});
|
||||
|
||||
test("smoothing 'weightedMovingAverage' weights later samples more", () => {
|
||||
const m = makeSmoother('weightedMovingAverage');
|
||||
feed(m, [10, 10, 10, 10, 50]);
|
||||
// weights [1,2,3,4,5], sum of weights = 15
|
||||
// weighted sum = 10+20+30+40+250 = 350 -> 350/15 = 23.333..., rounded 23.33
|
||||
assert.ok(Math.abs(m.outputAbs - 23.33) < 0.02, `expected ~23.33, got ${m.outputAbs}`);
|
||||
});
|
||||
|
||||
test("smoothing 'lowPass' attenuates transients", () => {
|
||||
const m = makeSmoother('lowPass');
|
||||
feed(m, [0, 0, 0, 0, 100]);
|
||||
// EMA(alpha=0.2) from 0,0,0,0,100: last value should be well below 100.
|
||||
assert.ok(m.outputAbs < 100 * 0.3, `lowPass should attenuate step: ${m.outputAbs}`);
|
||||
assert.ok(m.outputAbs > 0, `lowPass should still react: ${m.outputAbs}`);
|
||||
});
|
||||
|
||||
test("smoothing 'highPass' emphasises differences", () => {
|
||||
const m = makeSmoother('highPass');
|
||||
feed(m, [0, 0, 0, 0, 100]);
|
||||
// Highpass on a step should produce a positive transient; exact value is
|
||||
// recursive but we at least require it to be positive and non-zero.
|
||||
assert.ok(m.outputAbs > 10, `highPass should emphasise step: ${m.outputAbs}`);
|
||||
});
|
||||
|
||||
test("smoothing 'bandPass' produces a finite number", () => {
|
||||
const m = makeSmoother('bandPass');
|
||||
feed(m, [1, 2, 3, 4, 5]);
|
||||
assert.ok(Number.isFinite(m.outputAbs));
|
||||
});
|
||||
|
||||
test("smoothing 'kalman' converges toward steady values", () => {
|
||||
const m = makeSmoother('kalman');
|
||||
feed(m, [100, 100, 100, 100, 100]);
|
||||
// Kalman filter fed with a constant input should converge to that value
|
||||
// (within a small tolerance due to its gain smoothing).
|
||||
assert.ok(Math.abs(m.outputAbs - 100) < 5, `kalman should approach steady value: ${m.outputAbs}`);
|
||||
});
|
||||
|
||||
test("smoothing 'savitzkyGolay' returns last sample when window < 5", () => {
|
||||
const m = makeSmoother('savitzkyGolay', 3);
|
||||
feed(m, [7, 8, 9]);
|
||||
assert.equal(m.outputAbs, 9);
|
||||
});
|
||||
|
||||
test("smoothing 'savitzkyGolay' smooths across a 5-point window", () => {
|
||||
const m = makeSmoother('savitzkyGolay', 5);
|
||||
feed(m, [1, 2, 3, 4, 5]);
|
||||
// SG coefficients [-3,12,17,12,-3] / 35 on linear data returns the
|
||||
// middle value unchanged (=3); exact numeric comes out to 35/35 * 3.
|
||||
assert.ok(Math.abs(m.outputAbs - 3) < 0.01, `SG on linear data should return middle ~3, got ${m.outputAbs}`);
|
||||
});
|
||||
|
||||
test("unknown smoothing method falls through to raw value with an error", () => {
|
||||
const m = makeSmoother('bogus-method');
|
||||
// calculateInput will try the unknown key, hit the default branch in the
|
||||
// applySmoothing map, log an error, and return the raw value (as
|
||||
// implemented — the test pins that behaviour).
|
||||
feed(m, [42]);
|
||||
assert.equal(m.outputAbs, 42);
|
||||
});
|
||||
|
||||
test("smoothing window shifts oldest value when exceeded", () => {
|
||||
const m = makeSmoother('mean', 3);
|
||||
feed(m, [100, 100, 100, 10, 10, 10]);
|
||||
// Last three values are [10,10,10]; mean = 10.
|
||||
assert.equal(m.outputAbs, 10);
|
||||
});
|
||||
222
test/integration/digital-mode.integration.test.js
Normal file
222
test/integration/digital-mode.integration.test.js
Normal file
@@ -0,0 +1,222 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const Measurement = require('../../src/specificClass');
|
||||
|
||||
/**
|
||||
* Integration tests for digital mode.
|
||||
*
|
||||
* Digital mode accepts an object payload where each key maps to its own
|
||||
* independently-configured Channel (scaling / smoothing / outlier / unit /
|
||||
* position). A single inbound message can therefore emit N measurements
|
||||
* into the MeasurementContainer in one go — the MQTT / JSON IoT pattern
|
||||
* the analog-centric node previously did not support.
|
||||
*/
|
||||
|
||||
function makeDigitalConfig(channels, overrides = {}) {
|
||||
return {
|
||||
general: { id: 'm-dig-1', name: 'weather-station', unit: 'unitless', logging: { enabled: false, logLevel: 'error' } },
|
||||
asset: { type: 'pressure', unit: 'mbar', category: 'sensor', supplier: 'vendor', model: 'BME280' },
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 1, offset: 0 },
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'none' },
|
||||
simulation: { enabled: false },
|
||||
functionality: { positionVsParent: 'atEquipment', distance: null },
|
||||
mode: { current: 'digital' },
|
||||
channels,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
test('analog-mode default: no channels built, handleDigitalPayload is a no-op', () => {
|
||||
// Factory without mode config — defaults must stay analog.
|
||||
const m = new Measurement({
|
||||
general: { id: 'a', name: 'a', unit: 'bar', logging: { enabled: false, logLevel: 'error' } },
|
||||
asset: { type: 'pressure', unit: 'bar', category: 'sensor', supplier: 'v', model: 'M' },
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 1, offset: 0 },
|
||||
smoothing: { smoothWindow: 5, smoothMethod: 'none' },
|
||||
simulation: { enabled: false },
|
||||
functionality: { positionVsParent: 'atEquipment' },
|
||||
});
|
||||
assert.equal(m.mode, 'analog');
|
||||
assert.equal(m.channels.size, 0);
|
||||
// In analog mode, handleDigitalPayload must refuse and not mutate state.
|
||||
const res = m.handleDigitalPayload({ temperature: 21 });
|
||||
assert.deepEqual(res, {});
|
||||
});
|
||||
|
||||
test('digital mode builds one Channel per config.channels entry', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 'temperature', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 3, smoothMethod: 'mean' } },
|
||||
{ key: 'humidity', type: 'humidity', position: 'atEquipment', unit: '%',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 3, smoothMethod: 'mean' } },
|
||||
{ key: 'pressure', type: 'pressure', position: 'atEquipment', unit: 'mbar',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 800, absMax: 1200, offset: 0 },
|
||||
smoothing: { smoothWindow: 3, smoothMethod: 'mean' } },
|
||||
]));
|
||||
assert.equal(m.mode, 'digital');
|
||||
assert.equal(m.channels.size, 3);
|
||||
assert.ok(m.channels.has('temperature'));
|
||||
assert.ok(m.channels.has('humidity'));
|
||||
assert.ok(m.channels.has('pressure'));
|
||||
});
|
||||
|
||||
test('digital payload routes each key to its own channel', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 'temperature', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
{ key: 'humidity', type: 'humidity', position: 'atEquipment', unit: '%',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
]));
|
||||
|
||||
m.handleDigitalPayload({ temperature: 21.5, humidity: 65 });
|
||||
|
||||
const tempOut = m.channels.get('temperature').outputAbs;
|
||||
const humidOut = m.channels.get('humidity').outputAbs;
|
||||
assert.equal(tempOut, 21.5);
|
||||
assert.equal(humidOut, 65);
|
||||
});
|
||||
|
||||
test('digital payload emits on the MeasurementContainer per channel', async () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 't', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
{ key: 'h', type: 'humidity', position: 'atEquipment', unit: '%',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
]));
|
||||
|
||||
const events = [];
|
||||
m.measurements.emitter.on('temperature.measured.atequipment', (e) => events.push({ on: 't', value: e.value }));
|
||||
m.measurements.emitter.on('humidity.measured.atequipment', (e) => events.push({ on: 'h', value: e.value }));
|
||||
|
||||
m.handleDigitalPayload({ t: 22, h: 50 });
|
||||
await new Promise((r) => setImmediate(r));
|
||||
|
||||
assert.equal(events.filter((e) => e.on === 't').length, 1);
|
||||
assert.equal(events.filter((e) => e.on === 'h').length, 1);
|
||||
assert.equal(events.find((e) => e.on === 't').value, 22);
|
||||
assert.equal(events.find((e) => e.on === 'h').value, 50);
|
||||
});
|
||||
|
||||
test('digital payload with unmapped keys silently ignores them', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 't', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
]));
|
||||
|
||||
const res = m.handleDigitalPayload({ t: 20, unknown: 999, extra: 'x' });
|
||||
assert.equal(m.channels.get('t').outputAbs, 20);
|
||||
assert.equal(res.t.ok, true);
|
||||
assert.equal(res.unknown, undefined);
|
||||
assert.equal(res.extra, undefined);
|
||||
});
|
||||
|
||||
test('digital channel with scaling enabled maps input to abs range', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 'pt', type: 'pressure', position: 'atEquipment', unit: 'mbar',
|
||||
scaling: { enabled: true, inputMin: 0, inputMax: 100, absMin: 0, absMax: 1000, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
]));
|
||||
|
||||
m.handleDigitalPayload({ pt: 50 });
|
||||
// 50% of [0..100] -> 50% of [0..1000] = 500
|
||||
assert.equal(m.channels.get('pt').outputAbs, 500);
|
||||
});
|
||||
|
||||
test('digital channel smoothing accumulates per-channel, independent of siblings', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 't', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 3, smoothMethod: 'mean' } },
|
||||
{ key: 'h', type: 'humidity', position: 'atEquipment', unit: '%',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 3, smoothMethod: 'mean' } },
|
||||
]));
|
||||
|
||||
// Feed only temperature across 3 pushes; humidity never receives a value.
|
||||
m.handleDigitalPayload({ t: 10 });
|
||||
m.handleDigitalPayload({ t: 20 });
|
||||
m.handleDigitalPayload({ t: 30 });
|
||||
|
||||
assert.equal(m.channels.get('t').outputAbs, 20); // mean(10,20,30)=20
|
||||
assert.equal(m.channels.get('t').storedValues.length, 3);
|
||||
// Humidity channel must be untouched.
|
||||
assert.equal(m.channels.get('h').storedValues.length, 0);
|
||||
assert.equal(m.channels.get('h').outputAbs, 0);
|
||||
});
|
||||
|
||||
test('digital channel rejects non-numeric values in summary', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 't', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
]));
|
||||
|
||||
const res = m.handleDigitalPayload({ t: 'banana' });
|
||||
assert.equal(res.t.ok, false);
|
||||
assert.equal(res.t.reason, 'non-numeric');
|
||||
assert.equal(m.channels.get('t').outputAbs, 0);
|
||||
});
|
||||
|
||||
test('digital channel supports per-channel outlier detection', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 't', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 10, smoothMethod: 'none' },
|
||||
outlierDetection: { enabled: true, method: 'zscore', threshold: 3 } },
|
||||
]));
|
||||
|
||||
// Seed a tight baseline then lob an obvious spike.
|
||||
for (const v of [20, 20, 20, 20, 20, 20]) m.handleDigitalPayload({ t: v });
|
||||
const baselineOut = m.channels.get('t').outputAbs;
|
||||
m.handleDigitalPayload({ t: 1e6 });
|
||||
assert.equal(m.channels.get('t').outputAbs, baselineOut, 'spike must be rejected as outlier');
|
||||
});
|
||||
|
||||
test('getDigitalOutput produces one entry per channel', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 't', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: -50, absMax: 150, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
{ key: 'h', type: 'humidity', position: 'atEquipment', unit: '%',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
]));
|
||||
|
||||
m.handleDigitalPayload({ t: 25, h: 40 });
|
||||
const out = m.getDigitalOutput();
|
||||
assert.ok(out.channels.t);
|
||||
assert.ok(out.channels.h);
|
||||
assert.equal(out.channels.t.mAbs, 25);
|
||||
assert.equal(out.channels.h.mAbs, 40);
|
||||
assert.equal(out.channels.t.type, 'temperature');
|
||||
assert.equal(out.channels.h.unit, '%');
|
||||
});
|
||||
|
||||
test('digital mode with empty channels array still constructs cleanly', () => {
|
||||
const m = new Measurement(makeDigitalConfig([]));
|
||||
assert.equal(m.mode, 'digital');
|
||||
assert.equal(m.channels.size, 0);
|
||||
// No throw on empty payload.
|
||||
assert.deepEqual(m.handleDigitalPayload({ anything: 1 }), {});
|
||||
});
|
||||
|
||||
test('digital mode ignores malformed channel entries in config', () => {
|
||||
const m = new Measurement(makeDigitalConfig([
|
||||
{ key: 'valid', type: 'temperature', position: 'atEquipment', unit: 'C',
|
||||
scaling: { enabled: false, inputMin: 0, inputMax: 1, absMin: 0, absMax: 100, offset: 0 },
|
||||
smoothing: { smoothWindow: 1, smoothMethod: 'none' } },
|
||||
null, // malformed
|
||||
{ key: 'no_type' }, // missing type
|
||||
{ type: 'pressure' }, // missing key
|
||||
]));
|
||||
assert.equal(m.channels.size, 1);
|
||||
assert.ok(m.channels.has('valid'));
|
||||
});
|
||||
Reference in New Issue
Block a user