before functional changes by codex
This commit is contained in:
12
test/README.md
Normal file
12
test/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# monster Test Suite Layout
|
||||
|
||||
Required EVOLV layout:
|
||||
- basic/
|
||||
- integration/
|
||||
- edge/
|
||||
- helpers/
|
||||
|
||||
Baseline structure tests:
|
||||
- basic/structure-module-load.basic.test.js
|
||||
- integration/structure-examples.integration.test.js
|
||||
- edge/structure-examples-node-type.edge.test.js
|
||||
0
test/basic/.gitkeep
Normal file
0
test/basic/.gitkeep
Normal file
27
test/basic/constructor.basic.test.js
Normal file
27
test/basic/constructor.basic.test.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const Monster = require('../../src/specificClass');
|
||||
const { makeMonsterConfig } = require('../helpers/factories');
|
||||
|
||||
test('constructor initializes sampling boundaries and target values', () => {
|
||||
const monster = new Monster(makeMonsterConfig());
|
||||
|
||||
assert.equal(monster.maxVolume, 20);
|
||||
assert.equal(monster.minPuls, Math.round(monster.minVolume / monster.volume_pulse));
|
||||
assert.equal(monster.absMaxPuls, Math.round(monster.cap_volume / monster.volume_pulse));
|
||||
assert.ok(monster.targetPuls > 0);
|
||||
});
|
||||
|
||||
test('output contract contains report tooling fields', () => {
|
||||
const monster = new Monster(makeMonsterConfig());
|
||||
const output = monster.getOutput();
|
||||
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'm3PerPuls'));
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'm3PerPulse'));
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'm3Total'));
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'pulse'));
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'pulsesRemaining'));
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'targetDeltaM3'));
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(output, 'predictedRateM3h'));
|
||||
});
|
||||
8
test/basic/structure-module-load.basic.test.js
Normal file
8
test/basic/structure-module-load.basic.test.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
test('monster module load smoke', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require('../../monster.js');
|
||||
});
|
||||
});
|
||||
0
test/edge/.gitkeep
Normal file
0
test/edge/.gitkeep
Normal file
58
test/edge/sampling-guards.edge.test.js
Normal file
58
test/edge/sampling-guards.edge.test.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const Monster = require('../../src/specificClass');
|
||||
const { makeMonsterConfig, withMockedDate } = require('../helpers/factories');
|
||||
|
||||
test('invalid flow bounds prevent sampling start', () => {
|
||||
const monster = new Monster(
|
||||
makeMonsterConfig({
|
||||
constraints: {
|
||||
samplingtime: 1,
|
||||
minVolume: 5,
|
||||
maxWeight: 23,
|
||||
nominalFlowMin: 10,
|
||||
flowMax: 5,
|
||||
minSampleIntervalSec: 60,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
monster.handleInput('i_start', true);
|
||||
monster.sampling_program();
|
||||
|
||||
assert.equal(monster.invalidFlowBounds, true);
|
||||
assert.equal(monster.running, false);
|
||||
assert.equal(monster.i_start, false);
|
||||
});
|
||||
|
||||
test('cooldown guard blocks pulses when flow implies oversampling', () => {
|
||||
withMockedDate('2024-10-15T00:00:00Z', ({ advance }) => {
|
||||
const monster = new Monster(
|
||||
makeMonsterConfig({
|
||||
constraints: {
|
||||
samplingtime: 1,
|
||||
minVolume: 5,
|
||||
maxWeight: 23,
|
||||
nominalFlowMin: 0,
|
||||
flowMax: 6000,
|
||||
maxRainRef: 10,
|
||||
minSampleIntervalSec: 60,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
monster.handleInput('input_q', { value: 200, unit: 'm3/h' });
|
||||
monster.handleInput('i_start', true);
|
||||
|
||||
for (let i = 0; i < 80; i++) {
|
||||
advance(1000);
|
||||
monster.tick();
|
||||
}
|
||||
|
||||
assert.ok(monster.sumPuls > 0);
|
||||
assert.ok(monster.bucketVol > 0);
|
||||
assert.ok(monster.missedSamples > 0);
|
||||
assert.ok(monster.getSampleCooldownMs() > 0);
|
||||
});
|
||||
});
|
||||
21
test/edge/structure-examples-node-type.edge.test.js
Normal file
21
test/edge/structure-examples-node-type.edge.test.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
|
||||
const dir = path.resolve(__dirname, '../../examples');
|
||||
const exampleFlows = [
|
||||
'basic.flow.json',
|
||||
'integration.flow.json',
|
||||
'edge.flow.json',
|
||||
'monster-dashboard.flow.json',
|
||||
'monster-api-dashboard.flow.json'
|
||||
];
|
||||
|
||||
test('all example flows include node type monster', () => {
|
||||
for (const file of exampleFlows) {
|
||||
const flow = JSON.parse(fs.readFileSync(path.join(dir, file), 'utf8'));
|
||||
const count = flow.filter((n) => n && n.type === 'monster').length;
|
||||
assert.equal(count >= 1, true, file + ' missing monster node');
|
||||
}
|
||||
});
|
||||
0
test/helpers/.gitkeep
Normal file
0
test/helpers/.gitkeep
Normal file
128
test/helpers/factories.js
Normal file
128
test/helpers/factories.js
Normal file
@@ -0,0 +1,128 @@
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
const { MeasurementContainer } = require('generalFunctions');
|
||||
|
||||
function makeMonsterConfig(overrides = {}) {
|
||||
return {
|
||||
general: {
|
||||
name: 'Monster Test',
|
||||
logging: { enabled: false, logLevel: 'error' },
|
||||
},
|
||||
asset: {
|
||||
emptyWeightBucket: 3,
|
||||
},
|
||||
constraints: {
|
||||
samplingtime: 1,
|
||||
minVolume: 5,
|
||||
maxWeight: 23,
|
||||
nominalFlowMin: 1000,
|
||||
flowMax: 6000,
|
||||
maxRainRef: 10,
|
||||
minSampleIntervalSec: 60,
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function withMockedDate(iso, fn) {
|
||||
const RealDate = Date;
|
||||
let now = new RealDate(iso).getTime();
|
||||
|
||||
class MockDate extends RealDate {
|
||||
constructor(...args) {
|
||||
if (args.length === 0) {
|
||||
super(now);
|
||||
} else {
|
||||
super(...args);
|
||||
}
|
||||
}
|
||||
|
||||
static now() {
|
||||
return now;
|
||||
}
|
||||
}
|
||||
|
||||
global.Date = MockDate;
|
||||
try {
|
||||
return fn({
|
||||
advance(ms) {
|
||||
now += ms;
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
global.Date = RealDate;
|
||||
}
|
||||
}
|
||||
|
||||
function parseMonsternametijdenCsv(filePath) {
|
||||
const raw = fs.readFileSync(filePath, 'utf8').trim();
|
||||
const lines = raw.split(/\r?\n/);
|
||||
const header = lines.shift();
|
||||
const columns = header.split(',');
|
||||
|
||||
return lines
|
||||
.filter((line) => line && !line.startsWith('-----------'))
|
||||
.map((line) => {
|
||||
const parts = [];
|
||||
let cur = '';
|
||||
let inQ = false;
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
const ch = line[i];
|
||||
if (ch === '"') {
|
||||
inQ = !inQ;
|
||||
continue;
|
||||
}
|
||||
if (ch === ',' && !inQ) {
|
||||
parts.push(cur);
|
||||
cur = '';
|
||||
} else {
|
||||
cur += ch;
|
||||
}
|
||||
}
|
||||
parts.push(cur);
|
||||
const obj = {};
|
||||
columns.forEach((col, idx) => {
|
||||
obj[col] = parts[idx];
|
||||
});
|
||||
return obj;
|
||||
});
|
||||
}
|
||||
|
||||
function makeFlowMeasurementChild({
|
||||
id = 'flow-child-1',
|
||||
name = 'FlowSensor',
|
||||
positionVsParent = 'downstream',
|
||||
unit = 'm3/h',
|
||||
} = {}) {
|
||||
const measurements = new MeasurementContainer({
|
||||
autoConvert: true,
|
||||
defaultUnits: { flow: 'm3/h' },
|
||||
});
|
||||
|
||||
return {
|
||||
config: {
|
||||
general: { id, name, unit },
|
||||
functionality: { positionVsParent },
|
||||
asset: { type: 'flow', unit },
|
||||
},
|
||||
measurements,
|
||||
};
|
||||
}
|
||||
|
||||
function loadRainSeed() {
|
||||
const rainPath = path.join(__dirname, '..', 'seed_data', 'raindataFormat.json');
|
||||
return JSON.parse(fs.readFileSync(rainPath, 'utf8'));
|
||||
}
|
||||
|
||||
function loadScheduleSeed() {
|
||||
const csvPath = path.join(__dirname, '..', 'seed_data', 'monsternametijden.csv');
|
||||
return parseMonsternametijdenCsv(csvPath);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
makeMonsterConfig,
|
||||
withMockedDate,
|
||||
makeFlowMeasurementChild,
|
||||
loadRainSeed,
|
||||
loadScheduleSeed,
|
||||
};
|
||||
0
test/integration/.gitkeep
Normal file
0
test/integration/.gitkeep
Normal file
49
test/integration/flow-and-schedule.integration.test.js
Normal file
49
test/integration/flow-and-schedule.integration.test.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const Monster = require('../../src/specificClass');
|
||||
const {
|
||||
makeMonsterConfig,
|
||||
withMockedDate,
|
||||
makeFlowMeasurementChild,
|
||||
loadRainSeed,
|
||||
loadScheduleSeed,
|
||||
} = require('../helpers/factories');
|
||||
|
||||
test('effective flow uses average of measured and manual flow', () => {
|
||||
withMockedDate('2024-10-15T00:00:00Z', ({ advance }) => {
|
||||
const monster = new Monster(makeMonsterConfig());
|
||||
const child = makeFlowMeasurementChild({ positionVsParent: 'downstream' });
|
||||
monster.registerChild(child, 'measurement');
|
||||
|
||||
child.measurements
|
||||
.type('flow')
|
||||
.variant('measured')
|
||||
.position('downstream')
|
||||
.value(60, Date.now(), 'm3/h');
|
||||
|
||||
monster.handleInput('input_q', { value: 20, unit: 'm3/h' });
|
||||
advance(1000);
|
||||
monster.tick();
|
||||
|
||||
assert.equal(monster.q, 40);
|
||||
});
|
||||
});
|
||||
|
||||
test('rain and schedule payloads update prediction context and next date', () => {
|
||||
withMockedDate('2024-10-15T00:00:00Z', () => {
|
||||
const monster = new Monster(makeMonsterConfig());
|
||||
const rain = loadRainSeed();
|
||||
const schedule = loadScheduleSeed();
|
||||
|
||||
monster.aquonSampleName = '112100';
|
||||
monster.handleInput('rain_data', rain);
|
||||
monster.handleInput('monsternametijden', schedule);
|
||||
|
||||
assert.ok(monster.avgRain >= 0);
|
||||
assert.ok(monster.sumRain >= 0);
|
||||
const nextDate = monster.nextDate instanceof Date ? monster.nextDate.getTime() : Number(monster.nextDate);
|
||||
assert.ok(Number.isFinite(nextDate));
|
||||
assert.ok(nextDate > Date.now());
|
||||
});
|
||||
});
|
||||
32
test/integration/structure-examples.integration.test.js
Normal file
32
test/integration/structure-examples.integration.test.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const test = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
|
||||
const dir = path.resolve(__dirname, '../../examples');
|
||||
const requiredFiles = [
|
||||
'README.md',
|
||||
'basic.flow.json',
|
||||
'integration.flow.json',
|
||||
'edge.flow.json',
|
||||
'monster-dashboard.flow.json',
|
||||
'monster-api-dashboard.flow.json'
|
||||
];
|
||||
const flowFiles = requiredFiles.filter((file) => file.endsWith('.flow.json'));
|
||||
|
||||
function loadJson(file) {
|
||||
return JSON.parse(fs.readFileSync(path.join(dir, file), 'utf8'));
|
||||
}
|
||||
|
||||
test('examples package exists for monster', () => {
|
||||
for (const file of requiredFiles) {
|
||||
assert.equal(fs.existsSync(path.join(dir, file)), true, file + ' missing');
|
||||
}
|
||||
});
|
||||
|
||||
test('example flows are parseable arrays for monster', () => {
|
||||
for (const file of flowFiles) {
|
||||
const parsed = loadJson(file);
|
||||
assert.equal(Array.isArray(parsed), true);
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user