This commit is contained in:
sebseb7
2025-12-23 06:46:12 +01:00
parent ecaf8ab2a5
commit 1b56e2cc42
12 changed files with 475 additions and 599 deletions

184
server.js
View File

@@ -486,8 +486,15 @@ function evaluateRules(readings) {
// Let's insert every poll to have history graph? Or just changes?
// Graphs need continuous data. Let's insert every poll for now (small scale).
db.prepare('INSERT INTO output_log (dev_name, port, state, level, rule_id, rule_name) VALUES (?, ?, ?, ?, ?, ?)')
.run(val.devName || 'Unknown', val.port || 0, val.state, val.level, val.ruleId, val.ruleName);
// Log only if changed
let shouldLog = false;
if (!prev) shouldLog = true;
else if (prev.state !== val.state || (val.state === 1 && prev.level !== val.level)) shouldLog = true;
if (shouldLog) {
db.prepare('INSERT INTO output_log (dev_name, port, state, level, rule_id, rule_name) VALUES (?, ?, ?, ?, ?, ?)')
.run(val.devName || 'Unknown', val.port || 0, val.state, val.level, val.ruleId, val.ruleName);
}
// Detect Change for Alarms
if (prev) {
@@ -1193,42 +1200,187 @@ app.get('/api/devices', (req, res) => {
// API: History
app.get('/api/history', (req, res) => {
try {
const { devName, port, range } = req.query;
const { devName, port, range, offset = 0 } = req.query;
if (!devName || !port) return res.status(400).json({ error: 'Missing devName or port' });
let timeFilter;
const off = parseInt(offset, 10) || 0;
let bucketSize; // seconds
switch (range) {
case 'week': timeFilter = "-7 days"; break;
case 'month': timeFilter = "-30 days"; break;
case 'day': default: timeFilter = "-24 hours"; break;
case 'week':
bucketSize = 15 * 60;
break;
case 'month':
bucketSize = 60 * 60;
break;
case 'day': default:
bucketSize = 3 * 60;
break;
}
// Calculate time modifiers using offset in seconds
let durationSec;
if (range === 'week') durationSec = 7 * 24 * 3600;
else if (range === 'month') durationSec = 30 * 24 * 3600;
else durationSec = 24 * 3600; // day
const endOffsetSec = off * durationSec;
const startOffsetSec = (off + 1) * durationSec;
const endMod = `-${endOffsetSec} seconds`;
const startMod = `-${startOffsetSec} seconds`;
// Select raw data
// Select raw data
const stmt = db.prepare(`
SELECT timestamp || 'Z' as timestamp, temp_c, humidity, vpd, fan_speed, on_speed
SELECT strftime('%s', timestamp) as ts, temp_c, humidity, fan_speed
FROM readings
WHERE dev_name = ? AND port = ? AND timestamp >= datetime('now', ?)
WHERE dev_name = ? AND port = ?
AND timestamp >= datetime('now', ?)
AND timestamp < datetime('now', ?)
ORDER BY timestamp ASC
`);
const rows = stmt.all(devName, parseInt(port, 10), timeFilter);
res.json(rows);
const rows = stmt.all(devName, parseInt(port, 10), startMod, endMod);
if (rows.length === 0) return res.json({ start: 0, step: bucketSize, temps: [], hums: [], levels: [] });
// Aggregate into buckets
const startTs = parseInt(rows[0].ts);
// Align start to bucket
const roundedStart = Math.floor(startTs / bucketSize) * bucketSize;
const buckets = new Map();
rows.forEach(r => {
const ts = parseInt(r.ts);
const bucketKey = Math.floor(ts / bucketSize) * bucketSize;
if (!buckets.has(bucketKey)) {
buckets.set(bucketKey, { count: 0, tempSum: 0, humSum: 0, levelSum: 0 });
}
const b = buckets.get(bucketKey);
b.count++;
if (r.temp_c !== null) b.tempSum += r.temp_c;
if (r.humidity !== null) b.humSum += r.humidity;
if (r.fan_speed !== null) b.levelSum += r.fan_speed;
});
const temps = [];
const hums = [];
const levels = [];
// Fill gaps if strictly needed?
// For dense array, we need continuous steps from Start.
// Let's find max TS to know length.
const lastRow = rows[rows.length - 1];
const endTs = parseInt(lastRow.ts);
const roundedEnd = Math.floor(endTs / bucketSize) * bucketSize;
const numBuckets = (roundedEnd - roundedStart) / bucketSize + 1;
for (let i = 0; i < numBuckets; i++) {
const currentTs = roundedStart + (i * bucketSize);
const b = buckets.get(currentTs);
if (b && b.count > 0) {
// Formatting to 1 decimal place
temps.push(parseFloat((b.tempSum / b.count).toFixed(1)));
hums.push(parseFloat((b.humSum / b.count).toFixed(1)));
// Level: round to nearest int? or keep decimal? User said "averaged values".
// Usually levels are int, but average might be 5.5. Let's keep 1 decimal for smoothness or round?
// Charts handle decimals.
levels.push(parseFloat((b.levelSum / b.count).toFixed(1)));
} else {
// Gap -> null
temps.push(null);
hums.push(null);
levels.push(null);
}
}
res.json({
start: roundedStart,
step: bucketSize,
temps,
hums,
levels
});
} catch (error) {
console.error(error);
res.status(500).json({ error: error.message });
}
});
// API: Output History (New)
// API: Output History (Compressed)
app.get('/api/outputs/history', (req, res) => {
try {
const { range, offset = 0 } = req.query;
const off = parseInt(offset, 10) || 0;
// Calculate duration in seconds
let durationSec;
if (range === 'week') durationSec = 7 * 24 * 3600;
else if (range === 'month') durationSec = 30 * 24 * 3600;
else durationSec = 24 * 3600; // day
const endOffsetSec = off * durationSec;
const startOffsetSec = (off + 1) * durationSec;
const endMod = `-${endOffsetSec} seconds`;
const startMod = `-${startOffsetSec} seconds`;
const stmt = db.prepare(`
SELECT * FROM output_log
WHERE timestamp > datetime('now', '-24 hours')
SELECT timestamp, dev_name, port, state, level
FROM output_log
WHERE timestamp >= datetime('now', ?)
AND timestamp < datetime('now', ?)
ORDER BY timestamp ASC
`);
const rows = stmt.all();
res.json(rows);
const rows = stmt.all(startMod, endMod);
// Compress: Group by "Dev:Port" -> [[ts, state, level], ...]
const compressed = {};
rows.forEach(r => {
const key = `${r.dev_name}:${r.port}`;
if (!compressed[key]) compressed[key] = [];
// Convert timestamp to epoch ms for client (saves parsing there) or seconds?
// Client uses `new Date(ts).getTime()`. Let's give them epoch milliseconds to be consistent with other efficient APIs.
// SQLite 'timestamp' is string "YYYY-MM-DD HH:MM:SS".
// We can use strftime('%s', timestamp) * 1000 in SQL or parse here.
// Let's parse here to be safe with timezones if needed, effectively assuming UTC/server time.
// Actually, querying SQL for epoch is faster.
// Let's treat the existing ROWs which are strings.
const ts = new Date(r.timestamp + 'Z').getTime(); // Append Z to force UTC if missing
const lvl = r.level === null ? 0 : r.level;
// Check last entry for this key
const lastEntry = compressed[key][compressed[key].length - 1];
if (!lastEntry) {
// First entry, always add
compressed[key].push([ts, r.state, lvl]);
} else {
// Compare with last entry: [ts, state, level]
const lastState = lastEntry[1];
const lastLvl = lastEntry[2];
if (r.state != lastState || lvl != lastLvl) {
// State changed, add new point
compressed[key].push([ts, r.state, lvl]);
}
}
});
res.json(compressed);
} catch (error) {
console.error(error);
res.status(500).json({ error: error.message });
}
});