This commit is contained in:
sebseb7
2025-12-24 23:11:24 +01:00
parent 23a6f900ec
commit 0d08a4d924
5 changed files with 202 additions and 186 deletions

View File

@@ -35,14 +35,15 @@ export class ACInfinityClient {
async login() { async login() {
try { try {
// AC Infinity API does not accept passwords greater than 25 characters // AC Infinity API does not accept passwords greater than 25 characters - UPDATE: Reference impl uses full password?
const normalizedPassword = this.password.substring(0, 25); // const normalizedPassword = this.password.substring(0, 25);
const normalizedPassword = this.password;
const response = await fetch(`${this.host}${API_URL_LOGIN}`, { const response = await fetch(`${this.host}${API_URL_LOGIN}`, {
method: 'POST', method: 'POST',
headers: { headers: {
'User-Agent': 'ACController/1.9.7 (com.acinfinity.humiture; build:533; iOS 18.5.0) Alamofire/5.10.2', 'User-Agent': 'ACController/1.9.7 (com.acinfinity.humiture; build:533; iOS 18.5.0) Alamofire/5.10.2',
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', 'Content-Type': 'application/x-www-form-urlencoded',
}, },
body: new URLSearchParams({ body: new URLSearchParams({
appEmail: this.email, appEmail: this.email,
@@ -63,6 +64,7 @@ export class ACInfinityClient {
console.log('[AC] Successfully logged in to AC Infinity API'); console.log('[AC] Successfully logged in to AC Infinity API');
return this.userId; return this.userId;
} catch (error) { } catch (error) {
console.error('[AC] Login error details:', error); // Added detailed logging
if (error instanceof ACInfinityClientError) { if (error instanceof ACInfinityClientError) {
throw error; throw error;
} }
@@ -129,6 +131,9 @@ export class ACInfinityClient {
for (const device of devices) { for (const device of devices) {
const devId = device.devId; const devId = device.devId;
const devName = device.devName || `device-${devId}`; const devName = device.devName || `device-${devId}`;
// Use deviceInfo if available (newer API structure), otherwise fallback to root/devSettings
const info = device.deviceInfo || device;
const settings = device.devSettings || info;
// Normalize device name for use as identifier // Normalize device name for use as identifier
const deviceId = devName const deviceId = devName
@@ -136,52 +141,65 @@ export class ACInfinityClient {
.replace(/[^a-z0-9]+/g, '-') .replace(/[^a-z0-9]+/g, '-')
.replace(/^-|-$/g, ''); .replace(/^-|-$/g, '');
// Extract sensor data from device settings or sensor fields // --- Device Level Sensors ---
// Temperature is stored as Celsius * 100
if (device.devSettings?.temperature !== undefined) { // Temperature (Celsius * 100)
if (info.temperature !== undefined) {
readings.push({ readings.push({
device: deviceId, device: deviceId,
channel: 'temperature', channel: 'temperature',
value: device.devSettings.temperature / 100, value: info.temperature / 100,
}); });
} else if (device.temperature !== undefined) { } else if (settings.temperature !== undefined) {
readings.push({ readings.push({
device: deviceId, device: deviceId,
channel: 'temperature', channel: 'temperature',
value: device.temperature / 100, value: settings.temperature / 100,
}); });
} }
// Humidity is stored as % * 100 // Humidity (% * 100)
if (device.devSettings?.humidity !== undefined) { if (info.humidity !== undefined) {
readings.push({ readings.push({
device: deviceId, device: deviceId,
channel: 'humidity', channel: 'humidity',
value: device.devSettings.humidity / 100, value: info.humidity / 100,
}); });
} else if (device.humidity !== undefined) { } else if (settings.humidity !== undefined) {
readings.push({ readings.push({
device: deviceId, device: deviceId,
channel: 'humidity', channel: 'humidity',
value: device.humidity / 100, value: settings.humidity / 100,
}); });
} }
// VPD if available // VPD
if (device.devSettings?.vpdnums !== undefined) { if (info.vpdnums !== undefined) {
readings.push({ readings.push({
device: deviceId, device: deviceId,
channel: 'vpd', channel: 'vpd',
value: device.devSettings.vpdnums / 100, value: info.vpdnums / 100,
});
} else if (settings.vpdnums !== undefined) {
readings.push({
device: deviceId,
channel: 'vpd',
value: settings.vpdnums / 100,
}); });
} }
// Check for port-level sensors (some controllers have multiple ports) // --- Port Level Sensors/State ---
if (device.devPortList && Array.isArray(device.devPortList)) { const ports = info.ports || device.devPortList;
for (const port of device.devPortList) { if (ports && Array.isArray(ports)) {
const portId = port.portId || port.port; for (const port of ports) {
const portDeviceId = `${deviceId}-port${portId}`; const portId = port.port || port.portId;
const portName = port.portName || `port${portId}`;
// Create a descriptive suffix for the port device, e.g. "wall-fan" or "wall-port1"
// If portName is generic "Port X", use number. If it's specific "Fan", use that.
const suffix = portName.toLowerCase().replace(/[^a-z0-9]+/g, '-');
const portDeviceId = `${deviceId}-${suffix}`;
// Port specific sensors (if any - sometimes temp usually on device)
if (port.temperature !== undefined) { if (port.temperature !== undefined) {
readings.push({ readings.push({
device: portDeviceId, device: portDeviceId,
@@ -189,7 +207,6 @@ export class ACInfinityClient {
value: port.temperature / 100, value: port.temperature / 100,
}); });
} }
if (port.humidity !== undefined) { if (port.humidity !== undefined) {
readings.push({ readings.push({
device: portDeviceId, device: portDeviceId,
@@ -197,6 +214,15 @@ export class ACInfinityClient {
value: port.humidity / 100, value: port.humidity / 100,
}); });
} }
// Level / Speed (speak)
if (port.speak !== undefined) {
readings.push({
device: portDeviceId,
channel: 'level',
value: port.speak,
});
}
} }
} }
} }

View File

@@ -21,5 +21,5 @@ export default {
pollIntervalMs: parseInt(process.env.POLL_INTERVAL_MS || '60000', 10), pollIntervalMs: parseInt(process.env.POLL_INTERVAL_MS || '60000', 10),
// AC Infinity API // AC Infinity API
acApiHost: process.env.AC_API_HOST || 'https://www.acinfinity.com', acApiHost: process.env.AC_API_HOST || 'http://www.acinfinityserver.com',
}; };

View File

@@ -47,86 +47,92 @@ export function generateApiKey(db, name, devicePrefix) {
} }
/** /**
* Insert sensor readings into the database * Insert sensor readings with RLE (Run-Length Encoding) logic
* @param {Database} db - SQLite database instance * @param {Database} db - SQLite database instance
* @param {string} devicePrefix - Prefix to prepend to device names * @param {string} devicePrefix - Prefix to prepend to device names
* @param {Array} readings - Array of {device, channel, value} objects * @param {Array} readings - Array of readings
* @param {Date} timestamp - Timestamp for all readings (defaults to now) * @param {Date} timestamp - Timestamp for all readings
*/ */
export function insertReadings(db, devicePrefix, readings, timestamp = new Date()) { export function insertReadingsSmart(db, devicePrefix, readings, timestamp = new Date()) {
const isoTimestamp = timestamp.toISOString(); const isoTimestamp = timestamp.toISOString();
const stmt = db.prepare(` const stmtLast = db.prepare(`
INSERT INTO sensor_data (timestamp, device, channel, value) SELECT id, value, data, data_type
VALUES (?, ?, ?, ?) FROM sensor_events
WHERE device = ? AND channel = ?
ORDER BY timestamp DESC
LIMIT 1
`); `);
const insertMany = db.transaction((items) => { const stmtUpdate = db.prepare(`
UPDATE sensor_events SET until = ? WHERE id = ?
`);
const stmtInsert = db.prepare(`
INSERT INTO sensor_events (timestamp, until, device, channel, value, data, data_type)
VALUES (?, NULL, ?, ?, ?, ?, ?)
`);
const transaction = db.transaction((items) => {
let inserted = 0;
let updated = 0;
for (const reading of items) { for (const reading of items) {
const fullDevice = `${devicePrefix}${reading.device}`; const fullDevice = `${devicePrefix}${reading.device}`;
stmt.run(isoTimestamp, fullDevice, reading.channel, reading.value); const channel = reading.channel;
// Determine type and values
let dataType = 'number';
let value = null;
let data = null;
if (reading.value !== undefined && reading.value !== null) {
dataType = 'number';
value = reading.value;
} else if (reading.data !== undefined) {
dataType = 'json';
data = typeof reading.data === 'string' ? reading.data : JSON.stringify(reading.data);
} else {
continue; // Skip invalid
} }
// Check last reading for RLE
const last = stmtLast.get(fullDevice, channel);
let isDuplicate = false;
if (last && last.data_type === dataType) {
if (dataType === 'number') {
// Compare defined numbers with small epsilon? Or exact match?
// For sensors, exact match is typical for RLE if "identical".
if (Math.abs(last.value - value) < Number.EPSILON) {
isDuplicate = true;
}
} else {
// Compare JSON strings
if (last.data === data) {
isDuplicate = true;
}
}
}
if (isDuplicate) {
stmtUpdate.run(isoTimestamp, last.id);
updated++;
} else {
stmtInsert.run(isoTimestamp, fullDevice, channel, value, data, dataType);
inserted++;
}
}
return { inserted, updated };
}); });
insertMany(readings); return transaction(readings);
return readings.length;
} }
/** // Temporary stubs for aggregators until they are redesigned for the new schema
* Aggregate raw data into 10-minute buckets export function aggregate10Minutes(db) { return 0; }
* @param {Database} db - SQLite database instance export function aggregate1Hour(db) { return 0; }
* @returns {number} - Number of aggregated records created
*/
export function aggregate10Minutes(db) {
// Get the cutoff time (10 minutes ago, rounded down to 10-min boundary)
const now = new Date();
const cutoff = new Date(Math.floor(now.getTime() / 600000) * 600000 - 600000);
const cutoffISO = cutoff.toISOString();
const result = db.prepare(`
INSERT OR REPLACE INTO sensor_data_10m (timestamp, device, channel, value, sample_count)
SELECT
datetime(strftime('%s', timestamp) / 600 * 600, 'unixepoch') as bucket,
device,
channel,
AVG(value) as avg_value,
COUNT(*) as sample_count
FROM sensor_data
WHERE timestamp < ?
AND timestamp >= datetime(?, '-1 hour')
GROUP BY bucket, device, channel
`).run(cutoffISO, cutoffISO);
return result.changes;
}
/**
* Aggregate 10-minute data into 1-hour buckets
* @param {Database} db - SQLite database instance
* @returns {number} - Number of aggregated records created
*/
export function aggregate1Hour(db) {
// Get the cutoff time (1 hour ago, rounded down to hour boundary)
const now = new Date();
const cutoff = new Date(Math.floor(now.getTime() / 3600000) * 3600000 - 3600000);
const cutoffISO = cutoff.toISOString();
const result = db.prepare(`
INSERT OR REPLACE INTO sensor_data_1h (timestamp, device, channel, value, sample_count)
SELECT
datetime(strftime('%s', timestamp) / 3600 * 3600, 'unixepoch') as bucket,
device,
channel,
SUM(value * sample_count) / SUM(sample_count) as weighted_avg,
SUM(sample_count) as total_samples
FROM sensor_data_10m
WHERE timestamp < ?
AND timestamp >= datetime(?, '-1 day')
GROUP BY bucket, device, channel
`).run(cutoffISO, cutoffISO);
return result.changes;
}
/** /**
* Clean up old data according to retention policy * Clean up old data according to retention policy
@@ -136,21 +142,14 @@ export function aggregate1Hour(db) {
export function cleanupOldData(db) { export function cleanupOldData(db) {
const now = new Date(); const now = new Date();
// Delete raw data older than 7 days // Delete events older than 30 days
const weekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
const rawDeleted = db.prepare(`
DELETE FROM sensor_data WHERE timestamp < ?
`).run(weekAgo.toISOString());
// Delete 10-minute data older than 30 days
const monthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000); const monthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
const aggDeleted = db.prepare(` const eventsDeleted = db.prepare(`
DELETE FROM sensor_data_10m WHERE timestamp < ? DELETE FROM sensor_events WHERE timestamp < ?
`).run(monthAgo.toISOString()); `).run(monthAgo.toISOString());
return { return {
rawDeleted: rawDeleted.changes, eventsDeleted: eventsDeleted.changes
aggregatedDeleted: aggDeleted.changes
}; };
} }
@@ -171,7 +170,7 @@ export function listApiKeys(db) {
export default { export default {
validateApiKey, validateApiKey,
generateApiKey, generateApiKey,
insertReadings, insertReadingsSmart,
aggregate10Minutes, aggregate10Minutes,
aggregate1Hour, aggregate1Hour,
cleanupOldData, cleanupOldData,

View File

@@ -24,8 +24,8 @@ export function initDatabase(dbPath) {
db.pragma('journal_mode = WAL'); db.pragma('journal_mode = WAL');
// Create tables // Create tables
// API keys for agent authentication
db.exec(` db.exec(`
-- API keys for agent authentication
CREATE TABLE IF NOT EXISTS api_keys ( CREATE TABLE IF NOT EXISTS api_keys (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
key TEXT UNIQUE NOT NULL, key TEXT UNIQUE NOT NULL,
@@ -34,49 +34,31 @@ export function initDatabase(dbPath) {
created_at DATETIME DEFAULT CURRENT_TIMESTAMP, created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
last_used_at DATETIME last_used_at DATETIME
); );
`);
-- Raw sensor data (1-minute resolution, kept for 1 week) // --- MIGRATION: Drop old tables if they exist ---
CREATE TABLE IF NOT EXISTS sensor_data ( // User requested deleting old sensor data but keeping keys.
db.exec(`
DROP TABLE IF EXISTS sensor_data;
DROP TABLE IF EXISTS sensor_data_10m;
DROP TABLE IF EXISTS sensor_data_1h;
`);
// --- NEW SCHEMA: Sensor Events with RLE support ---
db.exec(`
CREATE TABLE IF NOT EXISTS sensor_events (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp DATETIME NOT NULL, timestamp DATETIME NOT NULL,
until DATETIME, -- NULL if point, Time if duplicated range end
device TEXT NOT NULL, device TEXT NOT NULL,
channel TEXT NOT NULL, channel TEXT NOT NULL,
value REAL NOT NULL value REAL, -- Nullable
data TEXT, -- Nullable (JSON)
data_type TEXT NOT NULL -- 'number' or 'json'
); );
-- Index for time-based queries and cleanup CREATE INDEX IF NOT EXISTS idx_sensor_events_search
CREATE INDEX IF NOT EXISTS idx_sensor_data_time ON sensor_events(device, channel, timestamp);
ON sensor_data(timestamp);
-- Index for device/channel queries
CREATE INDEX IF NOT EXISTS idx_sensor_data_device
ON sensor_data(device, channel, timestamp);
-- 10-minute aggregated data (kept for 1 month)
CREATE TABLE IF NOT EXISTS sensor_data_10m (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp DATETIME NOT NULL,
device TEXT NOT NULL,
channel TEXT NOT NULL,
value REAL NOT NULL,
sample_count INTEGER NOT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS idx_sensor_data_10m_unique
ON sensor_data_10m(timestamp, device, channel);
-- 1-hour aggregated data (kept forever)
CREATE TABLE IF NOT EXISTS sensor_data_1h (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp DATETIME NOT NULL,
device TEXT NOT NULL,
channel TEXT NOT NULL,
value REAL NOT NULL,
sample_count INTEGER NOT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS idx_sensor_data_1h_unique
ON sensor_data_1h(timestamp, device, channel);
`); `);
console.log('[DB] Database initialized successfully'); console.log('[DB] Database initialized successfully');

View File

@@ -1,5 +1,5 @@
import { WebSocketServer } from 'ws'; import { WebSocketServer } from 'ws';
import { validateApiKey, insertReadings } from '../db/queries.js'; import { validateApiKey, insertReadingsSmart } from '../db/queries.js';
/** /**
* Create and configure the WebSocket server * Create and configure the WebSocket server
@@ -153,30 +153,39 @@ function handleData(ws, message, clientState, db) {
// Validate readings format // Validate readings format
for (const reading of readings) { for (const reading of readings) {
// We strictly require device, channel, and value. // We require device, channel, and EITHER value (number) OR data (json)
// If value is missing (undefined), we skip it, even if 'data' is present. if (!reading.device || !reading.channel) {
if (!reading.device || !reading.channel || reading.value === undefined) { console.warn(`[WS] Skipped invalid reading (missing device/channel) from ${clientState.name}:`, JSON.stringify(reading));
console.warn(`[WS] Skipped invalid reading from ${clientState.name || 'unknown'}:`, JSON.stringify(reading));
skippedCount++; skippedCount++;
continue; continue;
} }
const hasValue = reading.value !== undefined && reading.value !== null;
const hasData = reading.data !== undefined;
if (!hasValue && !hasData) {
console.warn(`[WS] Skipped invalid reading (no value/data) from ${clientState.name}:`, JSON.stringify(reading));
skippedCount++;
continue;
}
validReadings.push(reading); validReadings.push(reading);
} }
if (validReadings.length === 0) { if (validReadings.length === 0) {
if (skippedCount > 0) { if (skippedCount > 0) {
// Acknowledge receipt even if all were skipped, so the agent doesn't retry endlessly if it thinks it's a temp error. console.log(`[WS] Received ${skippedCount} readings, but all were invalid.`);
// But here the agent probably doesn't handle acks effectively anyway.
console.log(`[WS] Received ${skippedCount} readings, but all were valid (non-numeric data dropped).`);
return send(ws, { type: 'ack', count: 0 }); return send(ws, { type: 'ack', count: 0 });
} }
return sendError(ws, 'No valid readings found in batch'); return sendError(ws, 'No valid readings found in batch');
} }
try { try {
const count = insertReadings(db, clientState.devicePrefix, validReadings); const result = insertReadingsSmart(db, clientState.devicePrefix, validReadings);
const count = result.inserted + result.updated;
if (skippedCount > 0) { if (skippedCount > 0) {
console.log(`[WS] Inserted ${count} valid readings (skipped ${skippedCount} invalid/non-numeric readings).`); console.log(`[WS] Processed ${count} readings (inserted: ${result.inserted}, updated: ${result.updated}, skipped: ${skippedCount}).`);
} }
send(ws, { type: 'ack', count }); send(ws, { type: 'ack', count });
} catch (err) { } catch (err) {