Compare commits
4 Commits
ad7a0d1768
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27f52b0c3d | ||
|
|
86bea2fa6d | ||
|
|
e9a66cd1f4 | ||
|
|
758684c598 |
139
README.md
139
README.md
@@ -1,139 +0,0 @@
|
|||||||
# TischlerCtrl - Sensor Data Collection System
|
|
||||||
|
|
||||||
A Node.js server that collects sensor data from multiple agents via WebSocket, stores it in SQLite with automatic data summarization and retention policies.
|
|
||||||
|
|
||||||
## Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
|
||||||
│ Central Server (Node.js) │
|
|
||||||
│ ┌─────────────┐ ┌──────────────┐ ┌──────────────────┐ │
|
|
||||||
│ │ WebSocket │ │ SQLite DB │ │ Aggregation & │ │
|
|
||||||
│ │ Server │──│ sensor_data │ │ Cleanup Jobs │ │
|
|
||||||
│ │ :8080 │ │ sensor_10m │ │ (10m, 1h) │ │
|
|
||||||
│ └─────────────┘ │ sensor_1h │ └──────────────────┘ │
|
|
||||||
└────────┬─────────┴──────────────────────────────────────────┘
|
|
||||||
│
|
|
||||||
┌────┴────┬──────────────┐
|
|
||||||
│ │ │
|
|
||||||
┌───▼───┐ ┌───▼───┐ ┌─────▼─────┐
|
|
||||||
│ AC │ │ Tapo │ │ CLI │
|
|
||||||
│Infinity│ │ Agent │ │ Agent │
|
|
||||||
│ Agent │ │(Rust) │ │ (bash) │
|
|
||||||
└───────┘ └───────┘ └───────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### 1. Start the Server
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd server
|
|
||||||
cp .env.example .env
|
|
||||||
npm install
|
|
||||||
npm start
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Generate API Keys
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd server
|
|
||||||
node src/cli/generate-key.js "ac-infinity-agent" "ac:"
|
|
||||||
node src/cli/generate-key.js "tapo-agent" "tapo:"
|
|
||||||
node src/cli/generate-key.js "custom" "custom:"
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Configure and Start AC Infinity Agent
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd agents/ac-infinity
|
|
||||||
cp .env.example .env
|
|
||||||
# Edit .env with your AC Infinity credentials and API key
|
|
||||||
npm install
|
|
||||||
npm start
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Build and Deploy Tapo Agent (Rust)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd agents/tapo
|
|
||||||
cp config.toml.example config.toml
|
|
||||||
# Edit config.toml with your Tapo devices and API key
|
|
||||||
|
|
||||||
# Build for local machine
|
|
||||||
cargo build --release
|
|
||||||
|
|
||||||
# Or cross-compile for Raspberry Pi (requires cross)
|
|
||||||
# cargo install cross
|
|
||||||
# cross build --release --target armv7-unknown-linux-gnueabihf
|
|
||||||
|
|
||||||
# Run
|
|
||||||
./target/release/tapo-agent
|
|
||||||
# Or: RUST_LOG=info ./target/release/tapo-agent
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Use CLI Agent
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Install websocat (one-time)
|
|
||||||
cargo install websocat
|
|
||||||
# Or: sudo apt install websocat
|
|
||||||
|
|
||||||
# Send data
|
|
||||||
export SENSOR_API_KEY="your-custom-api-key"
|
|
||||||
export SENSOR_SERVER="ws://localhost:8080"
|
|
||||||
./agents/cli/sensor-send mydevice temperature 24.5
|
|
||||||
```
|
|
||||||
|
|
||||||
## Data Retention Policy
|
|
||||||
|
|
||||||
| Resolution | Retention | Source |
|
|
||||||
|------------|-----------|--------|
|
|
||||||
| Raw (1 min) | 7 days | `sensor_data` |
|
|
||||||
| 10 minutes | 30 days | `sensor_data_10m` |
|
|
||||||
| 1 hour | Forever | `sensor_data_1h` |
|
|
||||||
|
|
||||||
Data is averaged when aggregating to higher resolutions.
|
|
||||||
|
|
||||||
## WebSocket Protocol
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
```json
|
|
||||||
→ {"type": "auth", "apiKey": "your-api-key"}
|
|
||||||
← {"type": "auth", "success": true, "devicePrefix": "ac:"}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Send Data
|
|
||||||
```json
|
|
||||||
→ {"type": "data", "readings": [
|
|
||||||
{"device": "ctrl1", "channel": "temperature", "value": 24.5},
|
|
||||||
{"device": "ctrl1", "channel": "humidity", "value": 65.0}
|
|
||||||
]}
|
|
||||||
← {"type": "ack", "count": 2}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
tischlerctrl/
|
|
||||||
├── server/ # Central data collection server
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── index.js # Entry point
|
|
||||||
│ │ ├── config.js # Configuration
|
|
||||||
│ │ ├── db/ # Database schema & queries
|
|
||||||
│ │ ├── websocket/ # WebSocket server
|
|
||||||
│ │ ├── jobs/ # Aggregation & cleanup jobs
|
|
||||||
│ │ └── cli/ # CLI tools (generate-key)
|
|
||||||
│ └── data/ # SQLite database files
|
|
||||||
│
|
|
||||||
├── agents/
|
|
||||||
│ ├── ac-infinity/ # Node.js AC Infinity agent
|
|
||||||
│ ├── tapo/ # Rust Tapo smart plug agent
|
|
||||||
│ └── cli/ # Bash CLI tool
|
|
||||||
│
|
|
||||||
└── README.md
|
|
||||||
```
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
17
debug_db.js
17
debug_db.js
@@ -1,17 +0,0 @@
|
|||||||
const Database = require('better-sqlite3');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
const dbPath = path.resolve(__dirname, 'server/data/sensors.db');
|
|
||||||
const db = new Database(dbPath, { readonly: true });
|
|
||||||
|
|
||||||
console.log('--- RULES ---');
|
|
||||||
const rules = db.prepare('SELECT * FROM rules').all();
|
|
||||||
console.log(JSON.stringify(rules, null, 2));
|
|
||||||
|
|
||||||
console.log('\n--- OUTPUT CHANNELS ---');
|
|
||||||
const outputs = db.prepare('SELECT * FROM output_events WHERE channel = "CircFanLevel" ORDER BY timestamp DESC LIMIT 10').all();
|
|
||||||
console.table(outputs);
|
|
||||||
|
|
||||||
console.log('\n--- SENSOR DATA (ac:tent:temperature) ---');
|
|
||||||
const sensors = db.prepare('SELECT * FROM sensor_events WHERE device = "ac" AND channel = "tent:temperature" ORDER BY timestamp DESC LIMIT 5').all();
|
|
||||||
console.table(sensors);
|
|
||||||
@@ -1,335 +0,0 @@
|
|||||||
# Sensor Data Collection System
|
|
||||||
|
|
||||||
A Node.js server that collects sensor data from multiple agents via WebSocket, stores it in SQLite with automatic data summarization and retention policies.
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
graph TB
|
|
||||||
subgraph "Central Server (Node.js)"
|
|
||||||
WS[WebSocket Server :8080]
|
|
||||||
DB[(SQLite Database)]
|
|
||||||
AGG[Aggregation Job]
|
|
||||||
WS --> DB
|
|
||||||
AGG --> DB
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph "AC Infinity Agent (Node.js)"
|
|
||||||
AC[AC Infinity Client]
|
|
||||||
AC -->|polls every 60s| ACAPI[AC Infinity Cloud API]
|
|
||||||
AC -->|WebSocket| WS
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph "Tapo Agent (Rust)"
|
|
||||||
TAPO[Tapo Client]
|
|
||||||
TAPO -->|polls every 60s| PLUG[Tapo P100/P110]
|
|
||||||
TAPO -->|WebSocket| WS
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph "Custom CLI Agent"
|
|
||||||
CLI[Shell Script]
|
|
||||||
CLI -->|WebSocket| WS
|
|
||||||
end
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## User Review Required
|
|
||||||
|
|
||||||
> [!IMPORTANT]
|
|
||||||
> **Tapo Agent Language Choice**: I recommend **Rust** for the Tapo agent because:
|
|
||||||
> - Compiles to a single ~2MB static binary
|
|
||||||
> - Uses ~5-10MB RAM at runtime
|
|
||||||
> - Excellent [tapo crate](https://crates.io/crates/tapo) already exists
|
|
||||||
> - Easy cross-compilation for Raspberry Pi
|
|
||||||
>
|
|
||||||
> Alternatively, I could write it in **Go** (would need to implement protocol from scratch) or as a **Node.js** agent (but you mentioned wanting it lightweight).
|
|
||||||
|
|
||||||
> [!IMPORTANT]
|
|
||||||
> **AC Infinity Credentials**: The AC Infinity API requires email/password authentication to their cloud service. These will need to be stored in configuration.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
tischlerctrl/
|
|
||||||
├── server/
|
|
||||||
│ ├── package.json
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── index.js # Entry point
|
|
||||||
│ │ ├── config.js # Configuration loader
|
|
||||||
│ │ ├── db/
|
|
||||||
│ │ │ ├── schema.js # SQLite schema + migrations
|
|
||||||
│ │ │ └── queries.js # Database operations
|
|
||||||
│ │ ├── websocket/
|
|
||||||
│ │ │ ├── server.js # WebSocket server
|
|
||||||
│ │ │ └── handlers.js # Message handlers
|
|
||||||
│ │ └── jobs/
|
|
||||||
│ │ ├── aggregator.js # Data summarization job
|
|
||||||
│ │ └── cleanup.js # Data retention cleanup
|
|
||||||
│ └── data/
|
|
||||||
│ └── sensors.db # SQLite database file
|
|
||||||
│
|
|
||||||
├── agents/
|
|
||||||
│ ├── ac-infinity/
|
|
||||||
│ │ ├── package.json
|
|
||||||
│ │ └── src/
|
|
||||||
│ │ ├── index.js # Entry point
|
|
||||||
│ │ ├── config.js # Configuration
|
|
||||||
│ │ ├── ac-client.js # AC Infinity API client
|
|
||||||
│ │ └── ws-client.js # WebSocket client with reconnect
|
|
||||||
│ │
|
|
||||||
│ ├── tapo/
|
|
||||||
│ │ ├── Cargo.toml
|
|
||||||
│ │ └── src/
|
|
||||||
│ │ └── main.rs # Rust Tapo agent
|
|
||||||
│ │
|
|
||||||
│ └── cli/
|
|
||||||
│ └── sensor-send # Shell script CLI tool
|
|
||||||
│
|
|
||||||
├── .env.example # Example environment variables
|
|
||||||
└── README.md
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Proposed Changes
|
|
||||||
|
|
||||||
### Server - Database Schema
|
|
||||||
|
|
||||||
#### [NEW] [schema.js](file:///home/seb/src/tischlerctrl/server/src/db/schema.js)
|
|
||||||
|
|
||||||
SQLite tables:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- API keys for agent authentication
|
|
||||||
CREATE TABLE api_keys (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
key TEXT UNIQUE NOT NULL,
|
|
||||||
name TEXT NOT NULL, -- e.g., "ac-infinity-agent"
|
|
||||||
device_prefix TEXT NOT NULL, -- e.g., "ac:" or "tapo:"
|
|
||||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
last_used_at DATETIME
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Raw sensor data (1-minute resolution, kept for 1 week)
|
|
||||||
CREATE TABLE sensor_data (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
timestamp DATETIME NOT NULL,
|
|
||||||
device TEXT NOT NULL, -- e.g., "ac:controller-69-grow"
|
|
||||||
channel TEXT NOT NULL, -- e.g., "temperature", "humidity", "power"
|
|
||||||
value REAL NOT NULL,
|
|
||||||
INDEX idx_sensor_data_time (timestamp),
|
|
||||||
INDEX idx_sensor_data_device (device, channel)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- 10-minute aggregated data (kept for 1 month)
|
|
||||||
CREATE TABLE sensor_data_10m (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
timestamp DATETIME NOT NULL, -- Rounded to 10-min boundary
|
|
||||||
device TEXT NOT NULL,
|
|
||||||
channel TEXT NOT NULL,
|
|
||||||
value REAL NOT NULL, -- Averaged value
|
|
||||||
sample_count INTEGER NOT NULL,
|
|
||||||
UNIQUE(timestamp, device, channel)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- 1-hour aggregated data (kept forever)
|
|
||||||
CREATE TABLE sensor_data_1h (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
timestamp DATETIME NOT NULL, -- Rounded to 1-hour boundary
|
|
||||||
device TEXT NOT NULL,
|
|
||||||
channel TEXT NOT NULL,
|
|
||||||
value REAL NOT NULL, -- Averaged value
|
|
||||||
sample_count INTEGER NOT NULL,
|
|
||||||
UNIQUE(timestamp, device, channel)
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Server - WebSocket Protocol
|
|
||||||
|
|
||||||
#### [NEW] [server.js](file:///home/seb/src/tischlerctrl/server/src/websocket/server.js)
|
|
||||||
|
|
||||||
**Authentication Flow:**
|
|
||||||
1. Client connects to `ws://server:8080`
|
|
||||||
2. Client sends: `{ "type": "auth", "apiKey": "xxx" }`
|
|
||||||
3. Server validates API key, responds: `{ "type": "auth", "success": true, "devicePrefix": "ac:" }`
|
|
||||||
4. Client is now authenticated and can send data
|
|
||||||
|
|
||||||
**Data Ingestion Message:**
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "data",
|
|
||||||
"readings": [
|
|
||||||
{ "device": "controller-69-grow", "channel": "temperature", "value": 24.5 },
|
|
||||||
{ "device": "controller-69-grow", "channel": "humidity", "value": 65.2 }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Server prepends `devicePrefix` to device names and adds timestamp.
|
|
||||||
|
|
||||||
**Keepalive:**
|
|
||||||
- Server sends `ping` every 30 seconds
|
|
||||||
- Client responds with `pong`
|
|
||||||
- Connection closed after 90 seconds of no response
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Server - Aggregation Jobs
|
|
||||||
|
|
||||||
#### [NEW] [aggregator.js](file:///home/seb/src/tischlerctrl/server/src/jobs/aggregator.js)
|
|
||||||
|
|
||||||
Runs every 10 minutes:
|
|
||||||
|
|
||||||
1. **10-minute aggregation**:
|
|
||||||
- Select data from `sensor_data` older than 10 minutes
|
|
||||||
- Group by device, channel, and 10-minute bucket
|
|
||||||
- Calculate average, insert into `sensor_data_10m`
|
|
||||||
|
|
||||||
2. **1-hour aggregation**:
|
|
||||||
- Select data from `sensor_data_10m` older than 1 hour
|
|
||||||
- Group by device, channel, and 1-hour bucket
|
|
||||||
- Calculate weighted average, insert into `sensor_data_1h`
|
|
||||||
|
|
||||||
#### [NEW] [cleanup.js](file:///home/seb/src/tischlerctrl/server/src/jobs/cleanup.js)
|
|
||||||
|
|
||||||
Runs every hour:
|
|
||||||
- Delete from `sensor_data` where timestamp < NOW - 7 days
|
|
||||||
- Delete from `sensor_data_10m` where timestamp < NOW - 30 days
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### AC Infinity Agent
|
|
||||||
|
|
||||||
#### [NEW] [ac-client.js](file:///home/seb/src/tischlerctrl/agents/ac-infinity/src/ac-client.js)
|
|
||||||
|
|
||||||
Port of the TypeScript AC Infinity client to JavaScript ES modules:
|
|
||||||
|
|
||||||
- `login(email, password)` → Returns userId token
|
|
||||||
- `getDevicesListAll()` → Returns all controllers with sensor readings
|
|
||||||
- Polling interval: 60 seconds
|
|
||||||
- Extracts: temperature, humidity, VPD (if available) per controller
|
|
||||||
|
|
||||||
**Data extraction from API response:**
|
|
||||||
```javascript
|
|
||||||
// Each device in response has:
|
|
||||||
// - devId, devName
|
|
||||||
// - devSettings.temperature (°C * 100)
|
|
||||||
// - devSettings.humidity (% * 100)
|
|
||||||
// We normalize and send to server
|
|
||||||
```
|
|
||||||
|
|
||||||
#### [NEW] [ws-client.js](file:///home/seb/src/tischlerctrl/agents/ac-infinity/src/ws-client.js)
|
|
||||||
|
|
||||||
WebSocket client with:
|
|
||||||
- Auto-reconnect with exponential backoff (1s → 2s → 4s → ... → 60s max)
|
|
||||||
- Authentication on connect
|
|
||||||
- Heartbeat response
|
|
||||||
- Message queue during disconnection
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Tapo Agent (Rust)
|
|
||||||
|
|
||||||
#### [NEW] [main.rs](file:///home/seb/src/tischlerctrl/agents/tapo/src/main.rs)
|
|
||||||
|
|
||||||
Uses [tapo crate](https://crates.io/crates/tapo) for P100/P110 communication.
|
|
||||||
|
|
||||||
**Features:**
|
|
||||||
- Configuration via environment variables or TOML file
|
|
||||||
- WebSocket client with tungstenite crate
|
|
||||||
- Auto-reconnect with backoff
|
|
||||||
- Polls devices every 60 seconds
|
|
||||||
|
|
||||||
**Data collected:**
|
|
||||||
| Device | Channel | Description |
|
|
||||||
|--------|---------|-------------|
|
|
||||||
| P100 | `state` | 0 = off, 1 = on |
|
|
||||||
| P110 | `state` | 0 = off, 1 = on |
|
|
||||||
| P110 | `power` | Current power in watts |
|
|
||||||
| P110 | `energy_today` | Energy used today in Wh |
|
|
||||||
|
|
||||||
**Build for Raspberry Pi:**
|
|
||||||
```bash
|
|
||||||
# Cross-compile for ARM
|
|
||||||
cross build --release --target armv7-unknown-linux-gnueabihf
|
|
||||||
# Binary: ~2MB, runs with ~8MB RAM
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Custom CLI Agent
|
|
||||||
|
|
||||||
#### [NEW] [sensor-send](file:///home/seb/src/tischlerctrl/agents/cli/sensor-send)
|
|
||||||
|
|
||||||
A shell script using `websocat` (lightweight WebSocket CLI tool):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
#!/bin/bash
|
|
||||||
# Usage: sensor-send --device=mydevice --channel=temp --value=23.5
|
|
||||||
|
|
||||||
API_KEY="${SENSOR_API_KEY:-}"
|
|
||||||
SERVER="${SENSOR_SERVER:-ws://localhost:8080}"
|
|
||||||
|
|
||||||
sensor-send mydevice temperature 23.5
|
|
||||||
```
|
|
||||||
|
|
||||||
Requires: `websocat` (single binary, ~3MB, available via cargo or apt)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Configuration Examples
|
|
||||||
|
|
||||||
### Server `.env`
|
|
||||||
```bash
|
|
||||||
PORT=8080
|
|
||||||
DB_PATH=./data/sensors.db
|
|
||||||
# Generate API keys via CLI: node src/cli/generate-key.js "ac-infinity" "ac:"
|
|
||||||
```
|
|
||||||
|
|
||||||
### AC Infinity Agent `.env`
|
|
||||||
```bash
|
|
||||||
SERVER_URL=ws://192.168.1.100:8080
|
|
||||||
API_KEY=your-api-key-here
|
|
||||||
AC_EMAIL=your@email.com
|
|
||||||
AC_PASSWORD=your-password
|
|
||||||
POLL_INTERVAL_MS=60000
|
|
||||||
```
|
|
||||||
|
|
||||||
### Tapo Agent `config.toml`
|
|
||||||
```toml
|
|
||||||
server_url = "ws://192.168.1.100:8080"
|
|
||||||
api_key = "your-api-key-here"
|
|
||||||
poll_interval_secs = 60
|
|
||||||
|
|
||||||
[[devices]]
|
|
||||||
ip = "192.168.1.50"
|
|
||||||
name = "grow-light-plug"
|
|
||||||
type = "P110" # or "P100"
|
|
||||||
tapo_email = "your@email.com"
|
|
||||||
tapo_password = "your-tapo-password"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verification Plan
|
|
||||||
|
|
||||||
### Automated Tests
|
|
||||||
1. **Server unit tests**: Database operations, aggregation logic
|
|
||||||
2. **Integration test**: Start server, connect mock agent, verify data flow
|
|
||||||
3. **Run commands**:
|
|
||||||
```bash
|
|
||||||
cd server && npm test
|
|
||||||
cd agents/ac-infinity && npm test
|
|
||||||
```
|
|
||||||
|
|
||||||
### Manual Verification
|
|
||||||
1. Start server, verify WebSocket accepts connections
|
|
||||||
2. Send test data via CLI agent, verify it appears in database
|
|
||||||
3. Wait 10+ minutes, verify aggregation runs and data appears in `sensor_data_10m`
|
|
||||||
4. Connect AC Infinity agent with real credentials, verify sensor readings
|
|
||||||
5. Deploy Tapo agent to Raspberry Pi, verify plug data collection
|
|
||||||
120
nginx_proxy.md
120
nginx_proxy.md
@@ -1,120 +0,0 @@
|
|||||||
# Setting up Nginx as a Reverse Proxy
|
|
||||||
|
|
||||||
This guide explains how to configure Nginx to act as a reverse proxy for the TischlerCtrl server. This allows you to host the application on standard HTTP/HTTPS ports (80/443) and adds a layer of security.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- A Linux server (Debian/Ubuntu/Raspberry Pi OS).
|
|
||||||
- Root or sudo access.
|
|
||||||
- TischlerCtrl server running on localhost (default port: `8080`).
|
|
||||||
|
|
||||||
## 1. Install Nginx
|
|
||||||
|
|
||||||
If Nginx is not already installed:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
## 2. Create Configuration File
|
|
||||||
|
|
||||||
Create a new configuration file for the site in `/etc/nginx/sites-available/`. We'll name it `tischlerctrl`.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo nano /etc/nginx/sites-available/tischlerctrl
|
|
||||||
```
|
|
||||||
|
|
||||||
Paste the following configuration using your actual domain name or IP address:
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
server_name your-domain.com; # Replace with your domain or IP address
|
|
||||||
|
|
||||||
# Access logs
|
|
||||||
access_log /var/log/nginx/tischlerctrl.access.log;
|
|
||||||
error_log /var/log/nginx/tischlerctrl.error.log;
|
|
||||||
|
|
||||||
location /agentapi/ {
|
|
||||||
proxy_pass http://localhost:8080/; # Trailing slash strips /agentapi/
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection 'upgrade';
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_cache_bypass $http_upgrade;
|
|
||||||
|
|
||||||
# Forwarding real client IP
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Key Configuration Explained
|
|
||||||
|
|
||||||
- **proxy_pass**: Forwards requests to your Node.js application running on port 8080.
|
|
||||||
- **WebSocket Support**: These lines are **critical** for TischlerCtrl as it relies on WebSockets for real-time sensor data:
|
|
||||||
```nginx
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection 'upgrade';
|
|
||||||
```
|
|
||||||
|
|
||||||
## 3. Enable the Site
|
|
||||||
|
|
||||||
Create a symbolic link to the `sites-enabled` directory to activate the configuration:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo ln -s /etc/nginx/sites-available/tischlerctrl /etc/nginx/sites-enabled/
|
|
||||||
```
|
|
||||||
|
|
||||||
## 4. Test and Reload Nginx
|
|
||||||
|
|
||||||
Test the configuration for syntax errors:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo nginx -t
|
|
||||||
```
|
|
||||||
|
|
||||||
If the test is successful (returns `syntax is ok`), reload Nginx:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo systemctl reload nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
## 5. SSL Configuration (Recommended)
|
|
||||||
|
|
||||||
To secure your connection with HTTPS (especially important for authentication), use Certbot to automatically configure a free specific Let's Encrypt SSL certificate.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo apt install certbot python3-certbot-nginx
|
|
||||||
sudo certbot --nginx -d your-domain.com
|
|
||||||
```
|
|
||||||
|
|
||||||
Certbot will automatically modify your Nginx configuration to force HTTPS redirection and manage the SSL certificates.
|
|
||||||
|
|
||||||
## 6. Update Client Configurations
|
|
||||||
|
|
||||||
Since you are serving the API under `/agentapi/`, you must update your agents' configuration to point to the new URL path.
|
|
||||||
|
|
||||||
### WebSocket URL Format
|
|
||||||
|
|
||||||
- **Old (Direct):** `ws://server-ip:8080`
|
|
||||||
- **New (Proxy):** `ws://your-domain.com/agentapi/` (or `wss://` if using SSL)
|
|
||||||
|
|
||||||
### Example for Tapo Agent (`config.toml`)
|
|
||||||
|
|
||||||
```toml
|
|
||||||
server_url = "ws://your-domain.com/agentapi/"
|
|
||||||
# Or with SSL:
|
|
||||||
# server_url = "wss://your-domain.com/agentapi/"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example for Environment Variables
|
|
||||||
|
|
||||||
For agents using `.env` files:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
SENSOR_SERVER="ws://your-domain.com/agentapi/"
|
|
||||||
```
|
|
||||||
258
promptlog.txt
258
promptlog.txt
@@ -1,258 +0,0 @@
|
|||||||
Sensor Data Collection System
|
|
||||||
A Node.js server that collects sensor data from multiple agents via WebSocket, stores it in SQLite with automatic data summarization and retention policies.
|
|
||||||
|
|
||||||
Architecture Overview
|
|
||||||
Custom CLI Agent
|
|
||||||
Tapo Agent (Rust)
|
|
||||||
AC Infinity Agent (Node.js)
|
|
||||||
Central Server (Node.js)
|
|
||||||
polls every 60s
|
|
||||||
WebSocket
|
|
||||||
polls every 60s
|
|
||||||
WebSocket
|
|
||||||
WebSocket
|
|
||||||
WebSocket Server :8080
|
|
||||||
SQLite Database
|
|
||||||
Aggregation Job
|
|
||||||
AC Infinity Client
|
|
||||||
AC Infinity Cloud API
|
|
||||||
Tapo Client
|
|
||||||
Tapo P100/P110
|
|
||||||
Shell Script
|
|
||||||
User Review Required
|
|
||||||
IMPORTANT
|
|
||||||
|
|
||||||
Tapo Agent Language Choice: I recommend Rust for the Tapo agent because:
|
|
||||||
|
|
||||||
Compiles to a single ~2MB static binary
|
|
||||||
Uses ~5-10MB RAM at runtime
|
|
||||||
Excellent tapo crate already exists
|
|
||||||
Easy cross-compilation for Raspberry Pi
|
|
||||||
Alternatively, I could write it in Go (would need to implement protocol from scratch) or as a Node.js agent (but you mentioned wanting it lightweight).
|
|
||||||
|
|
||||||
IMPORTANT
|
|
||||||
|
|
||||||
AC Infinity Credentials: The AC Infinity API requires email/password authentication to their cloud service. These will need to be stored in configuration.
|
|
||||||
|
|
||||||
Project Structure
|
|
||||||
tischlerctrl/
|
|
||||||
├── server/
|
|
||||||
│ ├── package.json
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── index.js # Entry point
|
|
||||||
│ │ ├── config.js # Configuration loader
|
|
||||||
│ │ ├── db/
|
|
||||||
│ │ │ ├── schema.js # SQLite schema + migrations
|
|
||||||
│ │ │ └── queries.js # Database operations
|
|
||||||
│ │ ├── websocket/
|
|
||||||
│ │ │ ├── server.js # WebSocket server
|
|
||||||
│ │ │ └── handlers.js # Message handlers
|
|
||||||
│ │ └── jobs/
|
|
||||||
│ │ ├── aggregator.js # Data summarization job
|
|
||||||
│ │ └── cleanup.js # Data retention cleanup
|
|
||||||
│ └── data/
|
|
||||||
│ └── sensors.db # SQLite database file
|
|
||||||
│
|
|
||||||
├── agents/
|
|
||||||
│ ├── ac-infinity/
|
|
||||||
│ │ ├── package.json
|
|
||||||
│ │ └── src/
|
|
||||||
│ │ ├── index.js # Entry point
|
|
||||||
│ │ ├── config.js # Configuration
|
|
||||||
│ │ ├── ac-client.js # AC Infinity API client
|
|
||||||
│ │ └── ws-client.js # WebSocket client with reconnect
|
|
||||||
│ │
|
|
||||||
│ ├── tapo/
|
|
||||||
│ │ ├── Cargo.toml
|
|
||||||
│ │ └── src/
|
|
||||||
│ │ └── main.rs # Rust Tapo agent
|
|
||||||
│ │
|
|
||||||
│ └── cli/
|
|
||||||
│ └── sensor-send # Shell script CLI tool
|
|
||||||
│
|
|
||||||
├── .env.example # Example environment variables
|
|
||||||
└── README.md
|
|
||||||
Proposed Changes
|
|
||||||
Server - Database Schema
|
|
||||||
[NEW]
|
|
||||||
schema.js
|
|
||||||
SQLite tables:
|
|
||||||
|
|
||||||
-- API keys for agent authentication
|
|
||||||
CREATE TABLE api_keys (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
key TEXT UNIQUE NOT NULL,
|
|
||||||
name TEXT NOT NULL, -- e.g., "ac-infinity-agent"
|
|
||||||
device_prefix TEXT NOT NULL, -- e.g., "ac:" or "tapo:"
|
|
||||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
last_used_at DATETIME
|
|
||||||
);
|
|
||||||
-- Raw sensor data (1-minute resolution, kept for 1 week)
|
|
||||||
CREATE TABLE sensor_data (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
timestamp DATETIME NOT NULL,
|
|
||||||
device TEXT NOT NULL, -- e.g., "ac:controller-69-grow"
|
|
||||||
channel TEXT NOT NULL, -- e.g., "temperature", "humidity", "power"
|
|
||||||
value REAL NOT NULL,
|
|
||||||
INDEX idx_sensor_data_time (timestamp),
|
|
||||||
INDEX idx_sensor_data_device (device, channel)
|
|
||||||
);
|
|
||||||
-- 10-minute aggregated data (kept for 1 month)
|
|
||||||
CREATE TABLE sensor_data_10m (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
timestamp DATETIME NOT NULL, -- Rounded to 10-min boundary
|
|
||||||
device TEXT NOT NULL,
|
|
||||||
channel TEXT NOT NULL,
|
|
||||||
value REAL NOT NULL, -- Averaged value
|
|
||||||
sample_count INTEGER NOT NULL,
|
|
||||||
UNIQUE(timestamp, device, channel)
|
|
||||||
);
|
|
||||||
-- 1-hour aggregated data (kept forever)
|
|
||||||
CREATE TABLE sensor_data_1h (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
timestamp DATETIME NOT NULL, -- Rounded to 1-hour boundary
|
|
||||||
device TEXT NOT NULL,
|
|
||||||
channel TEXT NOT NULL,
|
|
||||||
value REAL NOT NULL, -- Averaged value
|
|
||||||
sample_count INTEGER NOT NULL,
|
|
||||||
UNIQUE(timestamp, device, channel)
|
|
||||||
);
|
|
||||||
Server - WebSocket Protocol
|
|
||||||
[NEW]
|
|
||||||
server.js
|
|
||||||
Authentication Flow:
|
|
||||||
|
|
||||||
Client connects to ws://server:8080
|
|
||||||
Client sends: { "type": "auth", "apiKey": "xxx" }
|
|
||||||
Server validates API key, responds: { "type": "auth", "success": true, "devicePrefix": "ac:" }
|
|
||||||
Client is now authenticated and can send data
|
|
||||||
Data Ingestion Message:
|
|
||||||
|
|
||||||
{
|
|
||||||
"type": "data",
|
|
||||||
"readings": [
|
|
||||||
{ "device": "controller-69-grow", "channel": "temperature", "value": 24.5 },
|
|
||||||
{ "device": "controller-69-grow", "channel": "humidity", "value": 65.2 }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
Server prepends devicePrefix to device names and adds timestamp.
|
|
||||||
|
|
||||||
Keepalive:
|
|
||||||
|
|
||||||
Server sends ping every 30 seconds
|
|
||||||
Client responds with pong
|
|
||||||
Connection closed after 90 seconds of no response
|
|
||||||
Server - Aggregation Jobs
|
|
||||||
[NEW]
|
|
||||||
aggregator.js
|
|
||||||
Runs every 10 minutes:
|
|
||||||
|
|
||||||
10-minute aggregation:
|
|
||||||
|
|
||||||
Select data from sensor_data older than 10 minutes
|
|
||||||
Group by device, channel, and 10-minute bucket
|
|
||||||
Calculate average, insert into sensor_data_10m
|
|
||||||
1-hour aggregation:
|
|
||||||
|
|
||||||
Select data from sensor_data_10m older than 1 hour
|
|
||||||
Group by device, channel, and 1-hour bucket
|
|
||||||
Calculate weighted average, insert into sensor_data_1h
|
|
||||||
[NEW]
|
|
||||||
cleanup.js
|
|
||||||
Runs every hour:
|
|
||||||
|
|
||||||
Delete from sensor_data where timestamp < NOW - 7 days
|
|
||||||
Delete from sensor_data_10m where timestamp < NOW - 30 days
|
|
||||||
AC Infinity Agent
|
|
||||||
[NEW]
|
|
||||||
ac-client.js
|
|
||||||
Port of the TypeScript AC Infinity client to JavaScript ES modules:
|
|
||||||
|
|
||||||
login(email, password) → Returns userId token
|
|
||||||
getDevicesListAll() → Returns all controllers with sensor readings
|
|
||||||
Polling interval: 60 seconds
|
|
||||||
Extracts: temperature, humidity, VPD (if available) per controller
|
|
||||||
Data extraction from API response:
|
|
||||||
|
|
||||||
// Each device in response has:
|
|
||||||
// - devId, devName
|
|
||||||
// - devSettings.temperature (°C * 100)
|
|
||||||
// - devSettings.humidity (% * 100)
|
|
||||||
// We normalize and send to server
|
|
||||||
[NEW]
|
|
||||||
ws-client.js
|
|
||||||
WebSocket client with:
|
|
||||||
|
|
||||||
Auto-reconnect with exponential backoff (1s → 2s → 4s → ... → 60s max)
|
|
||||||
Authentication on connect
|
|
||||||
Heartbeat response
|
|
||||||
Message queue during disconnection
|
|
||||||
Tapo Agent (Rust)
|
|
||||||
[NEW]
|
|
||||||
main.rs
|
|
||||||
Uses tapo crate for P100/P110 communication.
|
|
||||||
|
|
||||||
Features:
|
|
||||||
|
|
||||||
Configuration via environment variables or TOML file
|
|
||||||
WebSocket client with tungstenite crate
|
|
||||||
Auto-reconnect with backoff
|
|
||||||
Polls devices every 60 seconds
|
|
||||||
Data collected:
|
|
||||||
|
|
||||||
Device Channel Description
|
|
||||||
P100 state 0 = off, 1 = on
|
|
||||||
P110 state 0 = off, 1 = on
|
|
||||||
P110 power Current power in watts
|
|
||||||
P110 energy_today Energy used today in Wh
|
|
||||||
Build for Raspberry Pi:
|
|
||||||
|
|
||||||
# Cross-compile for ARM
|
|
||||||
cross build --release --target armv7-unknown-linux-gnueabihf
|
|
||||||
# Binary: ~2MB, runs with ~8MB RAM
|
|
||||||
Custom CLI Agent
|
|
||||||
[NEW]
|
|
||||||
sensor-send
|
|
||||||
A shell script using websocat (lightweight WebSocket CLI tool):
|
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
# Usage: sensor-send --device=mydevice --channel=temp --value=23.5
|
|
||||||
API_KEY="${SENSOR_API_KEY:-}"
|
|
||||||
SERVER="${SENSOR_SERVER:-ws://localhost:8080}"
|
|
||||||
sensor-send mydevice temperature 23.5
|
|
||||||
Requires: websocat (single binary, ~3MB, available via cargo or apt)
|
|
||||||
|
|
||||||
Configuration Examples
|
|
||||||
Server .env
|
|
||||||
PORT=8080
|
|
||||||
DB_PATH=./data/sensors.db
|
|
||||||
# Generate API keys via CLI: node src/cli/generate-key.js "ac-infinity" "ac:"
|
|
||||||
AC Infinity Agent .env
|
|
||||||
SERVER_URL=ws://192.168.1.100:8080
|
|
||||||
API_KEY=your-api-key-here
|
|
||||||
AC_EMAIL=your@email.com
|
|
||||||
AC_PASSWORD=your-password
|
|
||||||
POLL_INTERVAL_MS=60000
|
|
||||||
Tapo Agent config.toml
|
|
||||||
server_url = "ws://192.168.1.100:8080"
|
|
||||||
api_key = "your-api-key-here"
|
|
||||||
poll_interval_secs = 60
|
|
||||||
[[devices]]
|
|
||||||
ip = "192.168.1.50"
|
|
||||||
name = "grow-light-plug"
|
|
||||||
type = "P110" # or "P100"
|
|
||||||
tapo_email = "your@email.com"
|
|
||||||
tapo_password = "your-tapo-password"
|
|
||||||
Verification Plan
|
|
||||||
Automated Tests
|
|
||||||
Server unit tests: Database operations, aggregation logic
|
|
||||||
Integration test: Start server, connect mock agent, verify data flow
|
|
||||||
Run commands:
|
|
||||||
cd server && npm test
|
|
||||||
cd agents/ac-infinity && npm test
|
|
||||||
Manual Verification
|
|
||||||
Start server, verify WebSocket accepts connections
|
|
||||||
Send test data via CLI agent, verify it appears in database
|
|
||||||
Wait 10+ minutes, verify aggregation runs and data appears in sensor_data_10m
|
|
||||||
Connect AC Infinity agent with real credentials, verify sensor readings
|
|
||||||
Deploy Tapo agent to Raspberry Pi, verify plug data collection
|
|
||||||
14
uiserver/.env.example
Normal file
14
uiserver/.env.example
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Database path (default: ../server/data/sensors.db)
|
||||||
|
DB_PATH=
|
||||||
|
|
||||||
|
# JWT secret for authentication (CHANGE IN PRODUCTION!)
|
||||||
|
JWT_SECRET=your-secret-key-here
|
||||||
|
|
||||||
|
# WebSocket port for agent connections (default: 3962)
|
||||||
|
WS_PORT=3962
|
||||||
|
|
||||||
|
# Webpack dev server port (default: 3905)
|
||||||
|
DEV_SERVER_PORT=3905
|
||||||
|
|
||||||
|
# Rule runner interval in milliseconds (default: 10000 = 10s)
|
||||||
|
RULE_RUNNER_INTERVAL=10000
|
||||||
28
uiserver/api/auth.js
Normal file
28
uiserver/api/auth.js
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
/**
|
||||||
|
* Auth API - Login endpoint
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupAuthApi(app, { db, bcrypt, jwt, JWT_SECRET }) {
|
||||||
|
// POST /api/login
|
||||||
|
app.post('/api/login', (req, res) => {
|
||||||
|
const { username, password } = req.body;
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare('SELECT * FROM users WHERE username = ?');
|
||||||
|
const user = stmt.get(username);
|
||||||
|
|
||||||
|
if (!user || !bcrypt.compareSync(password, user.password_hash)) {
|
||||||
|
return res.status(401).json({ error: 'Invalid credentials' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = jwt.sign({
|
||||||
|
id: user.id,
|
||||||
|
username: user.username,
|
||||||
|
role: user.role
|
||||||
|
}, JWT_SECRET, { expiresIn: '24h' });
|
||||||
|
|
||||||
|
res.json({ token, role: user.role, username: user.username });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
26
uiserver/api/devices.js
Normal file
26
uiserver/api/devices.js
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
/**
|
||||||
|
* Devices API - List unique device/channel pairs
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupDevicesApi(app, { db, getOutputChannels }) {
|
||||||
|
// GET /api/devices - Returns list of unique device/channel pairs (sensors + outputs)
|
||||||
|
app.get('/api/devices', (req, res) => {
|
||||||
|
try {
|
||||||
|
if (!db) throw new Error('Database not connected');
|
||||||
|
// Get sensor channels
|
||||||
|
const sensorStmt = db.prepare("SELECT DISTINCT device, channel FROM sensor_events WHERE data_type = 'number' ORDER BY device, channel");
|
||||||
|
const sensorRows = sensorStmt.all();
|
||||||
|
|
||||||
|
// Add output channels with 'output' as device
|
||||||
|
const outputChannels = getOutputChannels();
|
||||||
|
const outputRows = outputChannels.map(ch => ({
|
||||||
|
device: 'output',
|
||||||
|
channel: ch.channel
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json([...sensorRows, ...outputRows]);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
45
uiserver/api/index.js
Normal file
45
uiserver/api/index.js
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* API Routes Index - Sets up all API endpoints
|
||||||
|
*/
|
||||||
|
|
||||||
|
const setupAuthApi = require('./auth');
|
||||||
|
const setupViewsApi = require('./views');
|
||||||
|
const setupRulesApi = require('./rules');
|
||||||
|
const setupOutputsApi = require('./outputs');
|
||||||
|
const setupOutputConfigApi = require('./output-config');
|
||||||
|
const setupDevicesApi = require('./devices');
|
||||||
|
const setupReadingsApi = require('./readings');
|
||||||
|
|
||||||
|
module.exports = function setupAllApis(app, context) {
|
||||||
|
const { db, bcrypt, jwt, JWT_SECRET, getOutputChannels, getOutputBindings, runRules, activeRuleIds } = context;
|
||||||
|
|
||||||
|
// Auth middleware helpers
|
||||||
|
const checkAuth = (req, res, next) => {
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
if (authHeader) {
|
||||||
|
const token = authHeader.split(' ')[1];
|
||||||
|
jwt.verify(token, JWT_SECRET, (err, user) => {
|
||||||
|
if (user) req.user = user;
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const requireAdmin = (req, res, next) => {
|
||||||
|
if (!req.user || req.user.role !== 'admin') {
|
||||||
|
return res.status(403).json({ error: 'Admin access required' });
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup all API routes
|
||||||
|
setupAuthApi(app, { db, bcrypt, jwt, JWT_SECRET });
|
||||||
|
setupViewsApi(app, { db, checkAuth, requireAdmin });
|
||||||
|
setupRulesApi(app, { db, checkAuth, requireAdmin, runRules, activeRuleIds });
|
||||||
|
setupOutputConfigApi(app, { db, checkAuth, requireAdmin });
|
||||||
|
setupOutputsApi(app, { db, getOutputChannels, getOutputBindings });
|
||||||
|
setupDevicesApi(app, { db, getOutputChannels });
|
||||||
|
setupReadingsApi(app, { db });
|
||||||
|
};
|
||||||
162
uiserver/api/output-config.js
Normal file
162
uiserver/api/output-config.js
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
/**
|
||||||
|
* Output Config API - CRUD for output channel configurations
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupOutputConfigApi(app, { db, checkAuth, requireAdmin }) {
|
||||||
|
// Apply checkAuth middleware to output config routes
|
||||||
|
app.use('/api/output-configs', checkAuth);
|
||||||
|
|
||||||
|
// GET /api/output-configs - List all output configs
|
||||||
|
app.get('/api/output-configs', (req, res) => {
|
||||||
|
try {
|
||||||
|
if (!db) throw new Error('Database not connected');
|
||||||
|
const stmt = db.prepare('SELECT * FROM output_configs ORDER BY position ASC');
|
||||||
|
const rows = stmt.all();
|
||||||
|
res.json(rows);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/output-configs - Create new output config (admin only)
|
||||||
|
app.post('/api/output-configs', requireAdmin, (req, res) => {
|
||||||
|
const { channel, description, value_type, min_value, max_value, device, device_channel } = req.body;
|
||||||
|
|
||||||
|
if (!channel || !value_type) {
|
||||||
|
return res.status(400).json({ error: 'Missing required fields: channel, value_type' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get max position
|
||||||
|
const maxPos = db.prepare('SELECT MAX(position) as max FROM output_configs').get();
|
||||||
|
const position = (maxPos.max ?? -1) + 1;
|
||||||
|
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
INSERT INTO output_configs (channel, description, value_type, min_value, max_value, device, device_channel, position)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`);
|
||||||
|
const info = stmt.run(
|
||||||
|
channel,
|
||||||
|
description || '',
|
||||||
|
value_type,
|
||||||
|
min_value ?? 0,
|
||||||
|
max_value ?? 1,
|
||||||
|
device || null,
|
||||||
|
device_channel || null,
|
||||||
|
position
|
||||||
|
);
|
||||||
|
|
||||||
|
global.insertChangelog(req.user?.username || 'admin', `Created output config "${channel}"`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
id: info.lastInsertRowid,
|
||||||
|
channel,
|
||||||
|
description,
|
||||||
|
value_type,
|
||||||
|
min_value: min_value ?? 0,
|
||||||
|
max_value: max_value ?? 1,
|
||||||
|
device,
|
||||||
|
device_channel,
|
||||||
|
position
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err.message.includes('UNIQUE constraint')) {
|
||||||
|
return res.status(400).json({ error: 'Channel name already exists' });
|
||||||
|
}
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// PUT /api/output-configs/:id - Update output config (admin only)
|
||||||
|
app.put('/api/output-configs/:id', requireAdmin, (req, res) => {
|
||||||
|
const { channel, description, value_type, min_value, max_value, device, device_channel } = req.body;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const oldConfig = db.prepare('SELECT * FROM output_configs WHERE id = ?').get(req.params.id);
|
||||||
|
if (!oldConfig) {
|
||||||
|
return res.status(404).json({ error: 'Output config not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
UPDATE output_configs
|
||||||
|
SET channel = ?, description = ?, value_type = ?, min_value = ?, max_value = ?, device = ?, device_channel = ?
|
||||||
|
WHERE id = ?
|
||||||
|
`);
|
||||||
|
const info = stmt.run(
|
||||||
|
channel ?? oldConfig.channel,
|
||||||
|
description ?? oldConfig.description,
|
||||||
|
value_type ?? oldConfig.value_type,
|
||||||
|
min_value ?? oldConfig.min_value,
|
||||||
|
max_value ?? oldConfig.max_value,
|
||||||
|
device ?? oldConfig.device,
|
||||||
|
device_channel ?? oldConfig.device_channel,
|
||||||
|
req.params.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (info.changes > 0) {
|
||||||
|
const changes = [];
|
||||||
|
if (oldConfig.channel !== channel) changes.push(`channel: ${oldConfig.channel} → ${channel}`);
|
||||||
|
if (oldConfig.device !== device) changes.push(`device: ${oldConfig.device || 'none'} → ${device || 'none'}`);
|
||||||
|
if (oldConfig.device_channel !== device_channel) changes.push(`device_channel: ${oldConfig.device_channel || 'none'} → ${device_channel || 'none'}`);
|
||||||
|
|
||||||
|
const changeText = changes.length > 0
|
||||||
|
? `Updated output config "${channel}": ${changes.join(', ')}`
|
||||||
|
: `Updated output config "${channel}"`;
|
||||||
|
global.insertChangelog(req.user?.username || 'admin', changeText);
|
||||||
|
|
||||||
|
res.json({ success: true, id: req.params.id });
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'Output config not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
if (err.message.includes('UNIQUE constraint')) {
|
||||||
|
return res.status(400).json({ error: 'Channel name already exists' });
|
||||||
|
}
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/output-configs/:id - Delete output config (admin only)
|
||||||
|
app.delete('/api/output-configs/:id', requireAdmin, (req, res) => {
|
||||||
|
try {
|
||||||
|
const config = db.prepare('SELECT channel FROM output_configs WHERE id = ?').get(req.params.id);
|
||||||
|
if (!config) {
|
||||||
|
return res.status(404).json({ error: 'Output config not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const stmt = db.prepare('DELETE FROM output_configs WHERE id = ?');
|
||||||
|
const info = stmt.run(req.params.id);
|
||||||
|
|
||||||
|
if (info.changes > 0) {
|
||||||
|
global.insertChangelog(req.user?.username || 'admin', `Deleted output config "${config.channel}"`);
|
||||||
|
res.json({ success: true });
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'Output config not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/output-configs/reorder - Reorder output configs (admin only)
|
||||||
|
app.post('/api/output-configs/reorder', requireAdmin, (req, res) => {
|
||||||
|
const { order } = req.body;
|
||||||
|
if (!Array.isArray(order)) {
|
||||||
|
return res.status(400).json({ error: 'Invalid format' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateStmt = db.prepare('UPDATE output_configs SET position = ? WHERE id = ?');
|
||||||
|
const updateMany = db.transaction((items) => {
|
||||||
|
for (const item of items) {
|
||||||
|
updateStmt.run(item.position, item.id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
updateMany(order);
|
||||||
|
res.json({ success: true });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
75
uiserver/api/outputs.js
Normal file
75
uiserver/api/outputs.js
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
/**
|
||||||
|
* Outputs API - Output channel definitions and values
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupOutputsApi(app, { db, getOutputChannels, getOutputBindings }) {
|
||||||
|
// GET /api/outputs - List output channel definitions
|
||||||
|
app.get('/api/outputs', (req, res) => {
|
||||||
|
res.json(getOutputChannels());
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/outputs/values - Get current output values
|
||||||
|
app.get('/api/outputs/values', (req, res) => {
|
||||||
|
try {
|
||||||
|
if (!db) throw new Error('Database not connected');
|
||||||
|
const result = {};
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
SELECT channel, value FROM output_events
|
||||||
|
WHERE id IN (
|
||||||
|
SELECT MAX(id) FROM output_events GROUP BY channel
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
const rows = stmt.all();
|
||||||
|
rows.forEach(row => {
|
||||||
|
result[row.channel] = row.value;
|
||||||
|
});
|
||||||
|
// Fill in defaults for missing channels
|
||||||
|
const outputChannels = getOutputChannels();
|
||||||
|
outputChannels.forEach(ch => {
|
||||||
|
if (result[ch.channel] === undefined) {
|
||||||
|
result[ch.channel] = 0;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
res.json(result);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/outputs/commands - Get desired states for bound devices
|
||||||
|
// Agents poll this to get commands. Returns { "device:channel": { state: 0|1 } }
|
||||||
|
app.get('/api/outputs/commands', (req, res) => {
|
||||||
|
try {
|
||||||
|
if (!db) throw new Error('Database not connected');
|
||||||
|
|
||||||
|
// Get current output values
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
SELECT channel, value FROM output_events
|
||||||
|
WHERE id IN (
|
||||||
|
SELECT MAX(id) FROM output_events GROUP BY channel
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
const rows = stmt.all();
|
||||||
|
const outputValues = {};
|
||||||
|
rows.forEach(row => {
|
||||||
|
outputValues[row.channel] = row.value;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Map to device commands
|
||||||
|
const bindings = getOutputBindings();
|
||||||
|
const commands = {};
|
||||||
|
for (const [outputChannel, binding] of Object.entries(bindings)) {
|
||||||
|
const value = outputValues[outputChannel] ?? 0;
|
||||||
|
const deviceKey = `${binding.device}:${binding.channel}`;
|
||||||
|
commands[deviceKey] = {
|
||||||
|
state: value > 0 ? 1 : 0,
|
||||||
|
source: outputChannel
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(commands);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
124
uiserver/api/readings.js
Normal file
124
uiserver/api/readings.js
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
/**
|
||||||
|
* Readings API - Sensor and output data for charts
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupReadingsApi(app, { db }) {
|
||||||
|
// GET /api/readings
|
||||||
|
// Query params: since, until, selection (comma-separated device:channel pairs)
|
||||||
|
app.get('/api/readings', (req, res) => {
|
||||||
|
try {
|
||||||
|
if (!db) throw new Error('Database not connected');
|
||||||
|
const { since, until } = req.query;
|
||||||
|
const startTime = since || new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString();
|
||||||
|
const endTime = until || new Date().toISOString();
|
||||||
|
|
||||||
|
const requestedSensorChannels = []; // [{device, channel}]
|
||||||
|
const requestedOutputChannels = []; // [channel]
|
||||||
|
|
||||||
|
if (req.query.selection) {
|
||||||
|
const selections = req.query.selection.split(',');
|
||||||
|
selections.forEach(s => {
|
||||||
|
const lastColonIndex = s.lastIndexOf(':');
|
||||||
|
if (lastColonIndex !== -1) {
|
||||||
|
const d = s.substring(0, lastColonIndex);
|
||||||
|
const c = s.substring(lastColonIndex + 1);
|
||||||
|
if (d === 'output') {
|
||||||
|
requestedOutputChannels.push(c);
|
||||||
|
} else {
|
||||||
|
requestedSensorChannels.push({ device: d, channel: c });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = {};
|
||||||
|
|
||||||
|
// 1. Fetch sensor data
|
||||||
|
if (requestedSensorChannels.length > 0) {
|
||||||
|
let sql = 'SELECT * FROM sensor_events WHERE timestamp > ? AND timestamp <= ? ';
|
||||||
|
const params = [startTime, endTime];
|
||||||
|
|
||||||
|
const placeholders = [];
|
||||||
|
requestedSensorChannels.forEach(ch => {
|
||||||
|
placeholders.push('(device = ? AND channel = ?)');
|
||||||
|
params.push(ch.device, ch.channel);
|
||||||
|
});
|
||||||
|
if (placeholders.length > 0) {
|
||||||
|
sql += `AND (${placeholders.join(' OR ')}) `;
|
||||||
|
}
|
||||||
|
sql += 'ORDER BY timestamp ASC';
|
||||||
|
|
||||||
|
const rows = db.prepare(sql).all(...params);
|
||||||
|
|
||||||
|
// Backfill for sensors
|
||||||
|
const backfillStmt = db.prepare(`
|
||||||
|
SELECT * FROM sensor_events
|
||||||
|
WHERE device = ? AND channel = ?
|
||||||
|
AND timestamp <= ?
|
||||||
|
AND (until >= ? OR until IS NULL)
|
||||||
|
ORDER BY timestamp DESC LIMIT 1
|
||||||
|
`);
|
||||||
|
|
||||||
|
const backfillRows = [];
|
||||||
|
requestedSensorChannels.forEach(ch => {
|
||||||
|
const prev = backfillStmt.get(ch.device, ch.channel, startTime, startTime);
|
||||||
|
if (prev) backfillRows.push(prev);
|
||||||
|
});
|
||||||
|
|
||||||
|
[...backfillRows, ...rows].forEach(row => {
|
||||||
|
const key = `${row.device}:${row.channel}`;
|
||||||
|
if (!result[key]) result[key] = [];
|
||||||
|
const pt = [row.timestamp, row.value];
|
||||||
|
if (row.until) pt.push(row.until);
|
||||||
|
result[key].push(pt);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Fetch output data
|
||||||
|
if (requestedOutputChannels.length > 0) {
|
||||||
|
let sql = 'SELECT * FROM output_events WHERE timestamp > ? AND timestamp <= ? ';
|
||||||
|
const params = [startTime, endTime];
|
||||||
|
|
||||||
|
const placeholders = requestedOutputChannels.map(() => 'channel = ?');
|
||||||
|
sql += `AND (${placeholders.join(' OR ')}) `;
|
||||||
|
params.push(...requestedOutputChannels);
|
||||||
|
sql += 'ORDER BY timestamp ASC';
|
||||||
|
|
||||||
|
const rows = db.prepare(sql).all(...params);
|
||||||
|
|
||||||
|
// Backfill for outputs
|
||||||
|
const backfillStmt = db.prepare(`
|
||||||
|
SELECT * FROM output_events
|
||||||
|
WHERE channel = ?
|
||||||
|
AND timestamp <= ?
|
||||||
|
AND (until >= ? OR until IS NULL)
|
||||||
|
ORDER BY timestamp DESC LIMIT 1
|
||||||
|
`);
|
||||||
|
|
||||||
|
const backfillRows = [];
|
||||||
|
requestedOutputChannels.forEach(ch => {
|
||||||
|
const prev = backfillStmt.get(ch, startTime, startTime);
|
||||||
|
if (prev) {
|
||||||
|
backfillRows.push(prev);
|
||||||
|
} else {
|
||||||
|
// No data at all - add default 0 value at startTime
|
||||||
|
backfillRows.push({ channel: ch, timestamp: startTime, value: 0, until: null });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
[...backfillRows, ...rows].forEach(row => {
|
||||||
|
const key = `output:${row.channel}`;
|
||||||
|
if (!result[key]) result[key] = [];
|
||||||
|
const pt = [row.timestamp, row.value];
|
||||||
|
if (row.until) pt.push(row.until);
|
||||||
|
result[key].push(pt);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
166
uiserver/api/rules.js
Normal file
166
uiserver/api/rules.js
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
/**
|
||||||
|
* Rules API - CRUD for automation rules
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupRulesApi(app, { db, checkAuth, requireAdmin, runRules, activeRuleIds }) {
|
||||||
|
// Apply checkAuth middleware to rules routes
|
||||||
|
app.use('/api/rules', checkAuth);
|
||||||
|
|
||||||
|
// GET /api/rules/status - Get currently active rule IDs
|
||||||
|
app.get('/api/rules/status', (req, res) => {
|
||||||
|
res.json({ activeIds: Array.from(activeRuleIds) });
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/rules - List all rules
|
||||||
|
app.get('/api/rules', (req, res) => {
|
||||||
|
try {
|
||||||
|
if (!db) throw new Error('Database not connected');
|
||||||
|
const stmt = db.prepare('SELECT * FROM rules ORDER BY position ASC, id ASC');
|
||||||
|
const rows = stmt.all();
|
||||||
|
const rules = rows.map(row => ({
|
||||||
|
...row,
|
||||||
|
conditions: JSON.parse(row.conditions || '{}'),
|
||||||
|
action: JSON.parse(row.action || '{}')
|
||||||
|
}));
|
||||||
|
res.json(rules);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/rules - Create rule (admin only)
|
||||||
|
app.post('/api/rules', requireAdmin, (req, res) => {
|
||||||
|
const { name, type = 'static', enabled = 1, conditions, action } = req.body;
|
||||||
|
if (!name || !conditions || !action) {
|
||||||
|
return res.status(400).json({ error: 'Missing required fields: name, conditions, action' });
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
INSERT INTO rules (name, type, enabled, conditions, action, created_by)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|
`);
|
||||||
|
const info = stmt.run(
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
enabled ? 1 : 0,
|
||||||
|
JSON.stringify(conditions),
|
||||||
|
JSON.stringify(action),
|
||||||
|
req.user?.id || null
|
||||||
|
);
|
||||||
|
runRules(); // Trigger rules immediately
|
||||||
|
global.insertChangelog(req.user?.username || 'admin', `Created rule "${name}"`);
|
||||||
|
res.json({ id: info.lastInsertRowid, name, type, enabled, conditions, action });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// PUT /api/rules/:id - Update rule (admin only)
|
||||||
|
app.put('/api/rules/:id', requireAdmin, (req, res) => {
|
||||||
|
const { name, type, enabled, conditions, action } = req.body;
|
||||||
|
try {
|
||||||
|
// Get old rule for comparison
|
||||||
|
const oldRule = db.prepare('SELECT * FROM rules WHERE id = ?').get(req.params.id);
|
||||||
|
if (!oldRule) {
|
||||||
|
return res.status(404).json({ error: 'Rule not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
UPDATE rules SET name = ?, type = ?, enabled = ?, conditions = ?, action = ?, updated_at = datetime('now')
|
||||||
|
WHERE id = ?
|
||||||
|
`);
|
||||||
|
const info = stmt.run(
|
||||||
|
name,
|
||||||
|
type || 'static',
|
||||||
|
enabled ? 1 : 0,
|
||||||
|
JSON.stringify(conditions),
|
||||||
|
JSON.stringify(action),
|
||||||
|
req.params.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (info.changes > 0) {
|
||||||
|
runRules(); // Trigger rules immediately
|
||||||
|
|
||||||
|
// Build detailed changelog
|
||||||
|
const changes = [];
|
||||||
|
if (oldRule.name !== name) {
|
||||||
|
changes.push(`name: "${oldRule.name}" → "${name}"`);
|
||||||
|
}
|
||||||
|
if (!!oldRule.enabled !== !!enabled) {
|
||||||
|
changes.push(`enabled: ${oldRule.enabled ? 'on' : 'off'} → ${enabled ? 'on' : 'off'}`);
|
||||||
|
}
|
||||||
|
const oldConditions = oldRule.conditions || '{}';
|
||||||
|
const newConditions = JSON.stringify(conditions);
|
||||||
|
if (oldConditions !== newConditions) {
|
||||||
|
changes.push('conditions changed');
|
||||||
|
}
|
||||||
|
const oldAction = oldRule.action || '{}';
|
||||||
|
const newAction = JSON.stringify(action);
|
||||||
|
if (oldAction !== newAction) {
|
||||||
|
try {
|
||||||
|
const oldA = JSON.parse(oldAction);
|
||||||
|
const newA = action;
|
||||||
|
if (oldA.channel !== newA.channel) {
|
||||||
|
changes.push(`action channel: ${oldA.channel} → ${newA.channel}`);
|
||||||
|
}
|
||||||
|
if (JSON.stringify(oldA.value) !== JSON.stringify(newA.value)) {
|
||||||
|
changes.push(`action value: ${JSON.stringify(oldA.value)} → ${JSON.stringify(newA.value)}`);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
changes.push('action changed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const changeText = changes.length > 0
|
||||||
|
? `Updated rule "${name}": ${changes.join(', ')}`
|
||||||
|
: `Updated rule "${name}" (no changes)`;
|
||||||
|
global.insertChangelog(req.user?.username || 'admin', changeText);
|
||||||
|
|
||||||
|
res.json({ id: req.params.id, name, type, enabled, conditions, action });
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'Rule not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/rules/:id - Delete rule (admin only)
|
||||||
|
app.delete('/api/rules/:id', requireAdmin, (req, res) => {
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare('DELETE FROM rules WHERE id = ?');
|
||||||
|
const ruleName = db.prepare('SELECT name FROM rules WHERE id = ?').get(req.params.id)?.name || 'Unknown Rule';
|
||||||
|
const info = stmt.run(req.params.id);
|
||||||
|
if (info.changes > 0) {
|
||||||
|
runRules(); // Trigger rules immediately
|
||||||
|
global.insertChangelog(req.user?.username || 'admin', `Deleted rule "${ruleName}" (ID: ${req.params.id})`);
|
||||||
|
res.json({ success: true });
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'Rule not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/rules/reorder - Reorder rules (admin only)
|
||||||
|
app.post('/api/rules/reorder', requireAdmin, (req, res) => {
|
||||||
|
const { order } = req.body;
|
||||||
|
if (!Array.isArray(order)) return res.status(400).json({ error: 'Invalid format' });
|
||||||
|
|
||||||
|
const updateStmt = db.prepare('UPDATE rules SET position = ? WHERE id = ?');
|
||||||
|
const updateMany = db.transaction((items) => {
|
||||||
|
for (const item of items) {
|
||||||
|
updateStmt.run(item.position, item.id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
updateMany(order);
|
||||||
|
runRules(); // Trigger rules immediately
|
||||||
|
res.json({ success: true });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
186
uiserver/api/views.js
Normal file
186
uiserver/api/views.js
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
/**
|
||||||
|
* Views API - CRUD for dashboard views
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = function setupViewsApi(app, { db, checkAuth, requireAdmin }) {
|
||||||
|
// Apply checkAuth middleware to views routes
|
||||||
|
app.use('/api/views', checkAuth);
|
||||||
|
|
||||||
|
// POST /api/views - Create view (admin only)
|
||||||
|
app.post('/api/views', requireAdmin, (req, res) => {
|
||||||
|
const { name, config } = req.body;
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare('INSERT INTO views (name, config, created_by) VALUES (?, ?, ?)');
|
||||||
|
const info = stmt.run(name, JSON.stringify(config), req.user.id);
|
||||||
|
global.insertChangelog(req.user.username, `Created view "${name}"`);
|
||||||
|
res.json({ id: info.lastInsertRowid, name, config });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/views - List all views (public)
|
||||||
|
app.get('/api/views', (req, res) => {
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare('SELECT * FROM views ORDER BY position ASC, id ASC');
|
||||||
|
const rows = stmt.all();
|
||||||
|
const views = rows.map(row => {
|
||||||
|
try {
|
||||||
|
return { ...row, config: JSON.parse(row.config) };
|
||||||
|
} catch (e) {
|
||||||
|
return row;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
res.json(views);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/views/:id - Get single view
|
||||||
|
app.get('/api/views/:id', (req, res) => {
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare('SELECT * FROM views WHERE id = ?');
|
||||||
|
const view = stmt.get(req.params.id);
|
||||||
|
if (view) {
|
||||||
|
view.config = JSON.parse(view.config);
|
||||||
|
res.json(view);
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'View not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/views/:id - Delete view (admin only)
|
||||||
|
app.delete('/api/views/:id', requireAdmin, (req, res) => {
|
||||||
|
try {
|
||||||
|
const stmt = db.prepare('DELETE FROM views WHERE id = ?');
|
||||||
|
const viewName = db.prepare('SELECT name FROM views WHERE id = ?').get(req.params.id)?.name || 'Unknown View';
|
||||||
|
const info = stmt.run(req.params.id);
|
||||||
|
if (info.changes > 0) {
|
||||||
|
global.insertChangelog(req.user.username, `Deleted view "${viewName}" (ID: ${req.params.id})`);
|
||||||
|
res.json({ success: true });
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'View not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// PUT /api/views/:id - Update view (admin only)
|
||||||
|
app.put('/api/views/:id', requireAdmin, (req, res) => {
|
||||||
|
const { name, config } = req.body;
|
||||||
|
try {
|
||||||
|
// Get old view for comparison
|
||||||
|
const oldView = db.prepare('SELECT * FROM views WHERE id = ?').get(req.params.id);
|
||||||
|
if (!oldView) {
|
||||||
|
return res.status(404).json({ error: 'View not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const stmt = db.prepare('UPDATE views SET name = ?, config = ? WHERE id = ?');
|
||||||
|
const info = stmt.run(name, JSON.stringify(config), req.params.id);
|
||||||
|
if (info.changes > 0) {
|
||||||
|
// Build detailed changelog
|
||||||
|
const changes = [];
|
||||||
|
|
||||||
|
// Check name change
|
||||||
|
if (oldView.name !== name) {
|
||||||
|
changes.push(`renamed: "${oldView.name}" → "${name}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse configs for comparison
|
||||||
|
let oldConfig = {};
|
||||||
|
try { oldConfig = JSON.parse(oldView.config || '{}'); } catch (e) { }
|
||||||
|
const newConfig = config || {};
|
||||||
|
|
||||||
|
// Compare channels
|
||||||
|
const oldChannels = (oldConfig.channels || []).map(ch =>
|
||||||
|
typeof ch === 'string' ? ch : ch.channel
|
||||||
|
);
|
||||||
|
const newChannels = (newConfig.channels || []).map(ch =>
|
||||||
|
typeof ch === 'string' ? ch : ch.channel
|
||||||
|
);
|
||||||
|
|
||||||
|
const added = newChannels.filter(ch => !oldChannels.includes(ch));
|
||||||
|
const removed = oldChannels.filter(ch => !newChannels.includes(ch));
|
||||||
|
|
||||||
|
if (added.length > 0) {
|
||||||
|
changes.push(`added channels: ${added.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (removed.length > 0) {
|
||||||
|
changes.push(`removed channels: ${removed.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for color/fill changes
|
||||||
|
const oldChannelConfigs = {};
|
||||||
|
(oldConfig.channels || []).forEach(ch => {
|
||||||
|
if (typeof ch === 'object') {
|
||||||
|
oldChannelConfigs[ch.channel] = ch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const newChannelConfigs = {};
|
||||||
|
(newConfig.channels || []).forEach(ch => {
|
||||||
|
if (typeof ch === 'object') {
|
||||||
|
newChannelConfigs[ch.channel] = ch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const colorChanges = [];
|
||||||
|
for (const ch of newChannels) {
|
||||||
|
const oldCh = oldChannelConfigs[ch] || {};
|
||||||
|
const newCh = newChannelConfigs[ch] || {};
|
||||||
|
if (oldCh.color !== newCh.color || oldCh.fillColor !== newCh.fillColor) {
|
||||||
|
colorChanges.push(ch.split(':').pop());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (colorChanges.length > 0) {
|
||||||
|
changes.push(`colors changed for: ${colorChanges.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check order change
|
||||||
|
if (added.length === 0 && removed.length === 0 &&
|
||||||
|
JSON.stringify(oldChannels) !== JSON.stringify(newChannels)) {
|
||||||
|
changes.push('channel order changed');
|
||||||
|
}
|
||||||
|
|
||||||
|
const changeText = changes.length > 0
|
||||||
|
? `Updated view "${name}": ${changes.join('; ')}`
|
||||||
|
: `Updated view "${name}" (no significant changes)`;
|
||||||
|
global.insertChangelog(req.user.username, changeText);
|
||||||
|
|
||||||
|
res.json({ id: req.params.id, name, config });
|
||||||
|
} else {
|
||||||
|
res.status(404).json({ error: 'View not found' });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/views/reorder - Reorder views (admin only)
|
||||||
|
app.post('/api/views/reorder', requireAdmin, (req, res) => {
|
||||||
|
const { order } = req.body;
|
||||||
|
console.log('[API] Reorder request:', order);
|
||||||
|
if (!Array.isArray(order)) return res.status(400).json({ error: 'Invalid format' });
|
||||||
|
|
||||||
|
const updateStmt = db.prepare('UPDATE views SET position = ? WHERE id = ?');
|
||||||
|
const updateMany = db.transaction((items) => {
|
||||||
|
for (const item of items) {
|
||||||
|
console.log('[API] Updating view', item.id, 'to position', item.position);
|
||||||
|
updateStmt.run(item.position, item.id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
updateMany(order);
|
||||||
|
console.log('[API] Reorder successful');
|
||||||
|
res.json({ success: true });
|
||||||
|
} catch (err) {
|
||||||
|
console.error('[API] Reorder error:', err);
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
@@ -1,18 +1,16 @@
|
|||||||
import React, { Component } from 'react';
|
import React, { Component } from 'react';
|
||||||
import { BrowserRouter, Routes, Route, Link, Navigate } from 'react-router-dom';
|
import { BrowserRouter, Routes, Route, Link, Navigate } from 'react-router-dom';
|
||||||
import { AppBar, Toolbar, Typography, Button, Box, IconButton, CssBaseline } from '@mui/material';
|
import { AppBar, Toolbar, Typography, Button, Box, CssBaseline } from '@mui/material';
|
||||||
import { ThemeProvider, createTheme } from '@mui/material/styles';
|
import { ThemeProvider, createTheme } from '@mui/material/styles';
|
||||||
import SettingsIcon from '@mui/icons-material/Settings';
|
|
||||||
import ShowChartIcon from '@mui/icons-material/ShowChart';
|
|
||||||
import DashboardIcon from '@mui/icons-material/Dashboard';
|
import DashboardIcon from '@mui/icons-material/Dashboard';
|
||||||
import RuleIcon from '@mui/icons-material/Rule';
|
import RuleIcon from '@mui/icons-material/Rule';
|
||||||
|
import SettingsInputComponentIcon from '@mui/icons-material/SettingsInputComponent';
|
||||||
|
|
||||||
import Settings from './components/Settings';
|
|
||||||
import Chart from './components/Chart';
|
|
||||||
import Login from './components/Login';
|
import Login from './components/Login';
|
||||||
import ViewManager from './components/ViewManager';
|
import ViewManager from './components/ViewManager';
|
||||||
import ViewDisplay from './components/ViewDisplay';
|
import ViewDisplay from './components/ViewDisplay';
|
||||||
import RuleEditor from './components/RuleEditor';
|
import RuleEditor from './components/RuleEditor';
|
||||||
|
import OutputConfigEditor from './components/OutputConfigEditor';
|
||||||
|
|
||||||
const darkTheme = createTheme({
|
const darkTheme = createTheme({
|
||||||
palette: {
|
palette: {
|
||||||
@@ -34,23 +32,12 @@ export default class App extends Component {
|
|||||||
constructor(props) {
|
constructor(props) {
|
||||||
super(props);
|
super(props);
|
||||||
this.state = {
|
this.state = {
|
||||||
selectedChannels: [],
|
|
||||||
user: null, // { username, role, token }
|
user: null, // { username, role, token }
|
||||||
loading: true
|
loading: true
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
// Load selection from local storage
|
|
||||||
const saved = localStorage.getItem('selectedChannels');
|
|
||||||
if (saved) {
|
|
||||||
try {
|
|
||||||
this.setState({ selectedChannels: JSON.parse(saved) });
|
|
||||||
} catch (e) {
|
|
||||||
console.error("Failed to parse saved channels");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for existing token
|
// Check for existing token
|
||||||
const token = localStorage.getItem('authToken');
|
const token = localStorage.getItem('authToken');
|
||||||
const username = localStorage.getItem('authUser');
|
const username = localStorage.getItem('authUser');
|
||||||
@@ -63,11 +50,6 @@ export default class App extends Component {
|
|||||||
this.setState({ loading: false });
|
this.setState({ loading: false });
|
||||||
}
|
}
|
||||||
|
|
||||||
handleSelectionChange = (newSelection) => {
|
|
||||||
this.setState({ selectedChannels: newSelection });
|
|
||||||
localStorage.setItem('selectedChannels', JSON.stringify(newSelection));
|
|
||||||
};
|
|
||||||
|
|
||||||
handleLogin = (userData) => {
|
handleLogin = (userData) => {
|
||||||
this.setState({ user: userData });
|
this.setState({ user: userData });
|
||||||
localStorage.setItem('authToken', userData.token);
|
localStorage.setItem('authToken', userData.token);
|
||||||
@@ -83,7 +65,7 @@ export default class App extends Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { selectedChannels, user, loading } = this.state;
|
const { user } = this.state;
|
||||||
|
|
||||||
// While checking auth, we could show loader, but it's sync here mostly.
|
// While checking auth, we could show loader, but it's sync here mostly.
|
||||||
|
|
||||||
@@ -99,16 +81,11 @@ export default class App extends Component {
|
|||||||
</Typography>
|
</Typography>
|
||||||
|
|
||||||
<Button color="inherit" component={Link} to="/" startIcon={<DashboardIcon />}>Views</Button>
|
<Button color="inherit" component={Link} to="/" startIcon={<DashboardIcon />}>Views</Button>
|
||||||
{user && (
|
|
||||||
<>
|
|
||||||
<Button color="inherit" component={Link} to="/live" startIcon={<ShowChartIcon />}>Live</Button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
{user && user.role === 'admin' && (
|
{user && user.role === 'admin' && (
|
||||||
|
<>
|
||||||
<Button color="inherit" component={Link} to="/rules" startIcon={<RuleIcon />}>Rules</Button>
|
<Button color="inherit" component={Link} to="/rules" startIcon={<RuleIcon />}>Rules</Button>
|
||||||
)}
|
<Button color="inherit" component={Link} to="/outputs" startIcon={<SettingsInputComponentIcon />}>Outputs</Button>
|
||||||
{user && (
|
</>
|
||||||
<Button color="inherit" component={Link} to="/settings" startIcon={<SettingsIcon />}>Settings</Button>
|
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{user ? (
|
{user ? (
|
||||||
@@ -123,17 +100,7 @@ export default class App extends Component {
|
|||||||
<Route path="/" element={<ViewManager user={user} />} />
|
<Route path="/" element={<ViewManager user={user} />} />
|
||||||
<Route path="/views/:id" element={<ViewDisplay />} />
|
<Route path="/views/:id" element={<ViewDisplay />} />
|
||||||
<Route path="/rules" element={<RuleEditor user={user} />} />
|
<Route path="/rules" element={<RuleEditor user={user} />} />
|
||||||
<Route path="/live" element={
|
<Route path="/outputs" element={<OutputConfigEditor user={user} />} />
|
||||||
<Chart
|
|
||||||
selectedChannels={selectedChannels}
|
|
||||||
/>
|
|
||||||
} />
|
|
||||||
<Route path="/settings" element={
|
|
||||||
<Settings
|
|
||||||
selectedChannels={selectedChannels}
|
|
||||||
onSelectionChange={this.handleSelectionChange}
|
|
||||||
/>
|
|
||||||
} />
|
|
||||||
<Route path="/login" element={<Login onLogin={this.handleLogin} />} />
|
<Route path="/login" element={<Login onLogin={this.handleLogin} />} />
|
||||||
<Route path="*" element={<Navigate to="/" replace />} />
|
<Route path="*" element={<Navigate to="/" replace />} />
|
||||||
</Routes>
|
</Routes>
|
||||||
|
|||||||
371
uiserver/src/components/OutputConfigEditor.js
Normal file
371
uiserver/src/components/OutputConfigEditor.js
Normal file
@@ -0,0 +1,371 @@
|
|||||||
|
import React, { Component } from 'react';
|
||||||
|
import {
|
||||||
|
Container, Typography, Paper, List, ListItem, ListItemText,
|
||||||
|
Button, TextField, Dialog, DialogTitle, DialogContent, DialogActions,
|
||||||
|
FormControl, InputLabel, Select, MenuItem, Box, IconButton,
|
||||||
|
Chip, Switch, FormControlLabel
|
||||||
|
} from '@mui/material';
|
||||||
|
import SettingsInputComponentIcon from '@mui/icons-material/SettingsInputComponent';
|
||||||
|
import AddIcon from '@mui/icons-material/Add';
|
||||||
|
import DeleteIcon from '@mui/icons-material/Delete';
|
||||||
|
import EditIcon from '@mui/icons-material/Edit';
|
||||||
|
import ArrowUpwardIcon from '@mui/icons-material/ArrowUpward';
|
||||||
|
import ArrowDownwardIcon from '@mui/icons-material/ArrowDownward';
|
||||||
|
import LinkIcon from '@mui/icons-material/Link';
|
||||||
|
import LinkOffIcon from '@mui/icons-material/LinkOff';
|
||||||
|
|
||||||
|
class OutputConfigEditor extends Component {
|
||||||
|
constructor(props) {
|
||||||
|
super(props);
|
||||||
|
this.state = {
|
||||||
|
configs: [],
|
||||||
|
loading: true,
|
||||||
|
error: null,
|
||||||
|
|
||||||
|
// Dialog state
|
||||||
|
open: false,
|
||||||
|
editingId: null,
|
||||||
|
channel: '',
|
||||||
|
description: '',
|
||||||
|
value_type: 'boolean',
|
||||||
|
min_value: 0,
|
||||||
|
max_value: 1,
|
||||||
|
device: '',
|
||||||
|
device_channel: ''
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
this.loadConfigs();
|
||||||
|
}
|
||||||
|
|
||||||
|
isAdmin() {
|
||||||
|
const { user } = this.props;
|
||||||
|
return user && user.role === 'admin';
|
||||||
|
}
|
||||||
|
|
||||||
|
loadConfigs = async () => {
|
||||||
|
try {
|
||||||
|
const res = await fetch('/api/output-configs');
|
||||||
|
const configs = await res.json();
|
||||||
|
this.setState({ configs, loading: false });
|
||||||
|
} catch (err) {
|
||||||
|
this.setState({ error: err.message, loading: false });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
handleOpenCreate = () => {
|
||||||
|
this.setState({
|
||||||
|
open: true,
|
||||||
|
editingId: null,
|
||||||
|
channel: '',
|
||||||
|
description: '',
|
||||||
|
value_type: 'boolean',
|
||||||
|
min_value: 0,
|
||||||
|
max_value: 1,
|
||||||
|
device: '',
|
||||||
|
device_channel: ''
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
handleOpenEdit = (config, e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
this.setState({
|
||||||
|
open: true,
|
||||||
|
editingId: config.id,
|
||||||
|
channel: config.channel,
|
||||||
|
description: config.description || '',
|
||||||
|
value_type: config.value_type,
|
||||||
|
min_value: config.min_value,
|
||||||
|
max_value: config.max_value,
|
||||||
|
device: config.device || '',
|
||||||
|
device_channel: config.device_channel || ''
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
handleSave = async () => {
|
||||||
|
const { editingId, channel, description, value_type, min_value, max_value, device, device_channel } = this.state;
|
||||||
|
const { user } = this.props;
|
||||||
|
|
||||||
|
if (!channel) {
|
||||||
|
alert('Channel name is required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = editingId ? `/api/output-configs/${editingId}` : '/api/output-configs';
|
||||||
|
const method = editingId ? 'PUT' : 'POST';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(url, {
|
||||||
|
method,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': `Bearer ${user.token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
channel,
|
||||||
|
description,
|
||||||
|
value_type,
|
||||||
|
min_value: parseFloat(min_value),
|
||||||
|
max_value: parseFloat(max_value),
|
||||||
|
device: device || null,
|
||||||
|
device_channel: device_channel || null
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if (res.ok) {
|
||||||
|
this.setState({ open: false });
|
||||||
|
this.loadConfigs();
|
||||||
|
} else {
|
||||||
|
const err = await res.json();
|
||||||
|
alert('Failed: ' + err.error);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
alert('Failed: ' + err.message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
handleDelete = async (id, e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
if (!window.confirm('Delete this output config?')) return;
|
||||||
|
|
||||||
|
const { user } = this.props;
|
||||||
|
try {
|
||||||
|
await fetch(`/api/output-configs/${id}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
headers: { 'Authorization': `Bearer ${user.token}` }
|
||||||
|
});
|
||||||
|
this.loadConfigs();
|
||||||
|
} catch (err) {
|
||||||
|
alert('Failed to delete: ' + err.message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
moveConfig = async (idx, dir) => {
|
||||||
|
const newConfigs = [...this.state.configs];
|
||||||
|
const target = idx + dir;
|
||||||
|
if (target < 0 || target >= newConfigs.length) return;
|
||||||
|
|
||||||
|
[newConfigs[idx], newConfigs[target]] = [newConfigs[target], newConfigs[idx]];
|
||||||
|
this.setState({ configs: newConfigs });
|
||||||
|
|
||||||
|
const order = newConfigs.map((c, i) => ({ id: c.id, position: i }));
|
||||||
|
const { user } = this.props;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fetch('/api/output-configs/reorder', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': `Bearer ${user.token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ order })
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to save order', err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const { configs, loading, error, open, editingId, channel, description, value_type, min_value, max_value, device, device_channel } = this.state;
|
||||||
|
const isAdmin = this.isAdmin();
|
||||||
|
|
||||||
|
if (loading) return <Container sx={{ mt: 4 }}><Typography>Loading...</Typography></Container>;
|
||||||
|
if (error) return <Container sx={{ mt: 4 }}><Typography color="error">{error}</Typography></Container>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Container maxWidth="lg" sx={{ mt: 4 }}>
|
||||||
|
<Paper sx={{ p: 2, mb: 4, display: 'flex', alignItems: 'center', justifyContent: 'space-between' }}>
|
||||||
|
<Typography variant="h5">
|
||||||
|
<SettingsInputComponentIcon sx={{ mr: 1, verticalAlign: 'middle' }} />
|
||||||
|
Output Configuration
|
||||||
|
</Typography>
|
||||||
|
{isAdmin && (
|
||||||
|
<Button variant="contained" startIcon={<AddIcon />} onClick={this.handleOpenCreate}>
|
||||||
|
Add Output
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</Paper>
|
||||||
|
|
||||||
|
<Paper sx={{ p: 2 }}>
|
||||||
|
<Typography variant="h6" gutterBottom>Output Channels</Typography>
|
||||||
|
<List>
|
||||||
|
{configs.map((config, idx) => (
|
||||||
|
<ListItem
|
||||||
|
key={config.id}
|
||||||
|
sx={{
|
||||||
|
borderRadius: 1,
|
||||||
|
mb: 1,
|
||||||
|
border: '1px solid #504945',
|
||||||
|
bgcolor: config.device ? 'rgba(131, 165, 152, 0.1)' : 'transparent'
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<ListItemText
|
||||||
|
primary={
|
||||||
|
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||||
|
<Typography variant="subtitle1" sx={{ fontWeight: 'bold' }}>
|
||||||
|
{config.channel}
|
||||||
|
</Typography>
|
||||||
|
<Chip
|
||||||
|
size="small"
|
||||||
|
label={config.value_type}
|
||||||
|
color={config.value_type === 'boolean' ? 'default' : 'info'}
|
||||||
|
/>
|
||||||
|
{config.device ? (
|
||||||
|
<Chip
|
||||||
|
size="small"
|
||||||
|
icon={<LinkIcon />}
|
||||||
|
label={`${config.device}:${config.device_channel}`}
|
||||||
|
color="success"
|
||||||
|
variant="outlined"
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<Chip
|
||||||
|
size="small"
|
||||||
|
icon={<LinkOffIcon />}
|
||||||
|
label="unbound"
|
||||||
|
color="warning"
|
||||||
|
variant="outlined"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</Box>
|
||||||
|
}
|
||||||
|
secondary={
|
||||||
|
<Box>
|
||||||
|
<Typography variant="body2" color="text.secondary">
|
||||||
|
{config.description || 'No description'}
|
||||||
|
</Typography>
|
||||||
|
{config.value_type === 'number' && (
|
||||||
|
<Typography variant="body2" color="text.secondary">
|
||||||
|
Range: {config.min_value} - {config.max_value}
|
||||||
|
</Typography>
|
||||||
|
)}
|
||||||
|
</Box>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
{isAdmin && (
|
||||||
|
<Box sx={{ display: 'flex', alignItems: 'center' }}>
|
||||||
|
<IconButton size="small" onClick={() => this.moveConfig(idx, -1)} disabled={idx === 0}>
|
||||||
|
<ArrowUpwardIcon />
|
||||||
|
</IconButton>
|
||||||
|
<IconButton size="small" onClick={() => this.moveConfig(idx, 1)} disabled={idx === configs.length - 1}>
|
||||||
|
<ArrowDownwardIcon />
|
||||||
|
</IconButton>
|
||||||
|
<IconButton onClick={(e) => this.handleOpenEdit(config, e)}>
|
||||||
|
<EditIcon />
|
||||||
|
</IconButton>
|
||||||
|
<IconButton color="error" onClick={(e) => this.handleDelete(config.id, e)}>
|
||||||
|
<DeleteIcon />
|
||||||
|
</IconButton>
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
</ListItem>
|
||||||
|
))}
|
||||||
|
{configs.length === 0 && (
|
||||||
|
<Typography color="text.secondary" sx={{ p: 2 }}>
|
||||||
|
No output channels defined. {isAdmin ? 'Click "Add Output" to create one.' : ''}
|
||||||
|
</Typography>
|
||||||
|
)}
|
||||||
|
</List>
|
||||||
|
</Paper>
|
||||||
|
|
||||||
|
{/* Edit/Create Dialog */}
|
||||||
|
<Dialog open={open} onClose={() => this.setState({ open: false })} maxWidth="sm" fullWidth>
|
||||||
|
<DialogTitle>{editingId ? 'Edit Output Config' : 'Add Output Config'}</DialogTitle>
|
||||||
|
<DialogContent>
|
||||||
|
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 2, mt: 1 }}>
|
||||||
|
<TextField
|
||||||
|
label="Channel Name"
|
||||||
|
value={channel}
|
||||||
|
onChange={e => this.setState({ channel: e.target.value })}
|
||||||
|
fullWidth
|
||||||
|
placeholder="e.g., CircFanLevel"
|
||||||
|
/>
|
||||||
|
<TextField
|
||||||
|
label="Description"
|
||||||
|
value={description}
|
||||||
|
onChange={e => this.setState({ description: e.target.value })}
|
||||||
|
fullWidth
|
||||||
|
placeholder="e.g., Circulation Fan Level"
|
||||||
|
/>
|
||||||
|
<FormControl fullWidth>
|
||||||
|
<InputLabel>Value Type</InputLabel>
|
||||||
|
<Select
|
||||||
|
value={value_type}
|
||||||
|
label="Value Type"
|
||||||
|
onChange={e => {
|
||||||
|
const newType = e.target.value;
|
||||||
|
// Auto-select compatible device: number->ac, boolean->tapo
|
||||||
|
const newDevice = device ? (newType === 'number' ? 'ac' : 'tapo') : '';
|
||||||
|
this.setState({
|
||||||
|
value_type: newType,
|
||||||
|
min_value: 0,
|
||||||
|
max_value: newType === 'boolean' ? 1 : 10,
|
||||||
|
device: newDevice
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<MenuItem value="boolean">Boolean (on/off)</MenuItem>
|
||||||
|
<MenuItem value="number">Number (0-10 range)</MenuItem>
|
||||||
|
</Select>
|
||||||
|
</FormControl>
|
||||||
|
{value_type === 'number' && (
|
||||||
|
<Box sx={{ display: 'flex', gap: 2 }}>
|
||||||
|
<TextField
|
||||||
|
label="Min Value"
|
||||||
|
type="number"
|
||||||
|
value={min_value}
|
||||||
|
onChange={e => this.setState({ min_value: e.target.value })}
|
||||||
|
sx={{ flex: 1 }}
|
||||||
|
/>
|
||||||
|
<TextField
|
||||||
|
label="Max Value"
|
||||||
|
type="number"
|
||||||
|
value={max_value}
|
||||||
|
onChange={e => this.setState({ max_value: e.target.value })}
|
||||||
|
sx={{ flex: 1 }}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<Typography variant="subtitle2" sx={{ mt: 2 }}>Device Binding (Optional)</Typography>
|
||||||
|
<Box sx={{ display: 'flex', gap: 2 }}>
|
||||||
|
<FormControl sx={{ flex: 1 }}>
|
||||||
|
<InputLabel>Device</InputLabel>
|
||||||
|
<Select
|
||||||
|
value={device}
|
||||||
|
label="Device"
|
||||||
|
onChange={e => this.setState({ device: e.target.value })}
|
||||||
|
>
|
||||||
|
<MenuItem value=""><em>Not bound</em></MenuItem>
|
||||||
|
{value_type === 'boolean' && <MenuItem value="tapo">tapo (Switch)</MenuItem>}
|
||||||
|
{value_type === 'number' && <MenuItem value="ac">ac (Level)</MenuItem>}
|
||||||
|
</Select>
|
||||||
|
</FormControl>
|
||||||
|
<TextField
|
||||||
|
label="Device Channel"
|
||||||
|
value={device_channel}
|
||||||
|
onChange={e => this.setState({ device_channel: e.target.value })}
|
||||||
|
sx={{ flex: 1 }}
|
||||||
|
placeholder={value_type === 'number' ? 'e.g., tent:fan' : 'e.g., r0, c'}
|
||||||
|
disabled={!device}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
{device && (
|
||||||
|
<Typography variant="caption" color="text.secondary">
|
||||||
|
Binding type: {device === 'ac' ? 'Level (0-10)' : 'Switch (on/off)'}
|
||||||
|
</Typography>
|
||||||
|
)}
|
||||||
|
</Box>
|
||||||
|
</DialogContent>
|
||||||
|
<DialogActions>
|
||||||
|
<Button onClick={() => this.setState({ open: false })}>Cancel</Button>
|
||||||
|
<Button variant="contained" onClick={this.handleSave}>Save</Button>
|
||||||
|
</DialogActions>
|
||||||
|
</Dialog>
|
||||||
|
</Container>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OutputConfigEditor;
|
||||||
@@ -178,15 +178,7 @@ class ViewManager extends Component {
|
|||||||
|
|
||||||
// Emoji for rule based on action channel
|
// Emoji for rule based on action channel
|
||||||
getRuleEmoji = (rule) => {
|
getRuleEmoji = (rule) => {
|
||||||
const channel = rule.action?.channel || '';
|
return '⚡';
|
||||||
const emojis = {
|
|
||||||
'CircFanLevel': '🌀',
|
|
||||||
'CO2Valve': '🫧',
|
|
||||||
'BigDehumid': '💧',
|
|
||||||
'TentExhaust': '💨',
|
|
||||||
'RoomExhaust': '🌬️'
|
|
||||||
};
|
|
||||||
return emojis[channel] || '⚡';
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Format conditions for display - returns React components with visual grouping
|
// Format conditions for display - returns React components with visual grouping
|
||||||
@@ -304,14 +296,7 @@ class ViewManager extends Component {
|
|||||||
// Format action for display
|
// Format action for display
|
||||||
formatRuleAction = (action) => {
|
formatRuleAction = (action) => {
|
||||||
if (!action?.channel) return '?';
|
if (!action?.channel) return '?';
|
||||||
const channelNames = {
|
const name = action.channel;
|
||||||
'CircFanLevel': '🌀 Circ Fan',
|
|
||||||
'CO2Valve': '🫧 CO2',
|
|
||||||
'BigDehumid': '💧 Big Dehumid',
|
|
||||||
'TentExhaust': '💨 Tent Exhaust Fan',
|
|
||||||
'RoomExhaust': '🌬️ Room Exhaust'
|
|
||||||
};
|
|
||||||
const name = channelNames[action.channel] || action.channel;
|
|
||||||
|
|
||||||
if (action.value && action.value.type === 'calculated') {
|
if (action.value && action.value.type === 'calculated') {
|
||||||
return `${name} = (${action.value.sensorA} - ${action.value.sensorB || '0'}) * ${action.value.factor} + ${action.value.offset}`;
|
return `${name} = (${action.value.sensorA} - ${action.value.sensorB || '0'}) * ${action.value.factor} + ${action.value.offset}`;
|
||||||
|
|||||||
@@ -12,7 +12,9 @@ config();
|
|||||||
// Database connection for Dev Server API
|
// Database connection for Dev Server API
|
||||||
const dbPath = process.env.DB_PATH || path.resolve(__dirname, '../server/data/sensors.db');
|
const dbPath = process.env.DB_PATH || path.resolve(__dirname, '../server/data/sensors.db');
|
||||||
const JWT_SECRET = process.env.JWT_SECRET || 'dev-secret-key-change-me';
|
const JWT_SECRET = process.env.JWT_SECRET || 'dev-secret-key-change-me';
|
||||||
const WS_PORT = process.env.WS_PORT || 3962;
|
const WS_PORT = parseInt(process.env.WS_PORT || '3962', 10);
|
||||||
|
const DEV_SERVER_PORT = parseInt(process.env.DEV_SERVER_PORT || '3905', 10);
|
||||||
|
const RULE_RUNNER_INTERVAL = parseInt(process.env.RULE_RUNNER_INTERVAL || '10000', 10);
|
||||||
let db;
|
let db;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -29,6 +31,22 @@ try {
|
|||||||
)
|
)
|
||||||
`);
|
`);
|
||||||
|
|
||||||
|
// Create output_configs table (unified channels + bindings)
|
||||||
|
// Note: binding_type derived from device (ac=level, tapo=switch)
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE IF NOT EXISTS output_configs (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
channel TEXT UNIQUE NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
value_type TEXT NOT NULL,
|
||||||
|
min_value REAL DEFAULT 0,
|
||||||
|
max_value REAL DEFAULT 1,
|
||||||
|
device TEXT,
|
||||||
|
device_channel TEXT,
|
||||||
|
position INTEGER DEFAULT 0
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
// Helper to insert changelog entry
|
// Helper to insert changelog entry
|
||||||
global.insertChangelog = (user, text) => {
|
global.insertChangelog = (user, text) => {
|
||||||
try {
|
try {
|
||||||
@@ -45,15 +63,36 @@ try {
|
|||||||
console.error(`[UI Server] Failed to connect to database at ${dbPath}:`, err.message);
|
console.error(`[UI Server] Failed to connect to database at ${dbPath}:`, err.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output bindings: map virtual outputs to physical devices
|
// Load output channels from database (replaces hardcoded OUTPUT_CHANNELS)
|
||||||
// Format: outputChannel -> { device, channel, type }
|
function getOutputChannels() {
|
||||||
const OUTPUT_BINDINGS = {
|
if (!db) return [];
|
||||||
'BigDehumid': { device: 'tapo', channel: 'r0', type: 'switch' },
|
const rows = db.prepare('SELECT * FROM output_configs ORDER BY position ASC').all();
|
||||||
'CO2Valve': { device: 'tapo', channel: 'c', type: 'switch' },
|
return rows.map(r => ({
|
||||||
'TentExhaust': { device: 'tapo', channel: 'fantent', type: 'switch' },
|
channel: r.channel,
|
||||||
'CircFanLevel': { device: 'ac', channel: 'tent:fan', type: 'level' },
|
type: r.value_type,
|
||||||
'RoomExhaust': { device: 'ac', channel: 'wall-fan', type: 'level' },
|
min: r.min_value,
|
||||||
|
max: r.max_value,
|
||||||
|
description: r.description
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load output bindings from database (replaces hardcoded OUTPUT_BINDINGS)
|
||||||
|
// Binding type derived: ac=level, tapo=switch
|
||||||
|
function getOutputBindings() {
|
||||||
|
if (!db) return {};
|
||||||
|
const rows = db.prepare('SELECT * FROM output_configs WHERE device IS NOT NULL').all();
|
||||||
|
const bindings = {};
|
||||||
|
for (const r of rows) {
|
||||||
|
if (r.device && r.device_channel) {
|
||||||
|
bindings[r.channel] = {
|
||||||
|
device: r.device,
|
||||||
|
channel: r.device_channel,
|
||||||
|
type: r.device === 'ac' ? 'level' : 'switch'
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return bindings;
|
||||||
|
}
|
||||||
|
|
||||||
// =============================================
|
// =============================================
|
||||||
// WebSocket Server for Agents (port 3962)
|
// WebSocket Server for Agents (port 3962)
|
||||||
@@ -313,6 +352,7 @@ function syncOutputStates() {
|
|||||||
if (!db) return;
|
if (!db) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const bindings = getOutputBindings();
|
||||||
// Get current output values
|
// Get current output values
|
||||||
const stmt = db.prepare(`
|
const stmt = db.prepare(`
|
||||||
SELECT channel, value FROM output_events
|
SELECT channel, value FROM output_events
|
||||||
@@ -323,7 +363,7 @@ function syncOutputStates() {
|
|||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
// Only sync non-zero values
|
// Only sync non-zero values
|
||||||
if (row.value > 0) {
|
if (row.value > 0) {
|
||||||
const binding = OUTPUT_BINDINGS[row.channel];
|
const binding = bindings[row.channel];
|
||||||
if (binding) {
|
if (binding) {
|
||||||
let commandValue = row.value;
|
let commandValue = row.value;
|
||||||
if (binding.type === 'switch') {
|
if (binding.type === 'switch') {
|
||||||
@@ -354,15 +394,6 @@ setInterval(syncOutputStates, 60000);
|
|||||||
// RULE ENGINE (Global Scope)
|
// RULE ENGINE (Global Scope)
|
||||||
// =============================================
|
// =============================================
|
||||||
|
|
||||||
// Virtual output channel definitions
|
|
||||||
const OUTPUT_CHANNELS = [
|
|
||||||
{ channel: 'CircFanLevel', type: 'number', min: 0, max: 10, description: 'Circulation Fan Level' },
|
|
||||||
{ channel: 'CO2Valve', type: 'boolean', min: 0, max: 1, description: 'CO2 Valve' },
|
|
||||||
{ channel: 'BigDehumid', type: 'boolean', min: 0, max: 1, description: 'Big Dehumidifier' },
|
|
||||||
{ channel: 'TentExhaust', type: 'boolean', min: 0, max: 1, description: 'Tent Exhaust Fan' },
|
|
||||||
{ channel: 'RoomExhaust', type: 'number', min: 0, max: 10, description: 'Room Exhaust Fan' },
|
|
||||||
];
|
|
||||||
|
|
||||||
// Get current sensor value
|
// Get current sensor value
|
||||||
function getSensorValue(channel) {
|
function getSensorValue(channel) {
|
||||||
// channel format: "device:channel" e.g. "ac:controller:co2"
|
// channel format: "device:channel" e.g. "ac:controller:co2"
|
||||||
@@ -418,7 +449,8 @@ function writeOutputValue(channel, value) {
|
|||||||
console.log(`[RuleRunner] Output changed: ${channel} = ${value}`);
|
console.log(`[RuleRunner] Output changed: ${channel} = ${value}`);
|
||||||
|
|
||||||
// Send command to bound physical device
|
// Send command to bound physical device
|
||||||
const binding = OUTPUT_BINDINGS[channel];
|
const bindings = getOutputBindings();
|
||||||
|
const binding = bindings[channel];
|
||||||
if (binding) {
|
if (binding) {
|
||||||
let commandValue = value;
|
let commandValue = value;
|
||||||
if (binding.type === 'switch') {
|
if (binding.type === 'switch') {
|
||||||
@@ -531,7 +563,8 @@ function runRules() {
|
|||||||
|
|
||||||
// Default all outputs to OFF (0) - if no rule sets them, they stay off
|
// Default all outputs to OFF (0) - if no rule sets them, they stay off
|
||||||
const desiredOutputs = {};
|
const desiredOutputs = {};
|
||||||
for (const ch of OUTPUT_CHANNELS) {
|
const outputChannels = getOutputChannels();
|
||||||
|
for (const ch of outputChannels) {
|
||||||
desiredOutputs[ch.channel] = 0;
|
desiredOutputs[ch.channel] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -572,12 +605,16 @@ function runRules() {
|
|||||||
console.error('[RuleRunner] Error running rules:', err.message);
|
console.error('[RuleRunner] Error running rules:', err.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also sync immediately on startup after a short delay
|
// Also sync immediately on startup after a short delay
|
||||||
setTimeout(syncOutputStates, 5000);
|
setTimeout(syncOutputStates, 5000);
|
||||||
|
|
||||||
// Start the WebSocket server
|
// Start the WebSocket server
|
||||||
const agentWss = createAgentWebSocketServer();
|
const agentWss = createAgentWebSocketServer();
|
||||||
|
|
||||||
|
// Import API setup
|
||||||
|
const setupAllApis = require('./api');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
entry: './src/index.js',
|
entry: './src/index.js',
|
||||||
output: {
|
output: {
|
||||||
@@ -621,7 +658,7 @@ module.exports = {
|
|||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
devServer: {
|
devServer: {
|
||||||
port: 3905,
|
port: DEV_SERVER_PORT,
|
||||||
historyApiFallback: true,
|
historyApiFallback: true,
|
||||||
hot: true,
|
hot: true,
|
||||||
allowedHosts: 'all',
|
allowedHosts: 'all',
|
||||||
@@ -634,484 +671,26 @@ module.exports = {
|
|||||||
throw new Error('webpack-dev-server is not defined');
|
throw new Error('webpack-dev-server is not defined');
|
||||||
}
|
}
|
||||||
|
|
||||||
// API Endpoints
|
// Setup body parser
|
||||||
const app = devServer.app;
|
const app = devServer.app;
|
||||||
const bodyParser = require('body-parser');
|
const bodyParser = require('body-parser');
|
||||||
app.use(bodyParser.json());
|
app.use(bodyParser.json());
|
||||||
|
|
||||||
// --- Auth API ---
|
// Setup all API routes from extracted modules
|
||||||
app.post('/api/login', (req, res) => {
|
setupAllApis(app, {
|
||||||
const { username, password } = req.body;
|
db,
|
||||||
try {
|
bcrypt,
|
||||||
const stmt = db.prepare('SELECT * FROM users WHERE username = ?');
|
jwt,
|
||||||
const user = stmt.get(username);
|
JWT_SECRET,
|
||||||
|
getOutputChannels,
|
||||||
if (!user || !bcrypt.compareSync(password, user.password_hash)) {
|
getOutputBindings,
|
||||||
return res.status(401).json({ error: 'Invalid credentials' });
|
runRules,
|
||||||
}
|
activeRuleIds
|
||||||
|
|
||||||
const token = jwt.sign({
|
|
||||||
id: user.id,
|
|
||||||
username: user.username,
|
|
||||||
role: user.role
|
|
||||||
}, JWT_SECRET, { expiresIn: '24h' });
|
|
||||||
|
|
||||||
res.json({ token, role: user.role, username: user.username });
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Middleware to check auth (Optional for read, required for write)
|
// Start rule runner
|
||||||
const checkAuth = (req, res, next) => {
|
const ruleRunnerInterval = setInterval(runRules, RULE_RUNNER_INTERVAL);
|
||||||
const authHeader = req.headers.authorization;
|
console.log(`[RuleRunner] Started background job (${RULE_RUNNER_INTERVAL / 1000}s interval)`);
|
||||||
if (authHeader) {
|
|
||||||
const token = authHeader.split(' ')[1];
|
|
||||||
jwt.verify(token, JWT_SECRET, (err, user) => {
|
|
||||||
if (user) req.user = user;
|
|
||||||
next();
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
next();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const requireAdmin = (req, res, next) => {
|
|
||||||
if (!req.user || req.user.role !== 'admin') {
|
|
||||||
return res.status(403).json({ error: 'Admin access required' });
|
|
||||||
}
|
|
||||||
next();
|
|
||||||
};
|
|
||||||
|
|
||||||
app.use('/api/views', checkAuth);
|
|
||||||
|
|
||||||
// --- Views API ---
|
|
||||||
app.post('/api/views', requireAdmin, (req, res) => {
|
|
||||||
const { name, config } = req.body;
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare('INSERT INTO views (name, config, created_by) VALUES (?, ?, ?)');
|
|
||||||
const info = stmt.run(name, JSON.stringify(config), req.user.id);
|
|
||||||
global.insertChangelog(req.user.username, `Created view "${name}"`);
|
|
||||||
res.json({ id: info.lastInsertRowid, name, config });
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Publicly list views
|
|
||||||
app.get('/api/views', (req, res) => {
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare('SELECT * FROM views ORDER BY position ASC, id ASC');
|
|
||||||
const rows = stmt.all();
|
|
||||||
const views = rows.map(row => {
|
|
||||||
try {
|
|
||||||
return { ...row, config: JSON.parse(row.config) };
|
|
||||||
} catch (e) {
|
|
||||||
return row;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
res.json(views);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.get('/api/views/:id', (req, res) => {
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare('SELECT * FROM views WHERE id = ?');
|
|
||||||
const view = stmt.get(req.params.id);
|
|
||||||
if (view) {
|
|
||||||
view.config = JSON.parse(view.config);
|
|
||||||
res.json(view);
|
|
||||||
} else {
|
|
||||||
res.status(404).json({ error: 'View not found' });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Delete View
|
|
||||||
app.delete('/api/views/:id', requireAdmin, (req, res) => {
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare('DELETE FROM views WHERE id = ?');
|
|
||||||
// Get name before delete for logging
|
|
||||||
const viewName = db.prepare('SELECT name FROM views WHERE id = ?').get(req.params.id)?.name || 'Unknown View';
|
|
||||||
const info = stmt.run(req.params.id);
|
|
||||||
if (info.changes > 0) {
|
|
||||||
global.insertChangelog(req.user.username, `Deleted view "${viewName}" (ID: ${req.params.id})`);
|
|
||||||
res.json({ success: true });
|
|
||||||
} else {
|
|
||||||
res.status(404).json({ error: 'View not found' });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update View
|
|
||||||
app.put('/api/views/:id', requireAdmin, (req, res) => {
|
|
||||||
const { name, config } = req.body;
|
|
||||||
try {
|
|
||||||
// Get old view for comparison
|
|
||||||
const oldView = db.prepare('SELECT * FROM views WHERE id = ?').get(req.params.id);
|
|
||||||
if (!oldView) {
|
|
||||||
return res.status(404).json({ error: 'View not found' });
|
|
||||||
}
|
|
||||||
|
|
||||||
const stmt = db.prepare('UPDATE views SET name = ?, config = ? WHERE id = ?');
|
|
||||||
const info = stmt.run(name, JSON.stringify(config), req.params.id);
|
|
||||||
if (info.changes > 0) {
|
|
||||||
// Build detailed changelog
|
|
||||||
const changes = [];
|
|
||||||
|
|
||||||
// Check name change
|
|
||||||
if (oldView.name !== name) {
|
|
||||||
changes.push(`renamed: "${oldView.name}" → "${name}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse configs for comparison
|
|
||||||
let oldConfig = {};
|
|
||||||
try { oldConfig = JSON.parse(oldView.config || '{}'); } catch (e) { }
|
|
||||||
const newConfig = config || {};
|
|
||||||
|
|
||||||
// Compare channels
|
|
||||||
const oldChannels = (oldConfig.channels || []).map(ch =>
|
|
||||||
typeof ch === 'string' ? ch : ch.channel
|
|
||||||
);
|
|
||||||
const newChannels = (newConfig.channels || []).map(ch =>
|
|
||||||
typeof ch === 'string' ? ch : ch.channel
|
|
||||||
);
|
|
||||||
|
|
||||||
const added = newChannels.filter(ch => !oldChannels.includes(ch));
|
|
||||||
const removed = oldChannels.filter(ch => !newChannels.includes(ch));
|
|
||||||
|
|
||||||
if (added.length > 0) {
|
|
||||||
changes.push(`added channels: ${added.join(', ')}`);
|
|
||||||
}
|
|
||||||
if (removed.length > 0) {
|
|
||||||
changes.push(`removed channels: ${removed.join(', ')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for color/fill changes
|
|
||||||
const oldChannelConfigs = {};
|
|
||||||
(oldConfig.channels || []).forEach(ch => {
|
|
||||||
if (typeof ch === 'object') {
|
|
||||||
oldChannelConfigs[ch.channel] = ch;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const newChannelConfigs = {};
|
|
||||||
(newConfig.channels || []).forEach(ch => {
|
|
||||||
if (typeof ch === 'object') {
|
|
||||||
newChannelConfigs[ch.channel] = ch;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const colorChanges = [];
|
|
||||||
for (const ch of newChannels) {
|
|
||||||
const oldCh = oldChannelConfigs[ch] || {};
|
|
||||||
const newCh = newChannelConfigs[ch] || {};
|
|
||||||
if (oldCh.color !== newCh.color || oldCh.fillColor !== newCh.fillColor) {
|
|
||||||
colorChanges.push(ch.split(':').pop()); // Just the channel name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (colorChanges.length > 0) {
|
|
||||||
changes.push(`colors changed for: ${colorChanges.join(', ')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check order change
|
|
||||||
if (added.length === 0 && removed.length === 0 &&
|
|
||||||
JSON.stringify(oldChannels) !== JSON.stringify(newChannels)) {
|
|
||||||
changes.push('channel order changed');
|
|
||||||
}
|
|
||||||
|
|
||||||
const changeText = changes.length > 0
|
|
||||||
? `Updated view "${name}": ${changes.join('; ')}`
|
|
||||||
: `Updated view "${name}" (no significant changes)`;
|
|
||||||
global.insertChangelog(req.user.username, changeText);
|
|
||||||
|
|
||||||
res.json({ id: req.params.id, name, config });
|
|
||||||
} else {
|
|
||||||
res.status(404).json({ error: 'View not found' });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Reorder Views
|
|
||||||
app.post('/api/views/reorder', requireAdmin, (req, res) => {
|
|
||||||
const { order } = req.body;
|
|
||||||
console.log('[API] Reorder request:', order);
|
|
||||||
if (!Array.isArray(order)) return res.status(400).json({ error: 'Invalid format' });
|
|
||||||
|
|
||||||
const updateStmt = db.prepare('UPDATE views SET position = ? WHERE id = ?');
|
|
||||||
const updateMany = db.transaction((items) => {
|
|
||||||
for (const item of items) {
|
|
||||||
console.log('[API] Updating view', item.id, 'to position', item.position);
|
|
||||||
updateStmt.run(item.position, item.id);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
updateMany(order);
|
|
||||||
console.log('[API] Reorder successful');
|
|
||||||
res.json({ success: true });
|
|
||||||
} catch (err) {
|
|
||||||
console.error('[API] Reorder error:', err);
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// =============================================
|
|
||||||
// RULES API
|
|
||||||
// =============================================
|
|
||||||
|
|
||||||
// Apply checkAuth middleware to rules API routes
|
|
||||||
app.use('/api/rules', checkAuth);
|
|
||||||
|
|
||||||
// Virtual output channel definitions
|
|
||||||
// Virtual output channel definitions - MOVED TO GLOBAL SCOPE
|
|
||||||
|
|
||||||
// GET /api/outputs - List output channel definitions
|
|
||||||
app.get('/api/outputs', (req, res) => {
|
|
||||||
res.json(OUTPUT_CHANNELS);
|
|
||||||
});
|
|
||||||
|
|
||||||
// GET /api/outputs/values - Get current output values
|
|
||||||
app.get('/api/outputs/values', (req, res) => {
|
|
||||||
try {
|
|
||||||
if (!db) throw new Error('Database not connected');
|
|
||||||
const result = {};
|
|
||||||
const stmt = db.prepare(`
|
|
||||||
SELECT channel, value FROM output_events
|
|
||||||
WHERE id IN (
|
|
||||||
SELECT MAX(id) FROM output_events GROUP BY channel
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
const rows = stmt.all();
|
|
||||||
rows.forEach(row => {
|
|
||||||
result[row.channel] = row.value;
|
|
||||||
});
|
|
||||||
// Fill in defaults for missing channels
|
|
||||||
OUTPUT_CHANNELS.forEach(ch => {
|
|
||||||
if (result[ch.channel] === undefined) {
|
|
||||||
result[ch.channel] = 0;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
res.json(result);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// GET /api/outputs/commands - Get desired states for bound devices
|
|
||||||
// Agents poll this to get commands. Returns { "device:channel": { state: 0|1 } }
|
|
||||||
app.get('/api/outputs/commands', (req, res) => {
|
|
||||||
try {
|
|
||||||
if (!db) throw new Error('Database not connected');
|
|
||||||
|
|
||||||
// Get current output values
|
|
||||||
const stmt = db.prepare(`
|
|
||||||
SELECT channel, value FROM output_events
|
|
||||||
WHERE id IN (
|
|
||||||
SELECT MAX(id) FROM output_events GROUP BY channel
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
const rows = stmt.all();
|
|
||||||
const outputValues = {};
|
|
||||||
rows.forEach(row => {
|
|
||||||
outputValues[row.channel] = row.value;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Map to device commands
|
|
||||||
const commands = {};
|
|
||||||
for (const [outputChannel, binding] of Object.entries(OUTPUT_BINDINGS)) {
|
|
||||||
const value = outputValues[outputChannel] ?? 0;
|
|
||||||
const deviceKey = `${binding.device}:${binding.channel}`;
|
|
||||||
commands[deviceKey] = {
|
|
||||||
state: value > 0 ? 1 : 0,
|
|
||||||
source: outputChannel
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json(commands);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// GET /api/rules/status - Get currently active rule IDs
|
|
||||||
app.get('/api/rules/status', (req, res) => {
|
|
||||||
res.json({ activeIds: Array.from(activeRuleIds) });
|
|
||||||
});
|
|
||||||
|
|
||||||
// GET /api/rules - List all rules
|
|
||||||
app.get('/api/rules', (req, res) => {
|
|
||||||
try {
|
|
||||||
if (!db) throw new Error('Database not connected');
|
|
||||||
const stmt = db.prepare('SELECT * FROM rules ORDER BY position ASC, id ASC');
|
|
||||||
const rows = stmt.all();
|
|
||||||
const rules = rows.map(row => ({
|
|
||||||
...row,
|
|
||||||
conditions: JSON.parse(row.conditions || '{}'),
|
|
||||||
action: JSON.parse(row.action || '{}')
|
|
||||||
}));
|
|
||||||
res.json(rules);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// POST /api/rules - Create rule (admin only)
|
|
||||||
app.post('/api/rules', requireAdmin, (req, res) => {
|
|
||||||
const { name, type = 'static', enabled = 1, conditions, action } = req.body;
|
|
||||||
if (!name || !conditions || !action) {
|
|
||||||
return res.status(400).json({ error: 'Missing required fields: name, conditions, action' });
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare(`
|
|
||||||
INSERT INTO rules (name, type, enabled, conditions, action, created_by)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
`);
|
|
||||||
const info = stmt.run(
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
enabled ? 1 : 0,
|
|
||||||
JSON.stringify(conditions),
|
|
||||||
JSON.stringify(action),
|
|
||||||
req.user?.id || null
|
|
||||||
);
|
|
||||||
runRules(); // Trigger rules immediately
|
|
||||||
global.insertChangelog(req.user?.username || 'admin', `Created rule "${name}"`);
|
|
||||||
res.json({ id: info.lastInsertRowid, name, type, enabled, conditions, action });
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// PUT /api/rules/:id - Update rule (admin only)
|
|
||||||
app.put('/api/rules/:id', requireAdmin, (req, res) => {
|
|
||||||
const { name, type, enabled, conditions, action } = req.body;
|
|
||||||
try {
|
|
||||||
// Get old rule for comparison
|
|
||||||
const oldRule = db.prepare('SELECT * FROM rules WHERE id = ?').get(req.params.id);
|
|
||||||
if (!oldRule) {
|
|
||||||
return res.status(404).json({ error: 'Rule not found' });
|
|
||||||
}
|
|
||||||
|
|
||||||
const stmt = db.prepare(`
|
|
||||||
UPDATE rules SET name = ?, type = ?, enabled = ?, conditions = ?, action = ?, updated_at = datetime('now')
|
|
||||||
WHERE id = ?
|
|
||||||
`);
|
|
||||||
const info = stmt.run(
|
|
||||||
name,
|
|
||||||
type || 'static',
|
|
||||||
enabled ? 1 : 0,
|
|
||||||
JSON.stringify(conditions),
|
|
||||||
JSON.stringify(action),
|
|
||||||
req.params.id
|
|
||||||
);
|
|
||||||
|
|
||||||
if (info.changes > 0) {
|
|
||||||
runRules(); // Trigger rules immediately
|
|
||||||
|
|
||||||
// Build detailed changelog
|
|
||||||
const changes = [];
|
|
||||||
if (oldRule.name !== name) {
|
|
||||||
changes.push(`name: "${oldRule.name}" → "${name}"`);
|
|
||||||
}
|
|
||||||
if (!!oldRule.enabled !== !!enabled) {
|
|
||||||
changes.push(`enabled: ${oldRule.enabled ? 'on' : 'off'} → ${enabled ? 'on' : 'off'}`);
|
|
||||||
}
|
|
||||||
const oldConditions = oldRule.conditions || '{}';
|
|
||||||
const newConditions = JSON.stringify(conditions);
|
|
||||||
if (oldConditions !== newConditions) {
|
|
||||||
changes.push('conditions changed');
|
|
||||||
}
|
|
||||||
const oldAction = oldRule.action || '{}';
|
|
||||||
const newAction = JSON.stringify(action);
|
|
||||||
if (oldAction !== newAction) {
|
|
||||||
// Parse to show what changed in action
|
|
||||||
try {
|
|
||||||
const oldA = JSON.parse(oldAction);
|
|
||||||
const newA = action;
|
|
||||||
if (oldA.channel !== newA.channel) {
|
|
||||||
changes.push(`action channel: ${oldA.channel} → ${newA.channel}`);
|
|
||||||
}
|
|
||||||
if (JSON.stringify(oldA.value) !== JSON.stringify(newA.value)) {
|
|
||||||
changes.push(`action value: ${JSON.stringify(oldA.value)} → ${JSON.stringify(newA.value)}`);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
changes.push('action changed');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const changeText = changes.length > 0
|
|
||||||
? `Updated rule "${name}": ${changes.join(', ')}`
|
|
||||||
: `Updated rule "${name}" (no changes)`;
|
|
||||||
global.insertChangelog(req.user?.username || 'admin', changeText);
|
|
||||||
|
|
||||||
res.json({ id: req.params.id, name, type, enabled, conditions, action });
|
|
||||||
} else {
|
|
||||||
res.status(404).json({ error: 'Rule not found' });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// DELETE /api/rules/:id - Delete rule (admin only)
|
|
||||||
app.delete('/api/rules/:id', requireAdmin, (req, res) => {
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare('DELETE FROM rules WHERE id = ?');
|
|
||||||
// Get name before delete
|
|
||||||
const ruleName = db.prepare('SELECT name FROM rules WHERE id = ?').get(req.params.id)?.name || 'Unknown Rule';
|
|
||||||
const info = stmt.run(req.params.id);
|
|
||||||
if (info.changes > 0) {
|
|
||||||
runRules(); // Trigger rules immediately
|
|
||||||
global.insertChangelog(req.user?.username || 'admin', `Deleted rule "${ruleName}" (ID: ${req.params.id})`);
|
|
||||||
res.json({ success: true });
|
|
||||||
} else {
|
|
||||||
res.status(404).json({ error: 'Rule not found' });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// POST /api/rules/reorder - Reorder rules (admin only)
|
|
||||||
app.post('/api/rules/reorder', requireAdmin, (req, res) => {
|
|
||||||
const { order } = req.body;
|
|
||||||
if (!Array.isArray(order)) return res.status(400).json({ error: 'Invalid format' });
|
|
||||||
|
|
||||||
const updateStmt = db.prepare('UPDATE rules SET position = ? WHERE id = ?');
|
|
||||||
const updateMany = db.transaction((items) => {
|
|
||||||
for (const item of items) {
|
|
||||||
updateStmt.run(item.position, item.id);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
updateMany(order);
|
|
||||||
runRules(); // Trigger rules immediately
|
|
||||||
res.json({ success: true });
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// =============================================
|
|
||||||
// RULE RUNNER (Background Job)
|
|
||||||
// =============================================
|
|
||||||
|
|
||||||
// Rule Engine functions moved to global scope
|
|
||||||
|
|
||||||
// Start rule runner (every 10 seconds)
|
|
||||||
const ruleRunnerInterval = setInterval(runRules, 10000);
|
|
||||||
console.log('[RuleRunner] Started background job (10s interval)');
|
|
||||||
|
|
||||||
// Clean up on server close
|
// Clean up on server close
|
||||||
devServer.server?.on('close', () => {
|
devServer.server?.on('close', () => {
|
||||||
@@ -1119,155 +698,6 @@ module.exports = {
|
|||||||
console.log('[RuleRunner] Stopped background job');
|
console.log('[RuleRunner] Stopped background job');
|
||||||
});
|
});
|
||||||
|
|
||||||
// GET /api/devices
|
|
||||||
// Returns list of unique device/channel pairs (sensors + outputs)
|
|
||||||
app.get('/api/devices', (req, res) => {
|
|
||||||
try {
|
|
||||||
if (!db) throw new Error('Database not connected');
|
|
||||||
// Get sensor channels
|
|
||||||
const sensorStmt = db.prepare("SELECT DISTINCT device, channel FROM sensor_events WHERE data_type = 'number' ORDER BY device, channel");
|
|
||||||
const sensorRows = sensorStmt.all();
|
|
||||||
|
|
||||||
// Add output channels with 'output' as device
|
|
||||||
const outputRows = OUTPUT_CHANNELS.map(ch => ({
|
|
||||||
device: 'output',
|
|
||||||
channel: ch.channel
|
|
||||||
}));
|
|
||||||
|
|
||||||
res.json([...sensorRows, ...outputRows]);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// GET /api/readings
|
|
||||||
// Query params: devices (comma sep), channels (comma sep), since (timestamp)
|
|
||||||
// Actually, user wants "Last 24h".
|
|
||||||
// We can accept `since` or valid ISO string.
|
|
||||||
// Expected params: `?device=x&channel=y` (single) or query for multiple?
|
|
||||||
// User asked for "chart that is refreshed once a minute... display the last 24 hours with the devices/channels previously selected"
|
|
||||||
// Efficient query: select * from sensor_events where timestamp > ? and (device,channel) IN (...)
|
|
||||||
// For simplicity, let's allow fetching by multiple devices/channels or just all for last 24h and filter client side?
|
|
||||||
// No, database filtering is better.
|
|
||||||
// Let's support ?since=ISO_DATE
|
|
||||||
|
|
||||||
app.get('/api/readings', (req, res) => {
|
|
||||||
try {
|
|
||||||
if (!db) throw new Error('Database not connected');
|
|
||||||
const { since, until } = req.query;
|
|
||||||
const startTime = since || new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString();
|
|
||||||
const endTime = until || new Date().toISOString();
|
|
||||||
|
|
||||||
const requestedSensorChannels = []; // [{device, channel}]
|
|
||||||
const requestedOutputChannels = []; // [channel]
|
|
||||||
|
|
||||||
if (req.query.selection) {
|
|
||||||
const selections = req.query.selection.split(',');
|
|
||||||
selections.forEach(s => {
|
|
||||||
const lastColonIndex = s.lastIndexOf(':');
|
|
||||||
if (lastColonIndex !== -1) {
|
|
||||||
const d = s.substring(0, lastColonIndex);
|
|
||||||
const c = s.substring(lastColonIndex + 1);
|
|
||||||
if (d === 'output') {
|
|
||||||
requestedOutputChannels.push(c);
|
|
||||||
} else {
|
|
||||||
requestedSensorChannels.push({ device: d, channel: c });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = {};
|
|
||||||
|
|
||||||
// 1. Fetch sensor data
|
|
||||||
if (requestedSensorChannels.length > 0) {
|
|
||||||
let sql = 'SELECT * FROM sensor_events WHERE timestamp > ? AND timestamp <= ? ';
|
|
||||||
const params = [startTime, endTime];
|
|
||||||
|
|
||||||
const placeholders = [];
|
|
||||||
requestedSensorChannels.forEach(ch => {
|
|
||||||
placeholders.push('(device = ? AND channel = ?)');
|
|
||||||
params.push(ch.device, ch.channel);
|
|
||||||
});
|
|
||||||
if (placeholders.length > 0) {
|
|
||||||
sql += `AND (${placeholders.join(' OR ')}) `;
|
|
||||||
}
|
|
||||||
sql += 'ORDER BY timestamp ASC';
|
|
||||||
|
|
||||||
const rows = db.prepare(sql).all(...params);
|
|
||||||
|
|
||||||
// Backfill for sensors
|
|
||||||
const backfillStmt = db.prepare(`
|
|
||||||
SELECT * FROM sensor_events
|
|
||||||
WHERE device = ? AND channel = ?
|
|
||||||
AND timestamp <= ?
|
|
||||||
AND (until >= ? OR until IS NULL)
|
|
||||||
ORDER BY timestamp DESC LIMIT 1
|
|
||||||
`);
|
|
||||||
|
|
||||||
const backfillRows = [];
|
|
||||||
requestedSensorChannels.forEach(ch => {
|
|
||||||
const prev = backfillStmt.get(ch.device, ch.channel, startTime, startTime);
|
|
||||||
if (prev) backfillRows.push(prev);
|
|
||||||
});
|
|
||||||
|
|
||||||
[...backfillRows, ...rows].forEach(row => {
|
|
||||||
const key = `${row.device}:${row.channel}`;
|
|
||||||
if (!result[key]) result[key] = [];
|
|
||||||
const pt = [row.timestamp, row.value];
|
|
||||||
if (row.until) pt.push(row.until);
|
|
||||||
result[key].push(pt);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Fetch output data
|
|
||||||
if (requestedOutputChannels.length > 0) {
|
|
||||||
let sql = 'SELECT * FROM output_events WHERE timestamp > ? AND timestamp <= ? ';
|
|
||||||
const params = [startTime, endTime];
|
|
||||||
|
|
||||||
const placeholders = requestedOutputChannels.map(() => 'channel = ?');
|
|
||||||
sql += `AND (${placeholders.join(' OR ')}) `;
|
|
||||||
params.push(...requestedOutputChannels);
|
|
||||||
sql += 'ORDER BY timestamp ASC';
|
|
||||||
|
|
||||||
const rows = db.prepare(sql).all(...params);
|
|
||||||
|
|
||||||
// Backfill for outputs
|
|
||||||
const backfillStmt = db.prepare(`
|
|
||||||
SELECT * FROM output_events
|
|
||||||
WHERE channel = ?
|
|
||||||
AND timestamp <= ?
|
|
||||||
AND (until >= ? OR until IS NULL)
|
|
||||||
ORDER BY timestamp DESC LIMIT 1
|
|
||||||
`);
|
|
||||||
|
|
||||||
const backfillRows = [];
|
|
||||||
requestedOutputChannels.forEach(ch => {
|
|
||||||
const prev = backfillStmt.get(ch, startTime, startTime);
|
|
||||||
if (prev) {
|
|
||||||
backfillRows.push(prev);
|
|
||||||
} else {
|
|
||||||
// No data at all - add default 0 value at startTime
|
|
||||||
backfillRows.push({ channel: ch, timestamp: startTime, value: 0, until: null });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
[...backfillRows, ...rows].forEach(row => {
|
|
||||||
const key = `output:${row.channel}`;
|
|
||||||
if (!result[key]) result[key] = [];
|
|
||||||
const pt = [row.timestamp, row.value];
|
|
||||||
if (row.until) pt.push(row.until);
|
|
||||||
result[key].push(pt);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json(result);
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
res.status(500).json({ error: err.message });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return middlewares;
|
return middlewares;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,48 +0,0 @@
|
|||||||
const Database = require('better-sqlite3');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
const dbPath = path.resolve(__dirname, 'server/data/sensors.db');
|
|
||||||
console.log(`Connecting to database at ${dbPath}`);
|
|
||||||
const db = new Database(dbPath);
|
|
||||||
|
|
||||||
// 1. Verify Table Creation
|
|
||||||
console.log('Creating changelog table...');
|
|
||||||
try {
|
|
||||||
db.exec(`
|
|
||||||
CREATE TABLE IF NOT EXISTS changelog (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
date TEXT NOT NULL,
|
|
||||||
user TEXT,
|
|
||||||
text TEXT NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
console.log('PASS: Table creation successful (or already exists)');
|
|
||||||
} catch (err) {
|
|
||||||
console.error('FAIL: Table creation failed:', err.message);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Verify Insert
|
|
||||||
console.log('Inserting test entry...');
|
|
||||||
try {
|
|
||||||
const stmt = db.prepare('INSERT INTO changelog (date, user, text) VALUES (?, ?, ?)');
|
|
||||||
const info = stmt.run(new Date().toISOString(), 'test_user', 'Test changelog entry');
|
|
||||||
console.log(`PASS: Insert successful, ID: ${info.lastInsertRowid}`);
|
|
||||||
} catch (err) {
|
|
||||||
console.error('FAIL: Insert failed:', err.message);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Verify Read
|
|
||||||
console.log('Reading entries...');
|
|
||||||
try {
|
|
||||||
const rows = db.prepare('SELECT * FROM changelog ORDER BY id DESC LIMIT 5').all();
|
|
||||||
console.table(rows);
|
|
||||||
if (rows.length > 0 && rows[0].user === 'test_user') {
|
|
||||||
console.log('PASS: Read verification successful');
|
|
||||||
} else {
|
|
||||||
console.error('FAIL: Read verification failed or data mismatch');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error('FAIL: Read failed:', err.message);
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user