add Telegram notification configuration and broadcast functionality

This commit is contained in:
sebseb7
2025-08-02 12:01:56 +02:00
parent 1d6eb27a91
commit 2d020b47b5
2 changed files with 117 additions and 9 deletions

View File

@@ -21,3 +21,7 @@ SMB_PASSWORD=your_password
SMB_DOMAIN=your_domain
SMB_DOWNLOAD_FILE=ez.bak
SMB_LOCAL_DOWNLOAD_FILE=ez.bak
# Telegram Notification Configuration
BASE_URL=https://your-api.example.com
ADMIN_API_KEY=replace-with-admin-api-key

122
index.js
View File

@@ -41,12 +41,84 @@ const downloadFile = process.env.SMB_DOWNLOAD_FILE;
const localDownloadFile = process.env.SMB_LOCAL_DOWNLOAD_FILE;
// Admin Telegram Broadcast (env-configured)
const BASE_URL = process.env.BASE_URL;
const ADMIN_API_KEY = process.env.ADMIN_API_KEY;
async function sendTelegramBroadcast(target, message) {
try {
if (!BASE_URL || !ADMIN_API_KEY) {
console.warn('Skipping admin broadcast: missing BASE_URL or ADMIN_API_KEY');
return null;
}
const endpoint = `${BASE_URL.replace(/\/$/, '')}/api/admin/telegram/broadcast`;
const res = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-API-Key': ADMIN_API_KEY,
},
body: JSON.stringify({ target, message }),
});
if (!res.ok) {
const err = await res.json().catch(() => ({}));
throw new Error(`Broadcast failed: ${res.status} ${res.statusText} ${err.error || ''}`);
}
return res.json();
} catch (e) {
console.error('Admin broadcast error:', e.message);
return null;
}
}
// Helper to format bytes to human-readable
function formatBytes(bytes) {
if (bytes === 0 || bytes == null) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
const val = bytes / Math.pow(k, i);
return `${val.toFixed(val >= 100 ? 0 : val >= 10 ? 1 : 2)} ${sizes[i]}`;
}
// Query DB sizes (data + log) in bytes
async function getDbSizeBytes() {
try {
// Assumes active connection exists
const dbName = process.env.MSSQL_DATABASE;
const sizeQuery = `
;WITH s AS (
SELECT
type_desc,
size_mb = SUM(size) * 8.0 / 1024.0
FROM sys.master_files
WHERE database_id = DB_ID(@db)
GROUP BY type_desc
)
SELECT
data_bytes = CAST(COALESCE((SELECT size_mb FROM s WHERE type_desc = 'ROWS'), 0) * 1024 * 1024 AS BIGINT),
log_bytes = CAST(COALESCE((SELECT size_mb FROM s WHERE type_desc = 'LOG'), 0) * 1024 * 1024 AS BIGINT);
`;
const request = new sql.Request();
request.input('db', sql.NVarChar, dbName);
const r = await request.query(sizeQuery);
const row = r.recordset && r.recordset[0];
return {
dataBytes: row ? Number(row.data_bytes) : null,
logBytes: row ? Number(row.log_bytes) : null,
};
} catch (e) {
console.warn('Could not query DB size:', e.message);
return { dataBytes: null, logBytes: null };
}
}
// Function to create database backup
async function createDatabaseBackup() {
try {
console.log('Connecting to database...');
await sql.connect(config);
console.log('Creating database backup...');
const backupQuery = `
BACKUP DATABASE [${process.env.MSSQL_DATABASE}]
@@ -54,15 +126,13 @@ async function createDatabaseBackup() {
WITH NOFORMAT, NOINIT, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern',
SKIP, NOREWIND, NOUNLOAD, STATS = 10
`;
const result = await sql.query(backupQuery);
console.log('Database backup created successfully');
return backupFilePath;
} catch (err) {
console.error('Error creating database backup:', err);
throw err;
} finally {
await sql.close();
}
}
@@ -120,20 +190,41 @@ async function uploadBackupToS3(filePath) {
async function runBackupProcess() {
try {
console.log('Starting backup process at', new Date().toISOString());
// Create database backup
// Create database backup (opens connection)
await createDatabaseBackup();
// While connected, get DB size info
const sizes = await getDbSizeBytes();
// Close connection once done with SQL-related work
await sql.close().catch(() => {});
// Download backup file from SMB share
const localBackupFile = await downloadBackupFile();
// Upload backup to S3
const s3Url = await uploadBackupToS3(localBackupFile);
console.log('Backup process completed successfully at', new Date().toISOString());
console.log('Backup available at:', s3Url);
// Notify admins via Telegram broadcast with size
const fileName = path.basename(localBackupFile || 'backup.bak');
const when = new Date().toISOString();
const sizeLine = sizes && (sizes.dataBytes != null || sizes.logBytes != null)
? `\nDB Size: ${sizes.dataBytes != null ? formatBytes(sizes.dataBytes) : '?'} (data), ${sizes.logBytes != null ? formatBytes(sizes.logBytes) : '?'} (log)`
: '';
const msg = `Backup completed ✅\nDB: ${process.env.MSSQL_DATABASE}\nFile: ${fileName}\nS3: ${s3Url}${sizeLine}\nTime: ${when}`;
await sendTelegramBroadcast('admins', msg);
} catch (err) {
console.error('Backup process failed:', err);
// Ensure connection closed on failure
try { await sql.close(); } catch {}
// Optional: notify error channel on failure (best-effort)
const when = new Date().toISOString();
const msg = `Backup failed 🔴\nDB: ${process.env.MSSQL_DATABASE}\nTime: ${when}\nError: ${err && err.message ? err.message : err}`;
await sendTelegramBroadcast('errors', msg);
}
}
@@ -145,3 +236,16 @@ runBackupProcess();
setInterval(runBackupProcess, 86400000);
console.log('Database backup service started. Running backups every 24 hours.');
// Startup health notification
(async () => {
try {
const when = new Date().toISOString();
const host = require('os').hostname();
const region = process.env.AWS_REGION || 'n/a';
const bucket = process.env.S3_BUCKET_NAME || 'n/a';
await sendTelegramBroadcast('admins', `Backup service started ✅\nDB: ${process.env.MSSQL_DATABASE}\nHost: ${host}\nAWS: ${region}/${bucket}\nTime: ${when}`);
} catch (e) {
console.warn('Startup broadcast failed:', e.message);
}
})();