require('dotenv').config(); const sql = require('mssql'); const AWS = require('aws-sdk'); const fs = require('fs'); const path = require('path'); const SambaClient = require('samba-client'); // AWS S3 Configuration const s3 = new AWS.S3({ accessKeyId: process.env.AWS_ACCESS_KEY_ID, secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, region: process.env.AWS_REGION }); // MSSQL Configuration const config = { user: process.env.MSSQL_USER, password: process.env.MSSQL_PASSWORD, server: process.env.MSSQL_SERVER, port: parseInt(process.env.MSSQL_PORT) || 1433, database: process.env.MSSQL_DATABASE, options: { encrypt: true, // Use this if you're on Windows Azure trustServerCertificate: true // Change to true for local dev / self-signed certs } }; // Backup file path const backupFilePath = process.env.BACKUP_FILE_PATH; // SMB Configuration const smbConfig = { address: process.env.SMB_ADDRESS, username: process.env.SMB_USERNAME, password: process.env.SMB_PASSWORD, domain: process.env.SMB_DOMAIN || '', // optional }; // Ensure download directory exists const downloadFile = process.env.SMB_DOWNLOAD_FILE; const localDownloadFile = process.env.SMB_LOCAL_DOWNLOAD_FILE; // Admin Telegram Broadcast (env-configured) const BASE_URL = process.env.BASE_URL; const ADMIN_API_KEY = process.env.ADMIN_API_KEY; async function sendTelegramBroadcast(target, message) { try { if (!BASE_URL || !ADMIN_API_KEY) { console.warn('Skipping admin broadcast: missing BASE_URL or ADMIN_API_KEY'); return null; } const endpoint = `${BASE_URL.replace(/\/$/, '')}/api/admin/telegram/broadcast`; const res = await fetch(endpoint, { method: 'POST', headers: { 'Content-Type': 'application/json', 'X-API-Key': ADMIN_API_KEY, }, body: JSON.stringify({ target, message }), }); if (!res.ok) { const err = await res.json().catch(() => ({})); throw new Error(`Broadcast failed: ${res.status} ${res.statusText} ${err.error || ''}`); } return res.json(); } catch (e) { console.error('Admin broadcast error:', e.message); return null; } } // Helper to format bytes to human-readable function formatBytes(bytes) { if (bytes === 0 || bytes == null) return '0 B'; const k = 1024; const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; const i = Math.floor(Math.log(bytes) / Math.log(k)); const val = bytes / Math.pow(k, i); return `${val.toFixed(val >= 100 ? 0 : val >= 10 ? 1 : 2)} ${sizes[i]}`; } // Query DB sizes (data + log) in bytes async function getDbSizeBytes() { try { // Assumes active connection exists const dbName = process.env.MSSQL_DATABASE; const sizeQuery = ` ;WITH s AS ( SELECT type_desc, size_mb = SUM(size) * 8.0 / 1024.0 FROM sys.master_files WHERE database_id = DB_ID(@db) GROUP BY type_desc ) SELECT data_bytes = CAST(COALESCE((SELECT size_mb FROM s WHERE type_desc = 'ROWS'), 0) * 1024 * 1024 AS BIGINT), log_bytes = CAST(COALESCE((SELECT size_mb FROM s WHERE type_desc = 'LOG'), 0) * 1024 * 1024 AS BIGINT); `; const request = new sql.Request(); request.input('db', sql.NVarChar, dbName); const r = await request.query(sizeQuery); const row = r.recordset && r.recordset[0]; return { dataBytes: row ? Number(row.data_bytes) : null, logBytes: row ? Number(row.log_bytes) : null, }; } catch (e) { console.warn('Could not query DB size:', e.message); return { dataBytes: null, logBytes: null }; } } // Function to create database backup async function createDatabaseBackup() { try { console.log('Connecting to database...'); await sql.connect(config); console.log('Creating database backup...'); const backupQuery = ` BACKUP DATABASE [${process.env.MSSQL_DATABASE}] TO DISK = N'${backupFilePath}' WITH NOFORMAT, NOINIT, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern', SKIP, NOREWIND, NOUNLOAD, STATS = 10 `; const result = await sql.query(backupQuery); console.log('Database backup created successfully'); return backupFilePath; } catch (err) { console.error('Error creating database backup:', err); throw err; } } // Function to download backup file from SMB share async function downloadBackupFile() { try { console.log('Downloading backup file from SMB share...'); // Create SMB client const client = new SambaClient(smbConfig); // Download file from SMB share await client.getFile(downloadFile, localDownloadFile); console.log('Backup file downloaded successfully to:', localDownloadFile); return localDownloadFile; } catch (err) { console.error('Error downloading backup file from SMB share:', err); throw err; } } // Function to upload backup to S3 async function uploadBackupToS3(filePath) { try { console.log('Uploading backup to S3...'); // Create a read stream for the file const fileStream = fs.createReadStream(filePath); // Handle stream errors fileStream.on('error', (err) => { console.error('File stream error:', err); throw err; }); // Create S3 upload parameters using stream const params = { Bucket: process.env.S3_BUCKET_NAME, Key: `backups/${process.env.MSSQL_DATABASE}_${new Date().toISOString().replace(/[:.]/g, '-')}.bak`, Body: fileStream }; // Upload file to S3 using stream const data = await s3.upload(params).promise(); console.log('Backup uploaded successfully to S3:', data.Location); return data.Location; } catch (err) { console.error('Error uploading backup to S3:', err); throw err; } } // Function to run backup process async function runBackupProcess() { try { console.log('Starting backup process at', new Date().toISOString()); // Create database backup (opens connection) await createDatabaseBackup(); // While connected, get DB size info const sizes = await getDbSizeBytes(); // Close connection once done with SQL-related work await sql.close().catch(() => {}); // Download backup file from SMB share const localBackupFile = await downloadBackupFile(); // Upload backup to S3 const s3Url = await uploadBackupToS3(localBackupFile); console.log('Backup process completed successfully at', new Date().toISOString()); console.log('Backup available at:', s3Url); // Notify admins via Telegram broadcast with size const fileName = path.basename(localBackupFile || 'backup.bak'); const when = new Date().toISOString(); const sizeLine = sizes && (sizes.dataBytes != null || sizes.logBytes != null) ? `\nDB Size: ${sizes.dataBytes != null ? formatBytes(sizes.dataBytes) : '?'} (data), ${sizes.logBytes != null ? formatBytes(sizes.logBytes) : '?'} (log)` : ''; const msg = `Backup completed ✅\nDB: ${process.env.MSSQL_DATABASE}\nFile: ${fileName}\nS3: ${s3Url}${sizeLine}\nTime: ${when}`; await sendTelegramBroadcast('admins', msg); } catch (err) { console.error('Backup process failed:', err); // Ensure connection closed on failure try { await sql.close(); } catch {} // Optional: notify error channel on failure (best-effort) const when = new Date().toISOString(); const msg = `Backup failed 🔴\nDB: ${process.env.MSSQL_DATABASE}\nTime: ${when}\nError: ${err && err.message ? err.message : err}`; await sendTelegramBroadcast('errors', msg); } } // Run backup immediately when starting runBackupProcess(); // Schedule backup to run every 24 hours (86400000 milliseconds) // You can adjust this interval as needed setInterval(runBackupProcess, 86400000); console.log('Database backup service started. Running backups every 24 hours.'); // Startup health notification (async () => { try { const when = new Date().toISOString(); const host = require('os').hostname(); const region = process.env.AWS_REGION || 'n/a'; const bucket = process.env.S3_BUCKET_NAME || 'n/a'; await sendTelegramBroadcast('admins', `Backup service started ✅\nDB: ${process.env.MSSQL_DATABASE}\nHost: ${host}\nAWS: ${region}/${bucket}\nTime: ${when}`); } catch (e) { console.warn('Startup broadcast failed:', e.message); } })();