- Added explicit connection cleanup in createDatabaseBackup() - Resolves 'BACKUP DATABASE is terminating abnormally' error in runBackupProcess() - Prevents connection pool conflicts when running scheduled backups
454 lines
14 KiB
JavaScript
454 lines
14 KiB
JavaScript
require('dotenv').config();
|
|
const sql = require('mssql');
|
|
// Replace aws-sdk v2 with modular v3 S3 client
|
|
const { S3Client, PutObjectCommand, ListObjectsV2Command } = require('@aws-sdk/client-s3');
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
const os = require('os');
|
|
const zlib = require('zlib');
|
|
const { pipeline } = require('stream/promises');
|
|
const SambaClient = require('samba-client');
|
|
const { Client } = require('ssh2');
|
|
|
|
// AWS S3 Configuration (v3 client)
|
|
const s3 = new S3Client({
|
|
region: process.env.AWS_REGION,
|
|
// Use explicit credentials if provided; otherwise let default provider chain resolve
|
|
credentials: (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY)
|
|
? {
|
|
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
}
|
|
: undefined,
|
|
});
|
|
|
|
// MSSQL Configuration
|
|
const config = {
|
|
user: process.env.MSSQL_USER,
|
|
password: process.env.MSSQL_PASSWORD,
|
|
server: process.env.MSSQL_SERVER,
|
|
port: parseInt(process.env.MSSQL_PORT) || 1433,
|
|
database: process.env.MSSQL_DATABASE,
|
|
options: {
|
|
encrypt: false, // Use this if you're on Windows Azure
|
|
trustServerCertificate: true // Change to true for local dev / self-signed certs
|
|
}
|
|
};
|
|
|
|
// Backup file path
|
|
const backupFilePath = process.env.BACKUP_FILE_PATH;
|
|
|
|
// SCP Configuration (SSH key authentication)
|
|
const scpConfig = {
|
|
host: process.env.SCP_HOST,
|
|
port: parseInt(process.env.SCP_PORT) || 22,
|
|
username: process.env.SCP_USERNAME,
|
|
privateKey: fs.readFileSync(path.resolve(process.env.SCP_KEY_PATH.replace(/^~/, os.homedir())))
|
|
};
|
|
|
|
// Legacy SMB Configuration (deprecated)
|
|
const smbConfig = {
|
|
address: process.env.SMB_ADDRESS,
|
|
username: process.env.SMB_USERNAME,
|
|
password: process.env.SMB_PASSWORD,
|
|
domain: process.env.SMB_DOMAIN || '', // optional
|
|
};
|
|
|
|
// Ensure download directory exists
|
|
const downloadFile = process.env.SCP_REMOTE_PATH || process.env.SMB_DOWNLOAD_FILE;
|
|
const localDownloadFile = process.env.SCP_LOCAL_PATH || process.env.SMB_LOCAL_DOWNLOAD_FILE;
|
|
|
|
|
|
// Admin Telegram Broadcast (env-configured)
|
|
const BASE_URL = (process.env.BASE_URL || '').trim();
|
|
const ADMIN_API_KEY = (process.env.ADMIN_API_KEY || '').trim();
|
|
|
|
async function sendTelegramBroadcast(target, message) {
|
|
try {
|
|
if (!BASE_URL || !ADMIN_API_KEY) {
|
|
console.warn('Skipping admin broadcast: missing BASE_URL or ADMIN_API_KEY', { hasBaseUrl: !!BASE_URL, hasKey: !!ADMIN_API_KEY });
|
|
return null;
|
|
}
|
|
const endpoint = `${BASE_URL.replace(/\/$/, '')}/api/admin/telegram/broadcast`;
|
|
console.log(`Broadcast → ${endpoint} target=${target}`);
|
|
|
|
const res = await fetch(endpoint, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'X-API-Key': ADMIN_API_KEY,
|
|
},
|
|
body: JSON.stringify({ target, message }),
|
|
});
|
|
|
|
let bodyText = '';
|
|
try { bodyText = await res.text(); } catch {}
|
|
|
|
if (!res.ok) {
|
|
let parsed;
|
|
try { parsed = JSON.parse(bodyText); } catch {}
|
|
const detail = parsed && parsed.error ? parsed.error : (bodyText ? bodyText.slice(0, 300) : '');
|
|
throw new Error(`Broadcast failed: ${res.status} ${res.statusText} - ${detail}`);
|
|
}
|
|
|
|
return bodyText ? JSON.parse(bodyText) : {};
|
|
} catch (e) {
|
|
console.error('Admin broadcast error:', e.message);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
// Helper to format bytes to human-readable
|
|
function formatBytes(bytes) {
|
|
if (bytes === 0 || bytes == null) return '0 B';
|
|
const k = 1024;
|
|
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
const val = bytes / Math.pow(k, i);
|
|
return `${val.toFixed(val >= 100 ? 0 : val >= 10 ? 1 : 2)} ${sizes[i]}`;
|
|
}
|
|
|
|
// Query DB sizes (data + log) in bytes
|
|
async function getDbSizeBytes() {
|
|
try {
|
|
// Assumes active connection exists
|
|
const dbName = process.env.MSSQL_DATABASE;
|
|
const sizeQuery = `
|
|
;WITH s AS (
|
|
SELECT
|
|
type_desc,
|
|
size_mb = SUM(size) * 8.0 / 1024.0
|
|
FROM sys.master_files
|
|
WHERE database_id = DB_ID(@db)
|
|
GROUP BY type_desc
|
|
)
|
|
SELECT
|
|
data_bytes = CAST(COALESCE((SELECT size_mb FROM s WHERE type_desc = 'ROWS'), 0) * 1024 * 1024 AS BIGINT),
|
|
log_bytes = CAST(COALESCE((SELECT size_mb FROM s WHERE type_desc = 'LOG'), 0) * 1024 * 1024 AS BIGINT);
|
|
`;
|
|
const request = new sql.Request();
|
|
request.input('db', sql.NVarChar, dbName);
|
|
const r = await request.query(sizeQuery);
|
|
const row = r.recordset && r.recordset[0];
|
|
return {
|
|
dataBytes: row ? Number(row.data_bytes) : null,
|
|
logBytes: row ? Number(row.log_bytes) : null,
|
|
};
|
|
} catch (e) {
|
|
console.warn('Could not query DB size:', e.message);
|
|
return { dataBytes: null, logBytes: null };
|
|
}
|
|
}
|
|
|
|
// Function to create database backup
|
|
async function createDatabaseBackup() {
|
|
try {
|
|
console.log('Connecting to database...');
|
|
|
|
// Close any existing connection to avoid conflicts
|
|
try {
|
|
await sql.close();
|
|
} catch (closeErr) {
|
|
// Ignore error if no connection exists
|
|
}
|
|
|
|
await sql.connect(config);
|
|
|
|
console.log('Creating database backup...');
|
|
const backupQuery = `
|
|
BACKUP DATABASE [${process.env.MSSQL_DATABASE}]
|
|
TO DISK = N'${backupFilePath}'
|
|
WITH FORMAT, INIT, COMPRESSION, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern',
|
|
SKIP, NOREWIND, NOUNLOAD, STATS = 10
|
|
`;
|
|
|
|
const result = await sql.query(backupQuery);
|
|
console.log('Database backup created successfully');
|
|
return backupFilePath;
|
|
} catch (err) {
|
|
console.error('Error creating database backup:', err);
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
// Function to download backup file via SCP
|
|
async function downloadBackupFile() {
|
|
try {
|
|
console.log('Downloading backup file via SCP...');
|
|
|
|
const remotePath = process.env.SCP_REMOTE_PATH || '/sharemnt/ez.bak';
|
|
const localPath = process.env.SCP_LOCAL_PATH || 'ez.bak';
|
|
|
|
console.log(`From: ${scpConfig.username}@${scpConfig.host}:${remotePath}`);
|
|
console.log(`To: ${localPath}`);
|
|
|
|
return new Promise((resolve, reject) => {
|
|
const conn = new Client();
|
|
|
|
conn.on('ready', () => {
|
|
console.log('SSH connection established');
|
|
|
|
conn.sftp((err, sftp) => {
|
|
if (err) {
|
|
console.error('SFTP error:', err);
|
|
conn.end();
|
|
reject(err);
|
|
return;
|
|
}
|
|
|
|
const readStream = sftp.createReadStream(remotePath);
|
|
const writeStream = fs.createWriteStream(localPath);
|
|
|
|
readStream.on('error', (err) => {
|
|
console.error('Read stream error:', err);
|
|
conn.end();
|
|
reject(err);
|
|
});
|
|
|
|
writeStream.on('error', (err) => {
|
|
console.error('Write stream error:', err);
|
|
conn.end();
|
|
reject(err);
|
|
});
|
|
|
|
writeStream.on('finish', () => {
|
|
console.log('Backup file downloaded successfully to:', localPath);
|
|
conn.end();
|
|
resolve(localPath);
|
|
});
|
|
|
|
readStream.pipe(writeStream);
|
|
});
|
|
});
|
|
|
|
conn.on('error', (err) => {
|
|
console.error('SSH connection error:', err);
|
|
reject(err);
|
|
});
|
|
|
|
conn.connect(scpConfig);
|
|
});
|
|
|
|
} catch (err) {
|
|
console.error('Error downloading backup file via SCP:', err);
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
// Legacy function to download backup file from SMB share (deprecated)
|
|
async function downloadBackupFileSMB() {
|
|
try {
|
|
console.log('Downloading backup file from SMB share...');
|
|
|
|
// Create SMB client
|
|
const client = new SambaClient(smbConfig);
|
|
|
|
// Download file from SMB share
|
|
await client.getFile(process.env.SMB_DOWNLOAD_FILE, process.env.SMB_LOCAL_DOWNLOAD_FILE);
|
|
|
|
console.log('Backup file downloaded successfully to:', process.env.SMB_LOCAL_DOWNLOAD_FILE);
|
|
return process.env.SMB_LOCAL_DOWNLOAD_FILE;
|
|
} catch (err) {
|
|
console.error('Error downloading backup file from SMB share:', err);
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
// Function to compress backup file with gzip
|
|
async function compressBackupFile(inputPath) {
|
|
try {
|
|
console.log('Compressing backup file...');
|
|
|
|
const outputPath = inputPath + '.gz';
|
|
const readStream = fs.createReadStream(inputPath);
|
|
const writeStream = fs.createWriteStream(outputPath);
|
|
const gzipStream = zlib.createGzip();
|
|
|
|
// Use pipeline for better error handling and cleanup
|
|
await pipeline(readStream, gzipStream, writeStream);
|
|
|
|
console.log('Backup file compressed successfully:', outputPath);
|
|
return outputPath;
|
|
} catch (err) {
|
|
console.error('Error compressing backup file:', err);
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
// Function to upload backup to S3
|
|
async function uploadBackupToS3(filePath) {
|
|
try {
|
|
console.log('Uploading backup to S3...');
|
|
|
|
// Create a read stream for the file
|
|
const fileStream = fs.createReadStream(filePath);
|
|
|
|
// Handle stream errors
|
|
fileStream.on('error', (err) => {
|
|
console.error('File stream error:', err);
|
|
throw err;
|
|
});
|
|
|
|
// Create S3 upload parameters using stream - add .gz extension to key
|
|
const key = `backups/${process.env.MSSQL_DATABASE}_${new Date().toISOString().replace(/[:.]/g, '-')}.bak.gz`;
|
|
const params = {
|
|
Bucket: process.env.S3_BUCKET_NAME,
|
|
Key: key,
|
|
Body: fileStream,
|
|
ContentEncoding: 'gzip',
|
|
ContentType: 'application/gzip'
|
|
};
|
|
|
|
// Upload file to S3 using v3 client
|
|
await s3.send(new PutObjectCommand(params));
|
|
const location = `s3://${process.env.S3_BUCKET_NAME}/${key}`;
|
|
console.log('Backup uploaded successfully to S3:', location);
|
|
return location;
|
|
} catch (err) {
|
|
console.error('Error uploading backup to S3:', err);
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
// Function to get latest backup timestamp from S3
|
|
async function getLatestBackupTime() {
|
|
try {
|
|
const params = {
|
|
Bucket: process.env.S3_BUCKET_NAME,
|
|
Prefix: 'backups/',
|
|
};
|
|
const command = new ListObjectsV2Command(params);
|
|
const response = await s3.send(command);
|
|
if (!response.Contents || response.Contents.length === 0) {
|
|
return null;
|
|
}
|
|
// Sort by LastModified descending
|
|
const sorted = response.Contents.sort((a, b) => b.LastModified - a.LastModified);
|
|
return sorted[0].LastModified;
|
|
} catch (err) {
|
|
console.error('Error checking latest backup:', err);
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
// Function to run backup process
|
|
async function runBackupProcess() {
|
|
try {
|
|
console.log('Starting backup process at', new Date().toISOString());
|
|
|
|
// Create database backup (opens connection)
|
|
await createDatabaseBackup();
|
|
|
|
// While connected, get DB size info
|
|
const sizes = await getDbSizeBytes();
|
|
|
|
// Close connection once done with SQL-related work
|
|
await sql.close().catch(() => {});
|
|
|
|
// Download backup file from SMB share
|
|
const localBackupFile = await downloadBackupFile();
|
|
|
|
// Compress backup file with gzip
|
|
const compressedBackupFile = await compressBackupFile(localBackupFile);
|
|
|
|
// Upload compressed backup to S3
|
|
const s3Url = await uploadBackupToS3(compressedBackupFile);
|
|
|
|
// Clean up temporary files
|
|
try {
|
|
if (fs.existsSync(localBackupFile)) {
|
|
fs.unlinkSync(localBackupFile);
|
|
console.log('Cleaned up original backup file:', localBackupFile);
|
|
}
|
|
if (fs.existsSync(compressedBackupFile)) {
|
|
fs.unlinkSync(compressedBackupFile);
|
|
console.log('Cleaned up compressed backup file:', compressedBackupFile);
|
|
}
|
|
} catch (cleanupErr) {
|
|
console.warn('Error cleaning up temporary files:', cleanupErr.message);
|
|
}
|
|
|
|
console.log('Backup process completed successfully at', new Date().toISOString());
|
|
console.log('Backup available at:', s3Url);
|
|
|
|
// Notify admins via Telegram broadcast with size
|
|
const fileName = path.basename(compressedBackupFile || 'backup.bak.gz');
|
|
const when = new Date().toISOString();
|
|
const sizeLine = sizes && (sizes.dataBytes != null || sizes.logBytes != null)
|
|
? `\nDB Size: ${sizes.dataBytes != null ? formatBytes(sizes.dataBytes) : '?'} (data), ${sizes.logBytes != null ? formatBytes(sizes.logBytes) : '?'} (log)`
|
|
: '';
|
|
const msg = `Backup completed ✅\nDB: ${process.env.MSSQL_DATABASE}\nFile: ${fileName}\nS3: ${s3Url}${sizeLine}\nTime: ${when}`;
|
|
await sendTelegramBroadcast('all', msg);
|
|
} catch (err) {
|
|
console.error('Backup process failed:', err);
|
|
// Ensure connection closed on failure
|
|
try { await sql.close(); } catch {}
|
|
// Optional: notify error channel on failure (best-effort)
|
|
const when = new Date().toISOString();
|
|
const msg = `Backup failed 🔴\nDB: ${process.env.MSSQL_DATABASE}\nTime: ${when}\nError: ${err && err.message ? err.message : err}`;
|
|
await sendTelegramBroadcast('errors', msg);
|
|
} finally {
|
|
setTimeout(runBackupProcess, 86400000);
|
|
}
|
|
}
|
|
|
|
// Only run startup logic if this file is executed directly (not imported)
|
|
if (require.main === module) {
|
|
// Run backup immediately when starting
|
|
(async () => {
|
|
try {
|
|
const latestBackupTime = await getLatestBackupTime();
|
|
const now = new Date();
|
|
let delay = 0;
|
|
|
|
if (latestBackupTime) {
|
|
const nextBackupTime = new Date(latestBackupTime.getTime() + 86400000);
|
|
if (nextBackupTime > now) {
|
|
delay = nextBackupTime - now;
|
|
console.log(`Scheduling first backup in ${Math.floor(delay / 3600000)} hours.`);
|
|
}
|
|
}
|
|
|
|
if (delay > 0) {
|
|
setTimeout(runBackupProcess, delay);
|
|
} else {
|
|
runBackupProcess();
|
|
}
|
|
} catch (err) {
|
|
console.error('Error during startup check, proceeding with backup:', err);
|
|
runBackupProcess();
|
|
}
|
|
})();
|
|
|
|
console.log('Database backup service started. Running backups every 24 hours.');
|
|
|
|
// Startup health notification
|
|
(async () => {
|
|
try {
|
|
const when = new Date().toISOString();
|
|
const host = require('os').hostname();
|
|
const region = process.env.AWS_REGION || 'n/a';
|
|
const bucket = process.env.S3_BUCKET_NAME || 'n/a';
|
|
await sendTelegramBroadcast('all', `Backup service started ✅\nDB: ${process.env.MSSQL_DATABASE}\nHost: ${host}\nAWS: ${region}/${bucket}\nTime: ${when}`);
|
|
} catch (e) {
|
|
console.warn('Startup broadcast failed:', e.message);
|
|
}
|
|
})();
|
|
}
|
|
|
|
// Export functions for reuse in other scripts
|
|
module.exports = {
|
|
createDatabaseBackup,
|
|
downloadBackupFile,
|
|
downloadBackupFileSMB, // Legacy SMB function
|
|
compressBackupFile,
|
|
formatBytes,
|
|
sendTelegramBroadcast,
|
|
getDbSizeBytes,
|
|
// Export configurations that might be needed
|
|
scpConfig,
|
|
smbConfig, // Legacy SMB config
|
|
config: config, // MSSQL config
|
|
s3 // S3 client
|
|
};
|