feat: add gzip compression for database backup files and update S3 upload process

This commit is contained in:
sebseb7
2025-09-04 07:20:05 +02:00
parent c59d55ea8d
commit 840fceee86

View File

@@ -4,6 +4,8 @@ const sql = require('mssql');
const { S3Client, PutObjectCommand, ListObjectsV2Command } = require('@aws-sdk/client-s3'); const { S3Client, PutObjectCommand, ListObjectsV2Command } = require('@aws-sdk/client-s3');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const zlib = require('zlib');
const { pipeline } = require('stream/promises');
const SambaClient = require('samba-client'); const SambaClient = require('samba-client');
// AWS S3 Configuration (v3 client) // AWS S3 Configuration (v3 client)
@@ -138,7 +140,7 @@ async function createDatabaseBackup() {
const backupQuery = ` const backupQuery = `
BACKUP DATABASE [${process.env.MSSQL_DATABASE}] BACKUP DATABASE [${process.env.MSSQL_DATABASE}]
TO DISK = N'${backupFilePath}' TO DISK = N'${backupFilePath}'
WITH NOFORMAT, INIT, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern', WITH FORMAT, INIT, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern',
SKIP, NOREWIND, NOUNLOAD, STATS = 10 SKIP, NOREWIND, NOUNLOAD, STATS = 10
`; `;
@@ -170,6 +172,27 @@ async function downloadBackupFile() {
} }
} }
// Function to compress backup file with gzip
async function compressBackupFile(inputPath) {
try {
console.log('Compressing backup file...');
const outputPath = inputPath + '.gz';
const readStream = fs.createReadStream(inputPath);
const writeStream = fs.createWriteStream(outputPath);
const gzipStream = zlib.createGzip();
// Use pipeline for better error handling and cleanup
await pipeline(readStream, gzipStream, writeStream);
console.log('Backup file compressed successfully:', outputPath);
return outputPath;
} catch (err) {
console.error('Error compressing backup file:', err);
throw err;
}
}
// Function to upload backup to S3 // Function to upload backup to S3
async function uploadBackupToS3(filePath) { async function uploadBackupToS3(filePath) {
try { try {
@@ -184,12 +207,14 @@ async function uploadBackupToS3(filePath) {
throw err; throw err;
}); });
// Create S3 upload parameters using stream // Create S3 upload parameters using stream - add .gz extension to key
const key = `backups/${process.env.MSSQL_DATABASE}_${new Date().toISOString().replace(/[:.]/g, '-')}.bak`; const key = `backups/${process.env.MSSQL_DATABASE}_${new Date().toISOString().replace(/[:.]/g, '-')}.bak.gz`;
const params = { const params = {
Bucket: process.env.S3_BUCKET_NAME, Bucket: process.env.S3_BUCKET_NAME,
Key: key, Key: key,
Body: fileStream Body: fileStream,
ContentEncoding: 'gzip',
ContentType: 'application/gzip'
}; };
// Upload file to S3 using v3 client // Upload file to S3 using v3 client
@@ -241,20 +266,37 @@ async function runBackupProcess() {
// Download backup file from SMB share // Download backup file from SMB share
const localBackupFile = await downloadBackupFile(); const localBackupFile = await downloadBackupFile();
// Upload backup to S3 // Compress backup file with gzip
const s3Url = await uploadBackupToS3(localBackupFile); const compressedBackupFile = await compressBackupFile(localBackupFile);
// Upload compressed backup to S3
const s3Url = await uploadBackupToS3(compressedBackupFile);
// Clean up temporary files
try {
if (fs.existsSync(localBackupFile)) {
fs.unlinkSync(localBackupFile);
console.log('Cleaned up original backup file:', localBackupFile);
}
if (fs.existsSync(compressedBackupFile)) {
fs.unlinkSync(compressedBackupFile);
console.log('Cleaned up compressed backup file:', compressedBackupFile);
}
} catch (cleanupErr) {
console.warn('Error cleaning up temporary files:', cleanupErr.message);
}
console.log('Backup process completed successfully at', new Date().toISOString()); console.log('Backup process completed successfully at', new Date().toISOString());
console.log('Backup available at:', s3Url); console.log('Backup available at:', s3Url);
// Notify admins via Telegram broadcast with size // Notify admins via Telegram broadcast with size
const fileName = path.basename(localBackupFile || 'backup.bak'); const fileName = path.basename(compressedBackupFile || 'backup.bak.gz');
const when = new Date().toISOString(); const when = new Date().toISOString();
const sizeLine = sizes && (sizes.dataBytes != null || sizes.logBytes != null) const sizeLine = sizes && (sizes.dataBytes != null || sizes.logBytes != null)
? `\nDB Size: ${sizes.dataBytes != null ? formatBytes(sizes.dataBytes) : '?'} (data), ${sizes.logBytes != null ? formatBytes(sizes.logBytes) : '?'} (log)` ? `\nDB Size: ${sizes.dataBytes != null ? formatBytes(sizes.dataBytes) : '?'} (data), ${sizes.logBytes != null ? formatBytes(sizes.logBytes) : '?'} (log)`
: ''; : '';
const msg = `Backup completed ✅\nDB: ${process.env.MSSQL_DATABASE}\nFile: ${fileName}\nS3: ${s3Url}${sizeLine}\nTime: ${when}`; const msg = `Backup completed ✅\nDB: ${process.env.MSSQL_DATABASE}\nFile: ${fileName}\nS3: ${s3Url}${sizeLine}\nTime: ${when}`;
await sendTelegramBroadcast('admins', msg); await sendTelegramBroadcast('all', msg);
} catch (err) { } catch (err) {
console.error('Backup process failed:', err); console.error('Backup process failed:', err);
// Ensure connection closed on failure // Ensure connection closed on failure
@@ -304,7 +346,7 @@ console.log('Database backup service started. Running backups every 24 hours.');
const host = require('os').hostname(); const host = require('os').hostname();
const region = process.env.AWS_REGION || 'n/a'; const region = process.env.AWS_REGION || 'n/a';
const bucket = process.env.S3_BUCKET_NAME || 'n/a'; const bucket = process.env.S3_BUCKET_NAME || 'n/a';
await sendTelegramBroadcast('admins', `Backup service started ✅\nDB: ${process.env.MSSQL_DATABASE}\nHost: ${host}\nAWS: ${region}/${bucket}\nTime: ${when}`); await sendTelegramBroadcast('all', `Backup service started ✅\nDB: ${process.env.MSSQL_DATABASE}\nHost: ${host}\nAWS: ${region}/${bucket}\nTime: ${when}`);
} catch (e) { } catch (e) {
console.warn('Startup broadcast failed:', e.message); console.warn('Startup broadcast failed:', e.message);
} }