chore: replace aws-sdk with @aws-sdk/client-s3 in dependencies
This commit is contained in:
85
index.js
85
index.js
@@ -1,15 +1,21 @@
|
|||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
const sql = require('mssql');
|
const sql = require('mssql');
|
||||||
const AWS = require('aws-sdk');
|
// Replace aws-sdk v2 with modular v3 S3 client
|
||||||
|
const { S3Client, PutObjectCommand, ListObjectsV2Command } = require('@aws-sdk/client-s3');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const SambaClient = require('samba-client');
|
const SambaClient = require('samba-client');
|
||||||
|
|
||||||
// AWS S3 Configuration
|
// AWS S3 Configuration (v3 client)
|
||||||
const s3 = new AWS.S3({
|
const s3 = new S3Client({
|
||||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
region: process.env.AWS_REGION,
|
||||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
// Use explicit credentials if provided; otherwise let default provider chain resolve
|
||||||
region: process.env.AWS_REGION
|
credentials: (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY)
|
||||||
|
? {
|
||||||
|
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
||||||
|
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
// MSSQL Configuration
|
// MSSQL Configuration
|
||||||
@@ -20,7 +26,7 @@ const config = {
|
|||||||
port: parseInt(process.env.MSSQL_PORT) || 1433,
|
port: parseInt(process.env.MSSQL_PORT) || 1433,
|
||||||
database: process.env.MSSQL_DATABASE,
|
database: process.env.MSSQL_DATABASE,
|
||||||
options: {
|
options: {
|
||||||
encrypt: true, // Use this if you're on Windows Azure
|
encrypt: false, // Use this if you're on Windows Azure
|
||||||
trustServerCertificate: true // Change to true for local dev / self-signed certs
|
trustServerCertificate: true // Change to true for local dev / self-signed certs
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -132,7 +138,7 @@ async function createDatabaseBackup() {
|
|||||||
const backupQuery = `
|
const backupQuery = `
|
||||||
BACKUP DATABASE [${process.env.MSSQL_DATABASE}]
|
BACKUP DATABASE [${process.env.MSSQL_DATABASE}]
|
||||||
TO DISK = N'${backupFilePath}'
|
TO DISK = N'${backupFilePath}'
|
||||||
WITH NOFORMAT, NOINIT, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern',
|
WITH NOFORMAT, INIT, NAME = N'${process.env.MSSQL_DATABASE}-Vollständig Datenbank Sichern',
|
||||||
SKIP, NOREWIND, NOUNLOAD, STATS = 10
|
SKIP, NOREWIND, NOUNLOAD, STATS = 10
|
||||||
`;
|
`;
|
||||||
|
|
||||||
@@ -179,22 +185,45 @@ async function uploadBackupToS3(filePath) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Create S3 upload parameters using stream
|
// Create S3 upload parameters using stream
|
||||||
|
const key = `backups/${process.env.MSSQL_DATABASE}_${new Date().toISOString().replace(/[:.]/g, '-')}.bak`;
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: process.env.S3_BUCKET_NAME,
|
Bucket: process.env.S3_BUCKET_NAME,
|
||||||
Key: `backups/${process.env.MSSQL_DATABASE}_${new Date().toISOString().replace(/[:.]/g, '-')}.bak`,
|
Key: key,
|
||||||
Body: fileStream
|
Body: fileStream
|
||||||
};
|
};
|
||||||
|
|
||||||
// Upload file to S3 using stream
|
// Upload file to S3 using v3 client
|
||||||
const data = await s3.upload(params).promise();
|
await s3.send(new PutObjectCommand(params));
|
||||||
console.log('Backup uploaded successfully to S3:', data.Location);
|
const location = `s3://${process.env.S3_BUCKET_NAME}/${key}`;
|
||||||
return data.Location;
|
console.log('Backup uploaded successfully to S3:', location);
|
||||||
|
return location;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error uploading backup to S3:', err);
|
console.error('Error uploading backup to S3:', err);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Function to get latest backup timestamp from S3
|
||||||
|
async function getLatestBackupTime() {
|
||||||
|
try {
|
||||||
|
const params = {
|
||||||
|
Bucket: process.env.S3_BUCKET_NAME,
|
||||||
|
Prefix: 'backups/',
|
||||||
|
};
|
||||||
|
const command = new ListObjectsV2Command(params);
|
||||||
|
const response = await s3.send(command);
|
||||||
|
if (!response.Contents || response.Contents.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// Sort by LastModified descending
|
||||||
|
const sorted = response.Contents.sort((a, b) => b.LastModified - a.LastModified);
|
||||||
|
return sorted[0].LastModified;
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error checking latest backup:', err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Function to run backup process
|
// Function to run backup process
|
||||||
async function runBackupProcess() {
|
async function runBackupProcess() {
|
||||||
try {
|
try {
|
||||||
@@ -234,15 +263,37 @@ async function runBackupProcess() {
|
|||||||
const when = new Date().toISOString();
|
const when = new Date().toISOString();
|
||||||
const msg = `Backup failed 🔴\nDB: ${process.env.MSSQL_DATABASE}\nTime: ${when}\nError: ${err && err.message ? err.message : err}`;
|
const msg = `Backup failed 🔴\nDB: ${process.env.MSSQL_DATABASE}\nTime: ${when}\nError: ${err && err.message ? err.message : err}`;
|
||||||
await sendTelegramBroadcast('errors', msg);
|
await sendTelegramBroadcast('errors', msg);
|
||||||
|
} finally {
|
||||||
|
setTimeout(runBackupProcess, 86400000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run backup immediately when starting
|
// Run backup immediately when starting
|
||||||
runBackupProcess();
|
(async () => {
|
||||||
|
try {
|
||||||
|
const latestBackupTime = await getLatestBackupTime();
|
||||||
|
const now = new Date();
|
||||||
|
let delay = 0;
|
||||||
|
|
||||||
|
if (latestBackupTime) {
|
||||||
|
const nextBackupTime = new Date(latestBackupTime.getTime() + 86400000);
|
||||||
|
if (nextBackupTime > now) {
|
||||||
|
delay = nextBackupTime - now;
|
||||||
|
console.log(`Scheduling first backup in ${Math.floor(delay / 3600000)} hours.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (delay > 0) {
|
||||||
|
setTimeout(runBackupProcess, delay);
|
||||||
|
} else {
|
||||||
|
runBackupProcess();
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error during startup check, proceeding with backup:', err);
|
||||||
|
runBackupProcess();
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
// Schedule backup to run every 24 hours (86400000 milliseconds)
|
|
||||||
// You can adjust this interval as needed
|
|
||||||
setInterval(runBackupProcess, 86400000);
|
|
||||||
|
|
||||||
console.log('Database backup service started. Running backups every 24 hours.');
|
console.log('Database backup service started. Running backups every 24 hours.');
|
||||||
|
|
||||||
|
|||||||
1776
package-lock.json
generated
1776
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@
|
|||||||
"dev": "nodemon index.js"
|
"dev": "nodemon index.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"aws-sdk": "^2.1490.0",
|
"@aws-sdk/client-s3": "^3.859.0",
|
||||||
"dotenv": "^16.0.3",
|
"dotenv": "^16.0.3",
|
||||||
"mssql": "^9.1.1",
|
"mssql": "^9.1.1",
|
||||||
"samba-client": "^7.2.0"
|
"samba-client": "^7.2.0"
|
||||||
|
|||||||
Reference in New Issue
Block a user