feat: Implement dedicated data fetchers and utilities for categories, products, and images, and add server compression.
This commit is contained in:
185
index.html
185
index.html
@@ -5,6 +5,11 @@
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Category Tree Viewer</title>
|
||||
|
||||
<!-- Resource hints to optimize loading -->
|
||||
<link rel="preload" href="/socket.io/socket.io.js" as="script">
|
||||
<link rel="preconnect" href="/api">
|
||||
|
||||
<style>
|
||||
* {
|
||||
margin: 0;
|
||||
@@ -421,11 +426,103 @@
|
||||
</div>
|
||||
<button id="version-reload-btn" onclick="location.reload()">New version - [reload]</button>
|
||||
|
||||
<script src="/socket.io/socket.io.js"></script>
|
||||
<script src="/socket.io/socket.io.js" async></script>
|
||||
<script>
|
||||
const socket = io({
|
||||
transports: ['websocket']
|
||||
});
|
||||
// Initialize socket when io is available (async)
|
||||
function initSocket() {
|
||||
if (typeof io === 'undefined') {
|
||||
// Socket.io not loaded yet, try again soon
|
||||
setTimeout(initSocket, 50);
|
||||
return;
|
||||
}
|
||||
|
||||
const socket = io({
|
||||
transports: ['websocket']
|
||||
});
|
||||
|
||||
// Version checking
|
||||
const clientEtag = document.querySelector('meta[name="app-version"]')?.content;
|
||||
const reloadBtn = document.getElementById('version-reload-btn');
|
||||
|
||||
// Socket Events
|
||||
socket.on('connect', () => {
|
||||
console.log('🔌 Connected to server via WebSocket');
|
||||
// Check version on connect and reconnect
|
||||
if (clientEtag) {
|
||||
socket.emit('checkVersion', clientEtag);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('versionMismatch', ({ serverEtag }) => {
|
||||
console.log('⚠️ New version available on server');
|
||||
if (reloadBtn) {
|
||||
reloadBtn.classList.add('show');
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('categoriesUpdated', () => {
|
||||
console.log('🔄 Categories updated, reloading tree...');
|
||||
loadCategories();
|
||||
});
|
||||
|
||||
socket.on('categoryProductsUpdated', ({ id }) => {
|
||||
console.log(`🔄 Products for category ${id} updated, reloading...`);
|
||||
updateCategoryProducts(id);
|
||||
});
|
||||
|
||||
// Debounce function
|
||||
function debounce(func, wait) {
|
||||
let timeout;
|
||||
return function (...args) {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(() => func.apply(this, args), wait);
|
||||
};
|
||||
}
|
||||
|
||||
const debouncedSearch = debounce((value) => {
|
||||
socket.emit('search', value);
|
||||
}, 300);
|
||||
|
||||
// Event Listeners
|
||||
filterInput.addEventListener('input', (e) => {
|
||||
const value = e.target.value;
|
||||
state.filter = value;
|
||||
|
||||
// Toggle clear button visibility
|
||||
if (value) {
|
||||
clearBtn.classList.add('visible');
|
||||
} else {
|
||||
clearBtn.classList.remove('visible');
|
||||
}
|
||||
|
||||
if (value.trim()) {
|
||||
debouncedSearch(value);
|
||||
} else {
|
||||
// Clear matches and collapse all categories
|
||||
resetMatches(state.categories);
|
||||
resetExpansion(state.categories);
|
||||
collapseAllProducts(state.categories);
|
||||
render();
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('searchResults', ({ query, matches }) => {
|
||||
if (query !== state.filter) return;
|
||||
|
||||
const matchSet = new Set(matches);
|
||||
|
||||
// Reset expansion and matches
|
||||
resetExpansion(state.categories);
|
||||
|
||||
// Mark matches and expand
|
||||
markMatches(state.categories, matchSet);
|
||||
|
||||
render();
|
||||
});
|
||||
}
|
||||
|
||||
// Start socket initialization (async, non-blocking)
|
||||
initSocket();
|
||||
|
||||
// State management
|
||||
const state = {
|
||||
@@ -444,72 +541,6 @@
|
||||
filterInput.focus();
|
||||
});
|
||||
|
||||
// Version checking
|
||||
const clientEtag = document.querySelector('meta[name="app-version"]')?.content;
|
||||
const reloadBtn = document.getElementById('version-reload-btn');
|
||||
|
||||
// Socket Events
|
||||
socket.on('connect', () => {
|
||||
console.log('🔌 Connected to server via WebSocket');
|
||||
// Check version on connect and reconnect
|
||||
if (clientEtag) {
|
||||
socket.emit('checkVersion', clientEtag);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('versionMismatch', ({ serverEtag }) => {
|
||||
console.log('⚠️ New version available on server');
|
||||
if (reloadBtn) {
|
||||
reloadBtn.classList.add('show');
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('categoriesUpdated', () => {
|
||||
console.log('🔄 Categories updated, reloading tree...');
|
||||
loadCategories();
|
||||
});
|
||||
|
||||
socket.on('categoryProductsUpdated', ({ id }) => {
|
||||
console.log(`🔄 Products for category ${id} updated, reloading...`);
|
||||
updateCategoryProducts(id);
|
||||
});
|
||||
|
||||
// Debounce function
|
||||
function debounce(func, wait) {
|
||||
let timeout;
|
||||
return function (...args) {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(() => func.apply(this, args), wait);
|
||||
};
|
||||
}
|
||||
|
||||
const debouncedSearch = debounce((value) => {
|
||||
socket.emit('search', value);
|
||||
}, 300);
|
||||
|
||||
// Event Listeners
|
||||
filterInput.addEventListener('input', (e) => {
|
||||
const value = e.target.value;
|
||||
state.filter = value;
|
||||
|
||||
// Toggle clear button visibility
|
||||
if (value) {
|
||||
clearBtn.classList.add('visible');
|
||||
} else {
|
||||
clearBtn.classList.remove('visible');
|
||||
}
|
||||
|
||||
if (value.trim()) {
|
||||
debouncedSearch(value);
|
||||
} else {
|
||||
// Clear matches and collapse all categories
|
||||
resetMatches(state.categories);
|
||||
resetExpansion(state.categories);
|
||||
collapseAllProducts(state.categories);
|
||||
render();
|
||||
}
|
||||
});
|
||||
|
||||
// Clear button functionality
|
||||
clearBtn.addEventListener('click', () => {
|
||||
filterInput.value = '';
|
||||
@@ -526,20 +557,6 @@
|
||||
filterInput.focus();
|
||||
});
|
||||
|
||||
socket.on('searchResults', ({ query, matches }) => {
|
||||
if (query !== state.filter) return;
|
||||
|
||||
const matchSet = new Set(matches);
|
||||
|
||||
// Reset expansion and matches
|
||||
resetExpansion(state.categories);
|
||||
|
||||
// Mark matches and expand
|
||||
markMatches(state.categories, matchSet);
|
||||
|
||||
render();
|
||||
});
|
||||
|
||||
function resetMatches(nodes) {
|
||||
nodes.forEach(node => {
|
||||
node._hasMatch = false;
|
||||
|
||||
64
package-lock.json
generated
64
package-lock.json
generated
@@ -8,6 +8,7 @@
|
||||
"name": "category-syncer",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"compression": "^1.8.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"express": "^5.1.0",
|
||||
"mssql": "^12.1.0",
|
||||
@@ -1093,6 +1094,60 @@
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/compressible": {
|
||||
"version": "2.0.18",
|
||||
"resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
|
||||
"integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mime-db": ">= 1.43.0 < 2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/compression": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz",
|
||||
"integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"compressible": "~2.0.18",
|
||||
"debug": "2.6.9",
|
||||
"negotiator": "~0.6.4",
|
||||
"on-headers": "~1.1.0",
|
||||
"safe-buffer": "5.2.1",
|
||||
"vary": "~1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/compression/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/compression/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/compression/node_modules/negotiator": {
|
||||
"version": "0.6.4",
|
||||
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
|
||||
"integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
@@ -2136,6 +2191,15 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/on-headers": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
|
||||
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"test": "node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"compression": "^1.8.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"express": "^5.1.0",
|
||||
"mssql": "^12.1.0",
|
||||
@@ -20,4 +21,4 @@
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.1.11"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Server } from 'socket.io';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import fs from 'fs/promises';
|
||||
import compression from 'compression';
|
||||
import { registerCategories } from './routes/categories.js';
|
||||
import { registerProducts } from './routes/products.js';
|
||||
import { registerImages } from './routes/images.js';
|
||||
@@ -18,6 +19,9 @@ export function startServer(categorySyncer, categoryProductsSyncer) {
|
||||
const httpServer = createServer(app);
|
||||
const io = new Server(httpServer);
|
||||
|
||||
// Enable gzip compression for all responses
|
||||
app.use(compression());
|
||||
|
||||
const PORT = process.env.SERVER_PORT || 3000;
|
||||
const HOST = process.env.SERVER_HOST || '0.0.0.0';
|
||||
const CACHE_DIR = process.env.CACHE_LOCATION || './cache';
|
||||
|
||||
141
src/services/category-data-fetcher.js
Normal file
141
src/services/category-data-fetcher.js
Normal file
@@ -0,0 +1,141 @@
|
||||
import { createConnection } from '../utils/database.js';
|
||||
|
||||
/**
|
||||
* CategoryDataFetcher - Handles all category-related database queries
|
||||
*/
|
||||
export class CategoryDataFetcher {
|
||||
/**
|
||||
* Fetch all categories with hierarchy information
|
||||
* @returns {Promise<Array>} - Array of category records
|
||||
*/
|
||||
async fetchCategories() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
const result = await pool.request().query(`
|
||||
SELECT kKategorie, kOberKategorie, nSort
|
||||
FROM tkategorie
|
||||
ORDER BY nSort, kKategorie
|
||||
`);
|
||||
return result.recordset;
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch category names for a specific language and shop
|
||||
* @returns {Promise<Array>} - Array of name records
|
||||
*/
|
||||
async fetchCategoryNames() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
const result = await pool.request().query(`
|
||||
SELECT kKategorie, cName
|
||||
FROM tKategorieSprache
|
||||
WHERE kSprache = ${process.env.JTL_SPRACHE_ID}
|
||||
AND kShop = ${process.env.JTL_SHOP_ID}
|
||||
`);
|
||||
return result.recordset;
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch article counts per category
|
||||
* @returns {Promise<Array>} - Array of count records
|
||||
*/
|
||||
async fetchArticleCounts() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
const result = await pool.request().query(`
|
||||
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE a.cAktiv = 'Y'
|
||||
GROUP BY ka.kKategorie
|
||||
`);
|
||||
return result.recordset;
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch category images (first image per category)
|
||||
* @returns {Promise<Array>} - Array of image records
|
||||
*/
|
||||
async fetchCategoryImages() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
const result = await pool.request().query(`
|
||||
SELECT kKategorie, kBild
|
||||
FROM (
|
||||
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
|
||||
FROM tKategoriebildPlattform
|
||||
WHERE kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
`);
|
||||
return result.recordset;
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all category data at once
|
||||
* @returns {Promise<Object>} - Object with categories, names, articleCounts, images
|
||||
*/
|
||||
async fetchAllCategoryData() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
const [categoriesResult, namesResult, articleCountsResult, imagesResult] = await Promise.all([
|
||||
pool.request().query(`
|
||||
SELECT kKategorie, kOberKategorie, nSort
|
||||
FROM tkategorie
|
||||
ORDER BY nSort, kKategorie
|
||||
`),
|
||||
pool.request().query(`
|
||||
SELECT kKategorie, cName
|
||||
FROM tKategorieSprache
|
||||
WHERE kSprache = ${process.env.JTL_SPRACHE_ID}
|
||||
AND kShop = ${process.env.JTL_SHOP_ID}
|
||||
`),
|
||||
pool.request().query(`
|
||||
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE a.cAktiv = 'Y'
|
||||
GROUP BY ka.kKategorie
|
||||
`),
|
||||
pool.request().query(`
|
||||
SELECT kKategorie, kBild
|
||||
FROM (
|
||||
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
|
||||
FROM tKategoriebildPlattform
|
||||
WHERE kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
`)
|
||||
]);
|
||||
|
||||
return {
|
||||
categories: categoriesResult.recordset,
|
||||
names: namesResult.recordset,
|
||||
articleCounts: articleCountsResult.recordset,
|
||||
images: imagesResult.recordset
|
||||
};
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
40
src/services/image-data-fetcher.js
Normal file
40
src/services/image-data-fetcher.js
Normal file
@@ -0,0 +1,40 @@
|
||||
import { createConnection } from '../utils/database.js';
|
||||
import { processInChunks, createInClause } from '../utils/database-utils.js';
|
||||
|
||||
/**
|
||||
* ImageDataFetcher - Handles all image-related database queries
|
||||
*/
|
||||
export class ImageDataFetcher {
|
||||
/**
|
||||
* Fetch image binary data for given image IDs
|
||||
* @param {Array<number>} imageIds - Image IDs to fetch
|
||||
* @param {Function} imageCallback - Callback for each image (receives {kBild, bBild})
|
||||
* @param {number} chunkSize - Size of each chunk (default: 50)
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async fetchImagesInChunks(imageIds, imageCallback, chunkSize = 50) {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
await processInChunks(imageIds, chunkSize, async (chunk) => {
|
||||
const list = createInClause(chunk);
|
||||
|
||||
const result = await pool.request().query(`
|
||||
SELECT kBild, bBild
|
||||
FROM tBild
|
||||
WHERE kBild IN (${list})
|
||||
`);
|
||||
|
||||
for (const record of result.recordset) {
|
||||
if (record.bBild) {
|
||||
await imageCallback(record);
|
||||
}
|
||||
}
|
||||
}, { showProgress: true, itemName: 'images' });
|
||||
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
109
src/services/product-data-fetcher.js
Normal file
109
src/services/product-data-fetcher.js
Normal file
@@ -0,0 +1,109 @@
|
||||
import { createConnection } from '../utils/database.js';
|
||||
import { processInChunks, createInClause } from '../utils/database-utils.js';
|
||||
|
||||
/**
|
||||
* ProductDataFetcher - Handles all product-related database queries
|
||||
*/
|
||||
export class ProductDataFetcher {
|
||||
/**
|
||||
* Fetch products for given category IDs
|
||||
* @param {Array<number>} categoryIds - Category IDs to fetch products for
|
||||
* @param {Object} pool - Database connection pool
|
||||
* @returns {Promise<Array>} - Array of product records with kKategorie, kArtikel, cName
|
||||
*/
|
||||
async fetchProductsForCategories(categoryIds, pool) {
|
||||
const list = createInClause(categoryIds);
|
||||
|
||||
const result = await pool.request().query(`
|
||||
SELECT
|
||||
ka.kKategorie,
|
||||
ka.kArtikel,
|
||||
ab.cName
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikelBeschreibung ab ON ka.kArtikel = ab.kArtikel
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE ab.kSprache = ${process.env.JTL_SPRACHE_ID}
|
||||
AND a.cAktiv = 'Y'
|
||||
AND ab.kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
AND ab.kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND ka.kKategorie IN (${list})
|
||||
ORDER BY (
|
||||
CASE
|
||||
WHEN a.bRowversion >= ab.bRowversion AND a.bRowversion >= ka.bRowversion THEN a.bRowversion
|
||||
WHEN ab.bRowversion >= a.bRowversion AND ab.bRowversion >= ka.bRowversion THEN ab.bRowversion
|
||||
ELSE ka.bRowversion
|
||||
END
|
||||
) DESC
|
||||
`);
|
||||
|
||||
return result.recordset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch product images for given article IDs
|
||||
* @param {Array<number>} articleIds - Article IDs to fetch images for
|
||||
* @param {Object} pool - Database connection pool
|
||||
* @returns {Promise<Map>} - Map of kArtikel -> array of kBild
|
||||
*/
|
||||
async fetchProductImages(articleIds, pool) {
|
||||
const list = createInClause(articleIds);
|
||||
|
||||
const result = await pool.request().query(`
|
||||
SELECT kArtikel, kBild
|
||||
FROM tArtikelbildPlattform
|
||||
WHERE kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
AND kArtikel IN (${list})
|
||||
ORDER BY nNr ASC
|
||||
`);
|
||||
|
||||
const productImages = new Map();
|
||||
result.recordset.forEach(r => {
|
||||
if (!productImages.has(r.kArtikel)) {
|
||||
productImages.set(r.kArtikel, []);
|
||||
}
|
||||
productImages.get(r.kArtikel).push(r.kBild);
|
||||
});
|
||||
|
||||
return productImages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch products with images for categories in chunks
|
||||
* @param {Array<number>} categoryIds - All category IDs to process
|
||||
* @param {Function} chunkCallback - Callback for each chunk (receives chunk data)
|
||||
* @param {number} chunkSize - Size of each chunk (default: 50)
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async fetchProductsInChunks(categoryIds, chunkCallback, chunkSize = 50) {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
await processInChunks(categoryIds, chunkSize, async (chunk) => {
|
||||
// Fetch products for this chunk
|
||||
const products = await this.fetchProductsForCategories(chunk, pool);
|
||||
|
||||
// Collect all article IDs
|
||||
const articleIds = new Set();
|
||||
products.forEach(p => articleIds.add(p.kArtikel));
|
||||
|
||||
// Fetch images for these articles
|
||||
let productImages = new Map();
|
||||
if (articleIds.size > 0) {
|
||||
productImages = await this.fetchProductImages(Array.from(articleIds), pool);
|
||||
}
|
||||
|
||||
// Call the callback with chunk data
|
||||
await chunkCallback({
|
||||
categoryIds: chunk,
|
||||
products,
|
||||
productImages
|
||||
});
|
||||
}, { showProgress: false, itemName: 'categories' });
|
||||
|
||||
} finally {
|
||||
if (pool) await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { EventEmitter } from 'events';
|
||||
import { createConnection } from '../utils/database.js';
|
||||
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
|
||||
import { ProductDataFetcher } from '../services/product-data-fetcher.js';
|
||||
import { getExistingIds, deleteObsoleteFiles, writeJsonIfChanged, ensureDir } from '../utils/file-sync-utils.js';
|
||||
import pictureSyncer from './picture-syncer.js';
|
||||
|
||||
class CategoryProductsSyncer extends EventEmitter {
|
||||
@@ -10,38 +12,18 @@ class CategoryProductsSyncer extends EventEmitter {
|
||||
if (CategoryProductsSyncer.instance) {
|
||||
return CategoryProductsSyncer.instance;
|
||||
}
|
||||
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
|
||||
|
||||
// Track syncing state
|
||||
this.isSyncing = false;
|
||||
this.queuedCategoryIds = null;
|
||||
this.syncQueue = new SyncQueueManager();
|
||||
this.dataFetcher = new ProductDataFetcher();
|
||||
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
|
||||
|
||||
CategoryProductsSyncer.instance = this;
|
||||
}
|
||||
|
||||
async syncProducts(categoryIds) {
|
||||
// Check if already syncing
|
||||
if (this.isSyncing) {
|
||||
console.log('⏳ CategoryProductsSyncer is busy. Queuing sync...');
|
||||
this.queuedCategoryIds = categoryIds;
|
||||
return;
|
||||
}
|
||||
|
||||
this.isSyncing = true;
|
||||
try {
|
||||
await this.syncQueue.executeSync('category-products-sync', async () => {
|
||||
await this._performSync(categoryIds);
|
||||
} catch (err) {
|
||||
console.error('❌ Error syncing products:', err);
|
||||
} finally {
|
||||
this.isSyncing = false;
|
||||
// Process queued sync if exists
|
||||
if (this.queuedCategoryIds) {
|
||||
const nextIds = this.queuedCategoryIds;
|
||||
this.queuedCategoryIds = null;
|
||||
// Use setTimeout to allow event loop to breathe
|
||||
setTimeout(() => this.syncProducts(nextIds), 0);
|
||||
}
|
||||
}
|
||||
}, categoryIds);
|
||||
}
|
||||
|
||||
async _performSync(categoryIds) {
|
||||
@@ -49,37 +31,26 @@ class CategoryProductsSyncer extends EventEmitter {
|
||||
const productsDir = path.join(this.cacheBaseDir, 'products');
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(productsDir, { recursive: true });
|
||||
await ensureDir(productsDir);
|
||||
|
||||
// Get existing files
|
||||
let existingFiles = [];
|
||||
try {
|
||||
existingFiles = await fs.readdir(productsDir);
|
||||
} catch (err) {
|
||||
// Directory might be empty or new
|
||||
}
|
||||
|
||||
// Filter for category json files (assuming we save as category_{id}.json)
|
||||
const existingIds = existingFiles
|
||||
.filter(f => f.startsWith('category_') && f.endsWith('.json'))
|
||||
.map(f => parseInt(f.replace('category_', '').replace('.json', '')));
|
||||
const existingIds = await getExistingIds(productsDir, {
|
||||
prefix: 'category_',
|
||||
suffix: '.json'
|
||||
});
|
||||
|
||||
const validIds = new Set(categoryIds.filter(id => id !== null && id !== undefined));
|
||||
|
||||
// 1. Delete obsolete category files
|
||||
const toDelete = existingIds.filter(id => !validIds.has(id));
|
||||
for (const id of toDelete) {
|
||||
const filePath = path.join(productsDir, `category_${id}.json`);
|
||||
await fs.unlink(filePath);
|
||||
}
|
||||
if (toDelete.length > 0) {
|
||||
console.log(`🗑️ Deleted ${toDelete.length} obsolete product lists.`);
|
||||
}
|
||||
// Delete obsolete category files
|
||||
await deleteObsoleteFiles(
|
||||
productsDir,
|
||||
existingIds,
|
||||
validIds,
|
||||
(id) => `category_${id}.json`
|
||||
);
|
||||
|
||||
// 2. Update/Create product lists for all valid categories
|
||||
// We update all because product assignments might have changed even if category exists
|
||||
// Update/Create product lists for all valid categories
|
||||
if (validIds.size > 0) {
|
||||
//console.log(`📦 Syncing products for ${validIds.size} categories...`);
|
||||
await this._fetchAndWriteProducts([...validIds], productsDir);
|
||||
} else {
|
||||
console.log(`✅ No categories to sync products for.`);
|
||||
@@ -90,128 +61,49 @@ class CategoryProductsSyncer extends EventEmitter {
|
||||
}
|
||||
|
||||
async _fetchAndWriteProducts(ids, dir) {
|
||||
let pool;
|
||||
const globalImageIds = new Set();
|
||||
|
||||
try {
|
||||
pool = await createConnection();
|
||||
await this.dataFetcher.fetchProductsInChunks(ids, async (chunkData) => {
|
||||
const { categoryIds, products, productImages } = chunkData;
|
||||
|
||||
// Process in chunks to avoid huge queries
|
||||
const chunkSize = 50;
|
||||
for (let i = 0; i < ids.length; i += chunkSize) {
|
||||
const chunk = ids.slice(i, i + chunkSize);
|
||||
const list = chunk.join(',');
|
||||
// Group results by kKategorie
|
||||
const productsByCategory = {};
|
||||
|
||||
// Fetch products for this chunk of categories
|
||||
// We need kArtikel and cName, ordered by bRowversion descending
|
||||
const result = await pool.request().query(`
|
||||
SELECT
|
||||
ka.kKategorie,
|
||||
ka.kArtikel,
|
||||
ab.cName
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikelBeschreibung ab ON ka.kArtikel = ab.kArtikel
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE ab.kSprache = ${process.env.JTL_SPRACHE_ID}
|
||||
AND a.cAktiv = 'Y'
|
||||
AND ab.kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
AND ab.kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND ka.kKategorie IN (${list})
|
||||
ORDER BY (
|
||||
CASE
|
||||
WHEN a.bRowversion >= ab.bRowversion AND a.bRowversion >= ka.bRowversion THEN a.bRowversion
|
||||
WHEN ab.bRowversion >= a.bRowversion AND ab.bRowversion >= ka.bRowversion THEN ab.bRowversion
|
||||
ELSE ka.bRowversion
|
||||
END
|
||||
) DESC
|
||||
`);
|
||||
// Initialize arrays for all requested IDs (so we create empty files for empty categories)
|
||||
categoryIds.forEach(id => {
|
||||
productsByCategory[id] = [];
|
||||
});
|
||||
|
||||
// Collect all kArtikel IDs to fetch images
|
||||
const artikelIds = new Set();
|
||||
result.recordset.forEach(r => artikelIds.add(r.kArtikel));
|
||||
for (const record of products) {
|
||||
if (productsByCategory[record.kKategorie]) {
|
||||
const images = productImages.get(record.kArtikel) || [];
|
||||
images.forEach(imgId => globalImageIds.add(imgId));
|
||||
|
||||
// Fetch images for these articles
|
||||
let productImages = new Map(); // kArtikel -> kBild[]
|
||||
if (artikelIds.size > 0) {
|
||||
const artikelList = Array.from(artikelIds).join(',');
|
||||
const imagesResult = await pool.request().query(`
|
||||
SELECT kArtikel, kBild
|
||||
FROM tArtikelbildPlattform
|
||||
WHERE kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
AND kArtikel IN (${artikelList})
|
||||
ORDER BY nNr ASC
|
||||
`);
|
||||
|
||||
imagesResult.recordset.forEach(r => {
|
||||
if (!productImages.has(r.kArtikel)) {
|
||||
productImages.set(r.kArtikel, []);
|
||||
}
|
||||
productImages.get(r.kArtikel).push(r.kBild);
|
||||
productsByCategory[record.kKategorie].push({
|
||||
kArtikel: record.kArtikel,
|
||||
cName: record.cName,
|
||||
images: images
|
||||
});
|
||||
}
|
||||
|
||||
// Group results by kKategorie
|
||||
const productsByCategory = {};
|
||||
|
||||
// Initialize arrays for all requested IDs (so we create empty files for empty categories)
|
||||
chunk.forEach(id => {
|
||||
productsByCategory[id] = [];
|
||||
});
|
||||
|
||||
for (const record of result.recordset) {
|
||||
if (productsByCategory[record.kKategorie]) {
|
||||
const images = productImages.get(record.kArtikel) || [];
|
||||
images.forEach(imgId => globalImageIds.add(imgId));
|
||||
|
||||
productsByCategory[record.kKategorie].push({
|
||||
kArtikel: record.kArtikel,
|
||||
cName: record.cName,
|
||||
images: images
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write files
|
||||
for (const catId of chunk) {
|
||||
const filePath = path.join(dir, `category_${catId}.json`);
|
||||
const products = productsByCategory[catId] || [];
|
||||
const newContent = JSON.stringify(products, null, 2);
|
||||
|
||||
// Check for changes
|
||||
let oldContent = '';
|
||||
try {
|
||||
oldContent = await fs.readFile(filePath, 'utf-8');
|
||||
} catch (e) {
|
||||
// File doesn't exist yet
|
||||
}
|
||||
|
||||
if (oldContent !== newContent) {
|
||||
await fs.writeFile(filePath, newContent);
|
||||
this.emit('categoryUpdated', { id: catId, products });
|
||||
}
|
||||
}
|
||||
|
||||
const processed = Math.min(i + chunkSize, ids.length);
|
||||
if (processed === ids.length) {
|
||||
//console.log(`✅ Processed products for ${processed}/${ids.length} categories.`);
|
||||
} else {
|
||||
//console.log(`⏳ Processed products for ${processed}/${ids.length} categories...`);
|
||||
}
|
||||
}
|
||||
|
||||
// Sync all collected images at once
|
||||
if (globalImageIds.size > 0) {
|
||||
//console.log(`🖼️ Syncing ${globalImageIds.size} product images...`);
|
||||
await pictureSyncer.syncImages(Array.from(globalImageIds), 'products');
|
||||
}
|
||||
// Write files
|
||||
for (const catId of categoryIds) {
|
||||
const filePath = path.join(dir, `category_${catId}.json`);
|
||||
const categoryProducts = productsByCategory[catId] || [];
|
||||
|
||||
} catch (err) {
|
||||
console.error('❌ Error fetching products:', err);
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
const changed = await writeJsonIfChanged(filePath, categoryProducts);
|
||||
|
||||
if (changed) {
|
||||
this.emit('categoryUpdated', { id: catId, products: categoryProducts });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Sync all collected images at once
|
||||
if (globalImageIds.size > 0) {
|
||||
//console.log(`🖼️ Syncing ${globalImageIds.size} product images...`);
|
||||
await pictureSyncer.syncImages(Array.from(globalImageIds), 'products');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { EventEmitter } from 'events';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { createConnection } from '../utils/database.js';
|
||||
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
|
||||
import { CategoryDataFetcher } from '../services/category-data-fetcher.js';
|
||||
import { buildTree, pruneTree, buildTranslationTemplate, formatTranslationTemplate } from '../utils/category-tree-utils.js';
|
||||
import { readTextFile } from '../utils/file-sync-utils.js';
|
||||
|
||||
class CategorySyncer extends EventEmitter {
|
||||
constructor() {
|
||||
@@ -10,8 +13,8 @@ class CategorySyncer extends EventEmitter {
|
||||
return CategorySyncer.instance;
|
||||
}
|
||||
|
||||
this.isSyncing = false;
|
||||
this.queuedSync = false;
|
||||
this.syncQueue = new SyncQueueManager();
|
||||
this.dataFetcher = new CategoryDataFetcher();
|
||||
this.cacheDir = process.env.CACHE_LOCATION || '.';
|
||||
this.lastTreeString = null;
|
||||
this.lastTemplateString = null;
|
||||
@@ -23,40 +26,21 @@ class CategorySyncer extends EventEmitter {
|
||||
}
|
||||
|
||||
async _loadExistingTemplate() {
|
||||
try {
|
||||
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
|
||||
this.lastTemplateString = await fs.readFile(templatePath, 'utf-8');
|
||||
} catch (err) {
|
||||
// File doesn't exist yet, that's fine
|
||||
}
|
||||
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
|
||||
this.lastTemplateString = await readTextFile(templatePath);
|
||||
|
||||
try {
|
||||
const treePath = path.join(this.cacheDir, 'category_tree.json');
|
||||
const treeContent = await fs.readFile(treePath, 'utf-8');
|
||||
this.lastTreeString = treeContent;
|
||||
} catch (err) {
|
||||
// File doesn't exist yet, that's fine
|
||||
}
|
||||
const treePath = path.join(this.cacheDir, 'category_tree.json');
|
||||
this.lastTreeString = await readTextFile(treePath);
|
||||
}
|
||||
|
||||
async triggerSync() {
|
||||
if (this.isSyncing) {
|
||||
if (this.queuedSync) {
|
||||
console.log('🚫 Sync already in progress and next sync already queued. Ignoring.');
|
||||
return;
|
||||
}
|
||||
console.log('⏳ Sync already in progress. Queuing next sync.');
|
||||
this.queuedSync = true;
|
||||
return;
|
||||
}
|
||||
|
||||
await this._doSync();
|
||||
await this.syncQueue.executeSync('category-sync', async () => {
|
||||
await this._doSync();
|
||||
});
|
||||
}
|
||||
|
||||
async _doSync() {
|
||||
this.isSyncing = true;
|
||||
const startTime = Date.now();
|
||||
//console.log('🚀 Starting sync...');
|
||||
|
||||
try {
|
||||
await this._syncFromDb();
|
||||
@@ -68,247 +52,53 @@ class CategorySyncer extends EventEmitter {
|
||||
//console.log(`✅ Sync completed in ${duration}ms. Next sync in ${minutes} minute${minutes !== 1 ? 's' : ''}`);
|
||||
} catch (err) {
|
||||
console.error('❌ Sync failed:', err);
|
||||
} finally {
|
||||
this.isSyncing = false;
|
||||
if (this.queuedSync) {
|
||||
console.log('🔄 Processing queued sync...');
|
||||
this.queuedSync = false;
|
||||
// Use setImmediate to allow stack to clear/event loop to tick
|
||||
setImmediate(() => this.triggerSync());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _syncFromDb() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
// Fetch all category data
|
||||
const { categories, names, articleCounts, images } = await this.dataFetcher.fetchAllCategoryData();
|
||||
|
||||
// Fetch categories
|
||||
const categoriesResult = await pool.request().query(`
|
||||
SELECT kKategorie, kOberKategorie, nSort
|
||||
FROM tkategorie
|
||||
ORDER BY nSort, kKategorie
|
||||
`);
|
||||
// Build tree with ROOT_CATEGORY_ID filter (if set)
|
||||
let tree = buildTree(categories, names, articleCounts, images, true);
|
||||
|
||||
// Fetch names
|
||||
const namesResult = await pool.request().query(`
|
||||
SELECT kKategorie, cName
|
||||
FROM tKategorieSprache
|
||||
WHERE kSprache = ${process.env.JTL_SPRACHE_ID} AND kShop = ${process.env.JTL_SHOP_ID}
|
||||
`);
|
||||
// Deep copy tree for unpruned version (before pruning modifies it)
|
||||
const unprunedTree = JSON.parse(JSON.stringify(tree));
|
||||
|
||||
// Fetch article counts
|
||||
const articleCountsResult = await pool.request().query(`
|
||||
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE a.cAktiv = 'Y'
|
||||
GROUP BY ka.kKategorie
|
||||
`);
|
||||
// Generate translation template BEFORE pruning (to include all categories)
|
||||
const translationTemplate = buildTranslationTemplate(tree);
|
||||
const templateString = formatTranslationTemplate(translationTemplate);
|
||||
|
||||
// Fetch images (kBild)
|
||||
const imagesResult = await pool.request().query(`
|
||||
SELECT kKategorie, kBild
|
||||
FROM (
|
||||
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
|
||||
FROM tKategoriebildPlattform
|
||||
WHERE kShop = ${process.env.JTL_SHOP_ID} AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
`);
|
||||
// Now prune for the main tree
|
||||
tree = pruneTree(tree);
|
||||
|
||||
const categories = categoriesResult.recordset;
|
||||
const names = namesResult.recordset;
|
||||
const articleCounts = articleCountsResult.recordset;
|
||||
const images = imagesResult.recordset;
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(this.cacheDir, { recursive: true });
|
||||
|
||||
// Build tree with ROOT_CATEGORY_ID filter (if set)
|
||||
// This gives us the subtree we're interested in
|
||||
let tree = this._buildTree(categories, names, articleCounts, images, true);
|
||||
// Compare pruned tree
|
||||
const treeString = JSON.stringify(tree, null, 2);
|
||||
const changed = this.lastTreeString !== treeString;
|
||||
|
||||
// Deep copy tree for unpruned version (before pruning modifies it)
|
||||
const unprunedTree = JSON.parse(JSON.stringify(tree));
|
||||
|
||||
// Generate translation template BEFORE pruning (to include all categories)
|
||||
const translationTemplate = this._buildTranslationTemplate(tree);
|
||||
const templateString = this._formatTranslationTemplate(translationTemplate);
|
||||
|
||||
// Now prune for the main tree
|
||||
tree = this._pruneTree(tree);
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(this.cacheDir, { recursive: true });
|
||||
|
||||
// Compare pruned tree
|
||||
const treeString = JSON.stringify(tree, null, 2);
|
||||
const changed = this.lastTreeString !== treeString;
|
||||
|
||||
if (changed) {
|
||||
// Save template if it changed
|
||||
if (this.lastTemplateString !== templateString) {
|
||||
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
|
||||
await fs.writeFile(templatePath, templateString);
|
||||
console.log(`💾 Translation template saved to ${templatePath}`);
|
||||
this.lastTemplateString = templateString;
|
||||
}
|
||||
|
||||
const filePath = path.join(this.cacheDir, 'category_tree.json');
|
||||
await fs.writeFile(filePath, treeString);
|
||||
console.log(`💾 Category tree saved to ${filePath}`);
|
||||
|
||||
this.lastTreeString = treeString;
|
||||
console.log('📢 Tree updated.');
|
||||
} else {
|
||||
//console.log('🤷 No changes detected in category tree.');
|
||||
if (changed) {
|
||||
// Save template if it changed
|
||||
if (this.lastTemplateString !== templateString) {
|
||||
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
|
||||
await fs.writeFile(templatePath, templateString);
|
||||
console.log(`💾 Translation template saved to ${templatePath}`);
|
||||
this.lastTemplateString = templateString;
|
||||
}
|
||||
|
||||
this.emit('synced', { tree, unprunedTree, changed });
|
||||
const filePath = path.join(this.cacheDir, 'category_tree.json');
|
||||
await fs.writeFile(filePath, treeString);
|
||||
console.log(`💾 Category tree saved to ${filePath}`);
|
||||
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_buildTree(categories, names, articleCounts, images, applyRootFilter = true) {
|
||||
// Create a map for quick lookup of names
|
||||
const nameMap = new Map();
|
||||
names.forEach(n => nameMap.set(n.kKategorie, n.cName));
|
||||
|
||||
// Create a map for article counts
|
||||
const countMap = new Map();
|
||||
articleCounts.forEach(c => countMap.set(c.kKategorie, c.count));
|
||||
|
||||
// Create a map for images
|
||||
const imageMap = new Map();
|
||||
images.forEach(i => imageMap.set(i.kKategorie, i.kBild));
|
||||
|
||||
// Create a map for category nodes
|
||||
const categoryMap = new Map();
|
||||
|
||||
// Initialize all nodes
|
||||
categories.forEach(cat => {
|
||||
categoryMap.set(cat.kKategorie, {
|
||||
kKategorie: cat.kKategorie,
|
||||
cName: nameMap.get(cat.kKategorie) || `Unknown (${cat.kKategorie})`, // Fallback if name missing
|
||||
articleCount: countMap.get(cat.kKategorie) || 0,
|
||||
kBild: imageMap.get(cat.kKategorie) || null,
|
||||
children: [],
|
||||
nSort: cat.nSort || 0 // Store nSort temporarily
|
||||
});
|
||||
});
|
||||
|
||||
const rootNodes = [];
|
||||
|
||||
// Parse excluded IDs
|
||||
const excludedIds = new Set(
|
||||
(process.env.EXCLUDE_CATEGORY_IDS || '')
|
||||
.split(',')
|
||||
.map(id => parseInt(id.trim()))
|
||||
.filter(id => !isNaN(id))
|
||||
);
|
||||
|
||||
// Build hierarchy
|
||||
categories.forEach(cat => {
|
||||
// Skip if excluded
|
||||
if (excludedIds.has(cat.kKategorie)) return;
|
||||
|
||||
const node = categoryMap.get(cat.kKategorie);
|
||||
if (cat.kOberKategorie === 0) {
|
||||
rootNodes.push(node);
|
||||
} else {
|
||||
const parent = categoryMap.get(cat.kOberKategorie);
|
||||
if (parent) {
|
||||
parent.children.push(node);
|
||||
} else {
|
||||
// Handle orphan nodes if necessary, or ignore
|
||||
// console.warn(`Orphan category found: ${cat.kKategorie}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const rootId = process.env.ROOT_CATEGORY_ID ? parseInt(process.env.ROOT_CATEGORY_ID) : null;
|
||||
let resultNodes = rootNodes;
|
||||
|
||||
if (rootId && applyRootFilter) {
|
||||
if (excludedIds.has(rootId)) {
|
||||
resultNodes = [];
|
||||
} else {
|
||||
const specificRoot = categoryMap.get(rootId);
|
||||
// Return the children of the specified root, not the root itself
|
||||
resultNodes = specificRoot ? specificRoot.children : [];
|
||||
}
|
||||
this.lastTreeString = treeString;
|
||||
console.log('📢 Tree updated.');
|
||||
} else {
|
||||
//console.log('🤷 No changes detected in category tree.');
|
||||
}
|
||||
|
||||
// Sort children and remove nSort
|
||||
for (const node of categoryMap.values()) {
|
||||
node.children.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
|
||||
}
|
||||
|
||||
// Sort root nodes if returning multiple
|
||||
resultNodes.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
|
||||
|
||||
// Remove nSort property from all nodes
|
||||
for (const node of categoryMap.values()) {
|
||||
delete node.nSort;
|
||||
}
|
||||
|
||||
return resultNodes;
|
||||
}
|
||||
|
||||
_pruneTree(nodes) {
|
||||
// Filter out nodes that are empty (no articles) and have no valid children
|
||||
return nodes.filter(node => {
|
||||
// Recursively prune children
|
||||
if (node.children && node.children.length > 0) {
|
||||
node.children = this._pruneTree(node.children);
|
||||
}
|
||||
|
||||
// Keep node if it has articles OR has remaining children
|
||||
const hasArticles = node.articleCount > 0;
|
||||
const hasChildren = node.children && node.children.length > 0;
|
||||
|
||||
return hasArticles || hasChildren;
|
||||
});
|
||||
}
|
||||
|
||||
_buildTranslationTemplate(nodes) {
|
||||
return nodes.map(node => {
|
||||
const result = { name: node.cName };
|
||||
if (node.children && node.children.length > 0) {
|
||||
result.children = this._buildTranslationTemplate(node.children);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
_formatTranslationTemplate(nodes, indent = 0) {
|
||||
const spaces = ' '.repeat(indent);
|
||||
const innerSpaces = ' '.repeat(indent + 1);
|
||||
|
||||
if (nodes.length === 0) return '[]';
|
||||
|
||||
const lines = ['['];
|
||||
|
||||
nodes.forEach((node, index) => {
|
||||
const isLast = index === nodes.length - 1;
|
||||
|
||||
if (node.children && node.children.length > 0) {
|
||||
// Node with children - multi-line format
|
||||
lines.push(`${innerSpaces}{`);
|
||||
lines.push(`${innerSpaces} "name": "${node.name}",`);
|
||||
lines.push(`${innerSpaces} "children": ${this._formatTranslationTemplate(node.children, indent + 2)}`);
|
||||
lines.push(`${innerSpaces}}${isLast ? '' : ','}`);
|
||||
} else {
|
||||
// Leaf node - single line format
|
||||
lines.push(`${innerSpaces}{ "name": "${node.name}" }${isLast ? '' : ','}`);
|
||||
}
|
||||
});
|
||||
|
||||
lines.push(`${spaces}]`);
|
||||
return lines.join('\n');
|
||||
this.emit('synced', { tree, unprunedTree, changed });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,87 +1,51 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import sharp from 'sharp';
|
||||
import { createConnection } from '../utils/database.js';
|
||||
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
|
||||
import { ImageDataFetcher } from '../services/image-data-fetcher.js';
|
||||
import { getExistingIds, deleteObsoleteFiles, ensureDir } from '../utils/file-sync-utils.js';
|
||||
|
||||
class PictureSyncer {
|
||||
constructor() {
|
||||
if (PictureSyncer.instance) {
|
||||
return PictureSyncer.instance;
|
||||
}
|
||||
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
|
||||
|
||||
// Track syncing state per group
|
||||
this.isSyncing = new Map(); // groupName -> boolean
|
||||
this.queuedSyncs = new Map(); // groupName -> { imageIds, groupName }
|
||||
this.syncQueue = new SyncQueueManager();
|
||||
this.dataFetcher = new ImageDataFetcher();
|
||||
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
|
||||
|
||||
PictureSyncer.instance = this;
|
||||
}
|
||||
|
||||
async syncImages(imageIds, groupName) {
|
||||
// Check if already syncing this group
|
||||
if (this.isSyncing.get(groupName)) {
|
||||
if (this.queuedSyncs.has(groupName)) {
|
||||
console.log(`🚫 Image sync for '${groupName}' already in progress and queued. Ignoring.`);
|
||||
return;
|
||||
}
|
||||
console.log(`⏳ Image sync for '${groupName}' already in progress. Queuing.`);
|
||||
this.queuedSyncs.set(groupName, { imageIds, groupName });
|
||||
return;
|
||||
}
|
||||
|
||||
await this._doSync(imageIds, groupName);
|
||||
}
|
||||
|
||||
async _doSync(imageIds, groupName) {
|
||||
this.isSyncing.set(groupName, true);
|
||||
|
||||
try {
|
||||
await this.syncQueue.executeSync(groupName, async () => {
|
||||
await this._performSync(imageIds, groupName);
|
||||
} finally {
|
||||
this.isSyncing.set(groupName, false);
|
||||
|
||||
// Process queued sync for this group if any
|
||||
if (this.queuedSyncs.has(groupName)) {
|
||||
console.log(`🔄 Processing queued image sync for '${groupName}'...`);
|
||||
const queued = this.queuedSyncs.get(groupName);
|
||||
this.queuedSyncs.delete(groupName);
|
||||
setImmediate(() => this.syncImages(queued.imageIds, queued.groupName));
|
||||
}
|
||||
}
|
||||
}, { imageIds, groupName });
|
||||
}
|
||||
|
||||
async _performSync(imageIds, groupName) {
|
||||
const groupDir = path.join(this.cacheBaseDir, 'img', groupName);
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(groupDir, { recursive: true });
|
||||
await ensureDir(groupDir);
|
||||
|
||||
// Get existing files
|
||||
let existingFiles = [];
|
||||
try {
|
||||
existingFiles = await fs.readdir(groupDir);
|
||||
} catch (err) {
|
||||
// Directory might be empty or new
|
||||
}
|
||||
|
||||
// Filter for image files (assuming we save as {id}.avif)
|
||||
const existingIds = existingFiles
|
||||
.filter(f => f.endsWith('.avif'))
|
||||
.map(f => parseInt(f.replace('.avif', '')));
|
||||
const existingIds = await getExistingIds(groupDir, {
|
||||
suffix: '.avif'
|
||||
});
|
||||
|
||||
const validIds = new Set(imageIds.filter(id => id !== null && id !== undefined));
|
||||
|
||||
// 1. Delete obsolete images
|
||||
const toDelete = existingIds.filter(id => !validIds.has(id));
|
||||
for (const id of toDelete) {
|
||||
const filePath = path.join(groupDir, `${id}.avif`);
|
||||
await fs.unlink(filePath);
|
||||
}
|
||||
if (toDelete.length > 0) {
|
||||
console.log(`🗑️ Deleted ${toDelete.length} obsolete images.`);
|
||||
}
|
||||
// Delete obsolete images
|
||||
await deleteObsoleteFiles(
|
||||
groupDir,
|
||||
existingIds,
|
||||
validIds,
|
||||
(id) => `${id}.avif`
|
||||
);
|
||||
|
||||
// 2. Download missing images
|
||||
// Download missing images
|
||||
const toDownload = imageIds.filter(id => id !== null && id !== undefined && !existingIds.includes(id));
|
||||
|
||||
if (toDownload.length > 0) {
|
||||
@@ -93,49 +57,17 @@ class PictureSyncer {
|
||||
}
|
||||
|
||||
async _downloadImages(ids, dir) {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
// Process in chunks to avoid huge queries
|
||||
const chunkSize = 50;
|
||||
for (let i = 0; i < ids.length; i += chunkSize) {
|
||||
const chunk = ids.slice(i, i + chunkSize);
|
||||
const list = chunk.join(',');
|
||||
|
||||
const result = await pool.request().query(`
|
||||
SELECT kBild, bBild
|
||||
FROM tBild
|
||||
WHERE kBild IN (${list})
|
||||
`);
|
||||
|
||||
for (const record of result.recordset) {
|
||||
if (record.bBild) {
|
||||
const filePath = path.join(dir, `${record.kBild}.avif`);
|
||||
// Resize to 130x130 and convert to AVIF using sharp
|
||||
await sharp(record.bBild)
|
||||
.resize(130, 130, {
|
||||
fit: 'cover',
|
||||
position: 'center'
|
||||
})
|
||||
.avif({ quality: 80 })
|
||||
.toFile(filePath);
|
||||
}
|
||||
}
|
||||
const processed = Math.min(i + chunkSize, ids.length);
|
||||
if (processed === ids.length) {
|
||||
console.log(`✅ Processed ${processed}/${ids.length} images.`);
|
||||
} else {
|
||||
console.log(`⏳ Processed ${processed}/${ids.length} images...`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('❌ Error downloading images:', err);
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
}
|
||||
}
|
||||
await this.dataFetcher.fetchImagesInChunks(ids, async (record) => {
|
||||
const filePath = path.join(dir, `${record.kBild}.avif`);
|
||||
// Resize to 130x130 and convert to AVIF using sharp
|
||||
await sharp(record.bBild)
|
||||
.resize(130, 130, {
|
||||
fit: 'cover',
|
||||
position: 'center'
|
||||
})
|
||||
.avif({ quality: 80 })
|
||||
.toFile(filePath);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
166
src/utils/category-tree-utils.js
Normal file
166
src/utils/category-tree-utils.js
Normal file
@@ -0,0 +1,166 @@
|
||||
/**
|
||||
* Category tree manipulation utilities
|
||||
*/
|
||||
|
||||
/**
|
||||
* Parse excluded category IDs from environment variable
|
||||
* @returns {Set<number>} - Set of excluded category IDs
|
||||
*/
|
||||
export function parseExcludedIds() {
|
||||
return new Set(
|
||||
(process.env.EXCLUDE_CATEGORY_IDS || '')
|
||||
.split(',')
|
||||
.map(id => parseInt(id.trim()))
|
||||
.filter(id => !isNaN(id))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a hierarchical category tree from flat data
|
||||
* @param {Array} categories - Category records with kKategorie, kOberKategorie, nSort
|
||||
* @param {Array} names - Name records with kKategorie, cName
|
||||
* @param {Array} articleCounts - Article count records with kKategorie, count
|
||||
* @param {Array} images - Image records with kKategorie, kBild
|
||||
* @param {boolean} applyRootFilter - Whether to apply ROOT_CATEGORY_ID filter
|
||||
* @returns {Array} - Array of root category nodes
|
||||
*/
|
||||
export function buildTree(categories, names, articleCounts, images, applyRootFilter = true) {
|
||||
// Create maps for quick lookup
|
||||
const nameMap = new Map();
|
||||
names.forEach(n => nameMap.set(n.kKategorie, n.cName));
|
||||
|
||||
const countMap = new Map();
|
||||
articleCounts.forEach(c => countMap.set(c.kKategorie, c.count));
|
||||
|
||||
const imageMap = new Map();
|
||||
images.forEach(i => imageMap.set(i.kKategorie, i.kBild));
|
||||
|
||||
const categoryMap = new Map();
|
||||
|
||||
// Initialize all nodes
|
||||
categories.forEach(cat => {
|
||||
categoryMap.set(cat.kKategorie, {
|
||||
kKategorie: cat.kKategorie,
|
||||
cName: nameMap.get(cat.kKategorie) || `Unknown (${cat.kKategorie})`,
|
||||
articleCount: countMap.get(cat.kKategorie) || 0,
|
||||
kBild: imageMap.get(cat.kKategorie) || null,
|
||||
children: [],
|
||||
nSort: cat.nSort || 0
|
||||
});
|
||||
});
|
||||
|
||||
const rootNodes = [];
|
||||
const excludedIds = parseExcludedIds();
|
||||
|
||||
// Build hierarchy
|
||||
categories.forEach(cat => {
|
||||
// Skip if excluded
|
||||
if (excludedIds.has(cat.kKategorie)) return;
|
||||
|
||||
const node = categoryMap.get(cat.kKategorie);
|
||||
if (cat.kOberKategorie === 0) {
|
||||
rootNodes.push(node);
|
||||
} else {
|
||||
const parent = categoryMap.get(cat.kOberKategorie);
|
||||
if (parent) {
|
||||
parent.children.push(node);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const rootId = process.env.ROOT_CATEGORY_ID ? parseInt(process.env.ROOT_CATEGORY_ID) : null;
|
||||
let resultNodes = rootNodes;
|
||||
|
||||
if (rootId && applyRootFilter) {
|
||||
if (excludedIds.has(rootId)) {
|
||||
resultNodes = [];
|
||||
} else {
|
||||
const specificRoot = categoryMap.get(rootId);
|
||||
// Return the children of the specified root, not the root itself
|
||||
resultNodes = specificRoot ? specificRoot.children : [];
|
||||
}
|
||||
}
|
||||
|
||||
// Sort children and remove nSort
|
||||
for (const node of categoryMap.values()) {
|
||||
node.children.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
|
||||
}
|
||||
|
||||
// Sort root nodes
|
||||
resultNodes.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
|
||||
|
||||
// Remove nSort property from all nodes
|
||||
for (const node of categoryMap.values()) {
|
||||
delete node.nSort;
|
||||
}
|
||||
|
||||
return resultNodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune tree to remove empty categories with no children
|
||||
* @param {Array} nodes - Array of category nodes
|
||||
* @returns {Array} - Filtered array of nodes
|
||||
*/
|
||||
export function pruneTree(nodes) {
|
||||
return nodes.filter(node => {
|
||||
// Recursively prune children
|
||||
if (node.children && node.children.length > 0) {
|
||||
node.children = pruneTree(node.children);
|
||||
}
|
||||
|
||||
// Keep node if it has articles OR has remaining children
|
||||
const hasArticles = node.articleCount > 0;
|
||||
const hasChildren = node.children && node.children.length > 0;
|
||||
|
||||
return hasArticles || hasChildren;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build translation template structure from tree
|
||||
* @param {Array} nodes - Array of category nodes
|
||||
* @returns {Array} - Simplified structure with only names and children
|
||||
*/
|
||||
export function buildTranslationTemplate(nodes) {
|
||||
return nodes.map(node => {
|
||||
const result = { name: node.cName };
|
||||
if (node.children && node.children.length > 0) {
|
||||
result.children = buildTranslationTemplate(node.children);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Format translation template as formatted JSON string
|
||||
* @param {Array} nodes - Translation template nodes
|
||||
* @param {number} indent - Current indentation level
|
||||
* @returns {string} - Formatted JSON string
|
||||
*/
|
||||
export function formatTranslationTemplate(nodes, indent = 0) {
|
||||
const spaces = ' '.repeat(indent);
|
||||
const innerSpaces = ' '.repeat(indent + 1);
|
||||
|
||||
if (nodes.length === 0) return '[]';
|
||||
|
||||
const lines = ['['];
|
||||
|
||||
nodes.forEach((node, index) => {
|
||||
const isLast = index === nodes.length - 1;
|
||||
|
||||
if (node.children && node.children.length > 0) {
|
||||
// Node with children - multi-line format
|
||||
lines.push(`${innerSpaces}{`);
|
||||
lines.push(`${innerSpaces} "name": "${node.name}",`);
|
||||
lines.push(`${innerSpaces} "children": ${formatTranslationTemplate(node.children, indent + 2)}`);
|
||||
lines.push(`${innerSpaces}}${isLast ? '' : ','}`);
|
||||
} else {
|
||||
// Leaf node - single line format
|
||||
lines.push(`${innerSpaces}{ "name": "${node.name}" }${isLast ? '' : ','}`);
|
||||
}
|
||||
});
|
||||
|
||||
lines.push(`${spaces}]`);
|
||||
return lines.join('\n');
|
||||
}
|
||||
58
src/utils/database-utils.js
Normal file
58
src/utils/database-utils.js
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* Database utility functions for common operations
|
||||
*/
|
||||
|
||||
/**
|
||||
* Process items in chunks with a callback function
|
||||
* @param {Array} items - Items to process
|
||||
* @param {number} chunkSize - Size of each chunk
|
||||
* @param {Function} processFn - Async function to process each chunk (receives chunk array)
|
||||
* @param {Object} options - Optional configuration
|
||||
* @param {boolean} options.showProgress - Whether to log progress (default: true)
|
||||
* @param {string} options.itemName - Name of items for logging (default: 'items')
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function processInChunks(items, chunkSize, processFn, options = {}) {
|
||||
const { showProgress = true, itemName = 'items' } = options;
|
||||
|
||||
for (let i = 0; i < items.length; i += chunkSize) {
|
||||
const chunk = items.slice(i, i + chunkSize);
|
||||
await processFn(chunk, i);
|
||||
|
||||
if (showProgress) {
|
||||
const processed = Math.min(i + chunkSize, items.length);
|
||||
if (processed === items.length) {
|
||||
console.log(`✅ Processed ${processed}/${items.length} ${itemName}.`);
|
||||
} else {
|
||||
console.log(`⏳ Processed ${processed}/${items.length} ${itemName}...`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SQL IN clause from an array of IDs
|
||||
* @param {Array<number>} ids - Array of numeric IDs
|
||||
* @returns {string} - Comma-separated string of IDs
|
||||
*/
|
||||
export function createInClause(ids) {
|
||||
return ids.join(',');
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a database query with automatic connection management
|
||||
* @param {Function} queryFn - Async function that receives pool and executes queries
|
||||
* @param {Function} createConnection - Function to create database connection
|
||||
* @returns {Promise<*>} - Result from queryFn
|
||||
*/
|
||||
export async function withConnection(queryFn, createConnection) {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
return await queryFn(pool);
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
134
src/utils/file-sync-utils.js
Normal file
134
src/utils/file-sync-utils.js
Normal file
@@ -0,0 +1,134 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
* File synchronization utility functions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get existing IDs from files in a directory
|
||||
* @param {string} dir - Directory path
|
||||
* @param {Object} options - Configuration options
|
||||
* @param {string} options.prefix - File prefix to filter (e.g., 'category_')
|
||||
* @param {string} options.suffix - File suffix to filter (e.g., '.json')
|
||||
* @param {RegExp} options.pattern - Custom regex pattern to extract ID
|
||||
* @returns {Promise<number[]>} - Array of numeric IDs
|
||||
*/
|
||||
export async function getExistingIds(dir, options = {}) {
|
||||
const { prefix = '', suffix = '', pattern = null } = options;
|
||||
|
||||
let existingFiles = [];
|
||||
try {
|
||||
existingFiles = await fs.readdir(dir);
|
||||
} catch (err) {
|
||||
// Directory might be empty or new
|
||||
return [];
|
||||
}
|
||||
|
||||
if (pattern) {
|
||||
return existingFiles
|
||||
.map(f => {
|
||||
const match = f.match(pattern);
|
||||
return match ? parseInt(match[1]) : null;
|
||||
})
|
||||
.filter(id => id !== null && !isNaN(id));
|
||||
}
|
||||
|
||||
return existingFiles
|
||||
.filter(f => {
|
||||
if (prefix && !f.startsWith(prefix)) return false;
|
||||
if (suffix && !f.endsWith(suffix)) return false;
|
||||
return true;
|
||||
})
|
||||
.map(f => {
|
||||
let id = f;
|
||||
if (prefix) id = id.replace(prefix, '');
|
||||
if (suffix) id = id.replace(suffix, '');
|
||||
return parseInt(id);
|
||||
})
|
||||
.filter(id => !isNaN(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete obsolete files based on valid IDs
|
||||
* @param {string} dir - Directory path
|
||||
* @param {number[]} existingIds - IDs of existing files
|
||||
* @param {Set<number>} validIds - Set of valid IDs to keep
|
||||
* @param {Function} filenameFn - Function to generate filename from ID
|
||||
* @returns {Promise<number>} - Number of files deleted
|
||||
*/
|
||||
export async function deleteObsoleteFiles(dir, existingIds, validIds, filenameFn) {
|
||||
const toDelete = existingIds.filter(id => !validIds.has(id));
|
||||
|
||||
for (const id of toDelete) {
|
||||
const filePath = path.join(dir, filenameFn(id));
|
||||
await fs.unlink(filePath);
|
||||
}
|
||||
|
||||
if (toDelete.length > 0) {
|
||||
console.log(`🗑️ Deleted ${toDelete.length} obsolete files.`);
|
||||
}
|
||||
|
||||
return toDelete.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write JSON to file only if content has changed
|
||||
* @param {string} filePath - Full path to file
|
||||
* @param {*} data - Data to write (will be JSON.stringify'd)
|
||||
* @param {number} indent - JSON indentation (default: 2)
|
||||
* @returns {Promise<boolean>} - True if file was written, false if unchanged
|
||||
*/
|
||||
export async function writeJsonIfChanged(filePath, data, indent = 2) {
|
||||
const newContent = JSON.stringify(data, null, indent);
|
||||
|
||||
let oldContent = '';
|
||||
try {
|
||||
oldContent = await fs.readFile(filePath, 'utf-8');
|
||||
} catch (e) {
|
||||
// File doesn't exist yet
|
||||
}
|
||||
|
||||
if (oldContent !== newContent) {
|
||||
await fs.writeFile(filePath, newContent);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure directory exists, create if it doesn't
|
||||
* @param {string} dir - Directory path
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function ensureDir(dir) {
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Read JSON file safely
|
||||
* @param {string} filePath - Full path to file
|
||||
* @returns {Promise<*|null>} - Parsed JSON or null if file doesn't exist
|
||||
*/
|
||||
export async function readJsonFile(filePath) {
|
||||
try {
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
return JSON.parse(content);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read text file safely
|
||||
* @param {string} filePath - Full path to file
|
||||
* @returns {Promise<string|null>} - File content or null if file doesn't exist
|
||||
*/
|
||||
export async function readTextFile(filePath) {
|
||||
try {
|
||||
return await fs.readFile(filePath, 'utf-8');
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
76
src/utils/sync-queue-manager.js
Normal file
76
src/utils/sync-queue-manager.js
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* SyncQueueManager - Manages sync operations with queuing support
|
||||
*
|
||||
* Prevents concurrent syncs for the same key and queues subsequent requests.
|
||||
* Supports both global syncing (single key) and per-group syncing (multiple keys).
|
||||
*/
|
||||
export class SyncQueueManager {
|
||||
constructor() {
|
||||
this.isSyncing = new Map(); // key -> boolean
|
||||
this.queuedSyncs = new Map(); // key -> queuedData
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a sync operation with automatic queuing
|
||||
* @param {string} key - Unique identifier for this sync operation
|
||||
* @param {Function} syncFn - Async function to execute
|
||||
* @param {*} queuedData - Data to pass to queued sync (optional)
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async executeSync(key, syncFn, queuedData = null) {
|
||||
// Check if already syncing this key
|
||||
if (this.isSyncing.get(key)) {
|
||||
if (this.queuedSyncs.has(key)) {
|
||||
console.log(`🚫 Sync for '${key}' already in progress and queued. Ignoring.`);
|
||||
return;
|
||||
}
|
||||
console.log(`⏳ Sync for '${key}' already in progress. Queuing.`);
|
||||
this.queuedSyncs.set(key, queuedData);
|
||||
return;
|
||||
}
|
||||
|
||||
await this._doSync(key, syncFn);
|
||||
}
|
||||
|
||||
async _doSync(key, syncFn) {
|
||||
this.isSyncing.set(key, true);
|
||||
|
||||
try {
|
||||
await syncFn();
|
||||
} finally {
|
||||
this.isSyncing.set(key, false);
|
||||
|
||||
// Process queued sync for this key if any
|
||||
if (this.queuedSyncs.has(key)) {
|
||||
const queuedData = this.queuedSyncs.get(key);
|
||||
this.queuedSyncs.delete(key);
|
||||
|
||||
// Log only if we have meaningful data to show
|
||||
if (queuedData !== null && queuedData !== undefined) {
|
||||
console.log(`🔄 Processing queued sync for '${key}'...`);
|
||||
}
|
||||
|
||||
// Use setImmediate to allow stack to clear/event loop to tick
|
||||
setImmediate(() => this.executeSync(key, syncFn, queuedData));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a sync is currently in progress for a key
|
||||
* @param {string} key - Unique identifier
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isSyncInProgress(key) {
|
||||
return this.isSyncing.get(key) || false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a sync is queued for a key
|
||||
* @param {string} key - Unique identifier
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isSyncQueued(key) {
|
||||
return this.queuedSyncs.has(key);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user