refac
This commit is contained in:
57
src/index.js
Normal file
57
src/index.js
Normal file
@@ -0,0 +1,57 @@
|
||||
import categorySyncer from './syncers/category-syncer.js';
|
||||
import pictureSyncer from './syncers/picture-syncer.js';
|
||||
import categoryProductsSyncer from './syncers/category-products-syncer.js';
|
||||
import { startServer } from './server/server.js';
|
||||
|
||||
categorySyncer.on('synced', async ({ tree, unprunedTree, changed }) => {
|
||||
if (changed) {
|
||||
console.log('🎉 Event received: Category tree updated! Root nodes:', tree.length);
|
||||
} else {
|
||||
console.log('🎉 Event received: Sync finished (no changes). Checking images and products...');
|
||||
}
|
||||
|
||||
// Extract all kBild IDs and kKategorie IDs from unpruned tree
|
||||
const imageIds = [];
|
||||
const categoryIds = [];
|
||||
const traverse = (nodes) => {
|
||||
for (const node of nodes) {
|
||||
if (node.kBild) {
|
||||
imageIds.push(node.kBild);
|
||||
}
|
||||
if (node.kKategorie) {
|
||||
categoryIds.push(node.kKategorie);
|
||||
}
|
||||
if (node.children && node.children.length > 0) {
|
||||
traverse(node.children);
|
||||
}
|
||||
}
|
||||
};
|
||||
traverse(unprunedTree);
|
||||
|
||||
console.log(`🔍 Found ${imageIds.length} images and ${categoryIds.length} categories.`);
|
||||
|
||||
await pictureSyncer.syncImages(imageIds, 'categories');
|
||||
await categoryProductsSyncer.syncProducts(categoryIds);
|
||||
});
|
||||
|
||||
categoryProductsSyncer.on('categoryUpdated', ({ id, products }) => {
|
||||
console.log(`📝 Category ${id} updated. Products count: ${products.length}`);
|
||||
});
|
||||
|
||||
// Trigger immediate sync
|
||||
categorySyncer.triggerSync();
|
||||
|
||||
// Schedule periodic sync
|
||||
const syncInterval = parseInt(process.env.SYNC_INTERVAL_MS) || 60000;
|
||||
setInterval(() => {
|
||||
categorySyncer.triggerSync();
|
||||
}, syncInterval);
|
||||
|
||||
// Handle graceful shutdown
|
||||
process.on('SIGINT', () => {
|
||||
console.log('\n👋 Bye!');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Start Express server
|
||||
startServer(categorySyncer, categoryProductsSyncer);
|
||||
18
src/server/routes/categories.js
Normal file
18
src/server/routes/categories.js
Normal file
@@ -0,0 +1,18 @@
|
||||
export function registerCategories(app, cache) {
|
||||
app.get('/api/categories', async (req, res) => {
|
||||
try {
|
||||
// Check if client has cached version
|
||||
if (req.headers['if-none-match'] === cache.categories.etag) {
|
||||
return res.status(304).end(); // Not Modified
|
||||
}
|
||||
|
||||
// Set cache headers with ETag
|
||||
res.set('Cache-Control', 'public, max-age=60, must-revalidate');
|
||||
res.set('ETag', cache.categories.etag);
|
||||
|
||||
res.json(JSON.parse(cache.categories.data));
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to load category tree' });
|
||||
}
|
||||
});
|
||||
}
|
||||
17
src/server/routes/images.js
Normal file
17
src/server/routes/images.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import path from 'path';
|
||||
|
||||
export function registerImages(app, cacheDir) {
|
||||
app.get('/img/cat/:id.avif', (req, res) => {
|
||||
const { id } = req.params;
|
||||
const imagePath = path.join(cacheDir, 'img', 'categories', `${id}.avif`);
|
||||
|
||||
// Cache images for 1 year (immutable content)
|
||||
res.set('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
|
||||
res.sendFile(path.resolve(imagePath), (err) => {
|
||||
if (err) {
|
||||
res.status(404).send('Image not found');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
20
src/server/routes/index.js
Normal file
20
src/server/routes/index.js
Normal file
@@ -0,0 +1,20 @@
|
||||
export function registerIndex(app, cache) {
|
||||
app.get('/', async (req, res) => {
|
||||
try {
|
||||
// Check if client has cached version
|
||||
if (req.headers['if-none-match'] === cache.html.etag) {
|
||||
return res.status(304).end(); // Not Modified
|
||||
}
|
||||
|
||||
// Set cache headers with ETag
|
||||
res.set('Cache-Control', 'public, max-age=300, must-revalidate');
|
||||
res.set('ETag', cache.html.etag);
|
||||
res.set('Content-Type', 'text/html');
|
||||
|
||||
res.send(cache.html.data);
|
||||
} catch (err) {
|
||||
console.error('Error serving index.html:', err);
|
||||
res.status(500).send('Error loading page');
|
||||
}
|
||||
});
|
||||
}
|
||||
32
src/server/routes/products.js
Normal file
32
src/server/routes/products.js
Normal file
@@ -0,0 +1,32 @@
|
||||
export function registerProducts(app, cache, updateProductCache) {
|
||||
app.get('/api/categories/:id/products', async (req, res) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id);
|
||||
|
||||
// Lazy load if not in cache
|
||||
if (!cache.products.has(id)) {
|
||||
await updateProductCache(id);
|
||||
}
|
||||
|
||||
const cached = cache.products.get(id);
|
||||
|
||||
if (!cached) {
|
||||
return res.status(404).json({ error: 'Category products not found' });
|
||||
}
|
||||
|
||||
// Check if client has cached version
|
||||
if (req.headers['if-none-match'] === cached.etag) {
|
||||
return res.status(304).end(); // Not Modified
|
||||
}
|
||||
|
||||
// Set cache headers with ETag
|
||||
res.set('Cache-Control', 'public, max-age=60, must-revalidate');
|
||||
res.set('ETag', cached.etag);
|
||||
|
||||
res.json(JSON.parse(cached.data));
|
||||
} catch (err) {
|
||||
console.error(`Error serving products for category ${req.params.id}:`, err);
|
||||
res.status(500).json({ error: 'Failed to load products' });
|
||||
}
|
||||
});
|
||||
}
|
||||
113
src/server/server.js
Normal file
113
src/server/server.js
Normal file
@@ -0,0 +1,113 @@
|
||||
import express from 'express';
|
||||
import { createServer } from 'http';
|
||||
import { Server } from 'socket.io';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import fs from 'fs/promises';
|
||||
import { registerCategories } from './routes/categories.js';
|
||||
import { registerProducts } from './routes/products.js';
|
||||
import { registerImages } from './routes/images.js';
|
||||
import { registerIndex } from './routes/index.js';
|
||||
import { registerConnection } from './socket/connection.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
export function startServer(categorySyncer, categoryProductsSyncer) {
|
||||
const app = express();
|
||||
const httpServer = createServer(app);
|
||||
const io = new Server(httpServer);
|
||||
|
||||
const PORT = process.env.SERVER_PORT || 3000;
|
||||
const HOST = process.env.SERVER_HOST || '0.0.0.0';
|
||||
const CACHE_DIR = process.env.CACHE_LOCATION || './cache';
|
||||
|
||||
// Cache for ETags and data
|
||||
const cache = {
|
||||
categories: { etag: null, data: null },
|
||||
html: { etag: null, data: null },
|
||||
products: new Map() // id -> { etag, data }
|
||||
};
|
||||
|
||||
// Function to calculate ETag for categories
|
||||
async function updateCategoriesCache() {
|
||||
try {
|
||||
const treePath = path.join(CACHE_DIR, 'category_tree.json');
|
||||
const data = await fs.readFile(treePath, 'utf-8');
|
||||
const crypto = await import('crypto');
|
||||
cache.categories.etag = crypto.createHash('md5').update(data).digest('hex');
|
||||
cache.categories.data = data;
|
||||
} catch (err) {
|
||||
// Silently skip if file doesn't exist yet (will be created on first sync)
|
||||
if (err.code !== 'ENOENT') {
|
||||
console.error('Error updating categories cache:', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Function to calculate ETag for a specific category's products
|
||||
async function updateProductCache(id) {
|
||||
try {
|
||||
const productPath = path.join(CACHE_DIR, 'products', `category_${id}.json`);
|
||||
const data = await fs.readFile(productPath, 'utf-8');
|
||||
const crypto = await import('crypto');
|
||||
const etag = crypto.createHash('md5').update(data).digest('hex');
|
||||
cache.products.set(id, { etag, data });
|
||||
} catch (err) {
|
||||
// If file missing, remove from cache
|
||||
if (err.code === 'ENOENT') {
|
||||
cache.products.delete(id);
|
||||
} else {
|
||||
console.error(`Error updating product cache for category ${id}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Function to calculate ETag for HTML
|
||||
async function updateHtmlCache() {
|
||||
try {
|
||||
const htmlPath = path.join(__dirname, '../../index.html');
|
||||
const data = await fs.readFile(htmlPath, 'utf-8');
|
||||
const crypto = await import('crypto');
|
||||
cache.html.etag = crypto.createHash('md5').update(data).digest('hex');
|
||||
cache.html.data = data;
|
||||
} catch (err) {
|
||||
console.error('Error updating HTML cache:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize caches on startup
|
||||
updateHtmlCache();
|
||||
updateCategoriesCache();
|
||||
|
||||
// Update categories cache when sync completes
|
||||
if (categorySyncer) {
|
||||
categorySyncer.on('synced', ({ changed }) => {
|
||||
if (changed) {
|
||||
updateCategoriesCache();
|
||||
io.emit('categoriesUpdated');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Update product cache when category products update
|
||||
if (categoryProductsSyncer) {
|
||||
categoryProductsSyncer.on('categoryUpdated', ({ id }) => {
|
||||
updateProductCache(id);
|
||||
io.emit('categoryProductsUpdated', { id });
|
||||
});
|
||||
}
|
||||
|
||||
// Register socket connection handler
|
||||
registerConnection(io);
|
||||
|
||||
// Register routes
|
||||
registerCategories(app, cache);
|
||||
registerProducts(app, cache, updateProductCache);
|
||||
registerImages(app, CACHE_DIR);
|
||||
registerIndex(app, cache);
|
||||
|
||||
httpServer.listen(PORT, HOST, () => {
|
||||
console.log(`🌐 Server running on http://${HOST}:${PORT}`);
|
||||
});
|
||||
}
|
||||
8
src/server/socket/connection.js
Normal file
8
src/server/socket/connection.js
Normal file
@@ -0,0 +1,8 @@
|
||||
export function registerConnection(io) {
|
||||
io.on('connection', (socket) => {
|
||||
console.log('🔌 Client connected');
|
||||
socket.on('disconnect', () => {
|
||||
console.log('🔌 Client disconnected');
|
||||
});
|
||||
});
|
||||
}
|
||||
174
src/syncers/category-products-syncer.js
Normal file
174
src/syncers/category-products-syncer.js
Normal file
@@ -0,0 +1,174 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { EventEmitter } from 'events';
|
||||
import { createConnection } from '../utils/database.js';
|
||||
|
||||
class CategoryProductsSyncer extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
if (CategoryProductsSyncer.instance) {
|
||||
return CategoryProductsSyncer.instance;
|
||||
}
|
||||
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
|
||||
|
||||
// Track syncing state
|
||||
this.isSyncing = false;
|
||||
this.queuedCategoryIds = null;
|
||||
|
||||
CategoryProductsSyncer.instance = this;
|
||||
}
|
||||
|
||||
async syncProducts(categoryIds) {
|
||||
// Check if already syncing
|
||||
if (this.isSyncing) {
|
||||
console.log('⏳ CategoryProductsSyncer is busy. Queuing sync...');
|
||||
this.queuedCategoryIds = categoryIds;
|
||||
return;
|
||||
}
|
||||
|
||||
this.isSyncing = true;
|
||||
try {
|
||||
await this._performSync(categoryIds);
|
||||
} catch (err) {
|
||||
console.error('❌ Error syncing products:', err);
|
||||
} finally {
|
||||
this.isSyncing = false;
|
||||
// Process queued sync if exists
|
||||
if (this.queuedCategoryIds) {
|
||||
const nextIds = this.queuedCategoryIds;
|
||||
this.queuedCategoryIds = null;
|
||||
// Use setTimeout to allow event loop to breathe
|
||||
setTimeout(() => this.syncProducts(nextIds), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _performSync(categoryIds) {
|
||||
const startTime = Date.now();
|
||||
const productsDir = path.join(this.cacheBaseDir, 'products');
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(productsDir, { recursive: true });
|
||||
|
||||
// Get existing files
|
||||
let existingFiles = [];
|
||||
try {
|
||||
existingFiles = await fs.readdir(productsDir);
|
||||
} catch (err) {
|
||||
// Directory might be empty or new
|
||||
}
|
||||
|
||||
// Filter for category json files (assuming we save as category_{id}.json)
|
||||
const existingIds = existingFiles
|
||||
.filter(f => f.startsWith('category_') && f.endsWith('.json'))
|
||||
.map(f => parseInt(f.replace('category_', '').replace('.json', '')));
|
||||
|
||||
const validIds = new Set(categoryIds.filter(id => id !== null && id !== undefined));
|
||||
|
||||
// 1. Delete obsolete category files
|
||||
const toDelete = existingIds.filter(id => !validIds.has(id));
|
||||
for (const id of toDelete) {
|
||||
const filePath = path.join(productsDir, `category_${id}.json`);
|
||||
await fs.unlink(filePath);
|
||||
}
|
||||
if (toDelete.length > 0) {
|
||||
console.log(`🗑️ Deleted ${toDelete.length} obsolete product lists.`);
|
||||
}
|
||||
|
||||
// 2. Update/Create product lists for all valid categories
|
||||
// We update all because product assignments might have changed even if category exists
|
||||
if (validIds.size > 0) {
|
||||
console.log(`📦 Syncing products for ${validIds.size} categories...`);
|
||||
await this._fetchAndWriteProducts([...validIds], productsDir);
|
||||
} else {
|
||||
console.log(`✅ No categories to sync products for.`);
|
||||
}
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
console.log(`✅ Product sync completed in ${duration}ms.`);
|
||||
}
|
||||
|
||||
async _fetchAndWriteProducts(ids, dir) {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
// Process in chunks to avoid huge queries
|
||||
const chunkSize = 50;
|
||||
for (let i = 0; i < ids.length; i += chunkSize) {
|
||||
const chunk = ids.slice(i, i + chunkSize);
|
||||
const list = chunk.join(',');
|
||||
|
||||
// Fetch products for this chunk of categories
|
||||
// We need kArtikel and cName, ordered by bRowversion descending
|
||||
const result = await pool.request().query(`
|
||||
SELECT
|
||||
ka.kKategorie,
|
||||
ka.kArtikel,
|
||||
ab.cName
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikelBeschreibung ab ON ka.kArtikel = ab.kArtikel
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE ab.kSprache = ${process.env.JTL_SPRACHE_ID}
|
||||
AND ab.kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
AND ab.kShop = ${process.env.JTL_SHOP_ID}
|
||||
AND ka.kKategorie IN (${list})
|
||||
ORDER BY a.bRowversion DESC, ab.bRowversion DESC
|
||||
`);
|
||||
|
||||
// Group results by kKategorie
|
||||
const productsByCategory = {};
|
||||
|
||||
// Initialize arrays for all requested IDs (so we create empty files for empty categories)
|
||||
chunk.forEach(id => {
|
||||
productsByCategory[id] = [];
|
||||
});
|
||||
|
||||
for (const record of result.recordset) {
|
||||
if (productsByCategory[record.kKategorie]) {
|
||||
productsByCategory[record.kKategorie].push({
|
||||
kArtikel: record.kArtikel,
|
||||
cName: record.cName
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write files
|
||||
for (const catId of chunk) {
|
||||
const filePath = path.join(dir, `category_${catId}.json`);
|
||||
const products = productsByCategory[catId] || [];
|
||||
const newContent = JSON.stringify(products, null, 2);
|
||||
|
||||
// Check for changes
|
||||
let oldContent = '';
|
||||
try {
|
||||
oldContent = await fs.readFile(filePath, 'utf-8');
|
||||
} catch (e) {
|
||||
// File doesn't exist yet
|
||||
}
|
||||
|
||||
if (oldContent !== newContent) {
|
||||
await fs.writeFile(filePath, newContent);
|
||||
this.emit('categoryUpdated', { id: catId, products });
|
||||
}
|
||||
}
|
||||
|
||||
const processed = Math.min(i + chunkSize, ids.length);
|
||||
if (processed === ids.length) {
|
||||
console.log(`✅ Processed products for ${processed}/${ids.length} categories.`);
|
||||
} else {
|
||||
console.log(`⏳ Processed products for ${processed}/${ids.length} categories...`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('❌ Error fetching products:', err);
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new CategoryProductsSyncer();
|
||||
export default instance;
|
||||
300
src/syncers/category-syncer.js
Normal file
300
src/syncers/category-syncer.js
Normal file
@@ -0,0 +1,300 @@
|
||||
import { EventEmitter } from 'events';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { createConnection } from '../utils/database.js';
|
||||
|
||||
class CategorySyncer extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
if (CategorySyncer.instance) {
|
||||
return CategorySyncer.instance;
|
||||
}
|
||||
|
||||
this.isSyncing = false;
|
||||
this.queuedSync = false;
|
||||
this.cacheDir = process.env.CACHE_LOCATION || '.';
|
||||
this.lastTreeString = null;
|
||||
this.lastTemplateString = null;
|
||||
|
||||
// Load existing template if it exists
|
||||
this._loadExistingTemplate();
|
||||
|
||||
CategorySyncer.instance = this;
|
||||
}
|
||||
|
||||
async _loadExistingTemplate() {
|
||||
try {
|
||||
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
|
||||
this.lastTemplateString = await fs.readFile(templatePath, 'utf-8');
|
||||
} catch (err) {
|
||||
// File doesn't exist yet, that's fine
|
||||
}
|
||||
|
||||
try {
|
||||
const treePath = path.join(this.cacheDir, 'category_tree.json');
|
||||
const treeContent = await fs.readFile(treePath, 'utf-8');
|
||||
this.lastTreeString = treeContent;
|
||||
} catch (err) {
|
||||
// File doesn't exist yet, that's fine
|
||||
}
|
||||
}
|
||||
|
||||
async triggerSync() {
|
||||
if (this.isSyncing) {
|
||||
if (this.queuedSync) {
|
||||
console.log('🚫 Sync already in progress and next sync already queued. Ignoring.');
|
||||
return;
|
||||
}
|
||||
console.log('⏳ Sync already in progress. Queuing next sync.');
|
||||
this.queuedSync = true;
|
||||
return;
|
||||
}
|
||||
|
||||
await this._doSync();
|
||||
}
|
||||
|
||||
async _doSync() {
|
||||
this.isSyncing = true;
|
||||
const startTime = Date.now();
|
||||
console.log('🚀 Starting sync...');
|
||||
|
||||
try {
|
||||
await this._syncFromDb();
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Log completion and next sync time
|
||||
const syncInterval = parseInt(process.env.SYNC_INTERVAL_MS) || 60000;
|
||||
const minutes = Math.round(syncInterval / 60000);
|
||||
console.log(`✅ Sync completed in ${duration}ms. Next sync in ${minutes} minute${minutes !== 1 ? 's' : ''}`);
|
||||
} catch (err) {
|
||||
console.error('❌ Sync failed:', err);
|
||||
} finally {
|
||||
this.isSyncing = false;
|
||||
if (this.queuedSync) {
|
||||
console.log('🔄 Processing queued sync...');
|
||||
this.queuedSync = false;
|
||||
// Use setImmediate to allow stack to clear/event loop to tick
|
||||
setImmediate(() => this.triggerSync());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _syncFromDb() {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
// Fetch categories
|
||||
const categoriesResult = await pool.request().query(`
|
||||
SELECT kKategorie, kOberKategorie, nSort
|
||||
FROM tkategorie
|
||||
`);
|
||||
|
||||
// Fetch names
|
||||
const namesResult = await pool.request().query(`
|
||||
SELECT kKategorie, cName
|
||||
FROM tKategorieSprache
|
||||
WHERE kSprache = ${process.env.JTL_SPRACHE_ID} AND kShop = ${process.env.JTL_SHOP_ID}
|
||||
`);
|
||||
|
||||
// Fetch article counts
|
||||
const articleCountsResult = await pool.request().query(`
|
||||
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
|
||||
FROM tkategorieartikel ka
|
||||
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
|
||||
WHERE a.cAktiv = 'Y'
|
||||
GROUP BY ka.kKategorie
|
||||
`);
|
||||
|
||||
// Fetch images (kBild)
|
||||
const imagesResult = await pool.request().query(`
|
||||
SELECT kKategorie, kBild
|
||||
FROM (
|
||||
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
|
||||
FROM tKategoriebildPlattform
|
||||
WHERE kShop = ${process.env.JTL_SHOP_ID} AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
`);
|
||||
|
||||
const categories = categoriesResult.recordset;
|
||||
const names = namesResult.recordset;
|
||||
const articleCounts = articleCountsResult.recordset;
|
||||
const images = imagesResult.recordset;
|
||||
|
||||
// Build tree with ROOT_CATEGORY_ID filter (if set)
|
||||
// This gives us the subtree we're interested in
|
||||
let tree = this._buildTree(categories, names, articleCounts, images, true);
|
||||
|
||||
// Deep copy tree for unpruned version (before pruning modifies it)
|
||||
const unprunedTree = JSON.parse(JSON.stringify(tree));
|
||||
|
||||
// Generate translation template BEFORE pruning (to include all categories)
|
||||
const translationTemplate = this._buildTranslationTemplate(tree);
|
||||
const templateString = this._formatTranslationTemplate(translationTemplate);
|
||||
|
||||
// Now prune for the main tree
|
||||
tree = this._pruneTree(tree);
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(this.cacheDir, { recursive: true });
|
||||
|
||||
// Compare pruned tree
|
||||
const treeString = JSON.stringify(tree, null, 2);
|
||||
const changed = this.lastTreeString !== treeString;
|
||||
|
||||
if (changed) {
|
||||
// Save template if it changed
|
||||
if (this.lastTemplateString !== templateString) {
|
||||
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
|
||||
await fs.writeFile(templatePath, templateString);
|
||||
console.log(`💾 Translation template saved to ${templatePath}`);
|
||||
this.lastTemplateString = templateString;
|
||||
}
|
||||
|
||||
const filePath = path.join(this.cacheDir, 'category_tree.json');
|
||||
await fs.writeFile(filePath, treeString);
|
||||
console.log(`💾 Category tree saved to ${filePath}`);
|
||||
|
||||
this.lastTreeString = treeString;
|
||||
console.log('📢 Tree updated.');
|
||||
} else {
|
||||
console.log('🤷 No changes detected in category tree.');
|
||||
}
|
||||
|
||||
this.emit('synced', { tree, unprunedTree, changed });
|
||||
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_buildTree(categories, names, articleCounts, images, applyRootFilter = true) {
|
||||
// Create a map for quick lookup of names
|
||||
const nameMap = new Map();
|
||||
names.forEach(n => nameMap.set(n.kKategorie, n.cName));
|
||||
|
||||
// Create a map for article counts
|
||||
const countMap = new Map();
|
||||
articleCounts.forEach(c => countMap.set(c.kKategorie, c.count));
|
||||
|
||||
// Create a map for images
|
||||
const imageMap = new Map();
|
||||
images.forEach(i => imageMap.set(i.kKategorie, i.kBild));
|
||||
|
||||
// Create a map for category nodes
|
||||
const categoryMap = new Map();
|
||||
|
||||
// Initialize all nodes
|
||||
categories.forEach(cat => {
|
||||
categoryMap.set(cat.kKategorie, {
|
||||
kKategorie: cat.kKategorie,
|
||||
cName: nameMap.get(cat.kKategorie) || `Unknown (${cat.kKategorie})`, // Fallback if name missing
|
||||
articleCount: countMap.get(cat.kKategorie) || 0,
|
||||
kBild: imageMap.get(cat.kKategorie) || null,
|
||||
children: [],
|
||||
nSort: cat.nSort || 0 // Store nSort temporarily
|
||||
});
|
||||
});
|
||||
|
||||
const rootNodes = [];
|
||||
|
||||
// Build hierarchy
|
||||
categories.forEach(cat => {
|
||||
const node = categoryMap.get(cat.kKategorie);
|
||||
if (cat.kOberKategorie === 0) {
|
||||
rootNodes.push(node);
|
||||
} else {
|
||||
const parent = categoryMap.get(cat.kOberKategorie);
|
||||
if (parent) {
|
||||
parent.children.push(node);
|
||||
} else {
|
||||
// Handle orphan nodes if necessary, or ignore
|
||||
// console.warn(`Orphan category found: ${cat.kKategorie}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const rootId = process.env.ROOT_CATEGORY_ID ? parseInt(process.env.ROOT_CATEGORY_ID) : null;
|
||||
let resultNodes = rootNodes;
|
||||
|
||||
if (rootId && applyRootFilter) {
|
||||
const specificRoot = categoryMap.get(rootId);
|
||||
// Return the children of the specified root, not the root itself
|
||||
resultNodes = specificRoot ? specificRoot.children : [];
|
||||
}
|
||||
|
||||
// Sort children and remove nSort
|
||||
for (const node of categoryMap.values()) {
|
||||
node.children.sort((a, b) => a.nSort - b.nSort);
|
||||
}
|
||||
|
||||
// Sort root nodes if returning multiple
|
||||
resultNodes.sort((a, b) => a.nSort - b.nSort);
|
||||
|
||||
// Remove nSort property from all nodes
|
||||
for (const node of categoryMap.values()) {
|
||||
delete node.nSort;
|
||||
}
|
||||
|
||||
return resultNodes;
|
||||
}
|
||||
|
||||
_pruneTree(nodes) {
|
||||
// Filter out nodes that are empty (no articles) and have no valid children
|
||||
return nodes.filter(node => {
|
||||
// Recursively prune children
|
||||
if (node.children && node.children.length > 0) {
|
||||
node.children = this._pruneTree(node.children);
|
||||
}
|
||||
|
||||
// Keep node if it has articles OR has remaining children
|
||||
const hasArticles = node.articleCount > 0;
|
||||
const hasChildren = node.children && node.children.length > 0;
|
||||
|
||||
return hasArticles || hasChildren;
|
||||
});
|
||||
}
|
||||
|
||||
_buildTranslationTemplate(nodes) {
|
||||
return nodes.map(node => {
|
||||
const result = { name: node.cName };
|
||||
if (node.children && node.children.length > 0) {
|
||||
result.children = this._buildTranslationTemplate(node.children);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
_formatTranslationTemplate(nodes, indent = 0) {
|
||||
const spaces = ' '.repeat(indent);
|
||||
const innerSpaces = ' '.repeat(indent + 1);
|
||||
|
||||
if (nodes.length === 0) return '[]';
|
||||
|
||||
const lines = ['['];
|
||||
|
||||
nodes.forEach((node, index) => {
|
||||
const isLast = index === nodes.length - 1;
|
||||
|
||||
if (node.children && node.children.length > 0) {
|
||||
// Node with children - multi-line format
|
||||
lines.push(`${innerSpaces}{`);
|
||||
lines.push(`${innerSpaces} "name": "${node.name}",`);
|
||||
lines.push(`${innerSpaces} "children": ${this._formatTranslationTemplate(node.children, indent + 2)}`);
|
||||
lines.push(`${innerSpaces}}${isLast ? '' : ','}`);
|
||||
} else {
|
||||
// Leaf node - single line format
|
||||
lines.push(`${innerSpaces}{ "name": "${node.name}" }${isLast ? '' : ','}`);
|
||||
}
|
||||
});
|
||||
|
||||
lines.push(`${spaces}]`);
|
||||
return lines.join('\n');
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new CategorySyncer();
|
||||
export default instance;
|
||||
143
src/syncers/picture-syncer.js
Normal file
143
src/syncers/picture-syncer.js
Normal file
@@ -0,0 +1,143 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import sharp from 'sharp';
|
||||
import { createConnection } from '../utils/database.js';
|
||||
|
||||
class PictureSyncer {
|
||||
constructor() {
|
||||
if (PictureSyncer.instance) {
|
||||
return PictureSyncer.instance;
|
||||
}
|
||||
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
|
||||
|
||||
// Track syncing state per group
|
||||
this.isSyncing = new Map(); // groupName -> boolean
|
||||
this.queuedSyncs = new Map(); // groupName -> { imageIds, groupName }
|
||||
|
||||
PictureSyncer.instance = this;
|
||||
}
|
||||
|
||||
async syncImages(imageIds, groupName) {
|
||||
// Check if already syncing this group
|
||||
if (this.isSyncing.get(groupName)) {
|
||||
if (this.queuedSyncs.has(groupName)) {
|
||||
console.log(`🚫 Image sync for '${groupName}' already in progress and queued. Ignoring.`);
|
||||
return;
|
||||
}
|
||||
console.log(`⏳ Image sync for '${groupName}' already in progress. Queuing.`);
|
||||
this.queuedSyncs.set(groupName, { imageIds, groupName });
|
||||
return;
|
||||
}
|
||||
|
||||
await this._doSync(imageIds, groupName);
|
||||
}
|
||||
|
||||
async _doSync(imageIds, groupName) {
|
||||
this.isSyncing.set(groupName, true);
|
||||
|
||||
try {
|
||||
await this._performSync(imageIds, groupName);
|
||||
} finally {
|
||||
this.isSyncing.set(groupName, false);
|
||||
|
||||
// Process queued sync for this group if any
|
||||
if (this.queuedSyncs.has(groupName)) {
|
||||
console.log(`🔄 Processing queued image sync for '${groupName}'...`);
|
||||
const queued = this.queuedSyncs.get(groupName);
|
||||
this.queuedSyncs.delete(groupName);
|
||||
setImmediate(() => this.syncImages(queued.imageIds, queued.groupName));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _performSync(imageIds, groupName) {
|
||||
const groupDir = path.join(this.cacheBaseDir, 'img', groupName);
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(groupDir, { recursive: true });
|
||||
|
||||
// Get existing files
|
||||
let existingFiles = [];
|
||||
try {
|
||||
existingFiles = await fs.readdir(groupDir);
|
||||
} catch (err) {
|
||||
// Directory might be empty or new
|
||||
}
|
||||
|
||||
// Filter for image files (assuming we save as {id}.avif)
|
||||
const existingIds = existingFiles
|
||||
.filter(f => f.endsWith('.avif'))
|
||||
.map(f => parseInt(f.replace('.avif', '')));
|
||||
|
||||
const validIds = new Set(imageIds.filter(id => id !== null && id !== undefined));
|
||||
|
||||
// 1. Delete obsolete images
|
||||
const toDelete = existingIds.filter(id => !validIds.has(id));
|
||||
for (const id of toDelete) {
|
||||
const filePath = path.join(groupDir, `${id}.avif`);
|
||||
await fs.unlink(filePath);
|
||||
}
|
||||
if (toDelete.length > 0) {
|
||||
console.log(`🗑️ Deleted ${toDelete.length} obsolete images.`);
|
||||
}
|
||||
|
||||
// 2. Download missing images
|
||||
const toDownload = imageIds.filter(id => id !== null && id !== undefined && !existingIds.includes(id));
|
||||
|
||||
if (toDownload.length > 0) {
|
||||
console.log(`📥 Downloading ${toDownload.length} new images for group '${groupName}'...`);
|
||||
await this._downloadImages(toDownload, groupDir);
|
||||
} else {
|
||||
console.log(`✅ No new images to download for group '${groupName}'.`);
|
||||
}
|
||||
}
|
||||
|
||||
async _downloadImages(ids, dir) {
|
||||
let pool;
|
||||
try {
|
||||
pool = await createConnection();
|
||||
|
||||
// Process in chunks to avoid huge queries
|
||||
const chunkSize = 50;
|
||||
for (let i = 0; i < ids.length; i += chunkSize) {
|
||||
const chunk = ids.slice(i, i + chunkSize);
|
||||
const list = chunk.join(',');
|
||||
|
||||
const result = await pool.request().query(`
|
||||
SELECT kBild, bBild
|
||||
FROM tBild
|
||||
WHERE kBild IN (${list})
|
||||
`);
|
||||
|
||||
for (const record of result.recordset) {
|
||||
if (record.bBild) {
|
||||
const filePath = path.join(dir, `${record.kBild}.avif`);
|
||||
// Resize to 130x130 and convert to AVIF using sharp
|
||||
await sharp(record.bBild)
|
||||
.resize(130, 130, {
|
||||
fit: 'cover',
|
||||
position: 'center'
|
||||
})
|
||||
.avif({ quality: 80 })
|
||||
.toFile(filePath);
|
||||
}
|
||||
}
|
||||
const processed = Math.min(i + chunkSize, ids.length);
|
||||
if (processed === ids.length) {
|
||||
console.log(`✅ Processed ${processed}/${ids.length} images.`);
|
||||
} else {
|
||||
console.log(`⏳ Processed ${processed}/${ids.length} images...`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('❌ Error downloading images:', err);
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new PictureSyncer();
|
||||
export default instance;
|
||||
20
src/utils/database.js
Normal file
20
src/utils/database.js
Normal file
@@ -0,0 +1,20 @@
|
||||
import sql from 'mssql';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config({ quiet: true });
|
||||
|
||||
const config = {
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
server: process.env.DB_HOST,
|
||||
database: process.env.DB_DATABASE,
|
||||
options: {
|
||||
encrypt: false, // Adjust based on server config
|
||||
trustServerCertificate: true
|
||||
}
|
||||
};
|
||||
|
||||
export async function createConnection() {
|
||||
const pool = new sql.ConnectionPool(config);
|
||||
return await pool.connect();
|
||||
}
|
||||
Reference in New Issue
Block a user