feat: Implement product detail syncing with rowversion-based change detection and integrate it into category product synchronization.

This commit is contained in:
sebseb7
2025-11-24 14:50:40 +01:00
parent 6dbac0d3c1
commit b4d202bb23
5 changed files with 281 additions and 6 deletions

View File

@@ -480,7 +480,15 @@
} }
const debouncedSearch = debounce((value) => { const debouncedSearch = debounce((value) => {
socket.emit('search', value); if (value.trim().length >= 3) {
socket.emit('search', value);
} else {
// Clear matches and collapse all categories if less than 3 chars
resetMatches(state.categories);
resetExpansion(state.categories);
collapseAllProducts(state.categories);
render();
}
}, 300); }, 300);
// Event Listeners // Event Listeners
@@ -495,7 +503,7 @@
clearBtn.classList.remove('visible'); clearBtn.classList.remove('visible');
} }
if (value.trim()) { if (value.trim().length >= 3) {
debouncedSearch(value); debouncedSearch(value);
} else { } else {
// Clear matches and collapse all categories // Clear matches and collapse all categories
@@ -607,6 +615,12 @@
}); });
} }
function collapseAllProducts(nodes) {
nodes.forEach(node => {
node.isExpanded = false;
if (node.children) collapseAllProducts(node.children);
});
}
// Initial Load // Initial Load
@@ -674,7 +688,7 @@
// Filtering Logic // Filtering Logic
function filterTree(nodes, query) { function filterTree(nodes, query) {
if (!query.trim()) return nodes; // Return original structure if no filter if (!query.trim() || query.trim().length < 3) return nodes; // Return original structure if no filter or short query
return nodes.map(node => { return nodes.map(node => {
// Only keep if marked as having a match // Only keep if marked as having a match
@@ -690,6 +704,10 @@
const name = p.cName.toLowerCase(); const name = p.cName.toLowerCase();
return words.every(w => name.includes(w)); return words.every(w => name.includes(w));
}); });
// Limit product results
if (matchingProducts.length > 21) {
matchingProducts = matchingProducts.slice(0, 21);
}
} }
return { return {
@@ -845,9 +863,10 @@
const ul = document.createElement('ul'); const ul = document.createElement('ul');
ul.style.listStyle = 'none'; ul.style.listStyle = 'none';
const limit = realNode.isExpanded ? category.products.length : 3; const limit = realNode.isExpanded ? 20 : 3;
const displayProducts = category.products.slice(0, limit);
category.products.slice(0, limit).forEach(p => { displayProducts.forEach(p => {
const li = document.createElement('li'); const li = document.createElement('li');
li.className = 'product-item'; li.className = 'product-item';
@@ -867,7 +886,16 @@
ul.appendChild(li); ul.appendChild(li);
}); });
if (!realNode.isExpanded && category.products.length > 3) { // Show "more" if expanded and there are more than 20, OR if collapsed and there are more than 3
if (realNode.isExpanded && category.products.length > 20) {
const more = document.createElement('li');
more.className = 'product-item more';
more.style.fontStyle = 'italic';
more.textContent = `(more)`;
// Prevent click from collapsing
more.onclick = (e) => e.stopPropagation();
ul.appendChild(more);
} else if (!realNode.isExpanded && category.products.length > 3) {
const more = document.createElement('li'); const more = document.createElement('li');
more.className = 'product-item more'; more.className = 'product-item more';
more.style.fontStyle = 'italic'; more.style.fontStyle = 'italic';

View File

@@ -67,6 +67,10 @@ export async function findMatches(query, cacheDir) {
matchingCategoryIds.add(node.kKategorie); matchingCategoryIds.add(node.kKategorie);
} }
})); }));
if (matchingCategoryIds.size >= 20) {
break;
}
} }
return Array.from(matchingCategoryIds); return Array.from(matchingCategoryIds);

View File

@@ -0,0 +1,115 @@
import { createConnection } from '../utils/database.js';
import { processInChunks, createInClause } from '../utils/database-utils.js';
/**
* ProductDetailDataFetcher - Handles fetching product descriptions
*/
export class ProductDetailDataFetcher {
/**
* Fetch product descriptions for given article IDs
* @param {Array<number>} articleIds - Article IDs to fetch details for
* @param {Function} detailCallback - Callback for each detail (receives {kArtikel, cBeschreibung})
* @param {number} chunkSize - Size of each chunk (default: 50)
* @returns {Promise<void>}
*/
async fetchDetailsInChunks(articleIds, detailCallback, chunkSize = 50) {
let pool;
try {
pool = await createConnection();
await processInChunks(articleIds, chunkSize, async (chunk) => {
const list = createInClause(chunk);
const result = await pool.request().query(`
SELECT kArtikel, cBeschreibung, bRowversion
FROM tArtikelBeschreibung
WHERE kArtikel IN (${list})
AND kSprache = ${process.env.JTL_SPRACHE_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND kShop = ${process.env.JTL_SHOP_ID}
`);
const foundIds = new Set();
for (const record of result.recordset) {
foundIds.add(record.kArtikel);
// Convert Buffer or binary string to hex string if needed
if (Buffer.isBuffer(record.bRowversion)) {
record.bRowversion = '0x' + record.bRowversion.toString('hex').toUpperCase();
} else if (typeof record.bRowversion === 'string' && !record.bRowversion.startsWith('0x')) {
// Assume binary string
record.bRowversion = '0x' + Buffer.from(record.bRowversion, 'binary').toString('hex').toUpperCase();
}
if (!record.cBeschreibung) {
console.log(`⚠️ Item ${record.kArtikel} has no description, writing empty file.`);
}
await detailCallback(record);
}
// Check for missing items in this chunk
chunk.forEach(id => {
if (!foundIds.has(id)) {
// console.log(`⚠️ Item ${id} not found in tArtikelBeschreibung (or filtered out).`);
}
});
}, { showProgress: true, itemName: 'details' });
} finally {
if (pool) await pool.close();
}
}
/**
* Fetch IDs of articles that have changed since a given version
* @param {Array<number>} articleIds - Candidate article IDs
* @param {string} minRowversion - Minimum rowversion (hex string)
* @returns {Promise<Set<number>>} - Set of changed article IDs
*/
async fetchChangedArticleIds(articleIds, minRowversion) {
//console.log(`🔍 Checking changes for ${articleIds ? articleIds.length : 0} articles against version ${minRowversion}`);
if (!articleIds || articleIds.length === 0) return new Set();
// If no minRowversion, all are considered changed
if (!minRowversion) {
console.log('⚠️ No minRowversion provided, fetching all.');
return new Set(articleIds);
}
let pool;
const changedIds = new Set();
try {
pool = await createConnection();
await processInChunks(articleIds, 2000, async (chunk) => {
const list = createInClause(chunk);
// Convert hex string back to buffer for comparison if needed,
// but MSSQL driver usually handles 0x strings as binary.
// Let's assume minRowversion is passed as '0x...' string.
const query = `
SELECT kArtikel, bRowversion
FROM tArtikelBeschreibung
WHERE kArtikel IN (${list})
AND kSprache = ${process.env.JTL_SPRACHE_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND kShop = ${process.env.JTL_SHOP_ID}
AND bRowversion > ${minRowversion}
`;
// console.log('Executing query:', query);
const result = await pool.request().query(query);
result.recordset.forEach(r => {
// console.log(`Changed item: ${r.kArtikel}, version: 0x${r.bRowversion.toString('hex').toUpperCase()}`);
changedIds.add(r.kArtikel);
});
}, { showProgress: false });
if (changedIds.size > 0) console.log(`🔍 Found ${changedIds.size} changed articles.`);
return changedIds;
} finally {
if (pool) await pool.close();
}
}
}

View File

@@ -5,6 +5,7 @@ import { SyncQueueManager } from '../utils/sync-queue-manager.js';
import { ProductDataFetcher } from '../services/product-data-fetcher.js'; import { ProductDataFetcher } from '../services/product-data-fetcher.js';
import { getExistingIds, deleteObsoleteFiles, writeJsonIfChanged, ensureDir } from '../utils/file-sync-utils.js'; import { getExistingIds, deleteObsoleteFiles, writeJsonIfChanged, ensureDir } from '../utils/file-sync-utils.js';
import pictureSyncer from './picture-syncer.js'; import pictureSyncer from './picture-syncer.js';
import productDetailSyncer from './product-detail-syncer.js';
class CategoryProductsSyncer extends EventEmitter { class CategoryProductsSyncer extends EventEmitter {
constructor() { constructor() {
@@ -62,6 +63,7 @@ class CategoryProductsSyncer extends EventEmitter {
async _fetchAndWriteProducts(ids, dir) { async _fetchAndWriteProducts(ids, dir) {
const globalImageIds = new Set(); const globalImageIds = new Set();
const globalArticleIds = new Set();
await this.dataFetcher.fetchProductsInChunks(ids, async (chunkData) => { await this.dataFetcher.fetchProductsInChunks(ids, async (chunkData) => {
const { categoryIds, products, productImages } = chunkData; const { categoryIds, products, productImages } = chunkData;
@@ -78,6 +80,7 @@ class CategoryProductsSyncer extends EventEmitter {
if (productsByCategory[record.kKategorie]) { if (productsByCategory[record.kKategorie]) {
const images = productImages.get(record.kArtikel) || []; const images = productImages.get(record.kArtikel) || [];
images.forEach(imgId => globalImageIds.add(imgId)); images.forEach(imgId => globalImageIds.add(imgId));
globalArticleIds.add(record.kArtikel);
productsByCategory[record.kKategorie].push({ productsByCategory[record.kKategorie].push({
kArtikel: record.kArtikel, kArtikel: record.kArtikel,
@@ -105,6 +108,11 @@ class CategoryProductsSyncer extends EventEmitter {
//console.log(`🖼️ Syncing ${globalImageIds.size} product images...`); //console.log(`🖼️ Syncing ${globalImageIds.size} product images...`);
await pictureSyncer.syncImages(Array.from(globalImageIds), 'products'); await pictureSyncer.syncImages(Array.from(globalImageIds), 'products');
} }
// Sync product details for all articles found
if (globalArticleIds.size > 0) {
await productDetailSyncer.syncDetails(Array.from(globalArticleIds));
}
} }
} }

View File

@@ -0,0 +1,120 @@
import fs from 'fs/promises';
import path from 'path';
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
import { ProductDetailDataFetcher } from '../services/product-detail-data-fetcher.js';
import { getExistingIds, deleteObsoleteFiles, ensureDir, writeJsonIfChanged } from '../utils/file-sync-utils.js';
class ProductDetailSyncer {
constructor() {
if (ProductDetailSyncer.instance) {
return ProductDetailSyncer.instance;
}
this.syncQueue = new SyncQueueManager();
this.dataFetcher = new ProductDetailDataFetcher();
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
ProductDetailSyncer.instance = this;
}
async syncDetails(articleIds) {
await this.syncQueue.executeSync('product-details', async () => {
await this._performSync(articleIds);
}, articleIds);
}
async _performSync(articleIds) {
const detailsDir = path.join(this.cacheBaseDir, 'details');
const stateFile = path.join(this.cacheBaseDir, 'product-details-state.json');
// Ensure directory exists
await ensureDir(detailsDir);
// Load state
let lastSyncRowversion = null;
try {
const state = JSON.parse(await fs.readFile(stateFile, 'utf-8'));
lastSyncRowversion = state.lastSyncRowversion;
} catch (err) {
// State file might not exist yet
}
// Get existing files
const existingIds = await getExistingIds(detailsDir, {
suffix: '.json'
});
const validIds = new Set(articleIds.filter(id => id !== null && id !== undefined));
// Delete obsolete files
await deleteObsoleteFiles(
detailsDir,
existingIds,
validIds,
(id) => `${id}.json`
);
// Split into missing and present
const missingIds = [];
const presentIds = [];
for (const id of validIds) {
if (existingIds.includes(id)) {
presentIds.push(id);
} else {
missingIds.push(id);
}
}
// Determine what to fetch
const toFetch = new Set(missingIds);
if (presentIds.length > 0) {
// Check which present files need update based on rowversion
//console.log(`Checking changes for ${presentIds.length} present items with lastSyncRowversion: ${lastSyncRowversion}`);
const changedIds = await this.dataFetcher.fetchChangedArticleIds(presentIds, lastSyncRowversion);
//console.log(`Got ${changedIds.size} changed items from fetcher`);
changedIds.forEach(id => toFetch.add(id));
}
if (toFetch.size > 0) {
console.log(`📝 Syncing ${toFetch.size} product details (Missing: ${missingIds.length}, Changed: ${toFetch.size - missingIds.length})...`);
await this._fetchAndWriteDetails([...toFetch], detailsDir, stateFile, lastSyncRowversion);
} else {
//console.log(`✅ No product details to sync.`);
}
}
async _fetchAndWriteDetails(ids, dir, stateFile, currentMaxRowversion) {
let maxRowversion = currentMaxRowversion;
await this.dataFetcher.fetchDetailsInChunks(ids, async (record) => {
const filePath = path.join(dir, `${record.kArtikel}.json`);
// Update max rowversion
if (record.bRowversion) {
// Simple string comparison for hex strings works for sorting/max if length is same.
// MSSQL rowversions are fixed length (8 bytes), so hex string length should be constant.
if (!maxRowversion || record.bRowversion > maxRowversion) {
maxRowversion = record.bRowversion;
}
}
// Use writeJsonIfChanged which handles reading and comparing
// It will compare the new object with the existing JSON content
await writeJsonIfChanged(filePath, {
kArtikel: record.kArtikel,
cBeschreibung: record.cBeschreibung || null, // Ensure null is written if missing
bRowversion: record.bRowversion || null
});
});
// Save new state
if (maxRowversion && maxRowversion !== currentMaxRowversion) {
await fs.writeFile(stateFile, JSON.stringify({ lastSyncRowversion: maxRowversion }, null, 2));
}
}
}
const instance = new ProductDetailSyncer();
export default instance;