import fs from 'fs/promises'; import path from 'path'; import { SyncQueueManager } from '../utils/sync-queue-manager.js'; import { ProductDetailDataFetcher } from '../services/product-detail-data-fetcher.js'; import { getExistingIds, deleteObsoleteFiles, ensureDir, writeJsonIfChanged } from '../utils/file-sync-utils.js'; class ProductDetailSyncer { constructor() { if (ProductDetailSyncer.instance) { return ProductDetailSyncer.instance; } this.syncQueue = new SyncQueueManager(); this.dataFetcher = new ProductDetailDataFetcher(); this.cacheBaseDir = process.env.CACHE_LOCATION || '.'; ProductDetailSyncer.instance = this; } async syncDetails(articleIds) { await this.syncQueue.executeSync('product-details', async () => { await this._performSync(articleIds); }, articleIds); } async _performSync(articleIds) { const detailsDir = path.join(this.cacheBaseDir, 'details'); const stateFile = path.join(this.cacheBaseDir, 'product-details-state.json'); // Ensure directory exists await ensureDir(detailsDir); // Load state let lastSyncRowversion = null; try { const state = JSON.parse(await fs.readFile(stateFile, 'utf-8')); lastSyncRowversion = state.lastSyncRowversion; } catch (err) { // State file might not exist yet } // Get existing files const existingIds = await getExistingIds(detailsDir, { suffix: '.json' }); const validIds = new Set(articleIds.filter(id => id !== null && id !== undefined)); // Delete obsolete files await deleteObsoleteFiles( detailsDir, existingIds, validIds, (id) => `${id}.json` ); // Split into missing and present const missingIds = []; const presentIds = []; for (const id of validIds) { if (existingIds.includes(id)) { presentIds.push(id); } else { missingIds.push(id); } } // Determine what to fetch const toFetch = new Set(missingIds); if (presentIds.length > 0) { // Check which present files need update based on rowversion //console.log(`Checking changes for ${presentIds.length} present items with lastSyncRowversion: ${lastSyncRowversion}`); const changedIds = await this.dataFetcher.fetchChangedArticleIds(presentIds, lastSyncRowversion); //console.log(`Got ${changedIds.size} changed items from fetcher`); changedIds.forEach(id => toFetch.add(id)); } if (toFetch.size > 0) { console.log(`📝 Syncing ${toFetch.size} product details (Missing: ${missingIds.length}, Changed: ${toFetch.size - missingIds.length})...`); await this._fetchAndWriteDetails([...toFetch], detailsDir, stateFile, lastSyncRowversion); } else { //console.log(`✅ No product details to sync.`); } } async _fetchAndWriteDetails(ids, dir, stateFile, currentMaxRowversion) { let maxRowversion = currentMaxRowversion; await this.dataFetcher.fetchDetailsInChunks(ids, async (record) => { const filePath = path.join(dir, `${record.kArtikel}.json`); // Update max rowversion if (record.bRowversion) { // Simple string comparison for hex strings works for sorting/max if length is same. // MSSQL rowversions are fixed length (8 bytes), so hex string length should be constant. if (!maxRowversion || record.bRowversion > maxRowversion) { maxRowversion = record.bRowversion; } } // Use writeJsonIfChanged which handles reading and comparing // It will compare the new object with the existing JSON content await writeJsonIfChanged(filePath, { kArtikel: record.kArtikel, cBeschreibung: record.cBeschreibung || null, // Ensure null is written if missing bRowversion: record.bRowversion || null }); }); // Save new state if (maxRowversion && maxRowversion !== currentMaxRowversion) { await fs.writeFile(stateFile, JSON.stringify({ lastSyncRowversion: maxRowversion }, null, 2)); } } } const instance = new ProductDetailSyncer(); export default instance;