Compare commits

..

11 Commits

25 changed files with 2552 additions and 740 deletions

View File

@@ -1,4 +1,5 @@
DB_HOST=10.10.10.34
DB_PORT=1433
DB_USER=sa
DB_PASSWORD=
DB_DATABASE=eazybusiness
@@ -8,6 +9,9 @@ ROOT_CATEGORY_ID=0
JTL_SHOP_ID=0
JTL_SPRACHE_ID=1
JTL_PLATTFORM_ID=1
JTL_STEUERZONE_ID=1
SERVER_PORT=3991
SERVER_HOST=127.0.0.1
SYNC_INTERVAL_MS=600000
EXCLUDE_CATEGORY_IDS=
INCLUDE_CATEGORY_IDS=

File diff suppressed because it is too large Load Diff

11
nodemon.json Normal file
View File

@@ -0,0 +1,11 @@
{
"watch": [
"src",
"index.html"
],
"ext": "js,json,html",
"ignore": [
"node_modules",
"cache"
]
}

402
package-lock.json generated
View File

@@ -8,12 +8,16 @@
"name": "category-syncer",
"version": "1.0.0",
"dependencies": {
"compression": "^1.8.1",
"dotenv": "^17.2.3",
"express": "^5.1.0",
"mssql": "^12.1.0",
"openai": "^6.9.1",
"sharp": "^0.34.5",
"socket.io": "^4.8.1"
},
"devDependencies": {
"nodemon": "^3.1.11"
}
},
"node_modules/@azure-rest/core-client": {
@@ -842,6 +846,27 @@
"node": ">= 14"
}
},
"node_modules/anymatch": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
"dev": true,
"license": "ISC",
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true,
"license": "MIT"
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
@@ -871,6 +896,19 @@
"node": "^4.5.0 || >= 5.9"
}
},
"node_modules/binary-extensions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/bl": {
"version": "6.1.4",
"resolved": "https://registry.npmjs.org/bl/-/bl-6.1.4.tgz",
@@ -915,6 +953,30 @@
"node": ">=0.10.0"
}
},
"node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/buffer": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
@@ -998,6 +1060,31 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/chokidar": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
"dev": true,
"license": "MIT",
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
"glob-parent": "~5.1.2",
"is-binary-path": "~2.1.0",
"is-glob": "~4.0.1",
"normalize-path": "~3.0.0",
"readdirp": "~3.6.0"
},
"engines": {
"node": ">= 8.10.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
},
"optionalDependencies": {
"fsevents": "~2.3.2"
}
},
"node_modules/commander": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz",
@@ -1007,6 +1094,67 @@
"node": ">=16"
}
},
"node_modules/compressible": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
"integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==",
"license": "MIT",
"dependencies": {
"mime-db": ">= 1.43.0 < 2"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/compression": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz",
"integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==",
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"compressible": "~2.0.18",
"debug": "2.6.9",
"negotiator": "~0.6.4",
"on-headers": "~1.1.0",
"safe-buffer": "5.2.1",
"vary": "~1.1.2"
},
"engines": {
"node": ">= 0.8.0"
}
},
"node_modules/compression/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/compression/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
},
"node_modules/compression/node_modules/negotiator": {
"version": "0.6.4",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
"integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"dev": true,
"license": "MIT"
},
"node_modules/content-disposition": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz",
@@ -1400,6 +1548,19 @@
"url": "https://opencollective.com/express"
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/finalhandler": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz",
@@ -1435,6 +1596,21 @@
"node": ">= 0.8"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
@@ -1481,6 +1657,19 @@
"node": ">= 0.4"
}
},
"node_modules/glob-parent": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
@@ -1493,6 +1682,16 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
@@ -1599,6 +1798,13 @@
],
"license": "BSD-3-Clause"
},
"node_modules/ignore-by-default": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==",
"dev": true,
"license": "ISC"
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
@@ -1614,6 +1820,19 @@
"node": ">= 0.10"
}
},
"node_modules/is-binary-path": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
"dev": true,
"license": "MIT",
"dependencies": {
"binary-extensions": "^2.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/is-docker": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz",
@@ -1629,6 +1848,29 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-glob": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-inside-container": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz",
@@ -1647,6 +1889,16 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/is-promise": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz",
@@ -1814,6 +2066,19 @@
"url": "https://opencollective.com/express"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -1854,6 +2119,45 @@
"node": ">= 0.6"
}
},
"node_modules/nodemon": {
"version": "3.1.11",
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.11.tgz",
"integrity": "sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==",
"dev": true,
"license": "MIT",
"dependencies": {
"chokidar": "^3.5.2",
"debug": "^4",
"ignore-by-default": "^1.0.1",
"minimatch": "^3.1.2",
"pstree.remy": "^1.1.8",
"semver": "^7.5.3",
"simple-update-notifier": "^2.0.0",
"supports-color": "^5.5.0",
"touch": "^3.1.0",
"undefsafe": "^2.0.5"
},
"bin": {
"nodemon": "bin/nodemon.js"
},
"engines": {
"node": ">=10"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/nodemon"
}
},
"node_modules/normalize-path": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
@@ -1887,6 +2191,15 @@
"node": ">= 0.8"
}
},
"node_modules/on-headers": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -1954,6 +2267,19 @@
"url": "https://opencollective.com/express"
}
},
"node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/process": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
@@ -1976,6 +2302,13 @@
"node": ">= 0.10"
}
},
"node_modules/pstree.remy": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
"integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==",
"dev": true,
"license": "MIT"
},
"node_modules/qs": {
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
@@ -2031,6 +2364,19 @@
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
},
"node_modules/readdirp": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
"dev": true,
"license": "MIT",
"dependencies": {
"picomatch": "^2.2.1"
},
"engines": {
"node": ">=8.10.0"
}
},
"node_modules/router": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz",
@@ -2256,6 +2602,19 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/simple-update-notifier": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
"integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"semver": "^7.5.3"
},
"engines": {
"node": ">=10"
}
},
"node_modules/socket.io": {
"version": "4.8.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz",
@@ -2436,6 +2795,19 @@
"safe-buffer": "~5.2.0"
}
},
"node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/tarn": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz",
@@ -2466,6 +2838,19 @@
"node": ">=18.17"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@@ -2475,6 +2860,16 @@
"node": ">=0.6"
}
},
"node_modules/touch": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz",
"integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==",
"dev": true,
"license": "ISC",
"bin": {
"nodetouch": "bin/nodetouch.js"
}
},
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
@@ -2495,6 +2890,13 @@
"node": ">= 0.6"
}
},
"node_modules/undefsafe": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz",
"integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==",
"dev": true,
"license": "MIT"
},
"node_modules/undici-types": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",

View File

@@ -2,17 +2,23 @@
"name": "category-syncer",
"version": "1.0.0",
"description": "Library to sync JTL categories to local cache",
"main": "category-syncer.js",
"main": "src/index.js",
"type": "module",
"scripts": {
"dev": "nodemon src/index.js",
"start": "node src/index.js",
"test": "node index.js"
},
"dependencies": {
"compression": "^1.8.1",
"dotenv": "^17.2.3",
"express": "^5.1.0",
"mssql": "^12.1.0",
"openai": "^6.9.1",
"sharp": "^0.34.5",
"socket.io": "^4.8.1"
},
"devDependencies": {
"nodemon": "^3.1.11"
}
}

View File

@@ -1,4 +1,4 @@
import categorySyncer from './syncers/category-syncer.js';
import categorySyncer from './syncers/categories-syncer.js';
import pictureSyncer from './syncers/picture-syncer.js';
import categoryProductsSyncer from './syncers/category-products-syncer.js';
import { startServer } from './server/server.js';

View File

@@ -11,7 +11,13 @@ export function registerIndex(app, cache) {
res.set('ETag', cache.html.etag);
res.set('Content-Type', 'text/html');
res.send(cache.html.data);
// Inject ETag into HTML as meta tag
const htmlWithEtag = cache.html.data.replace(
'<head>',
`<head>\n <meta name="app-version" content="${cache.html.etag}">`
);
res.send(htmlWithEtag);
} catch (err) {
console.error('Error serving index.html:', err);
res.status(500).send('Error loading page');

View File

@@ -1,3 +1,7 @@
import fs from 'fs/promises';
import path from 'path';
import crypto from 'crypto';
export function registerProducts(app, cache, updateProductCache) {
app.get('/api/categories/:id/products', async (req, res) => {
try {
@@ -29,4 +33,36 @@ export function registerProducts(app, cache, updateProductCache) {
res.status(500).json({ error: 'Failed to load products' });
}
});
app.get('/api/products/:id/details', async (req, res) => {
try {
const id = parseInt(req.params.id);
const filePath = path.join(process.cwd(), 'cache', 'details', `${id}.json`);
try {
const data = await fs.readFile(filePath, 'utf8');
const parsed = JSON.parse(data);
// Generate ETag from rowversion if available, otherwise use content hash
// The user example showed "bRowversion": "0x0000000000470394"
const etag = parsed.bRowversion ? `"${parsed.bRowversion}"` : crypto.createHash('md5').update(data).digest('hex');
if (req.headers['if-none-match'] === etag) {
return res.status(304).end();
}
res.set('Cache-Control', 'public, max-age=60, must-revalidate');
res.set('ETag', etag);
res.json(parsed);
} catch (err) {
if (err.code === 'ENOENT') {
return res.status(404).json({ error: 'Product details not found' });
}
throw err;
}
} catch (err) {
console.error(`Error serving details for product ${req.params.id}:`, err);
res.status(500).json({ error: 'Failed to load product details' });
}
});
}

View File

@@ -4,6 +4,7 @@ import { Server } from 'socket.io';
import path from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs/promises';
import compression from 'compression';
import { registerCategories } from './routes/categories.js';
import { registerProducts } from './routes/products.js';
import { registerImages } from './routes/images.js';
@@ -18,6 +19,9 @@ export function startServer(categorySyncer, categoryProductsSyncer) {
const httpServer = createServer(app);
const io = new Server(httpServer);
// Enable gzip compression for all responses
app.use(compression());
const PORT = process.env.SERVER_PORT || 3000;
const HOST = process.env.SERVER_HOST || '0.0.0.0';
const CACHE_DIR = process.env.CACHE_LOCATION || './cache';
@@ -99,7 +103,7 @@ export function startServer(categorySyncer, categoryProductsSyncer) {
}
// Register socket connection handler
registerConnection(io);
registerConnection(io, CACHE_DIR, cache);
// Register routes
registerCategories(app, cache);

View File

@@ -1,6 +1,28 @@
export function registerConnection(io) {
import { findMatches } from '../utils/search-helper.js';
export function registerConnection(io, cacheDir, cache) {
io.on('connection', (socket) => {
console.log('🔌 Client connected');
socket.on('checkVersion', (clientEtag) => {
const serverEtag = cache.html.etag;
if (clientEtag !== serverEtag) {
console.log(`⚠️ Version mismatch - Client: ${clientEtag}, Server: ${serverEtag}`);
socket.emit('versionMismatch', { serverEtag });
}
});
socket.on('search', async (query) => {
// console.log(`🔍 Search request: "${query}"`);
try {
const matches = await findMatches(query, cacheDir);
socket.emit('searchResults', { query, matches });
} catch (err) {
console.error('Search error:', err);
socket.emit('searchResults', { query, matches: [] });
}
});
socket.on('disconnect', () => {
console.log('🔌 Client disconnected');
});

View File

@@ -0,0 +1,77 @@
import fs from 'fs/promises';
import path from 'path';
export async function findMatches(query, cacheDir) {
if (!query || !query.trim()) return [];
const terms = query.toLowerCase().split(/\s+/).filter(t => t);
// Load category tree
const treePath = path.join(cacheDir, 'category_tree.json');
let tree = [];
try {
const treeData = await fs.readFile(treePath, 'utf-8');
tree = JSON.parse(treeData);
} catch (e) {
console.error("Failed to load category tree for search", e);
return [];
}
const matchingCategoryIds = new Set();
// Helper to check text match
const isMatch = (text) => {
if (!text) return false;
const lower = text.toLowerCase();
return terms.every(t => lower.includes(t));
};
// Flatten tree to linear list for easier iteration
const queue = [...tree];
const nodes = [];
while (queue.length > 0) {
const node = queue.shift();
nodes.push(node);
if (node.children) {
queue.push(...node.children);
}
}
// Process in chunks to avoid too many open files
const CHUNK_SIZE = 50;
for (let i = 0; i < nodes.length; i += CHUNK_SIZE) {
const chunk = nodes.slice(i, i + CHUNK_SIZE);
await Promise.all(chunk.map(async (node) => {
let nodeMatches = false;
// Check category name
if (isMatch(node.cName)) {
nodeMatches = true;
} else {
// Check products
try {
const prodPath = path.join(cacheDir, 'products', `category_${node.kKategorie}.json`);
// Check if file exists before reading to avoid throwing too many errors
// Actually readFile throws ENOENT which is fine
const prodData = await fs.readFile(prodPath, 'utf-8');
const products = JSON.parse(prodData);
if (products.some(p => isMatch(p.cName))) {
nodeMatches = true;
}
} catch (e) {
// Ignore missing files
}
}
if (nodeMatches) {
matchingCategoryIds.add(node.kKategorie);
}
}));
if (matchingCategoryIds.size >= 20) {
break;
}
}
return Array.from(matchingCategoryIds);
}

View File

@@ -0,0 +1,141 @@
import { createConnection } from '../utils/database.js';
/**
* CategoryDataFetcher - Handles all category-related database queries
*/
export class CategoryDataFetcher {
/**
* Fetch all categories with hierarchy information
* @returns {Promise<Array>} - Array of category records
*/
async fetchCategories() {
let pool;
try {
pool = await createConnection();
const result = await pool.request().query(`
SELECT kKategorie, kOberKategorie, nSort
FROM tkategorie
ORDER BY nSort, kKategorie
`);
return result.recordset;
} finally {
if (pool) await pool.close();
}
}
/**
* Fetch category names for a specific language and shop
* @returns {Promise<Array>} - Array of name records
*/
async fetchCategoryNames() {
let pool;
try {
pool = await createConnection();
const result = await pool.request().query(`
SELECT kKategorie, cName
FROM tKategorieSprache
WHERE kSprache = ${process.env.JTL_SPRACHE_ID}
AND kShop = ${process.env.JTL_SHOP_ID}
`);
return result.recordset;
} finally {
if (pool) await pool.close();
}
}
/**
* Fetch article counts per category
* @returns {Promise<Array>} - Array of count records
*/
async fetchArticleCounts() {
let pool;
try {
pool = await createConnection();
const result = await pool.request().query(`
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
FROM tkategorieartikel ka
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
WHERE a.cAktiv = 'Y'
GROUP BY ka.kKategorie
`);
return result.recordset;
} finally {
if (pool) await pool.close();
}
}
/**
* Fetch category images (first image per category)
* @returns {Promise<Array>} - Array of image records
*/
async fetchCategoryImages() {
let pool;
try {
pool = await createConnection();
const result = await pool.request().query(`
SELECT kKategorie, kBild
FROM (
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
FROM tKategoriebildPlattform
WHERE kShop = ${process.env.JTL_SHOP_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
) t
WHERE rn = 1
`);
return result.recordset;
} finally {
if (pool) await pool.close();
}
}
/**
* Fetch all category data at once
* @returns {Promise<Object>} - Object with categories, names, articleCounts, images
*/
async fetchAllCategoryData() {
let pool;
try {
pool = await createConnection();
const [categoriesResult, namesResult, articleCountsResult, imagesResult] = await Promise.all([
pool.request().query(`
SELECT kKategorie, kOberKategorie, nSort
FROM tkategorie
ORDER BY nSort, kKategorie
`),
pool.request().query(`
SELECT kKategorie, cName
FROM tKategorieSprache
WHERE kSprache = ${process.env.JTL_SPRACHE_ID}
AND kShop = ${process.env.JTL_SHOP_ID}
`),
pool.request().query(`
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
FROM tkategorieartikel ka
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
WHERE a.cAktiv = 'Y'
GROUP BY ka.kKategorie
`),
pool.request().query(`
SELECT kKategorie, kBild
FROM (
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
FROM tKategoriebildPlattform
WHERE kShop = ${process.env.JTL_SHOP_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
) t
WHERE rn = 1
`)
]);
return {
categories: categoriesResult.recordset,
names: namesResult.recordset,
articleCounts: articleCountsResult.recordset,
images: imagesResult.recordset
};
} finally {
if (pool) await pool.close();
}
}
}

View File

@@ -0,0 +1,117 @@
import { createConnection } from '../utils/database.js';
import { processInChunks, createInClause } from '../utils/database-utils.js';
/**
* ProductDataFetcher - Handles all product-related database queries
*/
export class ProductDataFetcher {
/**
* Fetch products for given category IDs
* @param {Array<number>} categoryIds - Category IDs to fetch products for
* @param {Object} pool - Database connection pool
* @returns {Promise<Array>} - Array of product records with kKategorie, kArtikel, cName
*/
async fetchProductsForCategories(categoryIds, pool) {
const list = createInClause(categoryIds);
const result = await pool.request().query(`
SELECT
ka.kKategorie,
ka.kArtikel,
ab.cName,
a.cArtNr,
a.fVKNetto,
a.cBarcode,
a.kSteuerklasse,
sk.cName AS cSteuerklasseName,
ss.fSteuersatz
FROM tkategorieartikel ka
JOIN tArtikelBeschreibung ab ON ka.kArtikel = ab.kArtikel
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
LEFT JOIN tSteuerklasse sk ON a.kSteuerklasse = sk.kSteuerklasse
LEFT JOIN tSteuersatz ss ON sk.kSteuerklasse = ss.kSteuerklasse AND ss.kSteuerzone = ${process.env.JTL_STEUERZONE_ID}
WHERE ab.kSprache = ${process.env.JTL_SPRACHE_ID}
AND a.cAktiv = 'Y'
AND ab.kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND ab.kShop = ${process.env.JTL_SHOP_ID}
AND ka.kKategorie IN (${list})
ORDER BY (
CASE
WHEN a.bRowversion >= ab.bRowversion AND a.bRowversion >= ka.bRowversion THEN a.bRowversion
WHEN ab.bRowversion >= a.bRowversion AND ab.bRowversion >= ka.bRowversion THEN ab.bRowversion
ELSE ka.bRowversion
END
) DESC
`);
return result.recordset;
}
/**
* Fetch product images for given article IDs
* @param {Array<number>} articleIds - Article IDs to fetch images for
* @param {Object} pool - Database connection pool
* @returns {Promise<Map>} - Map of kArtikel -> array of kBild
*/
async fetchProductImages(articleIds, pool) {
const list = createInClause(articleIds);
const result = await pool.request().query(`
SELECT kArtikel, kBild
FROM tArtikelbildPlattform
WHERE kShop = ${process.env.JTL_SHOP_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND kArtikel IN (${list})
ORDER BY nNr ASC
`);
const productImages = new Map();
result.recordset.forEach(r => {
if (!productImages.has(r.kArtikel)) {
productImages.set(r.kArtikel, []);
}
productImages.get(r.kArtikel).push(r.kBild);
});
return productImages;
}
/**
* Fetch products with images for categories in chunks
* @param {Array<number>} categoryIds - All category IDs to process
* @param {Function} chunkCallback - Callback for each chunk (receives chunk data)
* @param {number} chunkSize - Size of each chunk (default: 50)
* @returns {Promise<void>}
*/
async fetchProductsInChunks(categoryIds, chunkCallback, chunkSize = 50) {
let pool;
try {
pool = await createConnection();
await processInChunks(categoryIds, chunkSize, async (chunk) => {
// Fetch products for this chunk
const products = await this.fetchProductsForCategories(chunk, pool);
// Collect all article IDs
const articleIds = new Set();
products.forEach(p => articleIds.add(p.kArtikel));
// Fetch images for these articles
let productImages = new Map();
if (articleIds.size > 0) {
productImages = await this.fetchProductImages(Array.from(articleIds), pool);
}
// Call the callback with chunk data
await chunkCallback({
categoryIds: chunk,
products,
productImages
});
}, { showProgress: false, itemName: 'categories' });
} finally {
if (pool) await pool.close();
}
}
}

View File

@@ -0,0 +1,40 @@
import { createConnection } from '../utils/database.js';
import { processInChunks, createInClause } from '../utils/database-utils.js';
/**
* ImageDataFetcher - Handles all image-related database queries
*/
export class ImageDataFetcher {
/**
* Fetch image binary data for given image IDs
* @param {Array<number>} imageIds - Image IDs to fetch
* @param {Function} imageCallback - Callback for each image (receives {kBild, bBild})
* @param {number} chunkSize - Size of each chunk (default: 50)
* @returns {Promise<void>}
*/
async fetchImagesInChunks(imageIds, imageCallback, chunkSize = 50) {
let pool;
try {
pool = await createConnection();
await processInChunks(imageIds, chunkSize, async (chunk) => {
const list = createInClause(chunk);
const result = await pool.request().query(`
SELECT kBild, bBild
FROM tBild
WHERE kBild IN (${list})
`);
for (const record of result.recordset) {
if (record.bBild) {
await imageCallback(record);
}
}
}, { showProgress: true, itemName: 'images' });
} finally {
if (pool) await pool.close();
}
}
}

View File

@@ -0,0 +1,115 @@
import { createConnection } from '../utils/database.js';
import { processInChunks, createInClause } from '../utils/database-utils.js';
/**
* ProductDetailDataFetcher - Handles fetching product descriptions
*/
export class ProductDetailDataFetcher {
/**
* Fetch product descriptions for given article IDs
* @param {Array<number>} articleIds - Article IDs to fetch details for
* @param {Function} detailCallback - Callback for each detail (receives {kArtikel, cBeschreibung})
* @param {number} chunkSize - Size of each chunk (default: 50)
* @returns {Promise<void>}
*/
async fetchDetailsInChunks(articleIds, detailCallback, chunkSize = 50) {
let pool;
try {
pool = await createConnection();
await processInChunks(articleIds, chunkSize, async (chunk) => {
const list = createInClause(chunk);
const result = await pool.request().query(`
SELECT kArtikel, cBeschreibung, bRowversion
FROM tArtikelBeschreibung
WHERE kArtikel IN (${list})
AND kSprache = ${process.env.JTL_SPRACHE_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND kShop = ${process.env.JTL_SHOP_ID}
`);
const foundIds = new Set();
for (const record of result.recordset) {
foundIds.add(record.kArtikel);
// Convert Buffer or binary string to hex string if needed
if (Buffer.isBuffer(record.bRowversion)) {
record.bRowversion = '0x' + record.bRowversion.toString('hex').toUpperCase();
} else if (typeof record.bRowversion === 'string' && !record.bRowversion.startsWith('0x')) {
// Assume binary string
record.bRowversion = '0x' + Buffer.from(record.bRowversion, 'binary').toString('hex').toUpperCase();
}
if (!record.cBeschreibung) {
console.log(`⚠️ Item ${record.kArtikel} has no description, writing empty file.`);
}
await detailCallback(record);
}
// Check for missing items in this chunk
chunk.forEach(id => {
if (!foundIds.has(id)) {
// console.log(`⚠️ Item ${id} not found in tArtikelBeschreibung (or filtered out).`);
}
});
}, { showProgress: true, itemName: 'details' });
} finally {
if (pool) await pool.close();
}
}
/**
* Fetch IDs of articles that have changed since a given version
* @param {Array<number>} articleIds - Candidate article IDs
* @param {string} minRowversion - Minimum rowversion (hex string)
* @returns {Promise<Set<number>>} - Set of changed article IDs
*/
async fetchChangedArticleIds(articleIds, minRowversion) {
//console.log(`🔍 Checking changes for ${articleIds ? articleIds.length : 0} articles against version ${minRowversion}`);
if (!articleIds || articleIds.length === 0) return new Set();
// If no minRowversion, all are considered changed
if (!minRowversion) {
console.log('⚠️ No minRowversion provided, fetching all.');
return new Set(articleIds);
}
let pool;
const changedIds = new Set();
try {
pool = await createConnection();
await processInChunks(articleIds, 2000, async (chunk) => {
const list = createInClause(chunk);
// Convert hex string back to buffer for comparison if needed,
// but MSSQL driver usually handles 0x strings as binary.
// Let's assume minRowversion is passed as '0x...' string.
const query = `
SELECT kArtikel, bRowversion
FROM tArtikelBeschreibung
WHERE kArtikel IN (${list})
AND kSprache = ${process.env.JTL_SPRACHE_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND kShop = ${process.env.JTL_SHOP_ID}
AND bRowversion > ${minRowversion}
`;
// console.log('Executing query:', query);
const result = await pool.request().query(query);
result.recordset.forEach(r => {
// console.log(`Changed item: ${r.kArtikel}, version: 0x${r.bRowversion.toString('hex').toUpperCase()}`);
changedIds.add(r.kArtikel);
});
}, { showProgress: false });
if (changedIds.size > 0) console.log(`🔍 Found ${changedIds.size} changed articles.`);
return changedIds;
} finally {
if (pool) await pool.close();
}
}
}

View File

@@ -0,0 +1,106 @@
import { EventEmitter } from 'events';
import fs from 'fs/promises';
import path from 'path';
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
import { CategoryDataFetcher } from '../services/categories-data-fetcher.js';
import { buildTree, pruneTree, buildTranslationTemplate, formatTranslationTemplate } from '../utils/category-tree-utils.js';
import { readTextFile } from '../utils/file-sync-utils.js';
class CategorySyncer extends EventEmitter {
constructor() {
super();
if (CategorySyncer.instance) {
return CategorySyncer.instance;
}
this.syncQueue = new SyncQueueManager();
this.dataFetcher = new CategoryDataFetcher();
this.cacheDir = process.env.CACHE_LOCATION || '.';
this.lastTreeString = null;
this.lastTemplateString = null;
// Load existing template if it exists
this._loadExistingTemplate();
CategorySyncer.instance = this;
}
async _loadExistingTemplate() {
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
this.lastTemplateString = await readTextFile(templatePath);
const treePath = path.join(this.cacheDir, 'category_tree.json');
this.lastTreeString = await readTextFile(treePath);
}
async triggerSync() {
await this.syncQueue.executeSync('category-sync', async () => {
await this._doSync();
});
}
async _doSync() {
const startTime = Date.now();
try {
await this._syncFromDb();
const duration = Date.now() - startTime;
// Log completion and next sync time
const syncInterval = parseInt(process.env.SYNC_INTERVAL_MS) || 60000;
const minutes = Math.round(syncInterval / 60000);
//console.log(`✅ Sync completed in ${duration}ms. Next sync in ${minutes} minute${minutes !== 1 ? 's' : ''}`);
} catch (err) {
console.error('❌ Sync failed:', err);
}
}
async _syncFromDb() {
// Fetch all category data
const { categories, names, articleCounts, images } = await this.dataFetcher.fetchAllCategoryData();
// Build tree with ROOT_CATEGORY_ID filter (if set)
let tree = buildTree(categories, names, articleCounts, images, true);
// Deep copy tree for unpruned version (before pruning modifies it)
const unprunedTree = JSON.parse(JSON.stringify(tree));
// Generate translation template BEFORE pruning (to include all categories)
const translationTemplate = buildTranslationTemplate(tree);
const templateString = formatTranslationTemplate(translationTemplate);
// Now prune for the main tree
tree = pruneTree(tree);
// Ensure directory exists
await fs.mkdir(this.cacheDir, { recursive: true });
// Compare pruned tree
const treeString = JSON.stringify(tree, null, 2);
const changed = this.lastTreeString !== treeString;
if (changed) {
// Save template if it changed
if (this.lastTemplateString !== templateString) {
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
await fs.writeFile(templatePath, templateString);
console.log(`💾 Translation template saved to ${templatePath}`);
this.lastTemplateString = templateString;
}
const filePath = path.join(this.cacheDir, 'category_tree.json');
await fs.writeFile(filePath, treeString);
console.log(`💾 Category tree saved to ${filePath}`);
this.lastTreeString = treeString;
console.log('📢 Tree updated.');
} else {
//console.log('🤷 No changes detected in category tree.');
}
this.emit('synced', { tree, unprunedTree, changed });
}
}
const instance = new CategorySyncer();
export default instance;

View File

@@ -1,8 +1,11 @@
import fs from 'fs/promises';
import path from 'path';
import { EventEmitter } from 'events';
import { createConnection } from '../utils/database.js';
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
import { ProductDataFetcher } from '../services/category-products-data-fetcher.js';
import { getExistingIds, deleteObsoleteFiles, writeJsonIfChanged, ensureDir } from '../utils/file-sync-utils.js';
import pictureSyncer from './picture-syncer.js';
import productDetailSyncer from './product-detail-syncer.js';
class CategoryProductsSyncer extends EventEmitter {
constructor() {
@@ -10,38 +13,18 @@ class CategoryProductsSyncer extends EventEmitter {
if (CategoryProductsSyncer.instance) {
return CategoryProductsSyncer.instance;
}
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
// Track syncing state
this.isSyncing = false;
this.queuedCategoryIds = null;
this.syncQueue = new SyncQueueManager();
this.dataFetcher = new ProductDataFetcher();
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
CategoryProductsSyncer.instance = this;
}
async syncProducts(categoryIds) {
// Check if already syncing
if (this.isSyncing) {
console.log('⏳ CategoryProductsSyncer is busy. Queuing sync...');
this.queuedCategoryIds = categoryIds;
return;
}
this.isSyncing = true;
try {
await this.syncQueue.executeSync('category-products-sync', async () => {
await this._performSync(categoryIds);
} catch (err) {
console.error('❌ Error syncing products:', err);
} finally {
this.isSyncing = false;
// Process queued sync if exists
if (this.queuedCategoryIds) {
const nextIds = this.queuedCategoryIds;
this.queuedCategoryIds = null;
// Use setTimeout to allow event loop to breathe
setTimeout(() => this.syncProducts(nextIds), 0);
}
}
}, categoryIds);
}
async _performSync(categoryIds) {
@@ -49,37 +32,26 @@ class CategoryProductsSyncer extends EventEmitter {
const productsDir = path.join(this.cacheBaseDir, 'products');
// Ensure directory exists
await fs.mkdir(productsDir, { recursive: true });
await ensureDir(productsDir);
// Get existing files
let existingFiles = [];
try {
existingFiles = await fs.readdir(productsDir);
} catch (err) {
// Directory might be empty or new
}
// Filter for category json files (assuming we save as category_{id}.json)
const existingIds = existingFiles
.filter(f => f.startsWith('category_') && f.endsWith('.json'))
.map(f => parseInt(f.replace('category_', '').replace('.json', '')));
const existingIds = await getExistingIds(productsDir, {
prefix: 'category_',
suffix: '.json'
});
const validIds = new Set(categoryIds.filter(id => id !== null && id !== undefined));
// 1. Delete obsolete category files
const toDelete = existingIds.filter(id => !validIds.has(id));
for (const id of toDelete) {
const filePath = path.join(productsDir, `category_${id}.json`);
await fs.unlink(filePath);
}
if (toDelete.length > 0) {
console.log(`🗑️ Deleted ${toDelete.length} obsolete product lists.`);
}
// Delete obsolete category files
await deleteObsoleteFiles(
productsDir,
existingIds,
validIds,
(id) => `category_${id}.json`
);
// 2. Update/Create product lists for all valid categories
// We update all because product assignments might have changed even if category exists
// Update/Create product lists for all valid categories
if (validIds.size > 0) {
//console.log(`📦 Syncing products for ${validIds.size} categories...`);
await this._fetchAndWriteProducts([...validIds], productsDir);
} else {
console.log(`✅ No categories to sync products for.`);
@@ -90,115 +62,50 @@ class CategoryProductsSyncer extends EventEmitter {
}
async _fetchAndWriteProducts(ids, dir) {
let pool;
const globalImageIds = new Set();
const globalArticleIds = new Set();
try {
pool = await createConnection();
// Process in chunks to avoid huge queries
const chunkSize = 50;
for (let i = 0; i < ids.length; i += chunkSize) {
const chunk = ids.slice(i, i + chunkSize);
const list = chunk.join(',');
// Fetch products for this chunk of categories
// We need kArtikel and cName, ordered by bRowversion descending
const result = await pool.request().query(`
SELECT
ka.kKategorie,
ka.kArtikel,
ab.cName
FROM tkategorieartikel ka
JOIN tArtikelBeschreibung ab ON ka.kArtikel = ab.kArtikel
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
WHERE ab.kSprache = ${process.env.JTL_SPRACHE_ID}
AND a.cAktiv = 'Y'
AND ab.kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND ab.kShop = ${process.env.JTL_SHOP_ID}
AND ka.kKategorie IN (${list})
ORDER BY (
CASE
WHEN a.bRowversion >= ab.bRowversion AND a.bRowversion >= ka.bRowversion THEN a.bRowversion
WHEN ab.bRowversion >= a.bRowversion AND ab.bRowversion >= ka.bRowversion THEN ab.bRowversion
ELSE ka.bRowversion
END
) DESC
`);
// Collect all kArtikel IDs to fetch images
const artikelIds = new Set();
result.recordset.forEach(r => artikelIds.add(r.kArtikel));
// Fetch images for these articles
let productImages = new Map(); // kArtikel -> kBild[]
if (artikelIds.size > 0) {
const artikelList = Array.from(artikelIds).join(',');
const imagesResult = await pool.request().query(`
SELECT kArtikel, kBild
FROM tArtikelbildPlattform
WHERE kShop = ${process.env.JTL_SHOP_ID}
AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
AND kArtikel IN (${artikelList})
ORDER BY nNr ASC
`);
imagesResult.recordset.forEach(r => {
if (!productImages.has(r.kArtikel)) {
productImages.set(r.kArtikel, []);
}
productImages.get(r.kArtikel).push(r.kBild);
});
}
await this.dataFetcher.fetchProductsInChunks(ids, async (chunkData) => {
const { categoryIds, products, productImages } = chunkData;
// Group results by kKategorie
const productsByCategory = {};
// Initialize arrays for all requested IDs (so we create empty files for empty categories)
chunk.forEach(id => {
categoryIds.forEach(id => {
productsByCategory[id] = [];
});
for (const record of result.recordset) {
for (const record of products) {
if (productsByCategory[record.kKategorie]) {
const images = productImages.get(record.kArtikel) || [];
images.forEach(imgId => globalImageIds.add(imgId));
globalArticleIds.add(record.kArtikel);
productsByCategory[record.kKategorie].push({
kArtikel: record.kArtikel,
cName: record.cName,
cArtNr: record.cArtNr,
fVKNetto: record.fVKNetto,
cBarcode: record.cBarcode,
fSteuersatz: record.fSteuersatz,
images: images
});
}
}
// Write files
for (const catId of chunk) {
for (const catId of categoryIds) {
const filePath = path.join(dir, `category_${catId}.json`);
const products = productsByCategory[catId] || [];
const newContent = JSON.stringify(products, null, 2);
const categoryProducts = productsByCategory[catId] || [];
// Check for changes
let oldContent = '';
try {
oldContent = await fs.readFile(filePath, 'utf-8');
} catch (e) {
// File doesn't exist yet
}
const changed = await writeJsonIfChanged(filePath, categoryProducts);
if (oldContent !== newContent) {
await fs.writeFile(filePath, newContent);
this.emit('categoryUpdated', { id: catId, products });
}
}
const processed = Math.min(i + chunkSize, ids.length);
if (processed === ids.length) {
//console.log(`✅ Processed products for ${processed}/${ids.length} categories.`);
} else {
//console.log(`⏳ Processed products for ${processed}/${ids.length} categories...`);
if (changed) {
this.emit('categoryUpdated', { id: catId, products: categoryProducts });
}
}
});
// Sync all collected images at once
if (globalImageIds.size > 0) {
@@ -206,12 +113,9 @@ class CategoryProductsSyncer extends EventEmitter {
await pictureSyncer.syncImages(Array.from(globalImageIds), 'products');
}
} catch (err) {
console.error('❌ Error fetching products:', err);
} finally {
if (pool) {
await pool.close();
}
// Sync product details for all articles found
if (globalArticleIds.size > 0) {
await productDetailSyncer.syncDetails(Array.from(globalArticleIds));
}
}
}

View File

@@ -1,316 +0,0 @@
import { EventEmitter } from 'events';
import fs from 'fs/promises';
import path from 'path';
import { createConnection } from '../utils/database.js';
class CategorySyncer extends EventEmitter {
constructor() {
super();
if (CategorySyncer.instance) {
return CategorySyncer.instance;
}
this.isSyncing = false;
this.queuedSync = false;
this.cacheDir = process.env.CACHE_LOCATION || '.';
this.lastTreeString = null;
this.lastTemplateString = null;
// Load existing template if it exists
this._loadExistingTemplate();
CategorySyncer.instance = this;
}
async _loadExistingTemplate() {
try {
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
this.lastTemplateString = await fs.readFile(templatePath, 'utf-8');
} catch (err) {
// File doesn't exist yet, that's fine
}
try {
const treePath = path.join(this.cacheDir, 'category_tree.json');
const treeContent = await fs.readFile(treePath, 'utf-8');
this.lastTreeString = treeContent;
} catch (err) {
// File doesn't exist yet, that's fine
}
}
async triggerSync() {
if (this.isSyncing) {
if (this.queuedSync) {
console.log('🚫 Sync already in progress and next sync already queued. Ignoring.');
return;
}
console.log('⏳ Sync already in progress. Queuing next sync.');
this.queuedSync = true;
return;
}
await this._doSync();
}
async _doSync() {
this.isSyncing = true;
const startTime = Date.now();
//console.log('🚀 Starting sync...');
try {
await this._syncFromDb();
const duration = Date.now() - startTime;
// Log completion and next sync time
const syncInterval = parseInt(process.env.SYNC_INTERVAL_MS) || 60000;
const minutes = Math.round(syncInterval / 60000);
//console.log(`✅ Sync completed in ${duration}ms. Next sync in ${minutes} minute${minutes !== 1 ? 's' : ''}`);
} catch (err) {
console.error('❌ Sync failed:', err);
} finally {
this.isSyncing = false;
if (this.queuedSync) {
console.log('🔄 Processing queued sync...');
this.queuedSync = false;
// Use setImmediate to allow stack to clear/event loop to tick
setImmediate(() => this.triggerSync());
}
}
}
async _syncFromDb() {
let pool;
try {
pool = await createConnection();
// Fetch categories
const categoriesResult = await pool.request().query(`
SELECT kKategorie, kOberKategorie, nSort
FROM tkategorie
ORDER BY nSort, kKategorie
`);
// Fetch names
const namesResult = await pool.request().query(`
SELECT kKategorie, cName
FROM tKategorieSprache
WHERE kSprache = ${process.env.JTL_SPRACHE_ID} AND kShop = ${process.env.JTL_SHOP_ID}
`);
// Fetch article counts
const articleCountsResult = await pool.request().query(`
SELECT ka.kKategorie, COUNT(a.kArtikel) as count
FROM tkategorieartikel ka
JOIN tArtikel a ON ka.kArtikel = a.kArtikel
WHERE a.cAktiv = 'Y'
GROUP BY ka.kKategorie
`);
// Fetch images (kBild)
const imagesResult = await pool.request().query(`
SELECT kKategorie, kBild
FROM (
SELECT kKategorie, kBild, ROW_NUMBER() OVER (PARTITION BY kKategorie ORDER BY nNr ASC) as rn
FROM tKategoriebildPlattform
WHERE kShop = ${process.env.JTL_SHOP_ID} AND kPlattform = ${process.env.JTL_PLATTFORM_ID}
) t
WHERE rn = 1
`);
const categories = categoriesResult.recordset;
const names = namesResult.recordset;
const articleCounts = articleCountsResult.recordset;
const images = imagesResult.recordset;
// Build tree with ROOT_CATEGORY_ID filter (if set)
// This gives us the subtree we're interested in
let tree = this._buildTree(categories, names, articleCounts, images, true);
// Deep copy tree for unpruned version (before pruning modifies it)
const unprunedTree = JSON.parse(JSON.stringify(tree));
// Generate translation template BEFORE pruning (to include all categories)
const translationTemplate = this._buildTranslationTemplate(tree);
const templateString = this._formatTranslationTemplate(translationTemplate);
// Now prune for the main tree
tree = this._pruneTree(tree);
// Ensure directory exists
await fs.mkdir(this.cacheDir, { recursive: true });
// Compare pruned tree
const treeString = JSON.stringify(tree, null, 2);
const changed = this.lastTreeString !== treeString;
if (changed) {
// Save template if it changed
if (this.lastTemplateString !== templateString) {
const templatePath = path.join(this.cacheDir, 'categories_translation_template.txt');
await fs.writeFile(templatePath, templateString);
console.log(`💾 Translation template saved to ${templatePath}`);
this.lastTemplateString = templateString;
}
const filePath = path.join(this.cacheDir, 'category_tree.json');
await fs.writeFile(filePath, treeString);
console.log(`💾 Category tree saved to ${filePath}`);
this.lastTreeString = treeString;
console.log('📢 Tree updated.');
} else {
//console.log('🤷 No changes detected in category tree.');
}
this.emit('synced', { tree, unprunedTree, changed });
} finally {
if (pool) {
await pool.close();
}
}
}
_buildTree(categories, names, articleCounts, images, applyRootFilter = true) {
// Create a map for quick lookup of names
const nameMap = new Map();
names.forEach(n => nameMap.set(n.kKategorie, n.cName));
// Create a map for article counts
const countMap = new Map();
articleCounts.forEach(c => countMap.set(c.kKategorie, c.count));
// Create a map for images
const imageMap = new Map();
images.forEach(i => imageMap.set(i.kKategorie, i.kBild));
// Create a map for category nodes
const categoryMap = new Map();
// Initialize all nodes
categories.forEach(cat => {
categoryMap.set(cat.kKategorie, {
kKategorie: cat.kKategorie,
cName: nameMap.get(cat.kKategorie) || `Unknown (${cat.kKategorie})`, // Fallback if name missing
articleCount: countMap.get(cat.kKategorie) || 0,
kBild: imageMap.get(cat.kKategorie) || null,
children: [],
nSort: cat.nSort || 0 // Store nSort temporarily
});
});
const rootNodes = [];
// Parse excluded IDs
const excludedIds = new Set(
(process.env.EXCLUDE_CATEGORY_IDS || '')
.split(',')
.map(id => parseInt(id.trim()))
.filter(id => !isNaN(id))
);
// Build hierarchy
categories.forEach(cat => {
// Skip if excluded
if (excludedIds.has(cat.kKategorie)) return;
const node = categoryMap.get(cat.kKategorie);
if (cat.kOberKategorie === 0) {
rootNodes.push(node);
} else {
const parent = categoryMap.get(cat.kOberKategorie);
if (parent) {
parent.children.push(node);
} else {
// Handle orphan nodes if necessary, or ignore
// console.warn(`Orphan category found: ${cat.kKategorie}`);
}
}
});
const rootId = process.env.ROOT_CATEGORY_ID ? parseInt(process.env.ROOT_CATEGORY_ID) : null;
let resultNodes = rootNodes;
if (rootId && applyRootFilter) {
if (excludedIds.has(rootId)) {
resultNodes = [];
} else {
const specificRoot = categoryMap.get(rootId);
// Return the children of the specified root, not the root itself
resultNodes = specificRoot ? specificRoot.children : [];
}
}
// Sort children and remove nSort
for (const node of categoryMap.values()) {
node.children.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
}
// Sort root nodes if returning multiple
resultNodes.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
// Remove nSort property from all nodes
for (const node of categoryMap.values()) {
delete node.nSort;
}
return resultNodes;
}
_pruneTree(nodes) {
// Filter out nodes that are empty (no articles) and have no valid children
return nodes.filter(node => {
// Recursively prune children
if (node.children && node.children.length > 0) {
node.children = this._pruneTree(node.children);
}
// Keep node if it has articles OR has remaining children
const hasArticles = node.articleCount > 0;
const hasChildren = node.children && node.children.length > 0;
return hasArticles || hasChildren;
});
}
_buildTranslationTemplate(nodes) {
return nodes.map(node => {
const result = { name: node.cName };
if (node.children && node.children.length > 0) {
result.children = this._buildTranslationTemplate(node.children);
}
return result;
});
}
_formatTranslationTemplate(nodes, indent = 0) {
const spaces = ' '.repeat(indent);
const innerSpaces = ' '.repeat(indent + 1);
if (nodes.length === 0) return '[]';
const lines = ['['];
nodes.forEach((node, index) => {
const isLast = index === nodes.length - 1;
if (node.children && node.children.length > 0) {
// Node with children - multi-line format
lines.push(`${innerSpaces}{`);
lines.push(`${innerSpaces} "name": "${node.name}",`);
lines.push(`${innerSpaces} "children": ${this._formatTranslationTemplate(node.children, indent + 2)}`);
lines.push(`${innerSpaces}}${isLast ? '' : ','}`);
} else {
// Leaf node - single line format
lines.push(`${innerSpaces}{ "name": "${node.name}" }${isLast ? '' : ','}`);
}
});
lines.push(`${spaces}]`);
return lines.join('\n');
}
}
const instance = new CategorySyncer();
export default instance;

View File

@@ -1,87 +1,51 @@
import fs from 'fs/promises';
import path from 'path';
import sharp from 'sharp';
import { createConnection } from '../utils/database.js';
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
import { ImageDataFetcher } from '../services/image-data-fetcher.js';
import { getExistingIds, deleteObsoleteFiles, ensureDir } from '../utils/file-sync-utils.js';
class PictureSyncer {
constructor() {
if (PictureSyncer.instance) {
return PictureSyncer.instance;
}
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
// Track syncing state per group
this.isSyncing = new Map(); // groupName -> boolean
this.queuedSyncs = new Map(); // groupName -> { imageIds, groupName }
this.syncQueue = new SyncQueueManager();
this.dataFetcher = new ImageDataFetcher();
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
PictureSyncer.instance = this;
}
async syncImages(imageIds, groupName) {
// Check if already syncing this group
if (this.isSyncing.get(groupName)) {
if (this.queuedSyncs.has(groupName)) {
console.log(`🚫 Image sync for '${groupName}' already in progress and queued. Ignoring.`);
return;
}
console.log(`⏳ Image sync for '${groupName}' already in progress. Queuing.`);
this.queuedSyncs.set(groupName, { imageIds, groupName });
return;
}
await this._doSync(imageIds, groupName);
}
async _doSync(imageIds, groupName) {
this.isSyncing.set(groupName, true);
try {
await this.syncQueue.executeSync(groupName, async () => {
await this._performSync(imageIds, groupName);
} finally {
this.isSyncing.set(groupName, false);
// Process queued sync for this group if any
if (this.queuedSyncs.has(groupName)) {
console.log(`🔄 Processing queued image sync for '${groupName}'...`);
const queued = this.queuedSyncs.get(groupName);
this.queuedSyncs.delete(groupName);
setImmediate(() => this.syncImages(queued.imageIds, queued.groupName));
}
}
}, { imageIds, groupName });
}
async _performSync(imageIds, groupName) {
const groupDir = path.join(this.cacheBaseDir, 'img', groupName);
// Ensure directory exists
await fs.mkdir(groupDir, { recursive: true });
await ensureDir(groupDir);
// Get existing files
let existingFiles = [];
try {
existingFiles = await fs.readdir(groupDir);
} catch (err) {
// Directory might be empty or new
}
// Filter for image files (assuming we save as {id}.avif)
const existingIds = existingFiles
.filter(f => f.endsWith('.avif'))
.map(f => parseInt(f.replace('.avif', '')));
const existingIds = await getExistingIds(groupDir, {
suffix: '.avif'
});
const validIds = new Set(imageIds.filter(id => id !== null && id !== undefined));
// 1. Delete obsolete images
const toDelete = existingIds.filter(id => !validIds.has(id));
for (const id of toDelete) {
const filePath = path.join(groupDir, `${id}.avif`);
await fs.unlink(filePath);
}
if (toDelete.length > 0) {
console.log(`🗑️ Deleted ${toDelete.length} obsolete images.`);
}
// Delete obsolete images
await deleteObsoleteFiles(
groupDir,
existingIds,
validIds,
(id) => `${id}.avif`
);
// 2. Download missing images
// Download missing images
const toDownload = imageIds.filter(id => id !== null && id !== undefined && !existingIds.includes(id));
if (toDownload.length > 0) {
@@ -93,24 +57,7 @@ class PictureSyncer {
}
async _downloadImages(ids, dir) {
let pool;
try {
pool = await createConnection();
// Process in chunks to avoid huge queries
const chunkSize = 50;
for (let i = 0; i < ids.length; i += chunkSize) {
const chunk = ids.slice(i, i + chunkSize);
const list = chunk.join(',');
const result = await pool.request().query(`
SELECT kBild, bBild
FROM tBild
WHERE kBild IN (${list})
`);
for (const record of result.recordset) {
if (record.bBild) {
await this.dataFetcher.fetchImagesInChunks(ids, async (record) => {
const filePath = path.join(dir, `${record.kBild}.avif`);
// Resize to 130x130 and convert to AVIF using sharp
await sharp(record.bBild)
@@ -120,22 +67,7 @@ class PictureSyncer {
})
.avif({ quality: 80 })
.toFile(filePath);
}
}
const processed = Math.min(i + chunkSize, ids.length);
if (processed === ids.length) {
console.log(`✅ Processed ${processed}/${ids.length} images.`);
} else {
console.log(`⏳ Processed ${processed}/${ids.length} images...`);
}
}
} catch (err) {
console.error('❌ Error downloading images:', err);
} finally {
if (pool) {
await pool.close();
}
}
});
}
}

View File

@@ -0,0 +1,120 @@
import fs from 'fs/promises';
import path from 'path';
import { SyncQueueManager } from '../utils/sync-queue-manager.js';
import { ProductDetailDataFetcher } from '../services/product-detail-data-fetcher.js';
import { getExistingIds, deleteObsoleteFiles, ensureDir, writeJsonIfChanged } from '../utils/file-sync-utils.js';
class ProductDetailSyncer {
constructor() {
if (ProductDetailSyncer.instance) {
return ProductDetailSyncer.instance;
}
this.syncQueue = new SyncQueueManager();
this.dataFetcher = new ProductDetailDataFetcher();
this.cacheBaseDir = process.env.CACHE_LOCATION || '.';
ProductDetailSyncer.instance = this;
}
async syncDetails(articleIds) {
await this.syncQueue.executeSync('product-details', async () => {
await this._performSync(articleIds);
}, articleIds);
}
async _performSync(articleIds) {
const detailsDir = path.join(this.cacheBaseDir, 'details');
const stateFile = path.join(this.cacheBaseDir, 'product-details-state.json');
// Ensure directory exists
await ensureDir(detailsDir);
// Load state
let lastSyncRowversion = null;
try {
const state = JSON.parse(await fs.readFile(stateFile, 'utf-8'));
lastSyncRowversion = state.lastSyncRowversion;
} catch (err) {
// State file might not exist yet
}
// Get existing files
const existingIds = await getExistingIds(detailsDir, {
suffix: '.json'
});
const validIds = new Set(articleIds.filter(id => id !== null && id !== undefined));
// Delete obsolete files
await deleteObsoleteFiles(
detailsDir,
existingIds,
validIds,
(id) => `${id}.json`
);
// Split into missing and present
const missingIds = [];
const presentIds = [];
for (const id of validIds) {
if (existingIds.includes(id)) {
presentIds.push(id);
} else {
missingIds.push(id);
}
}
// Determine what to fetch
const toFetch = new Set(missingIds);
if (presentIds.length > 0) {
// Check which present files need update based on rowversion
//console.log(`Checking changes for ${presentIds.length} present items with lastSyncRowversion: ${lastSyncRowversion}`);
const changedIds = await this.dataFetcher.fetchChangedArticleIds(presentIds, lastSyncRowversion);
//console.log(`Got ${changedIds.size} changed items from fetcher`);
changedIds.forEach(id => toFetch.add(id));
}
if (toFetch.size > 0) {
console.log(`📝 Syncing ${toFetch.size} product details (Missing: ${missingIds.length}, Changed: ${toFetch.size - missingIds.length})...`);
await this._fetchAndWriteDetails([...toFetch], detailsDir, stateFile, lastSyncRowversion);
} else {
//console.log(`✅ No product details to sync.`);
}
}
async _fetchAndWriteDetails(ids, dir, stateFile, currentMaxRowversion) {
let maxRowversion = currentMaxRowversion;
await this.dataFetcher.fetchDetailsInChunks(ids, async (record) => {
const filePath = path.join(dir, `${record.kArtikel}.json`);
// Update max rowversion
if (record.bRowversion) {
// Simple string comparison for hex strings works for sorting/max if length is same.
// MSSQL rowversions are fixed length (8 bytes), so hex string length should be constant.
if (!maxRowversion || record.bRowversion > maxRowversion) {
maxRowversion = record.bRowversion;
}
}
// Use writeJsonIfChanged which handles reading and comparing
// It will compare the new object with the existing JSON content
await writeJsonIfChanged(filePath, {
kArtikel: record.kArtikel,
cBeschreibung: record.cBeschreibung || null, // Ensure null is written if missing
bRowversion: record.bRowversion || null
});
});
// Save new state
if (maxRowversion && maxRowversion !== currentMaxRowversion) {
await fs.writeFile(stateFile, JSON.stringify({ lastSyncRowversion: maxRowversion }, null, 2));
}
}
}
const instance = new ProductDetailSyncer();
export default instance;

View File

@@ -0,0 +1,197 @@
/**
* Category tree manipulation utilities
*/
/**
* Parse excluded category IDs from environment variable
* @returns {Set<number>} - Set of excluded category IDs
*/
export function parseExcludedIds() {
return new Set(
(process.env.EXCLUDE_CATEGORY_IDS || '')
.split(',')
.map(id => parseInt(id.trim()))
.filter(id => !isNaN(id))
);
}
/**
* Parse included category IDs from environment variable
* @returns {Set<number>} - Set of included category IDs
*/
export function parseIncludedIds() {
return new Set(
(process.env.INCLUDE_CATEGORY_IDS || '')
.split(',')
.map(id => parseInt(id.trim()))
.filter(id => !isNaN(id))
);
}
/**
* Build a hierarchical category tree from flat data
* @param {Array} categories - Category records with kKategorie, kOberKategorie, nSort
* @param {Array} names - Name records with kKategorie, cName
* @param {Array} articleCounts - Article count records with kKategorie, count
* @param {Array} images - Image records with kKategorie, kBild
* @param {boolean} applyRootFilter - Whether to apply ROOT_CATEGORY_ID filter
* @returns {Array} - Array of root category nodes
*/
export function buildTree(categories, names, articleCounts, images, applyRootFilter = true) {
// Create maps for quick lookup
const nameMap = new Map();
names.forEach(n => nameMap.set(n.kKategorie, n.cName));
const countMap = new Map();
articleCounts.forEach(c => countMap.set(c.kKategorie, c.count));
const imageMap = new Map();
images.forEach(i => imageMap.set(i.kKategorie, i.kBild));
const categoryMap = new Map();
// Initialize all nodes
categories.forEach(cat => {
categoryMap.set(cat.kKategorie, {
kKategorie: cat.kKategorie,
cName: nameMap.get(cat.kKategorie) || `Unknown (${cat.kKategorie})`,
articleCount: countMap.get(cat.kKategorie) || 0,
kBild: imageMap.get(cat.kKategorie) || null,
children: [],
nSort: cat.nSort || 0
});
});
const rootNodes = [];
const excludedIds = parseExcludedIds();
const includedIds = parseIncludedIds();
// Build hierarchy
categories.forEach(cat => {
// Skip if excluded
if (excludedIds.has(cat.kKategorie)) return;
const node = categoryMap.get(cat.kKategorie);
if (cat.kOberKategorie === 0) {
rootNodes.push(node);
} else {
const parent = categoryMap.get(cat.kOberKategorie);
if (parent) {
parent.children.push(node);
}
}
});
const rootId = process.env.ROOT_CATEGORY_ID ? parseInt(process.env.ROOT_CATEGORY_ID) : null;
let resultNodes = rootNodes;
if (rootId && applyRootFilter) {
if (excludedIds.has(rootId)) {
resultNodes = [];
} else {
const specificRoot = categoryMap.get(rootId);
// Return the children of the specified root, not the root itself
resultNodes = specificRoot ? specificRoot.children : [];
}
}
// Add included categories to the result nodes (promoting them to root level)
includedIds.forEach(id => {
// Skip if excluded (exclusion takes precedence)
if (excludedIds.has(id)) return;
const node = categoryMap.get(id);
if (node) {
// Remove subcategories for included categories as requested
node.children = [];
// Avoid duplicates if the node is already in the result list
if (!resultNodes.includes(node)) {
resultNodes.push(node);
}
}
});
// Sort children and remove nSort
for (const node of categoryMap.values()) {
node.children.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
}
// Sort root nodes
resultNodes.sort((a, b) => a.nSort - b.nSort || a.kKategorie - b.kKategorie);
// Remove nSort property from all nodes
for (const node of categoryMap.values()) {
delete node.nSort;
}
return resultNodes;
}
/**
* Prune tree to remove empty categories with no children
* @param {Array} nodes - Array of category nodes
* @returns {Array} - Filtered array of nodes
*/
export function pruneTree(nodes) {
return nodes.filter(node => {
// Recursively prune children
if (node.children && node.children.length > 0) {
node.children = pruneTree(node.children);
}
// Keep node if it has articles OR has remaining children
const hasArticles = node.articleCount > 0;
const hasChildren = node.children && node.children.length > 0;
return hasArticles || hasChildren;
});
}
/**
* Build translation template structure from tree
* @param {Array} nodes - Array of category nodes
* @returns {Array} - Simplified structure with only names and children
*/
export function buildTranslationTemplate(nodes) {
return nodes.map(node => {
const result = { name: node.cName };
if (node.children && node.children.length > 0) {
result.children = buildTranslationTemplate(node.children);
}
return result;
});
}
/**
* Format translation template as formatted JSON string
* @param {Array} nodes - Translation template nodes
* @param {number} indent - Current indentation level
* @returns {string} - Formatted JSON string
*/
export function formatTranslationTemplate(nodes, indent = 0) {
const spaces = ' '.repeat(indent);
const innerSpaces = ' '.repeat(indent + 1);
if (nodes.length === 0) return '[]';
const lines = ['['];
nodes.forEach((node, index) => {
const isLast = index === nodes.length - 1;
if (node.children && node.children.length > 0) {
// Node with children - multi-line format
lines.push(`${innerSpaces}{`);
lines.push(`${innerSpaces} "name": "${node.name}",`);
lines.push(`${innerSpaces} "children": ${formatTranslationTemplate(node.children, indent + 2)}`);
lines.push(`${innerSpaces}}${isLast ? '' : ','}`);
} else {
// Leaf node - single line format
lines.push(`${innerSpaces}{ "name": "${node.name}" }${isLast ? '' : ','}`);
}
});
lines.push(`${spaces}]`);
return lines.join('\n');
}

View File

@@ -0,0 +1,58 @@
/**
* Database utility functions for common operations
*/
/**
* Process items in chunks with a callback function
* @param {Array} items - Items to process
* @param {number} chunkSize - Size of each chunk
* @param {Function} processFn - Async function to process each chunk (receives chunk array)
* @param {Object} options - Optional configuration
* @param {boolean} options.showProgress - Whether to log progress (default: true)
* @param {string} options.itemName - Name of items for logging (default: 'items')
* @returns {Promise<void>}
*/
export async function processInChunks(items, chunkSize, processFn, options = {}) {
const { showProgress = true, itemName = 'items' } = options;
for (let i = 0; i < items.length; i += chunkSize) {
const chunk = items.slice(i, i + chunkSize);
await processFn(chunk, i);
if (showProgress) {
const processed = Math.min(i + chunkSize, items.length);
if (processed === items.length) {
console.log(`✅ Processed ${processed}/${items.length} ${itemName}.`);
} else {
console.log(`⏳ Processed ${processed}/${items.length} ${itemName}...`);
}
}
}
}
/**
* Create a SQL IN clause from an array of IDs
* @param {Array<number>} ids - Array of numeric IDs
* @returns {string} - Comma-separated string of IDs
*/
export function createInClause(ids) {
return ids.join(',');
}
/**
* Execute a database query with automatic connection management
* @param {Function} queryFn - Async function that receives pool and executes queries
* @param {Function} createConnection - Function to create database connection
* @returns {Promise<*>} - Result from queryFn
*/
export async function withConnection(queryFn, createConnection) {
let pool;
try {
pool = await createConnection();
return await queryFn(pool);
} finally {
if (pool) {
await pool.close();
}
}
}

View File

@@ -7,6 +7,7 @@ const config = {
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
server: process.env.DB_HOST,
port: parseInt(process.env.DB_PORT, 10),
database: process.env.DB_DATABASE,
options: {
encrypt: false, // Adjust based on server config

View File

@@ -0,0 +1,134 @@
import fs from 'fs/promises';
import path from 'path';
/**
* File synchronization utility functions
*/
/**
* Get existing IDs from files in a directory
* @param {string} dir - Directory path
* @param {Object} options - Configuration options
* @param {string} options.prefix - File prefix to filter (e.g., 'category_')
* @param {string} options.suffix - File suffix to filter (e.g., '.json')
* @param {RegExp} options.pattern - Custom regex pattern to extract ID
* @returns {Promise<number[]>} - Array of numeric IDs
*/
export async function getExistingIds(dir, options = {}) {
const { prefix = '', suffix = '', pattern = null } = options;
let existingFiles = [];
try {
existingFiles = await fs.readdir(dir);
} catch (err) {
// Directory might be empty or new
return [];
}
if (pattern) {
return existingFiles
.map(f => {
const match = f.match(pattern);
return match ? parseInt(match[1]) : null;
})
.filter(id => id !== null && !isNaN(id));
}
return existingFiles
.filter(f => {
if (prefix && !f.startsWith(prefix)) return false;
if (suffix && !f.endsWith(suffix)) return false;
return true;
})
.map(f => {
let id = f;
if (prefix) id = id.replace(prefix, '');
if (suffix) id = id.replace(suffix, '');
return parseInt(id);
})
.filter(id => !isNaN(id));
}
/**
* Delete obsolete files based on valid IDs
* @param {string} dir - Directory path
* @param {number[]} existingIds - IDs of existing files
* @param {Set<number>} validIds - Set of valid IDs to keep
* @param {Function} filenameFn - Function to generate filename from ID
* @returns {Promise<number>} - Number of files deleted
*/
export async function deleteObsoleteFiles(dir, existingIds, validIds, filenameFn) {
const toDelete = existingIds.filter(id => !validIds.has(id));
for (const id of toDelete) {
const filePath = path.join(dir, filenameFn(id));
await fs.unlink(filePath);
}
if (toDelete.length > 0) {
console.log(`🗑️ Deleted ${toDelete.length} obsolete files.`);
}
return toDelete.length;
}
/**
* Write JSON to file only if content has changed
* @param {string} filePath - Full path to file
* @param {*} data - Data to write (will be JSON.stringify'd)
* @param {number} indent - JSON indentation (default: 2)
* @returns {Promise<boolean>} - True if file was written, false if unchanged
*/
export async function writeJsonIfChanged(filePath, data, indent = 2) {
const newContent = JSON.stringify(data, null, indent);
let oldContent = '';
try {
oldContent = await fs.readFile(filePath, 'utf-8');
} catch (e) {
// File doesn't exist yet
}
if (oldContent !== newContent) {
await fs.writeFile(filePath, newContent);
return true;
}
return false;
}
/**
* Ensure directory exists, create if it doesn't
* @param {string} dir - Directory path
* @returns {Promise<void>}
*/
export async function ensureDir(dir) {
await fs.mkdir(dir, { recursive: true });
}
/**
* Read JSON file safely
* @param {string} filePath - Full path to file
* @returns {Promise<*|null>} - Parsed JSON or null if file doesn't exist
*/
export async function readJsonFile(filePath) {
try {
const content = await fs.readFile(filePath, 'utf-8');
return JSON.parse(content);
} catch (err) {
return null;
}
}
/**
* Read text file safely
* @param {string} filePath - Full path to file
* @returns {Promise<string|null>} - File content or null if file doesn't exist
*/
export async function readTextFile(filePath) {
try {
return await fs.readFile(filePath, 'utf-8');
} catch (err) {
return null;
}
}

View File

@@ -0,0 +1,76 @@
/**
* SyncQueueManager - Manages sync operations with queuing support
*
* Prevents concurrent syncs for the same key and queues subsequent requests.
* Supports both global syncing (single key) and per-group syncing (multiple keys).
*/
export class SyncQueueManager {
constructor() {
this.isSyncing = new Map(); // key -> boolean
this.queuedSyncs = new Map(); // key -> queuedData
}
/**
* Execute a sync operation with automatic queuing
* @param {string} key - Unique identifier for this sync operation
* @param {Function} syncFn - Async function to execute
* @param {*} queuedData - Data to pass to queued sync (optional)
* @returns {Promise<void>}
*/
async executeSync(key, syncFn, queuedData = null) {
// Check if already syncing this key
if (this.isSyncing.get(key)) {
if (this.queuedSyncs.has(key)) {
console.log(`🚫 Sync for '${key}' already in progress and queued. Ignoring.`);
return;
}
console.log(`⏳ Sync for '${key}' already in progress. Queuing.`);
this.queuedSyncs.set(key, queuedData);
return;
}
await this._doSync(key, syncFn);
}
async _doSync(key, syncFn) {
this.isSyncing.set(key, true);
try {
await syncFn();
} finally {
this.isSyncing.set(key, false);
// Process queued sync for this key if any
if (this.queuedSyncs.has(key)) {
const queuedData = this.queuedSyncs.get(key);
this.queuedSyncs.delete(key);
// Log only if we have meaningful data to show
if (queuedData !== null && queuedData !== undefined) {
console.log(`🔄 Processing queued sync for '${key}'...`);
}
// Use setImmediate to allow stack to clear/event loop to tick
setImmediate(() => this.executeSync(key, syncFn, queuedData));
}
}
}
/**
* Check if a sync is currently in progress for a key
* @param {string} key - Unique identifier
* @returns {boolean}
*/
isSyncInProgress(key) {
return this.isSyncing.get(key) || false;
}
/**
* Check if a sync is queued for a key
* @param {string} key - Unique identifier
* @returns {boolean}
*/
isSyncQueued(key) {
return this.queuedSyncs.has(key);
}
}