From 74529d8b19b9aef1d328acb047f8b7a15898caff Mon Sep 17 00:00:00 2001 From: sebseb7 Date: Thu, 21 Aug 2025 11:41:17 +0200 Subject: [PATCH] Enhanced error handling and logging for the DATEV export process. --- docker-compose.dev.yml | 56 --------- src/routes/data/datev.js | 237 ++++++++++++++++++++++++++++++++++++++- 2 files changed, 231 insertions(+), 62 deletions(-) delete mode 100644 docker-compose.dev.yml diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml deleted file mode 100644 index b9d239d..0000000 --- a/docker-compose.dev.yml +++ /dev/null @@ -1,56 +0,0 @@ -version: '3.8' - -services: - nginx: - image: nginx:alpine - ports: - - "80:80" - volumes: - - ./nginx.dev.conf:/etc/nginx/conf.d/default.conf - - ./logs/nginx:/var/log/nginx - depends_on: - - frontend - - backend - restart: unless-stopped - networks: - - fibdash-network - - frontend: - build: - context: . - dockerfile: Dockerfile.dev.frontend - ports: - - "5001:5001" - volumes: - - ./client:/app/client - - /app/node_modules - environment: - - NODE_ENV=development - - CHOKIDAR_USEPOLLING=true - networks: - - fibdash-network - command: npm run dev:frontend - - backend: - build: - context: . - dockerfile: Dockerfile.dev.backend - ports: - - "5000:5000" - volumes: - - ./src:/app/src - - /app/node_modules - environment: - - NODE_ENV=development - env_file: - - .env - networks: - - fibdash-network - command: npm run dev:backend - -networks: - fibdash-network: - driver: bridge - -volumes: - node_modules: \ No newline at end of file diff --git a/src/routes/data/datev.js b/src/routes/data/datev.js index 3956187..09bf92c 100644 --- a/src/routes/data/datev.js +++ b/src/routes/data/datev.js @@ -39,14 +39,33 @@ const formatDatevAmount = (amount) => { return Math.abs(amount).toFixed(2).replace('.', ','); }; -const formatDatevDate = (dateString) => { - if (!dateString) return ''; - const parts = dateString.split('.'); +const formatDatevDate = (date) => { + if (!date) return ''; + + // Handle Date object + if (date instanceof Date) { + const day = date.getDate().toString().padStart(2, '0'); + const month = (date.getMonth() + 1).toString().padStart(2, '0'); + return day + month; + } + + // Handle string date + const dateStr = date.toString(); + const parts = dateStr.split('.'); if (parts.length === 3) { const day = parts[0].padStart(2, '0'); const month = parts[1].padStart(2, '0'); return day + month; } + + // Try to parse as date string + const parsedDate = new Date(dateStr); + if (!isNaN(parsedDate)) { + const day = parsedDate.getDate().toString().padStart(2, '0'); + const month = (parsedDate.getMonth() + 1).toString().padStart(2, '0'); + return day + month; + } + return ''; }; @@ -55,13 +74,219 @@ const quote = (str, maxLen = 60) => { return '"' + str.slice(0, maxLen).replace(/"/g, '""') + '"'; }; +// Parse konto field which might contain multiple accounts like "5400+5300" +const parseKonto = (konto) => { + if (!konto) return ''; + // Take the first account number if multiple are present + const parts = konto.split('+'); + return parts[0].trim(); +}; + // DATEV export endpoint router.get('/datev/:timeRange', authenticateToken, async (req, res) => { try { const { timeRange } = req.params; - // TODO: Update to use database queries instead of CSV file - res.status(501).json({ error: 'DATEV export temporarily disabled - use database-based queries' }); - return; + const { executeQuery } = require('../../config/database'); + + // Parse the time range to get start and end dates + let startDate, endDate; + + if (timeRange.includes('-Q')) { + // Quarter format: 2025-Q1 + const [year, quarterPart] = timeRange.split('-Q'); + const quarter = parseInt(quarterPart, 10); + const startMonth = (quarter - 1) * 3 + 1; + const endMonth = startMonth + 2; + + startDate = new Date(year, startMonth - 1, 1); + endDate = new Date(year, endMonth - 1, new Date(year, endMonth, 0).getDate()); + } else if (timeRange.length === 4) { + // Year format: 2025 + startDate = new Date(timeRange, 0, 1); + endDate = new Date(timeRange, 11, 31); + } else { + // Month format: 2025-03 + const [year, month] = timeRange.split('-'); + startDate = new Date(year, parseInt(month) - 1, 1); + endDate = new Date(year, parseInt(month), 0); + } + + // Format dates for SQL query + const sqlStartDate = startDate.toISOString().split('T')[0]; + const sqlEndDate = endDate.toISOString().split('T')[0]; + + // Query to get all DATEV data with proper joins + // This handles multiple documents per transaction by creating separate rows + const query = ` + WITH DatevDocuments AS ( + -- Get documents from tUmsatzBeleg + SELECT + uk.kZahlungsabgleichUmsatz, + zu.fBetrag as umsatz_brutto, + CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz, + JSON_VALUE(uk.data, '$.konto1') as konto, + '' as gegenkonto, -- No creditorID in tUmsatzBeleg + -- BU determination based on amount and konto type + CASE + WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN '' + WHEN zu.fBetrag > 0 THEN '' + WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases + WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses + ELSE '' + END as bu, + FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd, + zu.dBuchungsdatum, + '' as rechnungsnummer, -- No invoice number in tUmsatzBeleg + zu.cVerwendungszweck as buchungstext, + ub.datevlink as beleglink, + 1 as priority -- tUmsatzBeleg has priority + FROM tUmsatzKontierung uk + INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz + INNER JOIN tUmsatzBeleg ub ON ub.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz + WHERE ub.datevlink IS NOT NULL + AND zu.dBuchungsdatum >= @startDate + AND zu.dBuchungsdatum <= @endDate + + UNION ALL + + -- Get documents from tPdfObjekt via tZahlungsabgleichUmsatzLink + SELECT + uk.kZahlungsabgleichUmsatz, + zu.fBetrag as umsatz_brutto, + CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz, + JSON_VALUE(uk.data, '$.konto1') as konto, + COALESCE(JSON_VALUE(po.extraction, '$.creditorID'), '') as gegenkonto, + -- BU determination based on amount and konto type + CASE + WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN '' + WHEN zu.fBetrag > 0 THEN '' + WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases + WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses + ELSE '' + END as bu, + FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd, + zu.dBuchungsdatum, + COALESCE(JSON_VALUE(po.extraction, '$.invoice_number'), '') as rechnungsnummer, + zu.cVerwendungszweck as buchungstext, + po.datevlink as beleglink, + 2 as priority -- tPdfObjekt has lower priority + FROM tUmsatzKontierung uk + INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz + INNER JOIN tZahlungsabgleichUmsatzLink zul ON zu.kZahlungsabgleichUmsatz = zul.kZahlungsabgleichUmsatz + AND zul.linktype = 'kLieferantenBestellung' + INNER JOIN tPdfObjekt po ON zul.linktarget = po.kLieferantenbestellung + WHERE po.datevlink IS NOT NULL + AND zu.dBuchungsdatum >= @startDate + AND zu.dBuchungsdatum <= @endDate + + UNION ALL + + -- Get transactions without documents + SELECT + uk.kZahlungsabgleichUmsatz, + zu.fBetrag as umsatz_brutto, + CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz, + JSON_VALUE(uk.data, '$.konto1') as konto, + '' as gegenkonto, + -- BU determination based on amount and konto type + CASE + WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN '' + WHEN zu.fBetrag > 0 THEN '' + WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases + WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses + ELSE '' + END as bu, + FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd, + zu.dBuchungsdatum, + '' as rechnungsnummer, + zu.cVerwendungszweck as buchungstext, + '' as beleglink, + 3 as priority -- No documents has lowest priority + FROM tUmsatzKontierung uk + INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz + WHERE zu.dBuchungsdatum >= @startDate + AND zu.dBuchungsdatum <= @endDate + AND NOT EXISTS ( + SELECT 1 FROM tUmsatzBeleg ub2 + WHERE ub2.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz + AND ub2.datevlink IS NOT NULL + ) + AND NOT EXISTS ( + SELECT 1 FROM tZahlungsabgleichUmsatzLink zul2 + INNER JOIN tPdfObjekt po2 ON zul2.linktarget = po2.kLieferantenbestellung + WHERE zul2.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz + AND zul2.linktype = 'kLieferantenBestellung' + AND po2.datevlink IS NOT NULL + ) + ) + SELECT + *, + ROW_NUMBER() OVER (PARTITION BY kZahlungsabgleichUmsatz, beleglink ORDER BY priority) as rn + FROM DatevDocuments + ORDER BY dBuchungsdatum DESC, kZahlungsabgleichUmsatz, priority + `; + + const result = await executeQuery(query, { + startDate: sqlStartDate, + endDate: sqlEndDate + }); + + // Format data for DATEV CSV + const datevRows = []; + + // Build header + const periodStart = startDate.getFullYear() + + ('0' + (startDate.getMonth() + 1)).slice(-2) + + ('0' + startDate.getDate()).slice(-2); + const periodEnd = endDate.getFullYear() + + ('0' + (endDate.getMonth() + 1)).slice(-2) + + ('0' + endDate.getDate()).slice(-2); + + datevRows.push(buildDatevHeader(periodStart, periodEnd)); + datevRows.push(DATEV_COLS); + + // Process each transaction + result.recordset.forEach(row => { + // Skip duplicate rows (keep only the first occurrence of each transaction+beleglink combination) + if (row.rn > 1) return; + + const datevRow = [ + formatDatevAmount(row.umsatz_brutto), // Umsatz (ohne Soll/Haben-Kz) + row.soll_haben_kz, // Soll/Haben-Kennzeichen + '', // WKZ Umsatz + '', // Kurs + '', // Basis-Umsatz + '', // WKZ Basis-Umsatz + parseKonto(row.konto), // Konto (parsed) + row.gegenkonto || '', // Gegenkonto (ohne BU-Schlüssel) + row.bu || '', // BU-Schlüssel + row.buchungsdatum_mdd || '', // Belegdatum (MDD format) + quote(row.rechnungsnummer || ''), // Belegfeld 1 (invoice number) + '', // Belegfeld 2 + '', // Skonto + quote(row.buchungstext || ''), // Buchungstext + '', // Postensperre + '', // Diverse Adressnummer + '', // Geschäftspartnerbank + '', // Sachverhalt + '', // Zinssperre + row.beleglink || '' // Beleglink + ].join(';'); + + datevRows.push(datevRow); + }); + + // Generate CSV content + const csvContent = datevRows.join('\n'); + + // Set headers for CSV download + const filename = `EXTF_${timeRange.replace('-', '_')}.csv`; + res.setHeader('Content-Type', 'text/csv; charset=windows-1252'); + res.setHeader('Content-Disposition', `attachment; filename="${filename}"`); + + // Send CSV content + res.send(csvContent); + } catch (error) { console.error('Error generating DATEV export:', error); res.status(500).json({ error: 'Failed to generate DATEV export' });