Compare commits

...

2 Commits

Author SHA1 Message Date
sebseb7
74529d8b19 Enhanced error handling and logging for the DATEV export process. 2025-08-21 11:41:17 +02:00
sebseb7
bd7c6dddbf Enhance CSV import functionality with improved error messaging and logging
- Updated error message in CSVImportPanel to include a period for better readability.
- Added console logs in the CSV import API route to track the import process and precheck status.
- Removed redundant validation for 'Beguenstigter/Zahlungspflichtiger' to streamline error handling during CSV import.
2025-08-21 04:46:30 +02:00
4 changed files with 236 additions and 66 deletions

View File

@@ -457,7 +457,7 @@ class CSVImportPanel extends Component {
)} )}
{importResult.errors > 0 && ( {importResult.errors > 0 && (
<Typography variant="body1" color="warning.main"> <Typography variant="body1" color="warning.main">
<strong>Fehler:</strong> {importResult.errors} Zeilen konnten nicht verarbeitet werden <strong>Fehler:</strong> {importResult.errors} Zeilen konnten nicht verarbeitet werden.
</Typography> </Typography>
)} )}
{importResult.message && ( {importResult.message && (

View File

@@ -1,56 +0,0 @@
version: '3.8'
services:
nginx:
image: nginx:alpine
ports:
- "80:80"
volumes:
- ./nginx.dev.conf:/etc/nginx/conf.d/default.conf
- ./logs/nginx:/var/log/nginx
depends_on:
- frontend
- backend
restart: unless-stopped
networks:
- fibdash-network
frontend:
build:
context: .
dockerfile: Dockerfile.dev.frontend
ports:
- "5001:5001"
volumes:
- ./client:/app/client
- /app/node_modules
environment:
- NODE_ENV=development
- CHOKIDAR_USEPOLLING=true
networks:
- fibdash-network
command: npm run dev:frontend
backend:
build:
context: .
dockerfile: Dockerfile.dev.backend
ports:
- "5000:5000"
volumes:
- ./src:/app/src
- /app/node_modules
environment:
- NODE_ENV=development
env_file:
- .env
networks:
- fibdash-network
command: npm run dev:backend
networks:
fibdash-network:
driver: bridge
volumes:
node_modules:

View File

@@ -129,6 +129,7 @@ router.post('/test-csv-import', async (req, res) => {
// Import CSV transactions to database // Import CSV transactions to database
router.post('/import-csv-transactions', authenticateToken, async (req, res) => { router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
console.log('Importing CSV transactions');
try { try {
const { executeQuery } = require('../../config/database'); const { executeQuery } = require('../../config/database');
const { transactions, filename, batchId, headers } = req.body; const { transactions, filename, batchId, headers } = req.body;
@@ -180,6 +181,7 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
let successCount = 0; let successCount = 0;
let errorCount = 0; let errorCount = 0;
const errors = []; const errors = [];
console.log('precheck done');
for (let i = 0; i < transactions.length; i++) { for (let i = 0; i < transactions.length; i++) {
const transaction = transactions[i]; const transaction = transactions[i];
@@ -195,9 +197,6 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
validationErrors.push('Betrag is required'); validationErrors.push('Betrag is required');
} }
if (!transaction['Beguenstigter/Zahlungspflichtiger'] || transaction['Beguenstigter/Zahlungspflichtiger'].trim() === '') {
validationErrors.push('Beguenstigter/Zahlungspflichtiger is required');
}
if (validationErrors.length > 2) { if (validationErrors.length > 2) {
console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors); console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors);
@@ -287,6 +286,8 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
errorCount++; errorCount++;
} }
} }
console.log('import done',errors);
res.json({ res.json({
success: true, success: true,

View File

@@ -39,14 +39,33 @@ const formatDatevAmount = (amount) => {
return Math.abs(amount).toFixed(2).replace('.', ','); return Math.abs(amount).toFixed(2).replace('.', ',');
}; };
const formatDatevDate = (dateString) => { const formatDatevDate = (date) => {
if (!dateString) return ''; if (!date) return '';
const parts = dateString.split('.');
// Handle Date object
if (date instanceof Date) {
const day = date.getDate().toString().padStart(2, '0');
const month = (date.getMonth() + 1).toString().padStart(2, '0');
return day + month;
}
// Handle string date
const dateStr = date.toString();
const parts = dateStr.split('.');
if (parts.length === 3) { if (parts.length === 3) {
const day = parts[0].padStart(2, '0'); const day = parts[0].padStart(2, '0');
const month = parts[1].padStart(2, '0'); const month = parts[1].padStart(2, '0');
return day + month; return day + month;
} }
// Try to parse as date string
const parsedDate = new Date(dateStr);
if (!isNaN(parsedDate)) {
const day = parsedDate.getDate().toString().padStart(2, '0');
const month = (parsedDate.getMonth() + 1).toString().padStart(2, '0');
return day + month;
}
return ''; return '';
}; };
@@ -55,13 +74,219 @@ const quote = (str, maxLen = 60) => {
return '"' + str.slice(0, maxLen).replace(/"/g, '""') + '"'; return '"' + str.slice(0, maxLen).replace(/"/g, '""') + '"';
}; };
// Parse konto field which might contain multiple accounts like "5400+5300"
const parseKonto = (konto) => {
if (!konto) return '';
// Take the first account number if multiple are present
const parts = konto.split('+');
return parts[0].trim();
};
// DATEV export endpoint // DATEV export endpoint
router.get('/datev/:timeRange', authenticateToken, async (req, res) => { router.get('/datev/:timeRange', authenticateToken, async (req, res) => {
try { try {
const { timeRange } = req.params; const { timeRange } = req.params;
// TODO: Update to use database queries instead of CSV file const { executeQuery } = require('../../config/database');
res.status(501).json({ error: 'DATEV export temporarily disabled - use database-based queries' });
return; // Parse the time range to get start and end dates
let startDate, endDate;
if (timeRange.includes('-Q')) {
// Quarter format: 2025-Q1
const [year, quarterPart] = timeRange.split('-Q');
const quarter = parseInt(quarterPart, 10);
const startMonth = (quarter - 1) * 3 + 1;
const endMonth = startMonth + 2;
startDate = new Date(year, startMonth - 1, 1);
endDate = new Date(year, endMonth - 1, new Date(year, endMonth, 0).getDate());
} else if (timeRange.length === 4) {
// Year format: 2025
startDate = new Date(timeRange, 0, 1);
endDate = new Date(timeRange, 11, 31);
} else {
// Month format: 2025-03
const [year, month] = timeRange.split('-');
startDate = new Date(year, parseInt(month) - 1, 1);
endDate = new Date(year, parseInt(month), 0);
}
// Format dates for SQL query
const sqlStartDate = startDate.toISOString().split('T')[0];
const sqlEndDate = endDate.toISOString().split('T')[0];
// Query to get all DATEV data with proper joins
// This handles multiple documents per transaction by creating separate rows
const query = `
WITH DatevDocuments AS (
-- Get documents from tUmsatzBeleg
SELECT
uk.kZahlungsabgleichUmsatz,
zu.fBetrag as umsatz_brutto,
CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz,
JSON_VALUE(uk.data, '$.konto1') as konto,
'' as gegenkonto, -- No creditorID in tUmsatzBeleg
-- BU determination based on amount and konto type
CASE
WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN ''
WHEN zu.fBetrag > 0 THEN ''
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses
ELSE ''
END as bu,
FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd,
zu.dBuchungsdatum,
'' as rechnungsnummer, -- No invoice number in tUmsatzBeleg
zu.cVerwendungszweck as buchungstext,
ub.datevlink as beleglink,
1 as priority -- tUmsatzBeleg has priority
FROM tUmsatzKontierung uk
INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
INNER JOIN tUmsatzBeleg ub ON ub.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
WHERE ub.datevlink IS NOT NULL
AND zu.dBuchungsdatum >= @startDate
AND zu.dBuchungsdatum <= @endDate
UNION ALL
-- Get documents from tPdfObjekt via tZahlungsabgleichUmsatzLink
SELECT
uk.kZahlungsabgleichUmsatz,
zu.fBetrag as umsatz_brutto,
CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz,
JSON_VALUE(uk.data, '$.konto1') as konto,
COALESCE(JSON_VALUE(po.extraction, '$.creditorID'), '') as gegenkonto,
-- BU determination based on amount and konto type
CASE
WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN ''
WHEN zu.fBetrag > 0 THEN ''
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses
ELSE ''
END as bu,
FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd,
zu.dBuchungsdatum,
COALESCE(JSON_VALUE(po.extraction, '$.invoice_number'), '') as rechnungsnummer,
zu.cVerwendungszweck as buchungstext,
po.datevlink as beleglink,
2 as priority -- tPdfObjekt has lower priority
FROM tUmsatzKontierung uk
INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
INNER JOIN tZahlungsabgleichUmsatzLink zul ON zu.kZahlungsabgleichUmsatz = zul.kZahlungsabgleichUmsatz
AND zul.linktype = 'kLieferantenBestellung'
INNER JOIN tPdfObjekt po ON zul.linktarget = po.kLieferantenbestellung
WHERE po.datevlink IS NOT NULL
AND zu.dBuchungsdatum >= @startDate
AND zu.dBuchungsdatum <= @endDate
UNION ALL
-- Get transactions without documents
SELECT
uk.kZahlungsabgleichUmsatz,
zu.fBetrag as umsatz_brutto,
CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz,
JSON_VALUE(uk.data, '$.konto1') as konto,
'' as gegenkonto,
-- BU determination based on amount and konto type
CASE
WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN ''
WHEN zu.fBetrag > 0 THEN ''
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses
ELSE ''
END as bu,
FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd,
zu.dBuchungsdatum,
'' as rechnungsnummer,
zu.cVerwendungszweck as buchungstext,
'' as beleglink,
3 as priority -- No documents has lowest priority
FROM tUmsatzKontierung uk
INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
WHERE zu.dBuchungsdatum >= @startDate
AND zu.dBuchungsdatum <= @endDate
AND NOT EXISTS (
SELECT 1 FROM tUmsatzBeleg ub2
WHERE ub2.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
AND ub2.datevlink IS NOT NULL
)
AND NOT EXISTS (
SELECT 1 FROM tZahlungsabgleichUmsatzLink zul2
INNER JOIN tPdfObjekt po2 ON zul2.linktarget = po2.kLieferantenbestellung
WHERE zul2.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
AND zul2.linktype = 'kLieferantenBestellung'
AND po2.datevlink IS NOT NULL
)
)
SELECT
*,
ROW_NUMBER() OVER (PARTITION BY kZahlungsabgleichUmsatz, beleglink ORDER BY priority) as rn
FROM DatevDocuments
ORDER BY dBuchungsdatum DESC, kZahlungsabgleichUmsatz, priority
`;
const result = await executeQuery(query, {
startDate: sqlStartDate,
endDate: sqlEndDate
});
// Format data for DATEV CSV
const datevRows = [];
// Build header
const periodStart = startDate.getFullYear() +
('0' + (startDate.getMonth() + 1)).slice(-2) +
('0' + startDate.getDate()).slice(-2);
const periodEnd = endDate.getFullYear() +
('0' + (endDate.getMonth() + 1)).slice(-2) +
('0' + endDate.getDate()).slice(-2);
datevRows.push(buildDatevHeader(periodStart, periodEnd));
datevRows.push(DATEV_COLS);
// Process each transaction
result.recordset.forEach(row => {
// Skip duplicate rows (keep only the first occurrence of each transaction+beleglink combination)
if (row.rn > 1) return;
const datevRow = [
formatDatevAmount(row.umsatz_brutto), // Umsatz (ohne Soll/Haben-Kz)
row.soll_haben_kz, // Soll/Haben-Kennzeichen
'', // WKZ Umsatz
'', // Kurs
'', // Basis-Umsatz
'', // WKZ Basis-Umsatz
parseKonto(row.konto), // Konto (parsed)
row.gegenkonto || '', // Gegenkonto (ohne BU-Schlüssel)
row.bu || '', // BU-Schlüssel
row.buchungsdatum_mdd || '', // Belegdatum (MDD format)
quote(row.rechnungsnummer || ''), // Belegfeld 1 (invoice number)
'', // Belegfeld 2
'', // Skonto
quote(row.buchungstext || ''), // Buchungstext
'', // Postensperre
'', // Diverse Adressnummer
'', // Geschäftspartnerbank
'', // Sachverhalt
'', // Zinssperre
row.beleglink || '' // Beleglink
].join(';');
datevRows.push(datevRow);
});
// Generate CSV content
const csvContent = datevRows.join('\n');
// Set headers for CSV download
const filename = `EXTF_${timeRange.replace('-', '_')}.csv`;
res.setHeader('Content-Type', 'text/csv; charset=windows-1252');
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
// Send CSV content
res.send(csvContent);
} catch (error) { } catch (error) {
console.error('Error generating DATEV export:', error); console.error('Error generating DATEV export:', error);
res.status(500).json({ error: 'Failed to generate DATEV export' }); res.status(500).json({ error: 'Failed to generate DATEV export' });