Refactor data.js to remove old CSV parsing logic and integrate a new router structure. Consolidate transaction retrieval and enhance error handling for various endpoints. Update the DATEV export functionality to utilize the new database queries and improve overall code organization.
This commit is contained in:
373
src/routes/data/csvImport.js
Normal file
373
src/routes/data/csvImport.js
Normal file
@@ -0,0 +1,373 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Test CSV import endpoint (no auth for testing) - ACTUALLY IMPORTS TO DATABASE
|
||||
router.post('/test-csv-import', async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { transactions, filename, batchId } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || 'test_import_' + Date.now();
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: 'Validation failed: ' + validationErrors.join(', '),
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0], 10);
|
||||
const month = parseInt(dateParts[1], 10) - 1;
|
||||
let year = parseInt(dateParts[2], 10);
|
||||
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900;
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
if (isNaN(parsedDate.getTime())) {
|
||||
parsedDate = null;
|
||||
validationErrors.push('Invalid date format: ' + dateStr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || 'test_import',
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error('Error importing transaction ' + (i + 1) + ':', error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined,
|
||||
paypalTransaction: transactions.find(t => t['Kontonummer/IBAN'] === 'LU89751000135104200E')
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test import error:', error);
|
||||
res.status(500).json({ error: 'Test import failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// Import CSV transactions to database
|
||||
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { transactions, filename, batchId, headers } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
const expectedHeaders = [
|
||||
'Auftragskonto',
|
||||
'Buchungstag',
|
||||
'Valutadatum',
|
||||
'Buchungstext',
|
||||
'Verwendungszweck',
|
||||
'Glaeubiger ID',
|
||||
'Mandatsreferenz',
|
||||
'Kundenreferenz (End-to-End)',
|
||||
'Sammlerreferenz',
|
||||
'Lastschrift Ursprungsbetrag',
|
||||
'Auslagenersatz Ruecklastschrift',
|
||||
'Beguenstigter/Zahlungspflichtiger',
|
||||
'Kontonummer/IBAN',
|
||||
'BIC (SWIFT-Code)',
|
||||
'Betrag',
|
||||
'Waehrung',
|
||||
'Info'
|
||||
];
|
||||
|
||||
if (headers && Array.isArray(headers)) {
|
||||
const missingHeaders = expectedHeaders.filter(expected =>
|
||||
!headers.some(header => header.trim() === expected)
|
||||
);
|
||||
|
||||
if (missingHeaders.length > 0) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid CSV format - missing required headers',
|
||||
missing: missingHeaders,
|
||||
expected: expectedHeaders,
|
||||
received: headers
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (transactions.length === 0) {
|
||||
return res.status(400).json({ error: 'No transaction data found' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || 'import_' + Date.now();
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Beguenstigter/Zahlungspflichtiger'] || transaction['Beguenstigter/Zahlungspflichtiger'].trim() === '') {
|
||||
validationErrors.push('Beguenstigter/Zahlungspflichtiger is required');
|
||||
}
|
||||
|
||||
if (validationErrors.length > 2) {
|
||||
console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: 'Validation failed: ' + validationErrors.join(', '),
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0], 10);
|
||||
const month = parseInt(dateParts[1], 10) - 1;
|
||||
let year = parseInt(dateParts[2], 10);
|
||||
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900;
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
if (isNaN(parsedDate.getTime()) ||
|
||||
parsedDate.getDate() !== day ||
|
||||
parsedDate.getMonth() !== month ||
|
||||
parsedDate.getFullYear() !== year) {
|
||||
parsedDate = null;
|
||||
validationErrors.push('Invalid date format: ' + dateStr);
|
||||
}
|
||||
} else {
|
||||
validationErrors.push('Invalid date format: ' + dateStr);
|
||||
}
|
||||
}
|
||||
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || null,
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error('Error importing transaction ' + (i + 1) + ':', error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error importing CSV transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to import CSV transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get imported CSV transactions
|
||||
router.get('/csv-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { batchId, limit = 100, offset = 0 } = req.query;
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
csv.*,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name
|
||||
FROM fibdash.CSVTransactions csv
|
||||
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
||||
`;
|
||||
|
||||
const params = {};
|
||||
|
||||
if (batchId) {
|
||||
query += ' WHERE csv.import_batch_id = @batchId';
|
||||
params.batchId = batchId;
|
||||
}
|
||||
|
||||
query += ' ORDER BY csv.parsed_date DESC, csv.id DESC';
|
||||
query += ' OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY';
|
||||
|
||||
params.offset = parseInt(offset, 10);
|
||||
params.limit = parseInt(limit, 10);
|
||||
|
||||
const result = await executeQuery(query, params);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching CSV transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch CSV transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get CSV import batches
|
||||
router.get('/csv-import-batches', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
import_batch_id,
|
||||
source_filename,
|
||||
MIN(import_date) as import_date,
|
||||
COUNT(*) as transaction_count,
|
||||
SUM(CASE WHEN is_processed = 1 THEN 1 ELSE 0 END) as processed_count
|
||||
FROM fibdash.CSVTransactions
|
||||
GROUP BY import_batch_id, source_filename
|
||||
ORDER BY MIN(import_date) DESC
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import batches:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch import batches' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
Reference in New Issue
Block a user