Implement CSV parsing and import functionality in data.js, replacing old database-based logic. Add endpoints for retrieving available months and importing transactions, with enhanced error handling and validation for CSV data. Update transaction processing to include date and amount parsing.
This commit is contained in:
@@ -1,30 +1,85 @@
|
||||
const express = require('express');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Old CSV parsing removed - now using database-based CSV import
|
||||
|
||||
// Get available months from database
|
||||
router.get('/months', authenticateToken, async (req, res) => {
|
||||
// Parse CSV data
|
||||
const parseCSV = () => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const csvPath = path.join(__dirname, '../../data.csv');
|
||||
const csvData = fs.readFileSync(csvPath, 'utf8');
|
||||
const lines = csvData.split('\n');
|
||||
const headers = lines[0].split(';').map(h => h.replace(/"/g, ''));
|
||||
|
||||
// Get months from both AccountingItems and CSVTransactions
|
||||
const query = `
|
||||
SELECT DISTINCT
|
||||
FORMAT(COALESCE(ai.buchungstag, csv.parsed_date), 'yyyy-MM') as month_year
|
||||
FROM (
|
||||
SELECT buchungstag FROM fibdash.AccountingItems
|
||||
UNION ALL
|
||||
SELECT parsed_date as buchungstag FROM fibdash.CSVTransactions WHERE parsed_date IS NOT NULL
|
||||
) ai
|
||||
WHERE ai.buchungstag IS NOT NULL
|
||||
ORDER BY month_year DESC
|
||||
`;
|
||||
const transactions = [];
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (!line.trim()) continue;
|
||||
|
||||
// Parse CSV line (handle semicolon-separated values with quotes)
|
||||
const values = [];
|
||||
let current = '';
|
||||
let inQuotes = false;
|
||||
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const char = line[j];
|
||||
if (char === '"') {
|
||||
inQuotes = !inQuotes;
|
||||
} else if (char === ';' && !inQuotes) {
|
||||
values.push(current);
|
||||
current = '';
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
}
|
||||
values.push(current); // Add last value
|
||||
|
||||
if (values.length >= headers.length) {
|
||||
const transaction = {};
|
||||
headers.forEach((header, index) => {
|
||||
transaction[header] = values[index] || '';
|
||||
});
|
||||
|
||||
// Parse date and amount
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateParts = transaction['Buchungstag'].split('.');
|
||||
if (dateParts.length === 3) {
|
||||
// Convert DD.MM.YY to proper date
|
||||
const day = dateParts[0];
|
||||
const month = dateParts[1];
|
||||
const year = '20' + dateParts[2]; // Assuming 20xx
|
||||
transaction.parsedDate = new Date(year, month - 1, day);
|
||||
transaction.monthYear = `${year}-${month.padStart(2, '0')}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse amount
|
||||
if (transaction['Betrag']) {
|
||||
const amount = transaction['Betrag'].replace(',', '.').replace(/[^-0-9.]/g, '');
|
||||
transaction.numericAmount = parseFloat(amount) || 0;
|
||||
}
|
||||
|
||||
transactions.push(transaction);
|
||||
}
|
||||
}
|
||||
|
||||
const result = await executeQuery(query);
|
||||
const months = result.recordset.map(row => row.month_year);
|
||||
return transactions;
|
||||
} catch (error) {
|
||||
console.error('Error parsing CSV:', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
// Get available months
|
||||
router.get('/months', authenticateToken, (req, res) => {
|
||||
try {
|
||||
const transactions = parseCSV();
|
||||
const months = [...new Set(transactions
|
||||
.filter(t => t.monthYear)
|
||||
.map(t => t.monthYear)
|
||||
)].sort().reverse(); // Newest first
|
||||
|
||||
res.json({ months });
|
||||
} catch (error) {
|
||||
@@ -99,12 +154,8 @@ const getJTLTransactions = async () => {
|
||||
// Get transactions for a specific time period (month, quarter, or year)
|
||||
router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
// TODO: Update to use database queries instead of CSV file
|
||||
res.status(501).json({ error: 'Endpoint temporarily disabled - use database-based queries' });
|
||||
return;
|
||||
|
||||
const { timeRange } = req.params;
|
||||
const transactions = [];
|
||||
const transactions = parseCSV();
|
||||
|
||||
let filteredTransactions = [];
|
||||
let periodDescription = '';
|
||||
@@ -377,14 +428,10 @@ const quote = (str, maxLen = 60) => {
|
||||
// DATEV export endpoint
|
||||
router.get('/datev/:timeRange', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
// TODO: Update to use database queries instead of CSV file
|
||||
res.status(501).json({ error: 'DATEV export temporarily disabled - use database-based queries' });
|
||||
return;
|
||||
|
||||
const { timeRange } = req.params;
|
||||
|
||||
// Get transactions for the time period
|
||||
const transactions = [];
|
||||
const transactions = parseCSV();
|
||||
let filteredTransactions = [];
|
||||
let periodStart, periodEnd, filename;
|
||||
|
||||
@@ -886,7 +933,132 @@ router.get('/assignable-kreditors', authenticateToken, async (req, res) => {
|
||||
|
||||
// CSV Import endpoints
|
||||
|
||||
// Test endpoint removed - use the authenticated import-csv-transactions endpoint
|
||||
// Test CSV import endpoint (no auth for testing) - ACTUALLY IMPORTS TO DATABASE
|
||||
router.post('/test-csv-import', async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { transactions, filename, batchId, headers } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || `test_import_${Date.now()}`;
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
// Validate required fields
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: `Validation failed: ${validationErrors.join(', ')}`,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the date
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0]);
|
||||
const month = parseInt(dateParts[1]) - 1;
|
||||
let year = parseInt(dateParts[2]);
|
||||
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900;
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
if (isNaN(parsedDate.getTime())) {
|
||||
parsedDate = null;
|
||||
validationErrors.push(`Invalid date format: ${dateStr}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the amount
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || 'test_import',
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`Error importing transaction ${i + 1}:`, error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined,
|
||||
paypalTransaction: transactions.find(t => t['Kontonummer/IBAN'] === 'LU89751000135104200E')
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test import error:', error);
|
||||
res.status(500).json({ error: 'Test import failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// Import CSV transactions to database
|
||||
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
||||
|
||||
Reference in New Issue
Block a user