Refactor data.js to remove old CSV parsing logic and integrate a new router structure. Consolidate transaction retrieval and enhance error handling for various endpoints. Update the DATEV export functionality to utilize the new database queries and improve overall code organization.
This commit is contained in:
1406
src/routes/data.js
1406
src/routes/data.js
File diff suppressed because it is too large
Load Diff
160
src/routes/data/bankingTransactions.js
Normal file
160
src/routes/data/bankingTransactions.js
Normal file
@@ -0,0 +1,160 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get banking account transactions for a specific transaction
|
||||
router.get('/banking-transactions/:transactionId', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { transactionId } = req.params;
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
bat.*,
|
||||
k.name as assigned_kreditor_name,
|
||||
k.kreditorId as assigned_kreditor_id_code
|
||||
FROM fibdash.BankingAccountTransactions bat
|
||||
LEFT JOIN fibdash.Kreditor k ON bat.assigned_kreditor_id = k.id
|
||||
WHERE bat.transaction_id = @transactionId OR bat.csv_transaction_id = @transactionId
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query, { transactionId: parseInt(transactionId, 10) });
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching banking account transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch banking account transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create banking account transaction assignment
|
||||
router.post('/banking-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { transaction_id, csv_transaction_id, banking_iban, assigned_kreditor_id, notes, assigned_by } = req.body;
|
||||
|
||||
if ((!transaction_id && !csv_transaction_id) || !banking_iban || !assigned_kreditor_id) {
|
||||
return res.status(400).json({
|
||||
error: 'Transaction ID (or CSV Transaction ID), banking IBAN, and assigned kreditor ID are required'
|
||||
});
|
||||
}
|
||||
|
||||
const checkQuery = `
|
||||
SELECT id FROM fibdash.BankingAccountTransactions
|
||||
WHERE transaction_id = @transaction_id OR csv_transaction_id = @csv_transaction_id
|
||||
`;
|
||||
|
||||
const checkResult = await executeQuery(checkQuery, {
|
||||
transaction_id: transaction_id || null,
|
||||
csv_transaction_id: csv_transaction_id || null
|
||||
});
|
||||
|
||||
if (checkResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Banking transaction assignment already exists' });
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.BankingAccountTransactions
|
||||
(transaction_id, csv_transaction_id, banking_iban, assigned_kreditor_id, notes, assigned_by)
|
||||
OUTPUT INSERTED.*
|
||||
VALUES (@transaction_id, @csv_transaction_id, @banking_iban, @assigned_kreditor_id, @notes, @assigned_by)
|
||||
`;
|
||||
|
||||
const result = await executeQuery(insertQuery, {
|
||||
transaction_id: transaction_id || null,
|
||||
csv_transaction_id: csv_transaction_id || null,
|
||||
banking_iban,
|
||||
assigned_kreditor_id,
|
||||
notes: notes || null,
|
||||
assigned_by: assigned_by || null
|
||||
});
|
||||
|
||||
res.status(201).json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating banking account transaction:', error);
|
||||
res.status(500).json({ error: 'Failed to create banking account transaction' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update banking account transaction assignment
|
||||
router.put('/banking-transactions/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { id } = req.params;
|
||||
const { assigned_kreditor_id, notes, assigned_by } = req.body;
|
||||
|
||||
if (!assigned_kreditor_id) {
|
||||
return res.status(400).json({ error: 'Assigned kreditor ID is required' });
|
||||
}
|
||||
|
||||
const updateQuery = `
|
||||
UPDATE fibdash.BankingAccountTransactions
|
||||
SET assigned_kreditor_id = @assigned_kreditor_id,
|
||||
notes = @notes,
|
||||
assigned_by = @assigned_by,
|
||||
assigned_date = GETDATE()
|
||||
OUTPUT INSERTED.*
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(updateQuery, {
|
||||
assigned_kreditor_id,
|
||||
notes: notes || null,
|
||||
assigned_by: assigned_by || null,
|
||||
id: parseInt(id, 10)
|
||||
});
|
||||
|
||||
if (result.recordset.length === 0) {
|
||||
return res.status(404).json({ error: 'Banking transaction assignment not found' });
|
||||
}
|
||||
|
||||
res.json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating banking account transaction:', error);
|
||||
res.status(500).json({ error: 'Failed to update banking account transaction' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete banking account transaction assignment
|
||||
router.delete('/banking-transactions/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { id } = req.params;
|
||||
|
||||
const deleteQuery = `
|
||||
DELETE FROM fibdash.BankingAccountTransactions
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
await executeQuery(deleteQuery, { id: parseInt(id, 10) });
|
||||
|
||||
res.json({ message: 'Banking transaction assignment deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting banking account transaction:', error);
|
||||
res.status(500).json({ error: 'Failed to delete banking account transaction' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get all kreditors that can be assigned to banking transactions (non-banking kreditors)
|
||||
router.get('/assignable-kreditors', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT id, name, kreditorId
|
||||
FROM fibdash.Kreditor
|
||||
WHERE is_banking = 0
|
||||
ORDER BY name
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching assignable kreditors:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch assignable kreditors' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
373
src/routes/data/csvImport.js
Normal file
373
src/routes/data/csvImport.js
Normal file
@@ -0,0 +1,373 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Test CSV import endpoint (no auth for testing) - ACTUALLY IMPORTS TO DATABASE
|
||||
router.post('/test-csv-import', async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { transactions, filename, batchId } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || 'test_import_' + Date.now();
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: 'Validation failed: ' + validationErrors.join(', '),
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0], 10);
|
||||
const month = parseInt(dateParts[1], 10) - 1;
|
||||
let year = parseInt(dateParts[2], 10);
|
||||
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900;
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
if (isNaN(parsedDate.getTime())) {
|
||||
parsedDate = null;
|
||||
validationErrors.push('Invalid date format: ' + dateStr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || 'test_import',
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error('Error importing transaction ' + (i + 1) + ':', error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined,
|
||||
paypalTransaction: transactions.find(t => t['Kontonummer/IBAN'] === 'LU89751000135104200E')
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test import error:', error);
|
||||
res.status(500).json({ error: 'Test import failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// Import CSV transactions to database
|
||||
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { transactions, filename, batchId, headers } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
const expectedHeaders = [
|
||||
'Auftragskonto',
|
||||
'Buchungstag',
|
||||
'Valutadatum',
|
||||
'Buchungstext',
|
||||
'Verwendungszweck',
|
||||
'Glaeubiger ID',
|
||||
'Mandatsreferenz',
|
||||
'Kundenreferenz (End-to-End)',
|
||||
'Sammlerreferenz',
|
||||
'Lastschrift Ursprungsbetrag',
|
||||
'Auslagenersatz Ruecklastschrift',
|
||||
'Beguenstigter/Zahlungspflichtiger',
|
||||
'Kontonummer/IBAN',
|
||||
'BIC (SWIFT-Code)',
|
||||
'Betrag',
|
||||
'Waehrung',
|
||||
'Info'
|
||||
];
|
||||
|
||||
if (headers && Array.isArray(headers)) {
|
||||
const missingHeaders = expectedHeaders.filter(expected =>
|
||||
!headers.some(header => header.trim() === expected)
|
||||
);
|
||||
|
||||
if (missingHeaders.length > 0) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid CSV format - missing required headers',
|
||||
missing: missingHeaders,
|
||||
expected: expectedHeaders,
|
||||
received: headers
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (transactions.length === 0) {
|
||||
return res.status(400).json({ error: 'No transaction data found' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || 'import_' + Date.now();
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Beguenstigter/Zahlungspflichtiger'] || transaction['Beguenstigter/Zahlungspflichtiger'].trim() === '') {
|
||||
validationErrors.push('Beguenstigter/Zahlungspflichtiger is required');
|
||||
}
|
||||
|
||||
if (validationErrors.length > 2) {
|
||||
console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: 'Validation failed: ' + validationErrors.join(', '),
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0], 10);
|
||||
const month = parseInt(dateParts[1], 10) - 1;
|
||||
let year = parseInt(dateParts[2], 10);
|
||||
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900;
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
if (isNaN(parsedDate.getTime()) ||
|
||||
parsedDate.getDate() !== day ||
|
||||
parsedDate.getMonth() !== month ||
|
||||
parsedDate.getFullYear() !== year) {
|
||||
parsedDate = null;
|
||||
validationErrors.push('Invalid date format: ' + dateStr);
|
||||
}
|
||||
} else {
|
||||
validationErrors.push('Invalid date format: ' + dateStr);
|
||||
}
|
||||
}
|
||||
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || null,
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error('Error importing transaction ' + (i + 1) + ':', error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error importing CSV transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to import CSV transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get imported CSV transactions
|
||||
router.get('/csv-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { batchId, limit = 100, offset = 0 } = req.query;
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
csv.*,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name
|
||||
FROM fibdash.CSVTransactions csv
|
||||
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
||||
`;
|
||||
|
||||
const params = {};
|
||||
|
||||
if (batchId) {
|
||||
query += ' WHERE csv.import_batch_id = @batchId';
|
||||
params.batchId = batchId;
|
||||
}
|
||||
|
||||
query += ' ORDER BY csv.parsed_date DESC, csv.id DESC';
|
||||
query += ' OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY';
|
||||
|
||||
params.offset = parseInt(offset, 10);
|
||||
params.limit = parseInt(limit, 10);
|
||||
|
||||
const result = await executeQuery(query, params);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching CSV transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch CSV transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get CSV import batches
|
||||
router.get('/csv-import-batches', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
import_batch_id,
|
||||
source_filename,
|
||||
MIN(import_date) as import_date,
|
||||
COUNT(*) as transaction_count,
|
||||
SUM(CASE WHEN is_processed = 1 THEN 1 ELSE 0 END) as processed_count
|
||||
FROM fibdash.CSVTransactions
|
||||
GROUP BY import_batch_id, source_filename
|
||||
ORDER BY MIN(import_date) DESC
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import batches:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch import batches' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
71
src/routes/data/datev.js
Normal file
71
src/routes/data/datev.js
Normal file
@@ -0,0 +1,71 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// DATEV helpers (ported from original file)
|
||||
const buildDatevHeader = (periodStart, periodEnd) => {
|
||||
const ts = new Date().toISOString().replace(/[-T:\.Z]/g, '').slice(0, 17); // yyyymmddHHMMSSfff
|
||||
const meta = {
|
||||
consultant: 1001,
|
||||
client: 10001,
|
||||
fyStart: periodStart.slice(0, 4) + '0101', // fiscal year start
|
||||
accLength: 4,
|
||||
description: 'Bank Statement Export',
|
||||
currency: 'EUR'
|
||||
};
|
||||
|
||||
return [
|
||||
'"EXTF"', 700, 21, '"Buchungsstapel"', 12, ts,
|
||||
'', '', '', '', // 7‑10 spare
|
||||
meta.consultant, meta.client, // 11, 12
|
||||
meta.fyStart, meta.accLength, // 13, 14
|
||||
periodStart, periodEnd, // 15, 16
|
||||
'"' + meta.description + '"',
|
||||
'AM', 1, 0, 1, meta.currency
|
||||
].join(';');
|
||||
};
|
||||
|
||||
const DATEV_COLS = [
|
||||
'Umsatz (ohne Soll/Haben-Kz)', 'Soll/Haben-Kennzeichen', 'WKZ Umsatz',
|
||||
'Kurs', 'Basis-Umsatz', 'WKZ Basis-Umsatz', 'Konto',
|
||||
'Gegenkonto (ohne BU-Schlüssel)', 'BU-Schlüssel', 'Belegdatum',
|
||||
'Belegfeld 1', 'Belegfeld 2', 'Skonto', 'Buchungstext',
|
||||
'Postensperre', 'Diverse Adressnummer', 'Geschäftspartnerbank',
|
||||
'Sachverhalt', 'Zinssperre', 'Beleglink'
|
||||
].join(';');
|
||||
|
||||
const formatDatevAmount = (amount) => {
|
||||
return Math.abs(amount).toFixed(2).replace('.', ',');
|
||||
};
|
||||
|
||||
const formatDatevDate = (dateString) => {
|
||||
if (!dateString) return '';
|
||||
const parts = dateString.split('.');
|
||||
if (parts.length === 3) {
|
||||
const day = parts[0].padStart(2, '0');
|
||||
const month = parts[1].padStart(2, '0');
|
||||
return day + month;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const quote = (str, maxLen = 60) => {
|
||||
if (!str) return '""';
|
||||
return '"' + str.slice(0, maxLen).replace(/"/g, '""') + '"';
|
||||
};
|
||||
|
||||
// DATEV export endpoint
|
||||
router.get('/datev/:timeRange', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { timeRange } = req.params;
|
||||
// TODO: Update to use database queries instead of CSV file
|
||||
res.status(501).json({ error: 'DATEV export temporarily disabled - use database-based queries' });
|
||||
return;
|
||||
} catch (error) {
|
||||
console.error('Error generating DATEV export:', error);
|
||||
res.status(500).json({ error: 'Failed to generate DATEV export' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
67
src/routes/data/helpers/jtl.js
Normal file
67
src/routes/data/helpers/jtl.js
Normal file
@@ -0,0 +1,67 @@
|
||||
const { executeQuery } = require('../../../config/database');
|
||||
|
||||
// Get database transactions for JTL comparison
|
||||
async function getJTLTransactions() {
|
||||
try {
|
||||
const query = `
|
||||
SELECT
|
||||
cKonto, cKontozusatz, cName, dBuchungsdatum,
|
||||
tZahlungsabgleichUmsatz.kZahlungsabgleichUmsatz,
|
||||
cVerwendungszweck, fBetrag, tUmsatzKontierung.data
|
||||
FROM [eazybusiness].[dbo].[tZahlungsabgleichUmsatz]
|
||||
LEFT JOIN tUmsatzKontierung ON (tUmsatzKontierung.kZahlungsabgleichUmsatz = tZahlungsabgleichUmsatz.kZahlungsabgleichUmsatz)
|
||||
ORDER BY dBuchungsdatum desc, tZahlungsabgleichUmsatz.kZahlungsabgleichUmsatz desc
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
const transactions = result.recordset || [];
|
||||
|
||||
// Get PDF documents for each transaction
|
||||
const pdfQuery = `SELECT kUmsatzBeleg, kZahlungsabgleichUmsatz, textContent, markDown, extraction, datevlink FROM tUmsatzBeleg`;
|
||||
const pdfResult = await executeQuery(pdfQuery);
|
||||
|
||||
for (const item of pdfResult.recordset) {
|
||||
for (const transaction of transactions) {
|
||||
if (item.kZahlungsabgleichUmsatz == transaction.kZahlungsabgleichUmsatz) {
|
||||
if (!transaction.pdfs) transaction.pdfs = [];
|
||||
transaction.pdfs.push({
|
||||
kUmsatzBeleg: item.kUmsatzBeleg,
|
||||
content: item.textContent,
|
||||
markDown: item.markDown,
|
||||
extraction: item.extraction,
|
||||
datevlink: item.datevlink
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get links for each transaction
|
||||
const linksQuery = `
|
||||
SELECT kZahlungsabgleichUmsatzLink, kZahlungsabgleichUmsatz, linktarget, linktype, note,
|
||||
tPdfObjekt.kPdfObjekt, tPdfObjekt.textContent, tPdfObjekt.markDown,
|
||||
tPdfObjekt.extraction
|
||||
FROM tZahlungsabgleichUmsatzLink
|
||||
LEFT JOIN tPdfObjekt ON (tZahlungsabgleichUmsatzLink.linktarget = tPdfObjekt.kLieferantenbestellung)
|
||||
WHERE linktype = 'kLieferantenBestellung'
|
||||
`;
|
||||
const linksResult = await executeQuery(linksQuery);
|
||||
|
||||
for (const item of linksResult.recordset) {
|
||||
for (const transaction of transactions) {
|
||||
if (item.kZahlungsabgleichUmsatz == transaction.kZahlungsabgleichUmsatz) {
|
||||
if (!transaction.links) transaction.links = [];
|
||||
transaction.links.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return transactions;
|
||||
} catch (error) {
|
||||
console.error('Error fetching JTL transactions:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getJTLTransactions
|
||||
};
|
||||
22
src/routes/data/index.js
Normal file
22
src/routes/data/index.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const express = require('express');
|
||||
|
||||
const months = require('./months');
|
||||
const transactions = require('./transactions');
|
||||
const datev = require('./datev');
|
||||
const pdf = require('./pdf');
|
||||
const kreditors = require('./kreditors');
|
||||
const bankingTransactions = require('./bankingTransactions');
|
||||
const csvImport = require('./csvImport');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Mount sub-routers preserving original paths
|
||||
router.use(months);
|
||||
router.use(transactions);
|
||||
router.use(datev);
|
||||
router.use(pdf);
|
||||
router.use(kreditors);
|
||||
router.use(bankingTransactions);
|
||||
router.use(csvImport);
|
||||
|
||||
module.exports = router;
|
||||
181
src/routes/data/kreditors.js
Normal file
181
src/routes/data/kreditors.js
Normal file
@@ -0,0 +1,181 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get all kreditors
|
||||
router.get('/kreditors', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const query = `
|
||||
SELECT id, iban, name, kreditorId
|
||||
FROM fibdash.Kreditor
|
||||
ORDER BY name ASC, iban ASC
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
res.json(result.recordset || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching kreditors:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch kreditors' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get kreditor by ID
|
||||
router.get('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { id } = req.params;
|
||||
|
||||
const query = `
|
||||
SELECT id, iban, name, kreditorId, is_banking
|
||||
FROM fibdash.Kreditor
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query, { id: parseInt(id, 10) });
|
||||
|
||||
if (result.recordset.length === 0) {
|
||||
return res.status(404).json({ error: 'Kreditor not found' });
|
||||
}
|
||||
|
||||
res.json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching kreditor:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch kreditor' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create new kreditor
|
||||
router.post('/kreditors', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { iban, name, kreditorId, is_banking } = req.body;
|
||||
|
||||
const isBanking = is_banking || false;
|
||||
|
||||
if (!name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'Name and kreditorId are required' });
|
||||
}
|
||||
|
||||
if (!isBanking && (!iban || iban.trim() === '')) {
|
||||
return res.status(400).json({ error: 'IBAN is required (except for banking accounts)' });
|
||||
}
|
||||
|
||||
if (iban && iban.trim() !== '') {
|
||||
const checkQuery = `
|
||||
SELECT id FROM fibdash.Kreditor
|
||||
WHERE iban = @iban
|
||||
`;
|
||||
|
||||
const checkResult = await executeQuery(checkQuery, { iban });
|
||||
|
||||
if (checkResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Kreditor with this IBAN already exists' });
|
||||
}
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.Kreditor (iban, name, kreditorId, is_banking)
|
||||
OUTPUT INSERTED.id, INSERTED.iban, INSERTED.name, INSERTED.kreditorId, INSERTED.is_banking
|
||||
VALUES (@iban, @name, @kreditorId, @is_banking)
|
||||
`;
|
||||
|
||||
const result = await executeQuery(insertQuery, {
|
||||
iban: iban || null,
|
||||
name,
|
||||
kreditorId,
|
||||
is_banking: isBanking
|
||||
});
|
||||
|
||||
res.status(201).json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating kreditor:', error);
|
||||
res.status(500).json({ error: 'Failed to create kreditor' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update kreditor
|
||||
router.put('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { id } = req.params;
|
||||
const { iban, name, kreditorId, is_banking } = req.body;
|
||||
|
||||
const isBanking = is_banking || false;
|
||||
|
||||
if (!name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'Name and kreditorId are required' });
|
||||
}
|
||||
|
||||
if (!isBanking && (!iban || iban.trim() === '')) {
|
||||
return res.status(400).json({ error: 'IBAN is required (except for banking accounts)' });
|
||||
}
|
||||
|
||||
const checkQuery = `SELECT id FROM fibdash.Kreditor WHERE id = @id`;
|
||||
const checkResult = await executeQuery(checkQuery, { id: parseInt(id, 10) });
|
||||
|
||||
if (checkResult.recordset.length === 0) {
|
||||
return res.status(404).json({ error: 'Kreditor not found' });
|
||||
}
|
||||
|
||||
if (iban && iban.trim() !== '') {
|
||||
const conflictQuery = `
|
||||
SELECT id FROM fibdash.Kreditor
|
||||
WHERE iban = @iban AND id != @id
|
||||
`;
|
||||
|
||||
const conflictResult = await executeQuery(conflictQuery, { iban, id: parseInt(id, 10) });
|
||||
|
||||
if (conflictResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Another kreditor with this IBAN already exists' });
|
||||
}
|
||||
}
|
||||
|
||||
const updateQuery = `
|
||||
UPDATE fibdash.Kreditor
|
||||
SET iban = @iban, name = @name, kreditorId = @kreditorId, is_banking = @is_banking
|
||||
OUTPUT INSERTED.id, INSERTED.iban, INSERTED.name, INSERTED.kreditorId, INSERTED.is_banking
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(updateQuery, {
|
||||
iban: iban || null,
|
||||
name,
|
||||
kreditorId,
|
||||
is_banking: isBanking,
|
||||
id: parseInt(id, 10)
|
||||
});
|
||||
|
||||
res.json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating kreditor:', error);
|
||||
res.status(500).json({ error: 'Failed to update kreditor' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete kreditor (hard delete)
|
||||
router.delete('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const { id } = req.params;
|
||||
|
||||
const query = `
|
||||
DELETE FROM fibdash.Kreditor
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query, { id: parseInt(id, 10) });
|
||||
|
||||
if (result.rowsAffected[0] === 0) {
|
||||
return res.status(404).json({ error: 'Kreditor not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Kreditor deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting kreditor:', error);
|
||||
res.status(500).json({ error: 'Failed to delete kreditor' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
33
src/routes/data/months.js
Normal file
33
src/routes/data/months.js
Normal file
@@ -0,0 +1,33 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get available months from database
|
||||
router.get('/months', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT DISTINCT
|
||||
FORMAT(combined.date_col, 'yyyy-MM') as month_year
|
||||
FROM (
|
||||
SELECT buchungsdatum as date_col FROM fibdash.AccountingItems WHERE buchungsdatum IS NOT NULL
|
||||
UNION ALL
|
||||
SELECT parsed_date as date_col FROM fibdash.CSVTransactions WHERE parsed_date IS NOT NULL
|
||||
) combined
|
||||
WHERE combined.date_col IS NOT NULL
|
||||
ORDER BY month_year DESC
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
const months = result.recordset.map(row => row.month_year);
|
||||
|
||||
res.json({ months });
|
||||
} catch (error) {
|
||||
console.error('Error getting months:', error);
|
||||
res.status(500).json({ error: 'Failed to load months' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
72
src/routes/data/pdf.js
Normal file
72
src/routes/data/pdf.js
Normal file
@@ -0,0 +1,72 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get PDF from tUmsatzBeleg
|
||||
router.get('/pdf/umsatzbeleg/:kUmsatzBeleg', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { kUmsatzBeleg } = req.params;
|
||||
const { executeQuery } = require('../../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT content, datevlink
|
||||
FROM dbo.tUmsatzBeleg
|
||||
WHERE kUmsatzBeleg = @kUmsatzBeleg AND content IS NOT NULL
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query, {
|
||||
kUmsatzBeleg: parseInt(kUmsatzBeleg, 10)
|
||||
});
|
||||
|
||||
if (!result.recordset || result.recordset.length === 0) {
|
||||
return res.status(404).json({ error: 'PDF not found' });
|
||||
}
|
||||
|
||||
const pdfData = result.recordset[0];
|
||||
const filename = 'Umsatzbeleg_' + kUmsatzBeleg + '_' + (pdfData.datevlink || 'document') + '.pdf';
|
||||
|
||||
res.setHeader('Content-Type', 'application/pdf');
|
||||
res.setHeader('Content-Disposition', 'inline; filename="' + filename + '"');
|
||||
res.send(pdfData.content);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching PDF from tUmsatzBeleg:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch PDF' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get PDF from tPdfObjekt
|
||||
router.get('/pdf/pdfobject/:kPdfObjekt', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { kPdfObjekt } = req.params;
|
||||
const { executeQuery } = require('../../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT content, datevlink, kLieferantenbestellung
|
||||
FROM dbo.tPdfObjekt
|
||||
WHERE kPdfObjekt = @kPdfObjekt AND content IS NOT NULL
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query, {
|
||||
kPdfObjekt: parseInt(kPdfObjekt, 10)
|
||||
});
|
||||
|
||||
if (!result.recordset || result.recordset.length === 0) {
|
||||
return res.status(404).json({ error: 'PDF not found' });
|
||||
}
|
||||
|
||||
const pdfData = result.recordset[0];
|
||||
const filename = 'PdfObjekt_' + kPdfObjekt + '_LB' + pdfData.kLieferantenbestellung + '_' + (pdfData.datevlink || 'document') + '.pdf';
|
||||
|
||||
res.setHeader('Content-Type', 'application/pdf');
|
||||
res.setHeader('Content-Disposition', 'inline; filename="' + filename + '"');
|
||||
res.send(pdfData.content);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching PDF from tPdfObjekt:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch PDF' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
348
src/routes/data/transactions.js
Normal file
348
src/routes/data/transactions.js
Normal file
@@ -0,0 +1,348 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
const { getJTLTransactions } = require('./helpers/jtl');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get transactions for a specific time period (month, quarter, or year)
|
||||
router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { timeRange } = req.params;
|
||||
|
||||
const { executeQuery } = require('../../config/database');
|
||||
const query = `
|
||||
SELECT
|
||||
csv.buchungstag as 'Buchungstag',
|
||||
csv.wertstellung as 'Valutadatum',
|
||||
csv.umsatzart as 'Buchungstext',
|
||||
csv.verwendungszweck as 'Verwendungszweck',
|
||||
csv.beguenstigter_zahlungspflichtiger as 'Beguenstigter/Zahlungspflichtiger',
|
||||
csv.kontonummer_iban as 'Kontonummer/IBAN',
|
||||
csv.bic as 'BIC (SWIFT-Code)',
|
||||
csv.betrag_original as 'Betrag',
|
||||
csv.waehrung as 'Waehrung',
|
||||
csv.numeric_amount as numericAmount,
|
||||
csv.parsed_date,
|
||||
FORMAT(csv.parsed_date, 'yyyy-MM') as monthYear,
|
||||
jtl.kZahlungsabgleichUmsatz as jtlId,
|
||||
CASE WHEN jtl.kZahlungsabgleichUmsatz IS NOT NULL THEN 1 ELSE 0 END as hasJTL,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name,
|
||||
0 as isJTLOnly,
|
||||
1 as isFromCSV,
|
||||
ub.textContent as jtl_document_data,
|
||||
ub.kUmsatzBeleg,
|
||||
ub.datevlink
|
||||
FROM fibdash.CSVTransactions csv
|
||||
LEFT JOIN eazybusiness.dbo.tZahlungsabgleichUmsatz jtl ON (
|
||||
ABS(csv.numeric_amount - jtl.fBetrag) < 0.01 AND
|
||||
ABS(DATEDIFF(day, csv.parsed_date, jtl.dBuchungsdatum)) <= 1
|
||||
)
|
||||
LEFT JOIN eazybusiness.dbo.tUmsatzBeleg ub ON ub.kZahlungsabgleichUmsatz = jtl.kZahlungsabgleichUmsatz
|
||||
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
FORMAT(jtl.dBuchungsdatum, 'dd.MM.yy') as 'Buchungstag',
|
||||
FORMAT(jtl.dBuchungsdatum, 'dd.MM.yy') as 'Valutadatum',
|
||||
'JTL Transaction' as 'Buchungstext',
|
||||
jtl.cVerwendungszweck as 'Verwendungszweck',
|
||||
jtl.cName as 'Beguenstigter/Zahlungspflichtiger',
|
||||
'' as 'Kontonummer/IBAN',
|
||||
'' as 'BIC (SWIFT-Code)',
|
||||
FORMAT(jtl.fBetrag, 'N2', 'de-DE') as 'Betrag',
|
||||
'' as 'Waehrung',
|
||||
jtl.fBetrag as numericAmount,
|
||||
jtl.dBuchungsdatum as parsed_date,
|
||||
FORMAT(jtl.dBuchungsdatum, 'yyyy-MM') as monthYear,
|
||||
jtl.kZahlungsabgleichUmsatz as jtlId,
|
||||
1 as hasJTL,
|
||||
NULL as kreditor_name,
|
||||
NULL as kreditor_id,
|
||||
NULL as kreditor_is_banking,
|
||||
NULL as assigned_kreditor_id,
|
||||
NULL as assigned_kreditor_name,
|
||||
1 as isJTLOnly,
|
||||
0 as isFromCSV,
|
||||
ub.textContent as jtl_document_data,
|
||||
ub.kUmsatzBeleg,
|
||||
ub.datevlink
|
||||
FROM eazybusiness.dbo.tZahlungsabgleichUmsatz jtl
|
||||
LEFT JOIN eazybusiness.dbo.tUmsatzBeleg ub ON ub.kZahlungsabgleichUmsatz = jtl.kZahlungsabgleichUmsatz
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM fibdash.CSVTransactions csv
|
||||
WHERE ABS(csv.numeric_amount - jtl.fBetrag) < 0.01
|
||||
AND ABS(DATEDIFF(day, csv.parsed_date, jtl.dBuchungsdatum)) <= 1
|
||||
)
|
||||
|
||||
ORDER BY parsed_date DESC
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
|
||||
// Get links data separately to avoid duplicate rows
|
||||
const linksQuery = `
|
||||
SELECT
|
||||
zul.kZahlungsabgleichUmsatz,
|
||||
zul.linktarget,
|
||||
zul.linktype,
|
||||
zul.note,
|
||||
po.kPdfObjekt,
|
||||
po.textContent,
|
||||
po.markDown,
|
||||
po.extraction
|
||||
FROM eazybusiness.dbo.tZahlungsabgleichUmsatzLink zul
|
||||
LEFT JOIN eazybusiness.dbo.tPdfObjekt po ON zul.linktarget = po.kLieferantenbestellung
|
||||
WHERE zul.linktype = 'kLieferantenBestellung'
|
||||
`;
|
||||
const linksResult = await executeQuery(linksQuery);
|
||||
const linksData = linksResult.recordset || [];
|
||||
|
||||
const transactions = result.recordset.map(transaction => ({
|
||||
...transaction,
|
||||
parsedDate: new Date(transaction.parsed_date),
|
||||
hasJTL: Boolean(transaction.hasJTL),
|
||||
isFromCSV: true,
|
||||
jtlDatabaseAvailable: true,
|
||||
hasKreditor: !!transaction.kreditor_name,
|
||||
kreditor: transaction.kreditor_name ? {
|
||||
name: transaction.kreditor_name,
|
||||
kreditorId: transaction.kreditor_id,
|
||||
is_banking: Boolean(transaction.kreditor_is_banking)
|
||||
} : null,
|
||||
assigned_kreditor: transaction.assigned_kreditor_name ? {
|
||||
name: transaction.assigned_kreditor_name,
|
||||
id: transaction.assigned_kreditor_id
|
||||
} : null,
|
||||
pdfs: transaction.jtl_document_data ? [{
|
||||
content: transaction.jtl_document_data,
|
||||
kUmsatzBeleg: transaction.kUmsatzBeleg,
|
||||
datevlink: transaction.datevlink
|
||||
}] : [],
|
||||
links: transaction.jtlId ? linksData.filter(link =>
|
||||
link.kZahlungsabgleichUmsatz === transaction.jtlId
|
||||
) : []
|
||||
}));
|
||||
|
||||
let filteredTransactions = [];
|
||||
|
||||
if (timeRange.includes('-Q')) {
|
||||
const [year, quarterPart] = timeRange.split('-Q');
|
||||
const quarter = parseInt(quarterPart, 10);
|
||||
const startMonth = (quarter - 1) * 3 + 1;
|
||||
const endMonth = startMonth + 2;
|
||||
|
||||
filteredTransactions = transactions.filter(t => {
|
||||
if (!t.monthYear) return false;
|
||||
const [tYear, tMonth] = t.monthYear.split('-');
|
||||
const monthNum = parseInt(tMonth, 10);
|
||||
return tYear === year && monthNum >= startMonth && monthNum <= endMonth;
|
||||
});
|
||||
} else if (timeRange.length === 4) {
|
||||
filteredTransactions = transactions.filter(t => {
|
||||
if (!t.monthYear) return false;
|
||||
const [tYear] = t.monthYear.split('-');
|
||||
return tYear === timeRange;
|
||||
});
|
||||
} else {
|
||||
filteredTransactions = transactions.filter(t => t.monthYear === timeRange);
|
||||
}
|
||||
|
||||
const monthTransactions = filteredTransactions
|
||||
.sort((a, b) => b.parsedDate - a.parsedDate);
|
||||
|
||||
// Get JTL transactions for comparison
|
||||
let jtlTransactions = [];
|
||||
let jtlDatabaseAvailable = false;
|
||||
try {
|
||||
jtlTransactions = await getJTLTransactions();
|
||||
jtlDatabaseAvailable = true;
|
||||
console.log('DEBUG: JTL database connected, found', jtlTransactions.length, 'transactions');
|
||||
} catch (error) {
|
||||
console.log('JTL database not available, continuing without JTL data:', error.message);
|
||||
jtlDatabaseAvailable = false;
|
||||
}
|
||||
|
||||
// Filter JTL transactions for the selected time period
|
||||
let jtlMonthTransactions = [];
|
||||
|
||||
if (timeRange.includes('-Q')) {
|
||||
const [year, quarterPart] = timeRange.split('-Q');
|
||||
const quarter = parseInt(quarterPart, 10);
|
||||
const startMonth = (quarter - 1) * 3 + 1;
|
||||
const endMonth = startMonth + 2;
|
||||
|
||||
jtlMonthTransactions = jtlTransactions.filter(jtl => {
|
||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
||||
const jtlMonth = jtlDate.getMonth() + 1;
|
||||
return jtlDate.getFullYear() === parseInt(year, 10) &&
|
||||
jtlMonth >= startMonth && jtlMonth <= endMonth;
|
||||
});
|
||||
} else if (timeRange.length === 4) {
|
||||
jtlMonthTransactions = jtlTransactions.filter(jtl => {
|
||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
||||
return jtlDate.getFullYear() === parseInt(timeRange, 10);
|
||||
});
|
||||
} else {
|
||||
const [year, month] = timeRange.split('-');
|
||||
jtlMonthTransactions = jtlTransactions.filter(jtl => {
|
||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
||||
return jtlDate.getFullYear() === parseInt(year, 10) &&
|
||||
jtlDate.getMonth() === parseInt(month, 10) - 1;
|
||||
});
|
||||
}
|
||||
|
||||
// Get Kreditor information for IBAN lookup
|
||||
let kreditorData = [];
|
||||
try {
|
||||
const kreditorQuery = `SELECT id, iban, name, kreditorId, is_banking FROM fibdash.Kreditor`;
|
||||
const kreditorResult = await executeQuery(kreditorQuery);
|
||||
kreditorData = kreditorResult.recordset || [];
|
||||
} catch (error) {
|
||||
console.log('Kreditor database not available, continuing without Kreditor data');
|
||||
}
|
||||
|
||||
// Add JTL status and Kreditor information to each CSV transaction
|
||||
const transactionsWithJTL = monthTransactions.map((transaction, index) => {
|
||||
const amount = transaction.numericAmount;
|
||||
const transactionDate = transaction.parsedDate;
|
||||
|
||||
if (index === 0) {
|
||||
console.log('DEBUG First CSV transaction:', {
|
||||
amount: amount,
|
||||
transactionDate: transactionDate,
|
||||
jtlMonthTransactionsCount: jtlMonthTransactions.length
|
||||
});
|
||||
if (jtlMonthTransactions.length > 0) {
|
||||
console.log('DEBUG First JTL transaction:', {
|
||||
amount: parseFloat(jtlMonthTransactions[0].fBetrag),
|
||||
date: new Date(jtlMonthTransactions[0].dBuchungsdatum)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const jtlMatch = jtlMonthTransactions.find(jtl => {
|
||||
const jtlAmount = parseFloat(jtl.fBetrag) || 0;
|
||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
||||
|
||||
const amountMatch = Math.abs(amount - jtlAmount) < 0.01;
|
||||
const dateMatch = transactionDate && jtlDate &&
|
||||
transactionDate.getFullYear() === jtlDate.getFullYear() &&
|
||||
transactionDate.getMonth() === jtlDate.getMonth() &&
|
||||
transactionDate.getDate() === jtlDate.getDate();
|
||||
|
||||
if (index === 0 && (amountMatch || dateMatch)) {
|
||||
console.log('DEBUG Potential match for first transaction:', {
|
||||
csvAmount: amount,
|
||||
jtlAmount: jtlAmount,
|
||||
amountMatch: amountMatch,
|
||||
csvDate: transactionDate,
|
||||
jtlDate: jtlDate,
|
||||
dateMatch: dateMatch,
|
||||
bothMatch: amountMatch && dateMatch
|
||||
});
|
||||
}
|
||||
|
||||
return amountMatch && dateMatch;
|
||||
});
|
||||
|
||||
const transactionIban = transaction['Kontonummer/IBAN'];
|
||||
const kreditorMatch = transactionIban ? kreditorData.find(k => k.iban === transactionIban) : null;
|
||||
|
||||
return {
|
||||
...transaction,
|
||||
hasJTL: jtlDatabaseAvailable ? !!jtlMatch : undefined,
|
||||
jtlId: jtlMatch ? jtlMatch.kZahlungsabgleichUmsatz : null,
|
||||
isFromCSV: true,
|
||||
jtlDatabaseAvailable,
|
||||
pdfs: jtlMatch ? jtlMatch.pdfs || [] : [],
|
||||
links: jtlMatch ? jtlMatch.links || [] : [],
|
||||
kreditor: kreditorMatch ? {
|
||||
id: kreditorMatch.id,
|
||||
name: kreditorMatch.name,
|
||||
kreditorId: kreditorMatch.kreditorId,
|
||||
iban: kreditorMatch.iban,
|
||||
is_banking: Boolean(kreditorMatch.is_banking)
|
||||
} : null,
|
||||
hasKreditor: !!kreditorMatch
|
||||
};
|
||||
});
|
||||
|
||||
const unmatchedJTLTransactions = jtlMonthTransactions
|
||||
.filter(jtl => {
|
||||
const jtlAmount = parseFloat(jtl.fBetrag) || 0;
|
||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
||||
|
||||
const hasCSVMatch = monthTransactions.some(transaction => {
|
||||
const amount = transaction.numericAmount;
|
||||
const transactionDate = transaction.parsedDate;
|
||||
|
||||
const amountMatch = Math.abs(amount - jtlAmount) < 0.01;
|
||||
const dateMatch = transactionDate && jtlDate &&
|
||||
transactionDate.getFullYear() === jtlDate.getFullYear() &&
|
||||
transactionDate.getMonth() === jtlDate.getMonth() &&
|
||||
transactionDate.getDate() === jtlDate.getDate();
|
||||
|
||||
return amountMatch && dateMatch;
|
||||
});
|
||||
|
||||
return !hasCSVMatch;
|
||||
})
|
||||
.map(jtl => ({
|
||||
'Buchungstag': new Date(jtl.dBuchungsdatum).toLocaleDateString('de-DE', {
|
||||
day: '2-digit',
|
||||
month: '2-digit',
|
||||
year: '2-digit'
|
||||
}),
|
||||
'Verwendungszweck': jtl.cVerwendungszweck || '',
|
||||
'Buchungstext': 'JTL Transaction',
|
||||
'Beguenstigter/Zahlungspflichtiger': jtl.cName || '',
|
||||
'Kontonummer/IBAN': '',
|
||||
'Betrag': jtl.fBetrag ? jtl.fBetrag.toString().replace('.', ',') : '0,00',
|
||||
numericAmount: parseFloat(jtl.fBetrag) || 0,
|
||||
parsedDate: new Date(jtl.dBuchungsdatum),
|
||||
monthYear: timeRange,
|
||||
hasJTL: true,
|
||||
jtlId: jtl.kZahlungsabgleichUmsatz,
|
||||
isFromCSV: false,
|
||||
isJTLOnly: true,
|
||||
pdfs: jtl.pdfs || [],
|
||||
links: jtl.links || [],
|
||||
kreditor: null,
|
||||
hasKreditor: false
|
||||
}));
|
||||
|
||||
const summary = {
|
||||
totalTransactions: filteredTransactions.length,
|
||||
totalIncome: filteredTransactions
|
||||
.filter(t => t.numericAmount > 0)
|
||||
.reduce((sum, t) => sum + t.numericAmount, 0),
|
||||
totalExpenses: filteredTransactions
|
||||
.filter(t => t.numericAmount < 0)
|
||||
.reduce((sum, t) => sum + Math.abs(t.numericAmount), 0),
|
||||
netAmount: filteredTransactions.reduce((sum, t) => sum + t.numericAmount, 0),
|
||||
timeRange: timeRange,
|
||||
jtlDatabaseAvailable: true,
|
||||
jtlMatches: filteredTransactions.filter(t => t.hasJTL === true && t.isFromCSV).length,
|
||||
jtlMissing: filteredTransactions.filter(t => t.hasJTL === false && t.isFromCSV).length,
|
||||
jtlOnly: filteredTransactions.filter(t => t.isJTLOnly === true).length,
|
||||
csvOnly: filteredTransactions.filter(t => t.hasJTL === false && t.isFromCSV).length
|
||||
};
|
||||
|
||||
res.json({
|
||||
transactions: filteredTransactions,
|
||||
summary
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error getting transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to load transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
Reference in New Issue
Block a user