Compare commits
5 Commits
8e8d93e4a6
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
adfcd90dcf | ||
|
|
bb610e0480 | ||
|
|
44d6cf6352 | ||
|
|
74529d8b19 | ||
|
|
bd7c6dddbf |
@@ -457,7 +457,7 @@ class CSVImportPanel extends Component {
|
|||||||
)}
|
)}
|
||||||
{importResult.errors > 0 && (
|
{importResult.errors > 0 && (
|
||||||
<Typography variant="body1" color="warning.main">
|
<Typography variant="body1" color="warning.main">
|
||||||
<strong>Fehler:</strong> {importResult.errors} Zeilen konnten nicht verarbeitet werden
|
<strong>Fehler:</strong> {importResult.errors} Zeilen konnten nicht verarbeitet werden.
|
||||||
</Typography>
|
</Typography>
|
||||||
)}
|
)}
|
||||||
{importResult.message && (
|
{importResult.message && (
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
nginx:
|
|
||||||
image: nginx:alpine
|
|
||||||
ports:
|
|
||||||
- "80:80"
|
|
||||||
volumes:
|
|
||||||
- ./nginx.dev.conf:/etc/nginx/conf.d/default.conf
|
|
||||||
- ./logs/nginx:/var/log/nginx
|
|
||||||
depends_on:
|
|
||||||
- frontend
|
|
||||||
- backend
|
|
||||||
restart: unless-stopped
|
|
||||||
networks:
|
|
||||||
- fibdash-network
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile.dev.frontend
|
|
||||||
ports:
|
|
||||||
- "5001:5001"
|
|
||||||
volumes:
|
|
||||||
- ./client:/app/client
|
|
||||||
- /app/node_modules
|
|
||||||
environment:
|
|
||||||
- NODE_ENV=development
|
|
||||||
- CHOKIDAR_USEPOLLING=true
|
|
||||||
networks:
|
|
||||||
- fibdash-network
|
|
||||||
command: npm run dev:frontend
|
|
||||||
|
|
||||||
backend:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile.dev.backend
|
|
||||||
ports:
|
|
||||||
- "5000:5000"
|
|
||||||
volumes:
|
|
||||||
- ./src:/app/src
|
|
||||||
- /app/node_modules
|
|
||||||
environment:
|
|
||||||
- NODE_ENV=development
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
networks:
|
|
||||||
- fibdash-network
|
|
||||||
command: npm run dev:backend
|
|
||||||
|
|
||||||
networks:
|
|
||||||
fibdash-network:
|
|
||||||
driver: bridge
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
node_modules:
|
|
||||||
@@ -71,6 +71,37 @@ router.post('/test-csv-import', async (req, res) => {
|
|||||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for existing transaction to prevent duplicates
|
||||||
|
const duplicateCheckQuery = `
|
||||||
|
SELECT COUNT(*) as count FROM fibdash.CSVTransactions
|
||||||
|
WHERE buchungstag = @buchungstag
|
||||||
|
AND wertstellung = @wertstellung
|
||||||
|
AND umsatzart = @umsatzart
|
||||||
|
AND betrag = @betrag
|
||||||
|
AND beguenstigter_zahlungspflichtiger = @beguenstigter_zahlungspflichtiger
|
||||||
|
AND verwendungszweck = @verwendungszweck
|
||||||
|
`;
|
||||||
|
|
||||||
|
const duplicateCheckResult = await executeQuery(duplicateCheckQuery, {
|
||||||
|
buchungstag: transaction['Buchungstag'] || null,
|
||||||
|
wertstellung: transaction['Valutadatum'] || null,
|
||||||
|
umsatzart: transaction['Buchungstext'] || null,
|
||||||
|
betrag: numericAmount,
|
||||||
|
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||||
|
verwendungszweck: transaction['Verwendungszweck'] || null
|
||||||
|
});
|
||||||
|
|
||||||
|
if (duplicateCheckResult.recordset[0].count > 0) {
|
||||||
|
console.log(`Skipping duplicate transaction at row ${i + 1}: ${transaction['Buchungstag']} - ${numericAmount}`);
|
||||||
|
errors.push({
|
||||||
|
row: i + 1,
|
||||||
|
error: 'Duplicate transaction (already exists in database)',
|
||||||
|
transaction: transaction
|
||||||
|
});
|
||||||
|
errorCount++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
const insertQuery = `
|
const insertQuery = `
|
||||||
INSERT INTO fibdash.CSVTransactions
|
INSERT INTO fibdash.CSVTransactions
|
||||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||||
@@ -129,6 +160,7 @@ router.post('/test-csv-import', async (req, res) => {
|
|||||||
|
|
||||||
// Import CSV transactions to database
|
// Import CSV transactions to database
|
||||||
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
||||||
|
console.log('Importing CSV transactions');
|
||||||
try {
|
try {
|
||||||
const { executeQuery } = require('../../config/database');
|
const { executeQuery } = require('../../config/database');
|
||||||
const { transactions, filename, batchId, headers } = req.body;
|
const { transactions, filename, batchId, headers } = req.body;
|
||||||
@@ -180,6 +212,7 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
|||||||
let successCount = 0;
|
let successCount = 0;
|
||||||
let errorCount = 0;
|
let errorCount = 0;
|
||||||
const errors = [];
|
const errors = [];
|
||||||
|
console.log('precheck done');
|
||||||
|
|
||||||
for (let i = 0; i < transactions.length; i++) {
|
for (let i = 0; i < transactions.length; i++) {
|
||||||
const transaction = transactions[i];
|
const transaction = transactions[i];
|
||||||
@@ -195,9 +228,6 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
|||||||
validationErrors.push('Betrag is required');
|
validationErrors.push('Betrag is required');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!transaction['Beguenstigter/Zahlungspflichtiger'] || transaction['Beguenstigter/Zahlungspflichtiger'].trim() === '') {
|
|
||||||
validationErrors.push('Beguenstigter/Zahlungspflichtiger is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (validationErrors.length > 2) {
|
if (validationErrors.length > 2) {
|
||||||
console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors);
|
console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors);
|
||||||
@@ -247,6 +277,37 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
|||||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for existing transaction to prevent duplicates
|
||||||
|
const duplicateCheckQuery = `
|
||||||
|
SELECT COUNT(*) as count FROM fibdash.CSVTransactions
|
||||||
|
WHERE buchungstag = @buchungstag
|
||||||
|
AND wertstellung = @wertstellung
|
||||||
|
AND umsatzart = @umsatzart
|
||||||
|
AND betrag = @betrag
|
||||||
|
AND beguenstigter_zahlungspflichtiger = @beguenstigter_zahlungspflichtiger
|
||||||
|
AND verwendungszweck = @verwendungszweck
|
||||||
|
`;
|
||||||
|
|
||||||
|
const duplicateCheckResult = await executeQuery(duplicateCheckQuery, {
|
||||||
|
buchungstag: transaction['Buchungstag'] || null,
|
||||||
|
wertstellung: transaction['Valutadatum'] || null,
|
||||||
|
umsatzart: transaction['Buchungstext'] || null,
|
||||||
|
betrag: numericAmount,
|
||||||
|
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||||
|
verwendungszweck: transaction['Verwendungszweck'] || null
|
||||||
|
});
|
||||||
|
|
||||||
|
if (duplicateCheckResult.recordset[0].count > 0) {
|
||||||
|
console.log(`Skipping duplicate transaction at row ${i + 1}: ${transaction['Buchungstag']} - ${numericAmount}`);
|
||||||
|
errors.push({
|
||||||
|
row: i + 1,
|
||||||
|
error: 'Duplicate transaction (already exists in database)',
|
||||||
|
transaction: transaction
|
||||||
|
});
|
||||||
|
errorCount++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
const insertQuery = `
|
const insertQuery = `
|
||||||
INSERT INTO fibdash.CSVTransactions
|
INSERT INTO fibdash.CSVTransactions
|
||||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||||
@@ -287,6 +348,8 @@ router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
|||||||
errorCount++;
|
errorCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log('import done',errors);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -488,7 +551,7 @@ router.post('/import-datev-beleglinks', authenticateToken, async (req, res) => {
|
|||||||
const updateQuery = `
|
const updateQuery = `
|
||||||
UPDATE eazybusiness.dbo.tUmsatzBeleg
|
UPDATE eazybusiness.dbo.tUmsatzBeleg
|
||||||
SET datevlink = @datevlink
|
SET datevlink = @datevlink
|
||||||
WHERE kUmsatzBeleg = @kUmsatzBeleg AND (datevlink IS NULL OR datevlink = '')
|
WHERE kUmsatzBeleg = @kUmsatzBeleg AND (datevlink IS NULL OR datevlink = '' OR datevlink = 'pending')
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const updateResult = await executeQuery(updateQuery, {
|
const updateResult = await executeQuery(updateQuery, {
|
||||||
@@ -514,7 +577,7 @@ router.post('/import-datev-beleglinks', authenticateToken, async (req, res) => {
|
|||||||
const updateQuery = `
|
const updateQuery = `
|
||||||
UPDATE eazybusiness.dbo.tPdfObjekt
|
UPDATE eazybusiness.dbo.tPdfObjekt
|
||||||
SET datevlink = @datevlink
|
SET datevlink = @datevlink
|
||||||
WHERE kPdfObjekt = @kPdfObjekt AND (datevlink IS NULL OR datevlink = '')
|
WHERE kPdfObjekt = @kPdfObjekt AND (datevlink IS NULL OR datevlink = '' OR datevlink = 'pending')
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const updateResult = await executeQuery(updateQuery, {
|
const updateResult = await executeQuery(updateQuery, {
|
||||||
|
|||||||
@@ -39,14 +39,33 @@ const formatDatevAmount = (amount) => {
|
|||||||
return Math.abs(amount).toFixed(2).replace('.', ',');
|
return Math.abs(amount).toFixed(2).replace('.', ',');
|
||||||
};
|
};
|
||||||
|
|
||||||
const formatDatevDate = (dateString) => {
|
const formatDatevDate = (date) => {
|
||||||
if (!dateString) return '';
|
if (!date) return '';
|
||||||
const parts = dateString.split('.');
|
|
||||||
|
// Handle Date object
|
||||||
|
if (date instanceof Date) {
|
||||||
|
const day = date.getDate().toString().padStart(2, '0');
|
||||||
|
const month = (date.getMonth() + 1).toString().padStart(2, '0');
|
||||||
|
return day + month;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle string date
|
||||||
|
const dateStr = date.toString();
|
||||||
|
const parts = dateStr.split('.');
|
||||||
if (parts.length === 3) {
|
if (parts.length === 3) {
|
||||||
const day = parts[0].padStart(2, '0');
|
const day = parts[0].padStart(2, '0');
|
||||||
const month = parts[1].padStart(2, '0');
|
const month = parts[1].padStart(2, '0');
|
||||||
return day + month;
|
return day + month;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try to parse as date string
|
||||||
|
const parsedDate = new Date(dateStr);
|
||||||
|
if (!isNaN(parsedDate)) {
|
||||||
|
const day = parsedDate.getDate().toString().padStart(2, '0');
|
||||||
|
const month = (parsedDate.getMonth() + 1).toString().padStart(2, '0');
|
||||||
|
return day + month;
|
||||||
|
}
|
||||||
|
|
||||||
return '';
|
return '';
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -55,13 +74,219 @@ const quote = (str, maxLen = 60) => {
|
|||||||
return '"' + str.slice(0, maxLen).replace(/"/g, '""') + '"';
|
return '"' + str.slice(0, maxLen).replace(/"/g, '""') + '"';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Parse konto field which might contain multiple accounts like "5400+5300"
|
||||||
|
const parseKonto = (konto) => {
|
||||||
|
if (!konto) return '';
|
||||||
|
// Take the first account number if multiple are present
|
||||||
|
const parts = konto.split('+');
|
||||||
|
return parts[0].trim();
|
||||||
|
};
|
||||||
|
|
||||||
// DATEV export endpoint
|
// DATEV export endpoint
|
||||||
router.get('/datev/:timeRange', authenticateToken, async (req, res) => {
|
router.get('/datev/:timeRange', authenticateToken, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { timeRange } = req.params;
|
const { timeRange } = req.params;
|
||||||
// TODO: Update to use database queries instead of CSV file
|
const { executeQuery } = require('../../config/database');
|
||||||
res.status(501).json({ error: 'DATEV export temporarily disabled - use database-based queries' });
|
|
||||||
return;
|
// Parse the time range to get start and end dates
|
||||||
|
let startDate, endDate;
|
||||||
|
|
||||||
|
if (timeRange.includes('-Q')) {
|
||||||
|
// Quarter format: 2025-Q1
|
||||||
|
const [year, quarterPart] = timeRange.split('-Q');
|
||||||
|
const quarter = parseInt(quarterPart, 10);
|
||||||
|
const startMonth = (quarter - 1) * 3 + 1;
|
||||||
|
const endMonth = startMonth + 2;
|
||||||
|
|
||||||
|
startDate = new Date(year, startMonth - 1, 1);
|
||||||
|
endDate = new Date(year, endMonth - 1, new Date(year, endMonth, 0).getDate());
|
||||||
|
} else if (timeRange.length === 4) {
|
||||||
|
// Year format: 2025
|
||||||
|
startDate = new Date(timeRange, 0, 1);
|
||||||
|
endDate = new Date(timeRange, 11, 31);
|
||||||
|
} else {
|
||||||
|
// Month format: 2025-03
|
||||||
|
const [year, month] = timeRange.split('-');
|
||||||
|
startDate = new Date(year, parseInt(month) - 1, 1);
|
||||||
|
endDate = new Date(year, parseInt(month), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format dates for SQL query
|
||||||
|
const sqlStartDate = startDate.toISOString().split('T')[0];
|
||||||
|
const sqlEndDate = endDate.toISOString().split('T')[0];
|
||||||
|
|
||||||
|
// Query to get all DATEV data with proper joins
|
||||||
|
// This handles multiple documents per transaction by creating separate rows
|
||||||
|
const query = `
|
||||||
|
WITH DatevDocuments AS (
|
||||||
|
-- Get documents from tUmsatzBeleg
|
||||||
|
SELECT
|
||||||
|
uk.kZahlungsabgleichUmsatz,
|
||||||
|
zu.fBetrag as umsatz_brutto,
|
||||||
|
CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz,
|
||||||
|
JSON_VALUE(uk.data, '$.konto1') as konto,
|
||||||
|
'' as gegenkonto, -- No creditorID in tUmsatzBeleg
|
||||||
|
-- BU determination based on amount and konto type
|
||||||
|
CASE
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN ''
|
||||||
|
WHEN zu.fBetrag > 0 THEN ''
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses
|
||||||
|
ELSE ''
|
||||||
|
END as bu,
|
||||||
|
FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd,
|
||||||
|
zu.dBuchungsdatum,
|
||||||
|
'' as rechnungsnummer, -- No invoice number in tUmsatzBeleg
|
||||||
|
zu.cVerwendungszweck as buchungstext,
|
||||||
|
ub.datevlink as beleglink,
|
||||||
|
1 as priority -- tUmsatzBeleg has priority
|
||||||
|
FROM tUmsatzKontierung uk
|
||||||
|
INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
|
||||||
|
INNER JOIN tUmsatzBeleg ub ON ub.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
|
||||||
|
WHERE ub.datevlink IS NOT NULL
|
||||||
|
AND zu.dBuchungsdatum >= @startDate
|
||||||
|
AND zu.dBuchungsdatum <= @endDate
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- Get documents from tPdfObjekt via tZahlungsabgleichUmsatzLink
|
||||||
|
SELECT
|
||||||
|
uk.kZahlungsabgleichUmsatz,
|
||||||
|
zu.fBetrag as umsatz_brutto,
|
||||||
|
CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz,
|
||||||
|
JSON_VALUE(uk.data, '$.konto1') as konto,
|
||||||
|
COALESCE(JSON_VALUE(po.extraction, '$.creditorID'), '') as gegenkonto,
|
||||||
|
-- BU determination based on amount and konto type
|
||||||
|
CASE
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN ''
|
||||||
|
WHEN zu.fBetrag > 0 THEN ''
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses
|
||||||
|
ELSE ''
|
||||||
|
END as bu,
|
||||||
|
FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd,
|
||||||
|
zu.dBuchungsdatum,
|
||||||
|
COALESCE(JSON_VALUE(po.extraction, '$.invoice_number'), '') as rechnungsnummer,
|
||||||
|
zu.cVerwendungszweck as buchungstext,
|
||||||
|
po.datevlink as beleglink,
|
||||||
|
2 as priority -- tPdfObjekt has lower priority
|
||||||
|
FROM tUmsatzKontierung uk
|
||||||
|
INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
|
||||||
|
INNER JOIN tZahlungsabgleichUmsatzLink zul ON zu.kZahlungsabgleichUmsatz = zul.kZahlungsabgleichUmsatz
|
||||||
|
AND zul.linktype = 'kLieferantenBestellung'
|
||||||
|
INNER JOIN tPdfObjekt po ON zul.linktarget = po.kLieferantenbestellung
|
||||||
|
WHERE po.datevlink IS NOT NULL
|
||||||
|
AND zu.dBuchungsdatum >= @startDate
|
||||||
|
AND zu.dBuchungsdatum <= @endDate
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- Get transactions without documents
|
||||||
|
SELECT
|
||||||
|
uk.kZahlungsabgleichUmsatz,
|
||||||
|
zu.fBetrag as umsatz_brutto,
|
||||||
|
CASE WHEN zu.fBetrag < 0 THEN 'H' ELSE 'S' END as soll_haben_kz,
|
||||||
|
JSON_VALUE(uk.data, '$.konto1') as konto,
|
||||||
|
'' as gegenkonto,
|
||||||
|
-- BU determination based on amount and konto type
|
||||||
|
CASE
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') IN ('3720', '3740', '2100', '1460', '1462') THEN ''
|
||||||
|
WHEN zu.fBetrag > 0 THEN ''
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '5%' THEN '9' -- 19% for purchases
|
||||||
|
WHEN JSON_VALUE(uk.data, '$.konto1') LIKE '6%' THEN '9' -- 19% for expenses
|
||||||
|
ELSE ''
|
||||||
|
END as bu,
|
||||||
|
FORMAT(zu.dBuchungsdatum, 'Mdd') as buchungsdatum_mdd,
|
||||||
|
zu.dBuchungsdatum,
|
||||||
|
'' as rechnungsnummer,
|
||||||
|
zu.cVerwendungszweck as buchungstext,
|
||||||
|
'' as beleglink,
|
||||||
|
3 as priority -- No documents has lowest priority
|
||||||
|
FROM tUmsatzKontierung uk
|
||||||
|
INNER JOIN tZahlungsabgleichUmsatz zu ON uk.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
|
||||||
|
WHERE zu.dBuchungsdatum >= @startDate
|
||||||
|
AND zu.dBuchungsdatum <= @endDate
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM tUmsatzBeleg ub2
|
||||||
|
WHERE ub2.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
|
||||||
|
AND ub2.datevlink IS NOT NULL
|
||||||
|
)
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM tZahlungsabgleichUmsatzLink zul2
|
||||||
|
INNER JOIN tPdfObjekt po2 ON zul2.linktarget = po2.kLieferantenbestellung
|
||||||
|
WHERE zul2.kZahlungsabgleichUmsatz = zu.kZahlungsabgleichUmsatz
|
||||||
|
AND zul2.linktype = 'kLieferantenBestellung'
|
||||||
|
AND po2.datevlink IS NOT NULL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY kZahlungsabgleichUmsatz, beleglink ORDER BY priority) as rn
|
||||||
|
FROM DatevDocuments
|
||||||
|
ORDER BY dBuchungsdatum DESC, kZahlungsabgleichUmsatz, priority
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await executeQuery(query, {
|
||||||
|
startDate: sqlStartDate,
|
||||||
|
endDate: sqlEndDate
|
||||||
|
});
|
||||||
|
|
||||||
|
// Format data for DATEV CSV
|
||||||
|
const datevRows = [];
|
||||||
|
|
||||||
|
// Build header
|
||||||
|
const periodStart = startDate.getFullYear() +
|
||||||
|
('0' + (startDate.getMonth() + 1)).slice(-2) +
|
||||||
|
('0' + startDate.getDate()).slice(-2);
|
||||||
|
const periodEnd = endDate.getFullYear() +
|
||||||
|
('0' + (endDate.getMonth() + 1)).slice(-2) +
|
||||||
|
('0' + endDate.getDate()).slice(-2);
|
||||||
|
|
||||||
|
datevRows.push(buildDatevHeader(periodStart, periodEnd));
|
||||||
|
datevRows.push(DATEV_COLS);
|
||||||
|
|
||||||
|
// Process each transaction
|
||||||
|
result.recordset.forEach(row => {
|
||||||
|
// Skip duplicate rows (keep only the first occurrence of each transaction+beleglink combination)
|
||||||
|
if (row.rn > 1) return;
|
||||||
|
|
||||||
|
const datevRow = [
|
||||||
|
formatDatevAmount(row.umsatz_brutto), // Umsatz (ohne Soll/Haben-Kz)
|
||||||
|
row.soll_haben_kz, // Soll/Haben-Kennzeichen
|
||||||
|
'', // WKZ Umsatz
|
||||||
|
'', // Kurs
|
||||||
|
'', // Basis-Umsatz
|
||||||
|
'', // WKZ Basis-Umsatz
|
||||||
|
parseKonto(row.konto), // Konto (parsed)
|
||||||
|
row.gegenkonto || '', // Gegenkonto (ohne BU-Schlüssel)
|
||||||
|
row.bu || '', // BU-Schlüssel
|
||||||
|
row.buchungsdatum_mdd || '', // Belegdatum (MDD format)
|
||||||
|
quote(row.rechnungsnummer || ''), // Belegfeld 1 (invoice number)
|
||||||
|
'', // Belegfeld 2
|
||||||
|
'', // Skonto
|
||||||
|
quote(row.buchungstext || ''), // Buchungstext
|
||||||
|
'', // Postensperre
|
||||||
|
'', // Diverse Adressnummer
|
||||||
|
'', // Geschäftspartnerbank
|
||||||
|
'', // Sachverhalt
|
||||||
|
'', // Zinssperre
|
||||||
|
row.beleglink || '' // Beleglink
|
||||||
|
].join(';');
|
||||||
|
|
||||||
|
datevRows.push(datevRow);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate CSV content
|
||||||
|
const csvContent = datevRows.join('\n');
|
||||||
|
|
||||||
|
// Set headers for CSV download
|
||||||
|
const filename = `EXTF_${timeRange.replace('-', '_')}.csv`;
|
||||||
|
res.setHeader('Content-Type', 'text/csv; charset=windows-1252');
|
||||||
|
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
|
||||||
|
|
||||||
|
// Send CSV content
|
||||||
|
res.send(csvContent);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error generating DATEV export:', error);
|
console.error('Error generating DATEV export:', error);
|
||||||
res.status(500).json({ error: 'Failed to generate DATEV export' });
|
res.status(500).json({ error: 'Failed to generate DATEV export' });
|
||||||
|
|||||||
@@ -10,6 +10,25 @@ router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
|||||||
const { timeRange } = req.params;
|
const { timeRange } = req.params;
|
||||||
|
|
||||||
const { executeQuery } = require('../../config/database');
|
const { executeQuery } = require('../../config/database');
|
||||||
|
|
||||||
|
// Build WHERE clause based on timeRange format
|
||||||
|
let timeWhereClause = '';
|
||||||
|
if (timeRange.includes('-Q')) {
|
||||||
|
// Quarter format: 2025-Q2
|
||||||
|
const [year, quarterPart] = timeRange.split('-Q');
|
||||||
|
const quarter = parseInt(quarterPart, 10);
|
||||||
|
const startMonth = (quarter - 1) * 3 + 1;
|
||||||
|
const endMonth = startMonth + 2;
|
||||||
|
timeWhereClause = `WHERE YEAR(csv.parsed_date) = ${year} AND MONTH(csv.parsed_date) BETWEEN ${startMonth} AND ${endMonth}`;
|
||||||
|
} else if (timeRange.length === 4) {
|
||||||
|
// Year format: 2025
|
||||||
|
timeWhereClause = `WHERE YEAR(csv.parsed_date) = ${timeRange}`;
|
||||||
|
} else {
|
||||||
|
// Month format: 2025-07
|
||||||
|
const [year, month] = timeRange.split('-');
|
||||||
|
timeWhereClause = `WHERE YEAR(csv.parsed_date) = ${year} AND MONTH(csv.parsed_date) = ${parseInt(month, 10)}`;
|
||||||
|
}
|
||||||
|
|
||||||
const query = `
|
const query = `
|
||||||
SELECT
|
SELECT
|
||||||
csv.id as id,
|
csv.id as id,
|
||||||
@@ -47,6 +66,7 @@ router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
|||||||
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
||||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
||||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
||||||
|
${timeWhereClause}
|
||||||
|
|
||||||
UNION ALL
|
UNION ALL
|
||||||
|
|
||||||
@@ -84,6 +104,12 @@ router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
|||||||
WHERE ABS(csv.numeric_amount - jtl.fBetrag) < 0.01
|
WHERE ABS(csv.numeric_amount - jtl.fBetrag) < 0.01
|
||||||
AND ABS(DATEDIFF(day, csv.parsed_date, jtl.dBuchungsdatum)) <= 1
|
AND ABS(DATEDIFF(day, csv.parsed_date, jtl.dBuchungsdatum)) <= 1
|
||||||
)
|
)
|
||||||
|
${timeRange.includes('-Q') ?
|
||||||
|
`AND YEAR(jtl.dBuchungsdatum) = ${timeRange.split('-Q')[0]} AND MONTH(jtl.dBuchungsdatum) BETWEEN ${(parseInt(timeRange.split('-Q')[1], 10) - 1) * 3 + 1} AND ${(parseInt(timeRange.split('-Q')[1], 10) - 1) * 3 + 3}` :
|
||||||
|
timeRange.length === 4 ?
|
||||||
|
`AND YEAR(jtl.dBuchungsdatum) = ${timeRange}` :
|
||||||
|
`AND YEAR(jtl.dBuchungsdatum) = ${timeRange.split('-')[0]} AND MONTH(jtl.dBuchungsdatum) = ${parseInt(timeRange.split('-')[1], 10)}`
|
||||||
|
}
|
||||||
|
|
||||||
ORDER BY parsed_date DESC
|
ORDER BY parsed_date DESC
|
||||||
`;
|
`;
|
||||||
@@ -163,213 +189,32 @@ router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
|||||||
links: [...new Set(transaction.links.map(l => JSON.stringify(l)))].map(l => JSON.parse(l))
|
links: [...new Set(transaction.links.map(l => JSON.stringify(l)))].map(l => JSON.parse(l))
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let filteredTransactions = [];
|
// Transactions are already filtered by the SQL query, so we just need to sort them
|
||||||
|
const monthTransactions = transactions
|
||||||
if (timeRange.includes('-Q')) {
|
|
||||||
const [year, quarterPart] = timeRange.split('-Q');
|
|
||||||
const quarter = parseInt(quarterPart, 10);
|
|
||||||
const startMonth = (quarter - 1) * 3 + 1;
|
|
||||||
const endMonth = startMonth + 2;
|
|
||||||
|
|
||||||
filteredTransactions = transactions.filter(t => {
|
|
||||||
if (!t.monthYear) return false;
|
|
||||||
const [tYear, tMonth] = t.monthYear.split('-');
|
|
||||||
const monthNum = parseInt(tMonth, 10);
|
|
||||||
return tYear === year && monthNum >= startMonth && monthNum <= endMonth;
|
|
||||||
});
|
|
||||||
} else if (timeRange.length === 4) {
|
|
||||||
filteredTransactions = transactions.filter(t => {
|
|
||||||
if (!t.monthYear) return false;
|
|
||||||
const [tYear] = t.monthYear.split('-');
|
|
||||||
return tYear === timeRange;
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
filteredTransactions = transactions.filter(t => t.monthYear === timeRange);
|
|
||||||
}
|
|
||||||
|
|
||||||
const monthTransactions = filteredTransactions
|
|
||||||
.sort((a, b) => b.parsedDate - a.parsedDate);
|
.sort((a, b) => b.parsedDate - a.parsedDate);
|
||||||
|
|
||||||
// Get JTL transactions for comparison
|
// Since transactions are already filtered and joined with JTL data in SQL,
|
||||||
let jtlTransactions = [];
|
// we don't need the complex post-processing logic anymore
|
||||||
let jtlDatabaseAvailable = false;
|
|
||||||
try {
|
|
||||||
jtlTransactions = await getJTLTransactions();
|
|
||||||
jtlDatabaseAvailable = true;
|
|
||||||
console.log('DEBUG: JTL database connected, found', jtlTransactions.length, 'transactions');
|
|
||||||
} catch (error) {
|
|
||||||
console.log('JTL database not available, continuing without JTL data:', error.message);
|
|
||||||
jtlDatabaseAvailable = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter JTL transactions for the selected time period
|
|
||||||
let jtlMonthTransactions = [];
|
|
||||||
|
|
||||||
if (timeRange.includes('-Q')) {
|
|
||||||
const [year, quarterPart] = timeRange.split('-Q');
|
|
||||||
const quarter = parseInt(quarterPart, 10);
|
|
||||||
const startMonth = (quarter - 1) * 3 + 1;
|
|
||||||
const endMonth = startMonth + 2;
|
|
||||||
|
|
||||||
jtlMonthTransactions = jtlTransactions.filter(jtl => {
|
|
||||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
|
||||||
const jtlMonth = jtlDate.getMonth() + 1;
|
|
||||||
return jtlDate.getFullYear() === parseInt(year, 10) &&
|
|
||||||
jtlMonth >= startMonth && jtlMonth <= endMonth;
|
|
||||||
});
|
|
||||||
} else if (timeRange.length === 4) {
|
|
||||||
jtlMonthTransactions = jtlTransactions.filter(jtl => {
|
|
||||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
|
||||||
return jtlDate.getFullYear() === parseInt(timeRange, 10);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const [year, month] = timeRange.split('-');
|
|
||||||
jtlMonthTransactions = jtlTransactions.filter(jtl => {
|
|
||||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
|
||||||
return jtlDate.getFullYear() === parseInt(year, 10) &&
|
|
||||||
jtlDate.getMonth() === parseInt(month, 10) - 1;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Kreditor information for IBAN lookup
|
|
||||||
let kreditorData = [];
|
|
||||||
try {
|
|
||||||
const kreditorQuery = `SELECT id, iban, name, kreditorId, is_banking FROM fibdash.Kreditor`;
|
|
||||||
const kreditorResult = await executeQuery(kreditorQuery);
|
|
||||||
kreditorData = kreditorResult.recordset || [];
|
|
||||||
} catch (error) {
|
|
||||||
console.log('Kreditor database not available, continuing without Kreditor data');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add JTL status and Kreditor information to each CSV transaction
|
|
||||||
const transactionsWithJTL = monthTransactions.map((transaction, index) => {
|
|
||||||
const amount = transaction.numericAmount;
|
|
||||||
const transactionDate = transaction.parsedDate;
|
|
||||||
|
|
||||||
if (index === 0) {
|
|
||||||
console.log('DEBUG First CSV transaction:', {
|
|
||||||
amount: amount,
|
|
||||||
transactionDate: transactionDate,
|
|
||||||
jtlMonthTransactionsCount: jtlMonthTransactions.length
|
|
||||||
});
|
|
||||||
if (jtlMonthTransactions.length > 0) {
|
|
||||||
console.log('DEBUG First JTL transaction:', {
|
|
||||||
amount: parseFloat(jtlMonthTransactions[0].fBetrag),
|
|
||||||
date: new Date(jtlMonthTransactions[0].dBuchungsdatum)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const jtlMatch = jtlMonthTransactions.find(jtl => {
|
|
||||||
const jtlAmount = parseFloat(jtl.fBetrag) || 0;
|
|
||||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
|
||||||
|
|
||||||
const amountMatch = Math.abs(amount - jtlAmount) < 0.01;
|
|
||||||
const dateMatch = transactionDate && jtlDate &&
|
|
||||||
transactionDate.getFullYear() === jtlDate.getFullYear() &&
|
|
||||||
transactionDate.getMonth() === jtlDate.getMonth() &&
|
|
||||||
transactionDate.getDate() === jtlDate.getDate();
|
|
||||||
|
|
||||||
if (index === 0 && (amountMatch || dateMatch)) {
|
|
||||||
console.log('DEBUG Potential match for first transaction:', {
|
|
||||||
csvAmount: amount,
|
|
||||||
jtlAmount: jtlAmount,
|
|
||||||
amountMatch: amountMatch,
|
|
||||||
csvDate: transactionDate,
|
|
||||||
jtlDate: jtlDate,
|
|
||||||
dateMatch: dateMatch,
|
|
||||||
bothMatch: amountMatch && dateMatch
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return amountMatch && dateMatch;
|
|
||||||
});
|
|
||||||
|
|
||||||
const transactionIban = transaction['Kontonummer/IBAN'];
|
|
||||||
const kreditorMatch = transactionIban ? kreditorData.find(k => k.iban === transactionIban) : null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
...transaction,
|
|
||||||
hasJTL: jtlDatabaseAvailable ? !!jtlMatch : undefined,
|
|
||||||
jtlId: jtlMatch ? jtlMatch.kZahlungsabgleichUmsatz : null,
|
|
||||||
isFromCSV: true,
|
|
||||||
jtlDatabaseAvailable,
|
|
||||||
pdfs: jtlMatch ? jtlMatch.pdfs || [] : [],
|
|
||||||
links: jtlMatch ? jtlMatch.links || [] : [],
|
|
||||||
kreditor: kreditorMatch ? {
|
|
||||||
id: kreditorMatch.id,
|
|
||||||
name: kreditorMatch.name,
|
|
||||||
kreditorId: kreditorMatch.kreditorId,
|
|
||||||
iban: kreditorMatch.iban,
|
|
||||||
is_banking: Boolean(kreditorMatch.is_banking)
|
|
||||||
} : null,
|
|
||||||
hasKreditor: !!kreditorMatch
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const unmatchedJTLTransactions = jtlMonthTransactions
|
|
||||||
.filter(jtl => {
|
|
||||||
const jtlAmount = parseFloat(jtl.fBetrag) || 0;
|
|
||||||
const jtlDate = new Date(jtl.dBuchungsdatum);
|
|
||||||
|
|
||||||
const hasCSVMatch = monthTransactions.some(transaction => {
|
|
||||||
const amount = transaction.numericAmount;
|
|
||||||
const transactionDate = transaction.parsedDate;
|
|
||||||
|
|
||||||
const amountMatch = Math.abs(amount - jtlAmount) < 0.01;
|
|
||||||
const dateMatch = transactionDate && jtlDate &&
|
|
||||||
transactionDate.getFullYear() === jtlDate.getFullYear() &&
|
|
||||||
transactionDate.getMonth() === jtlDate.getMonth() &&
|
|
||||||
transactionDate.getDate() === jtlDate.getDate();
|
|
||||||
|
|
||||||
return amountMatch && dateMatch;
|
|
||||||
});
|
|
||||||
|
|
||||||
return !hasCSVMatch;
|
|
||||||
})
|
|
||||||
.map(jtl => ({
|
|
||||||
'Buchungstag': new Date(jtl.dBuchungsdatum).toLocaleDateString('de-DE', {
|
|
||||||
day: '2-digit',
|
|
||||||
month: '2-digit',
|
|
||||||
year: '2-digit'
|
|
||||||
}),
|
|
||||||
'Verwendungszweck': jtl.cVerwendungszweck || '',
|
|
||||||
'Buchungstext': 'JTL Transaction',
|
|
||||||
'Beguenstigter/Zahlungspflichtiger': jtl.cName || '',
|
|
||||||
'Kontonummer/IBAN': '',
|
|
||||||
'Betrag': jtl.fBetrag ? jtl.fBetrag.toString().replace('.', ',') : '0,00',
|
|
||||||
numericAmount: parseFloat(jtl.fBetrag) || 0,
|
|
||||||
parsedDate: new Date(jtl.dBuchungsdatum),
|
|
||||||
monthYear: timeRange,
|
|
||||||
hasJTL: true,
|
|
||||||
jtlId: jtl.kZahlungsabgleichUmsatz,
|
|
||||||
isFromCSV: false,
|
|
||||||
isJTLOnly: true,
|
|
||||||
pdfs: jtl.pdfs || [],
|
|
||||||
links: jtl.links || [],
|
|
||||||
kreditor: null,
|
|
||||||
hasKreditor: false
|
|
||||||
}));
|
|
||||||
|
|
||||||
const summary = {
|
const summary = {
|
||||||
totalTransactions: filteredTransactions.length,
|
totalTransactions: transactions.length,
|
||||||
totalIncome: filteredTransactions
|
totalIncome: transactions
|
||||||
.filter(t => t.numericAmount > 0)
|
.filter(t => t.numericAmount > 0)
|
||||||
.reduce((sum, t) => sum + t.numericAmount, 0),
|
.reduce((sum, t) => sum + t.numericAmount, 0),
|
||||||
totalExpenses: filteredTransactions
|
totalExpenses: transactions
|
||||||
.filter(t => t.numericAmount < 0)
|
.filter(t => t.numericAmount < 0)
|
||||||
.reduce((sum, t) => sum + Math.abs(t.numericAmount), 0),
|
.reduce((sum, t) => sum + Math.abs(t.numericAmount), 0),
|
||||||
netAmount: filteredTransactions.reduce((sum, t) => sum + t.numericAmount, 0),
|
netAmount: transactions.reduce((sum, t) => sum + t.numericAmount, 0),
|
||||||
timeRange: timeRange,
|
timeRange: timeRange,
|
||||||
jtlDatabaseAvailable: true,
|
jtlDatabaseAvailable: true,
|
||||||
jtlMatches: filteredTransactions.filter(t => t.hasJTL === true && t.isFromCSV).length,
|
jtlMatches: transactions.filter(t => t.hasJTL === true && t.isFromCSV).length,
|
||||||
jtlMissing: filteredTransactions.filter(t => t.hasJTL === false && t.isFromCSV).length,
|
jtlMissing: transactions.filter(t => t.hasJTL === false && t.isFromCSV).length,
|
||||||
jtlOnly: filteredTransactions.filter(t => t.isJTLOnly === true).length,
|
jtlOnly: transactions.filter(t => t.isJTLOnly === true).length,
|
||||||
csvOnly: filteredTransactions.filter(t => t.hasJTL === false && t.isFromCSV).length
|
csvOnly: transactions.filter(t => t.hasJTL === false && t.isFromCSV).length
|
||||||
};
|
};
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
transactions: filteredTransactions,
|
transactions: transactions,
|
||||||
summary
|
summary
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
Reference in New Issue
Block a user