- Updated error message in CSVImportPanel to include a period for better readability. - Added console logs in the CSV import API route to track the import process and precheck status. - Removed redundant validation for 'Beguenstigter/Zahlungspflichtiger' to streamline error handling during CSV import.
573 lines
21 KiB
JavaScript
573 lines
21 KiB
JavaScript
const express = require('express');
|
|
const { authenticateToken } = require('../../middleware/auth');
|
|
|
|
const router = express.Router();
|
|
|
|
// Test CSV import endpoint (no auth for testing) - ACTUALLY IMPORTS TO DATABASE
|
|
router.post('/test-csv-import', async (req, res) => {
|
|
try {
|
|
const { executeQuery } = require('../../config/database');
|
|
const { transactions, filename, batchId } = req.body;
|
|
|
|
if (!transactions || !Array.isArray(transactions)) {
|
|
return res.status(400).json({ error: 'Transactions array is required' });
|
|
}
|
|
|
|
const importBatchId = batchId || 'test_import_' + Date.now();
|
|
let successCount = 0;
|
|
let errorCount = 0;
|
|
const errors = [];
|
|
|
|
for (let i = 0; i < transactions.length; i++) {
|
|
const transaction = transactions[i];
|
|
|
|
try {
|
|
const validationErrors = [];
|
|
|
|
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
|
validationErrors.push('Buchungstag is required');
|
|
}
|
|
|
|
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
|
validationErrors.push('Betrag is required');
|
|
}
|
|
|
|
if (validationErrors.length > 0) {
|
|
errors.push({
|
|
row: i + 1,
|
|
error: 'Validation failed: ' + validationErrors.join(', '),
|
|
transaction: transaction
|
|
});
|
|
errorCount++;
|
|
continue;
|
|
}
|
|
|
|
let parsedDate = null;
|
|
if (transaction['Buchungstag']) {
|
|
const dateStr = transaction['Buchungstag'].trim();
|
|
const dateParts = dateStr.split(/[.\/\-]/);
|
|
if (dateParts.length === 3) {
|
|
const day = parseInt(dateParts[0], 10);
|
|
const month = parseInt(dateParts[1], 10) - 1;
|
|
let year = parseInt(dateParts[2], 10);
|
|
|
|
if (year < 100) {
|
|
year += (year < 50) ? 2000 : 1900;
|
|
}
|
|
|
|
parsedDate = new Date(year, month, day);
|
|
|
|
if (isNaN(parsedDate.getTime())) {
|
|
parsedDate = null;
|
|
validationErrors.push('Invalid date format: ' + dateStr);
|
|
}
|
|
}
|
|
}
|
|
|
|
let numericAmount = 0;
|
|
if (transaction['Betrag']) {
|
|
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
|
const normalizedAmount = amountStr.replace(',', '.');
|
|
numericAmount = parseFloat(normalizedAmount) || 0;
|
|
}
|
|
|
|
const insertQuery = `
|
|
INSERT INTO fibdash.CSVTransactions
|
|
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
|
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
|
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
|
VALUES
|
|
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
|
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
|
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
|
`;
|
|
|
|
await executeQuery(insertQuery, {
|
|
buchungstag: transaction['Buchungstag'] || null,
|
|
wertstellung: transaction['Valutadatum'] || null,
|
|
umsatzart: transaction['Buchungstext'] || null,
|
|
betrag: numericAmount,
|
|
betrag_original: transaction['Betrag'] || null,
|
|
waehrung: transaction['Waehrung'] || null,
|
|
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
|
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
|
bic: transaction['BIC (SWIFT-Code)'] || null,
|
|
verwendungszweck: transaction['Verwendungszweck'] || null,
|
|
parsed_date: parsedDate,
|
|
numeric_amount: numericAmount,
|
|
import_batch_id: importBatchId,
|
|
source_filename: filename || 'test_import',
|
|
source_row_number: i + 1
|
|
});
|
|
|
|
successCount++;
|
|
} catch (error) {
|
|
console.error('Error importing transaction ' + (i + 1) + ':', error);
|
|
errors.push({
|
|
row: i + 1,
|
|
error: error.message,
|
|
transaction: transaction
|
|
});
|
|
errorCount++;
|
|
}
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
batchId: importBatchId,
|
|
imported: successCount,
|
|
errors: errorCount,
|
|
details: errors.length > 0 ? errors : undefined,
|
|
paypalTransaction: transactions.find(t => t['Kontonummer/IBAN'] === 'LU89751000135104200E')
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Test import error:', error);
|
|
res.status(500).json({ error: 'Test import failed' });
|
|
}
|
|
});
|
|
|
|
// Import CSV transactions to database
|
|
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
|
console.log('Importing CSV transactions');
|
|
try {
|
|
const { executeQuery } = require('../../config/database');
|
|
const { transactions, filename, batchId, headers } = req.body;
|
|
|
|
if (!transactions || !Array.isArray(transactions)) {
|
|
return res.status(400).json({ error: 'Transactions array is required' });
|
|
}
|
|
|
|
const expectedHeaders = [
|
|
'Auftragskonto',
|
|
'Buchungstag',
|
|
'Valutadatum',
|
|
'Buchungstext',
|
|
'Verwendungszweck',
|
|
'Glaeubiger ID',
|
|
'Mandatsreferenz',
|
|
'Kundenreferenz (End-to-End)',
|
|
'Sammlerreferenz',
|
|
'Lastschrift Ursprungsbetrag',
|
|
'Auslagenersatz Ruecklastschrift',
|
|
'Beguenstigter/Zahlungspflichtiger',
|
|
'Kontonummer/IBAN',
|
|
'BIC (SWIFT-Code)',
|
|
'Betrag',
|
|
'Waehrung',
|
|
'Info'
|
|
];
|
|
|
|
if (headers && Array.isArray(headers)) {
|
|
const missingHeaders = expectedHeaders.filter(expected =>
|
|
!headers.some(header => header.trim() === expected)
|
|
);
|
|
|
|
if (missingHeaders.length > 0) {
|
|
return res.status(400).json({
|
|
error: 'Invalid CSV format - missing required headers',
|
|
missing: missingHeaders,
|
|
expected: expectedHeaders,
|
|
received: headers
|
|
});
|
|
}
|
|
}
|
|
|
|
if (transactions.length === 0) {
|
|
return res.status(400).json({ error: 'No transaction data found' });
|
|
}
|
|
|
|
const importBatchId = batchId || 'import_' + Date.now();
|
|
let successCount = 0;
|
|
let errorCount = 0;
|
|
const errors = [];
|
|
console.log('precheck done');
|
|
|
|
for (let i = 0; i < transactions.length; i++) {
|
|
const transaction = transactions[i];
|
|
|
|
try {
|
|
const validationErrors = [];
|
|
|
|
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
|
validationErrors.push('Buchungstag is required');
|
|
}
|
|
|
|
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
|
validationErrors.push('Betrag is required');
|
|
}
|
|
|
|
|
|
if (validationErrors.length > 2) {
|
|
console.log('Skipping invalid row ' + (i + 1) + ':', validationErrors);
|
|
continue;
|
|
}
|
|
|
|
if (validationErrors.length > 0) {
|
|
errors.push({
|
|
row: i + 1,
|
|
error: 'Validation failed: ' + validationErrors.join(', '),
|
|
transaction: transaction
|
|
});
|
|
errorCount++;
|
|
continue;
|
|
}
|
|
let parsedDate = null;
|
|
if (transaction['Buchungstag']) {
|
|
const dateStr = transaction['Buchungstag'].trim();
|
|
const dateParts = dateStr.split(/[.\/\-]/);
|
|
if (dateParts.length === 3) {
|
|
const day = parseInt(dateParts[0], 10);
|
|
const month = parseInt(dateParts[1], 10) - 1;
|
|
let year = parseInt(dateParts[2], 10);
|
|
|
|
if (year < 100) {
|
|
year += (year < 50) ? 2000 : 1900;
|
|
}
|
|
|
|
parsedDate = new Date(year, month, day);
|
|
|
|
if (isNaN(parsedDate.getTime()) ||
|
|
parsedDate.getDate() !== day ||
|
|
parsedDate.getMonth() !== month ||
|
|
parsedDate.getFullYear() !== year) {
|
|
parsedDate = null;
|
|
validationErrors.push('Invalid date format: ' + dateStr);
|
|
}
|
|
} else {
|
|
validationErrors.push('Invalid date format: ' + dateStr);
|
|
}
|
|
}
|
|
|
|
let numericAmount = 0;
|
|
if (transaction['Betrag']) {
|
|
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
|
const normalizedAmount = amountStr.replace(',', '.');
|
|
numericAmount = parseFloat(normalizedAmount) || 0;
|
|
}
|
|
|
|
const insertQuery = `
|
|
INSERT INTO fibdash.CSVTransactions
|
|
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
|
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
|
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
|
VALUES
|
|
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
|
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
|
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
|
`;
|
|
|
|
await executeQuery(insertQuery, {
|
|
buchungstag: transaction['Buchungstag'] || null,
|
|
wertstellung: transaction['Valutadatum'] || null,
|
|
umsatzart: transaction['Buchungstext'] || null,
|
|
betrag: numericAmount,
|
|
betrag_original: transaction['Betrag'] || null,
|
|
waehrung: transaction['Waehrung'] || null,
|
|
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
|
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
|
bic: transaction['BIC (SWIFT-Code)'] || null,
|
|
verwendungszweck: transaction['Verwendungszweck'] || null,
|
|
parsed_date: parsedDate,
|
|
numeric_amount: numericAmount,
|
|
import_batch_id: importBatchId,
|
|
source_filename: filename || null,
|
|
source_row_number: i + 1
|
|
});
|
|
|
|
successCount++;
|
|
} catch (error) {
|
|
console.error('Error importing transaction ' + (i + 1) + ':', error);
|
|
errors.push({
|
|
row: i + 1,
|
|
error: error.message,
|
|
transaction: transaction
|
|
});
|
|
errorCount++;
|
|
}
|
|
}
|
|
|
|
console.log('import done',errors);
|
|
|
|
res.json({
|
|
success: true,
|
|
batchId: importBatchId,
|
|
imported: successCount,
|
|
errors: errorCount,
|
|
details: errors.length > 0 ? errors : undefined
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error importing CSV transactions:', error);
|
|
res.status(500).json({ error: 'Failed to import CSV transactions' });
|
|
}
|
|
});
|
|
|
|
// Get imported CSV transactions
|
|
router.get('/csv-transactions', authenticateToken, async (req, res) => {
|
|
try {
|
|
const { executeQuery } = require('../../config/database');
|
|
const { batchId, limit = 100, offset = 0 } = req.query;
|
|
|
|
let query = `
|
|
SELECT
|
|
csv.*,
|
|
k.name as kreditor_name,
|
|
k.kreditorId as kreditor_id,
|
|
k.is_banking as kreditor_is_banking,
|
|
bat.assigned_kreditor_id,
|
|
ak.name as assigned_kreditor_name
|
|
FROM fibdash.CSVTransactions csv
|
|
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
|
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
|
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
|
`;
|
|
|
|
const params = {};
|
|
|
|
if (batchId) {
|
|
query += ' WHERE csv.import_batch_id = @batchId';
|
|
params.batchId = batchId;
|
|
}
|
|
|
|
query += ' ORDER BY csv.parsed_date DESC, csv.id DESC';
|
|
query += ' OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY';
|
|
|
|
params.offset = parseInt(offset, 10);
|
|
params.limit = parseInt(limit, 10);
|
|
|
|
const result = await executeQuery(query, params);
|
|
|
|
res.json(result.recordset);
|
|
} catch (error) {
|
|
console.error('Error fetching CSV transactions:', error);
|
|
res.status(500).json({ error: 'Failed to fetch CSV transactions' });
|
|
}
|
|
});
|
|
|
|
// Get CSV import batches
|
|
router.get('/csv-import-batches', authenticateToken, async (req, res) => {
|
|
try {
|
|
const { executeQuery } = require('../../config/database');
|
|
|
|
const query = `
|
|
SELECT
|
|
import_batch_id,
|
|
source_filename,
|
|
MIN(import_date) as import_date,
|
|
COUNT(*) as transaction_count,
|
|
SUM(CASE WHEN is_processed = 1 THEN 1 ELSE 0 END) as processed_count
|
|
FROM fibdash.CSVTransactions
|
|
GROUP BY import_batch_id, source_filename
|
|
ORDER BY MIN(import_date) DESC
|
|
`;
|
|
|
|
const result = await executeQuery(query);
|
|
|
|
res.json(result.recordset);
|
|
} catch (error) {
|
|
console.error('Error fetching import batches:', error);
|
|
res.status(500).json({ error: 'Failed to fetch import batches' });
|
|
}
|
|
});
|
|
|
|
// Import DATEV Beleglinks to database
|
|
router.post('/import-datev-beleglinks', authenticateToken, async (req, res) => {
|
|
try {
|
|
const { executeQuery } = require('../../config/database');
|
|
const { beleglinks, filename, batchId, headers } = req.body;
|
|
|
|
if (!beleglinks || !Array.isArray(beleglinks)) {
|
|
return res.status(400).json({ error: 'Beleglinks array is required' });
|
|
}
|
|
|
|
// Expected DATEV CSV headers from the example
|
|
const expectedHeaders = [
|
|
'Belegart', 'Geschäftspartner-Name', 'Geschäftspartner-Konto', 'Rechnungsbetrag', 'WKZ',
|
|
'Rechnungs-Nr.', 'Interne Re.-Nr.', 'Rechnungsdatum', 'BU', 'Konto', 'Konto-Bezeichnung',
|
|
'Ware/Leistung', 'Zahlungszuordnung', 'Kontoumsatzzuordnung', 'Gebucht', 'Festgeschrieben',
|
|
'Kopie', 'Eingangsdatum', 'Bezahlt', 'BezahltAm', 'Geschäftspartner-Ort', 'Skonto-Betrag 1',
|
|
'Fällig mit Skonto 1', 'Skonto 1 in %', 'Skonto-Betrag 2', 'Fällig mit Skonto 2',
|
|
'Skonto 2 in %', 'Fällig ohne Skonto', 'Steuer in %', 'USt-IdNr.', 'Kunden-Nr.',
|
|
'KOST 1', 'KOST 2', 'KOST-Menge', 'Kurs', 'Nachricht', 'Freier Text', 'IBAN', 'BIC',
|
|
'Bankkonto-Nr.', 'BLZ', 'Notiz', 'Land', 'Personalnummer', 'Nachname', 'Vorname',
|
|
'Belegkategorie', 'Bezeichnung', 'Abrechnungsmonat', 'Gültig bis', 'Prüfungsrelevant',
|
|
'Ablageort', 'Belegtyp', 'Herkunft', 'Leistungsdatum', 'Buchungstext', 'Beleg-ID',
|
|
'Zahlungsbedingung', 'Geheftet', 'Gegenkonto', 'keine Überweisung/Lastschrift erstellen',
|
|
'Aufgeteilt', 'Bereitgestellt', 'Freigegeben', 'FreigegebenAm', 'Erweiterte Belegdaten fehlen',
|
|
'Periode fehlt', 'Rechnungsdaten beim Import fehlen'
|
|
];
|
|
|
|
if (beleglinks.length === 0) {
|
|
return res.status(400).json({ error: 'No beleglink data found' });
|
|
}
|
|
|
|
const importBatchId = batchId || 'datev_import_' + Date.now();
|
|
let successCount = 0;
|
|
let errorCount = 0;
|
|
let updateCount = 0;
|
|
let insertCount = 0;
|
|
let skippedCount = 0;
|
|
const errors = [];
|
|
|
|
for (let i = 0; i < beleglinks.length; i++) {
|
|
const beleglink = beleglinks[i];
|
|
|
|
try {
|
|
// Skip empty rows or rows without Beleg-ID
|
|
const belegId = beleglink['Beleg-ID'];
|
|
if (!belegId || belegId.trim() === '') {
|
|
console.log(`Skipping row ${i + 1}: No Beleg-ID found`);
|
|
skippedCount++;
|
|
continue;
|
|
}
|
|
|
|
const validationErrors = [];
|
|
|
|
// Parse amount if available
|
|
let numericAmount = null;
|
|
if (beleglink['Rechnungsbetrag']) {
|
|
const amountStr = beleglink['Rechnungsbetrag'].toString().replace(/[^\d,.-]/g, '');
|
|
const normalizedAmount = amountStr.replace(',', '.');
|
|
numericAmount = parseFloat(normalizedAmount) || null;
|
|
}
|
|
|
|
// Parse date if available
|
|
let parsedDate = null;
|
|
if (beleglink['Rechnungsdatum']) {
|
|
const dateStr = beleglink['Rechnungsdatum'].trim();
|
|
const dateParts = dateStr.split(/[.\/\-]/);
|
|
if (dateParts.length === 3) {
|
|
const day = parseInt(dateParts[0], 10);
|
|
const month = parseInt(dateParts[1], 10) - 1;
|
|
let year = parseInt(dateParts[2], 10);
|
|
|
|
if (year < 100) {
|
|
year += (year < 50) ? 2000 : 1900;
|
|
}
|
|
|
|
parsedDate = new Date(year, month, day);
|
|
|
|
if (isNaN(parsedDate.getTime())) {
|
|
parsedDate = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
// First, check if a record with this datevlink already exists
|
|
const checkExistingDatevLink = `
|
|
SELECT kUmsatzBeleg FROM eazybusiness.dbo.tUmsatzBeleg WHERE datevlink = @datevlink
|
|
`;
|
|
|
|
const existingDatevLink = await executeQuery(checkExistingDatevLink, { datevlink: belegId });
|
|
|
|
if (existingDatevLink.recordset.length > 0) {
|
|
// Record with this datevlink already exists - skip
|
|
console.log(`Datevlink already exists, skipping: ${belegId}`);
|
|
skippedCount++;
|
|
continue;
|
|
}
|
|
|
|
// Extract key from filename in 'Herkunft' column
|
|
// Examples: "Rechnung146.pdf" -> key 146 for tRechnung
|
|
// "UmsatzBeleg192.pdf" -> key 192 for tUmsatzBeleg
|
|
const herkunft = beleglink['Herkunft'];
|
|
if (!herkunft || herkunft.trim() === '') {
|
|
console.log(`Skipping row ${i + 1}: No filename in Herkunft column`);
|
|
skippedCount++;
|
|
continue;
|
|
}
|
|
|
|
// Extract the key from filename patterns
|
|
let matchFound = false;
|
|
|
|
// Pattern: UmsatzBeleg{key}.pdf -> match with tUmsatzBeleg.kUmsatzBeleg
|
|
const umsatzBelegMatch = herkunft.match(/UmsatzBeleg(\d+)\.pdf/i);
|
|
if (umsatzBelegMatch) {
|
|
const kUmsatzBeleg = parseInt(umsatzBelegMatch[1], 10);
|
|
|
|
const updateQuery = `
|
|
UPDATE eazybusiness.dbo.tUmsatzBeleg
|
|
SET datevlink = @datevlink
|
|
WHERE kUmsatzBeleg = @kUmsatzBeleg AND (datevlink IS NULL OR datevlink = '')
|
|
`;
|
|
|
|
const updateResult = await executeQuery(updateQuery, {
|
|
datevlink: belegId,
|
|
kUmsatzBeleg: kUmsatzBeleg
|
|
});
|
|
|
|
if (updateResult.rowsAffected && updateResult.rowsAffected[0] > 0) {
|
|
updateCount++;
|
|
console.log(`Added datevlink ${belegId} to tUmsatzBeleg.kUmsatzBeleg: ${kUmsatzBeleg}`);
|
|
matchFound = true;
|
|
} else {
|
|
console.log(`Skipping row ${i + 1}: UmsatzBeleg ${kUmsatzBeleg} nicht gefunden oder datevlink bereits gesetzt`);
|
|
skippedCount++;
|
|
}
|
|
}
|
|
|
|
// Pattern: Rechnung{key}.pdf -> match with tPdfObjekt.kPdfObjekt
|
|
const rechnungMatch = herkunft.match(/Rechnung(\d+)\.pdf/i);
|
|
if (!matchFound && rechnungMatch) {
|
|
const kPdfObjekt = parseInt(rechnungMatch[1], 10);
|
|
|
|
const updateQuery = `
|
|
UPDATE eazybusiness.dbo.tPdfObjekt
|
|
SET datevlink = @datevlink
|
|
WHERE kPdfObjekt = @kPdfObjekt AND (datevlink IS NULL OR datevlink = '')
|
|
`;
|
|
|
|
const updateResult = await executeQuery(updateQuery, {
|
|
datevlink: belegId,
|
|
kPdfObjekt: kPdfObjekt
|
|
});
|
|
|
|
if (updateResult.rowsAffected && updateResult.rowsAffected[0] > 0) {
|
|
updateCount++;
|
|
console.log(`Added datevlink ${belegId} to tPdfObjekt.kPdfObjekt: ${kPdfObjekt}`);
|
|
matchFound = true;
|
|
} else {
|
|
console.log(`Skipping row ${i + 1}: PdfObjekt ${kPdfObjekt} nicht gefunden oder datevlink bereits gesetzt`);
|
|
skippedCount++;
|
|
}
|
|
}
|
|
|
|
if (!matchFound) {
|
|
console.log(`Skipping row ${i + 1}: Unbekanntes Dateiformat '${herkunft}' (erwartet: UmsatzBeleg{key}.pdf oder Rechnung{key}.pdf)`);
|
|
skippedCount++;
|
|
continue;
|
|
}
|
|
|
|
successCount++;
|
|
} catch (error) {
|
|
console.error('Error processing beleglink ' + (i + 1) + ':', error);
|
|
errors.push({
|
|
row: i + 1,
|
|
error: error.message,
|
|
beleglink: beleglink
|
|
});
|
|
errorCount++;
|
|
}
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
batchId: importBatchId,
|
|
imported: updateCount, // Number of datevlinks actually added/updated
|
|
processed: successCount,
|
|
updated: updateCount,
|
|
inserted: insertCount,
|
|
skipped: skippedCount, // Records skipped (existing datevlinks)
|
|
errors: errorCount, // Only actual errors, not skipped records
|
|
details: errors.length > 0 ? errors : undefined,
|
|
message: `${updateCount} datevlinks hinzugefügt, ${skippedCount} bereits vorhanden, ${errorCount} Fehler`
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error importing DATEV beleglinks:', error);
|
|
res.status(500).json({ error: 'Failed to import DATEV beleglinks' });
|
|
}
|
|
});
|
|
|
|
module.exports = router; |