Remove data.csv file and update README to reflect new features including CSV import and banking account management. Enhance TransactionsTable and KreditorTable components with banking account handling, including UI updates and validation logic. Update SQL schema to support banking accounts and adjust API routes for improved data handling. Implement new document rendering logic for banking transactions and enhance recipient rendering with banking account status. Add new views and indexes for better transaction management.
This commit is contained in:
124
src/database/csv_transactions_schema.sql
Normal file
124
src/database/csv_transactions_schema.sql
Normal file
@@ -0,0 +1,124 @@
|
||||
-- CSV Transactions Import Schema
|
||||
-- This script creates a table to store imported CSV transaction data
|
||||
|
||||
-- Create CSVTransactions table to store imported CSV data
|
||||
CREATE TABLE fibdash.CSVTransactions (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
|
||||
-- Original CSV columns (German names as they appear in CSV)
|
||||
buchungstag NVARCHAR(50), -- "Buchungstag"
|
||||
wertstellung NVARCHAR(50), -- "Wertstellung"
|
||||
umsatzart NVARCHAR(100), -- "Umsatzart"
|
||||
betrag DECIMAL(15,2), -- "Betrag" (numeric value)
|
||||
betrag_original NVARCHAR(50), -- Original string from CSV
|
||||
waehrung NVARCHAR(10), -- "Waehrung"
|
||||
beguenstigter_zahlungspflichtiger NVARCHAR(500), -- "Beguenstigter/Zahlungspflichtiger"
|
||||
kontonummer_iban NVARCHAR(50), -- "Kontonummer/IBAN"
|
||||
bic NVARCHAR(20), -- "BIC"
|
||||
verwendungszweck NVARCHAR(1000), -- "Verwendungszweck"
|
||||
|
||||
-- Processed/computed fields
|
||||
parsed_date DATE, -- Parsed buchungstag
|
||||
numeric_amount DECIMAL(15,2), -- Processed amount
|
||||
|
||||
-- Import metadata
|
||||
import_date DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||
import_batch_id NVARCHAR(100), -- To group imports from same file
|
||||
source_filename NVARCHAR(255), -- Original CSV filename
|
||||
source_row_number INT, -- Row number in original CSV
|
||||
|
||||
-- Processing status
|
||||
is_processed BIT NOT NULL DEFAULT 0, -- Whether this transaction has been processed
|
||||
processing_notes NVARCHAR(500), -- Any processing notes or errors
|
||||
|
||||
-- Create indexes for performance
|
||||
INDEX IX_CSVTransactions_IBAN (kontonummer_iban),
|
||||
INDEX IX_CSVTransactions_Date (parsed_date),
|
||||
INDEX IX_CSVTransactions_Amount (numeric_amount),
|
||||
INDEX IX_CSVTransactions_ImportBatch (import_batch_id),
|
||||
INDEX IX_CSVTransactions_Processed (is_processed)
|
||||
);
|
||||
|
||||
-- Update BankingAccountTransactions to reference CSVTransactions
|
||||
-- Add a new column to support both AccountingItems and CSVTransactions
|
||||
ALTER TABLE fibdash.BankingAccountTransactions
|
||||
ADD csv_transaction_id INT NULL;
|
||||
|
||||
-- Add foreign key constraint
|
||||
ALTER TABLE fibdash.BankingAccountTransactions
|
||||
ADD CONSTRAINT FK_BankingAccountTransactions_CSVTransactions
|
||||
FOREIGN KEY (csv_transaction_id) REFERENCES fibdash.CSVTransactions(id);
|
||||
|
||||
-- Create index for the new column
|
||||
CREATE INDEX IX_BankingAccountTransactions_CSVTransactionId
|
||||
ON fibdash.BankingAccountTransactions(csv_transaction_id);
|
||||
|
||||
-- Update the view to include CSV transactions
|
||||
DROP VIEW IF EXISTS fibdash.vw_TransactionsWithKreditors;
|
||||
GO
|
||||
|
||||
CREATE VIEW fibdash.vw_TransactionsWithKreditors AS
|
||||
-- AccountingItems transactions
|
||||
SELECT
|
||||
'AccountingItems' as source_table,
|
||||
ai.id as transaction_id,
|
||||
NULL as csv_transaction_id,
|
||||
ai.umsatz_brutto as amount,
|
||||
ai.buchungsdatum as transaction_date,
|
||||
NULL as kontonummer_iban, -- AccountingItems uses gegenkonto
|
||||
ai.buchungstext as description,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name,
|
||||
ak.kreditorId as assigned_kreditor_id_code,
|
||||
bat.assigned_date,
|
||||
bat.notes as assignment_notes,
|
||||
CASE
|
||||
WHEN k.is_banking = 1 AND bat.assigned_kreditor_id IS NOT NULL THEN 'banking_assigned'
|
||||
WHEN k.is_banking = 1 AND bat.assigned_kreditor_id IS NULL THEN 'banking_unassigned'
|
||||
WHEN k.is_banking = 0 THEN 'regular_kreditor'
|
||||
ELSE 'no_kreditor'
|
||||
END as transaction_type
|
||||
FROM fibdash.AccountingItems ai
|
||||
LEFT JOIN fibdash.Kreditor k ON ai.gegenkonto = k.kreditorId
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON ai.id = bat.transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- CSV transactions
|
||||
SELECT
|
||||
'CSVTransactions' as source_table,
|
||||
NULL as transaction_id,
|
||||
csv.id as csv_transaction_id,
|
||||
csv.numeric_amount as amount,
|
||||
csv.parsed_date as transaction_date,
|
||||
csv.kontonummer_iban,
|
||||
csv.verwendungszweck as description,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name,
|
||||
ak.kreditorId as assigned_kreditor_id_code,
|
||||
bat.assigned_date,
|
||||
bat.notes as assignment_notes,
|
||||
CASE
|
||||
WHEN k.is_banking = 1 AND bat.assigned_kreditor_id IS NOT NULL THEN 'banking_assigned'
|
||||
WHEN k.is_banking = 1 AND bat.assigned_kreditor_id IS NULL THEN 'banking_unassigned'
|
||||
WHEN k.is_banking = 0 THEN 'regular_kreditor'
|
||||
ELSE 'no_kreditor'
|
||||
END as transaction_type
|
||||
FROM fibdash.CSVTransactions csv
|
||||
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id;
|
||||
|
||||
GO
|
||||
|
||||
PRINT 'CSV Transactions schema created successfully!';
|
||||
PRINT 'Created CSVTransactions table';
|
||||
PRINT 'Updated BankingAccountTransactions table';
|
||||
PRINT 'Updated vw_TransactionsWithKreditors view';
|
||||
@@ -9,17 +9,20 @@ GO
|
||||
|
||||
-- Create Kreditor table
|
||||
-- Multiple IBANs can have the same kreditor name and kreditorId
|
||||
-- IBAN can be NULL for Kreditors that don't have an IBAN (for banking account assignments)
|
||||
-- is_banking flag indicates if this IBAN represents a banking account (like PayPal) rather than a direct creditor
|
||||
CREATE TABLE fibdash.Kreditor (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
iban NVARCHAR(34) NOT NULL,
|
||||
iban NVARCHAR(34) NULL, -- Nullable to allow Kreditors without IBAN
|
||||
name NVARCHAR(255) NOT NULL,
|
||||
kreditorId NVARCHAR(50) NOT NULL
|
||||
kreditorId NVARCHAR(50) NOT NULL,
|
||||
is_banking BIT NOT NULL DEFAULT 0 -- 1 = banking account, 0 = regular creditor
|
||||
);
|
||||
|
||||
-- Create unique index on IBAN to prevent duplicate IBANs
|
||||
-- but allow same kreditorId and name for multiple IBANs
|
||||
ALTER TABLE fibdash.Kreditor
|
||||
ADD CONSTRAINT UQ_Kreditor_IBAN UNIQUE (iban);
|
||||
-- Create unique index on IBAN to prevent duplicate IBANs (allows NULL values)
|
||||
CREATE UNIQUE INDEX UQ_Kreditor_IBAN_NotNull
|
||||
ON fibdash.Kreditor(iban)
|
||||
WHERE iban IS NOT NULL;
|
||||
|
||||
-- Create AccountingItems table
|
||||
-- Based on CSV structure: umsatz brutto, soll/haben kz, konto, gegenkonto, bu, buchungsdatum, rechnungsnummer, buchungstext, beleglink
|
||||
@@ -86,10 +89,14 @@ CSV
|
||||
-- Create indexes for better performance
|
||||
CREATE INDEX IX_Kreditor_IBAN ON fibdash.Kreditor(iban);
|
||||
CREATE INDEX IX_Kreditor_KreditorId ON fibdash.Kreditor(kreditorId);
|
||||
CREATE INDEX IX_Kreditor_IsBanking ON fibdash.Kreditor(is_banking);
|
||||
CREATE INDEX IX_AccountingItems_Buchungsdatum ON fibdash.AccountingItems(buchungsdatum);
|
||||
CREATE INDEX IX_AccountingItems_Konto ON fibdash.AccountingItems(konto);
|
||||
CREATE INDEX IX_AccountingItems_Rechnungsnummer ON fibdash.AccountingItems(rechnungsnummer);
|
||||
CREATE INDEX IX_AccountingItems_SollHabenKz ON fibdash.AccountingItems(soll_haben_kz);
|
||||
CREATE INDEX IX_BankingAccountTransactions_TransactionId ON fibdash.BankingAccountTransactions(transaction_id);
|
||||
CREATE INDEX IX_BankingAccountTransactions_BankingIban ON fibdash.BankingAccountTransactions(banking_iban);
|
||||
CREATE INDEX IX_BankingAccountTransactions_AssignedKreditorId ON fibdash.BankingAccountTransactions(assigned_kreditor_id);
|
||||
|
||||
-- Add FK from AccountingItems.bu -> BU(bu)
|
||||
ALTER TABLE fibdash.AccountingItems
|
||||
@@ -106,6 +113,25 @@ ALTER TABLE fibdash.AccountingItems
|
||||
ADD CONSTRAINT FK_AccountingItems_Konto_Konto
|
||||
FOREIGN KEY (konto) REFERENCES fibdash.Konto(konto);
|
||||
|
||||
-- Create BankingAccountTransactions table to map banking account transactions to Kreditors
|
||||
-- This table handles cases where an IBAN is a banking account (like PayPal) and needs
|
||||
-- to be mapped to the actual creditor for accounting purposes
|
||||
CREATE TABLE fibdash.BankingAccountTransactions (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
transaction_id INT NOT NULL, -- References AccountingItems.id
|
||||
banking_iban NVARCHAR(34) NOT NULL, -- The banking account IBAN (e.g., PayPal)
|
||||
assigned_kreditor_id INT NOT NULL, -- References Kreditor.id for the actual creditor
|
||||
assigned_date DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||
assigned_by NVARCHAR(100), -- User who made the assignment
|
||||
notes NVARCHAR(500), -- Optional notes about the assignment
|
||||
|
||||
-- Foreign key constraints
|
||||
CONSTRAINT FK_BankingAccountTransactions_AccountingItems
|
||||
FOREIGN KEY (transaction_id) REFERENCES fibdash.AccountingItems(id),
|
||||
CONSTRAINT FK_BankingAccountTransactions_Kreditor
|
||||
FOREIGN KEY (assigned_kreditor_id) REFERENCES fibdash.Kreditor(id)
|
||||
);
|
||||
|
||||
-- Add vst column to existing BU table (for databases created before this update)
|
||||
-- IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID('fibdash.BU') AND name = 'vst')
|
||||
-- BEGIN
|
||||
@@ -124,4 +150,28 @@ FOREIGN KEY (konto) REFERENCES fibdash.Konto(konto);
|
||||
-- ('9', '19% VST', 19.00),
|
||||
-- ('8', '7% VST', 7.00),
|
||||
-- ('506', 'Dienstleistung aus EU', NULL),
|
||||
-- ('511', 'Dienstleistung außerhalb EU', NULL);
|
||||
-- ('511', 'Dienstleistung außerhalb EU', NULL);
|
||||
|
||||
-- Create view to easily query transactions with their assigned Kreditors
|
||||
-- This view combines regular transactions with banking account assignments
|
||||
CREATE VIEW fibdash.vw_TransactionsWithKreditors AS
|
||||
SELECT
|
||||
ai.*,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name,
|
||||
ak.kreditorId as assigned_kreditor_id_code,
|
||||
bat.assigned_date,
|
||||
bat.notes as assignment_notes,
|
||||
CASE
|
||||
WHEN k.is_banking = 1 AND bat.assigned_kreditor_id IS NOT NULL THEN 'banking_assigned'
|
||||
WHEN k.is_banking = 1 AND bat.assigned_kreditor_id IS NULL THEN 'banking_unassigned'
|
||||
WHEN k.is_banking = 0 THEN 'regular_kreditor'
|
||||
ELSE 'no_kreditor'
|
||||
END as transaction_type
|
||||
FROM fibdash.AccountingItems ai
|
||||
LEFT JOIN fibdash.Kreditor k ON ai.gegenkonto = k.kreditorId
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON ai.id = bat.transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id;
|
||||
@@ -22,7 +22,7 @@ router.get('/system-info', authenticateToken, (req, res) => {
|
||||
// Get all kreditoren
|
||||
router.get('/kreditoren', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const result = await executeQuery('SELECT id, iban, name, kreditorId FROM fibdash.Kreditor ORDER BY name, iban');
|
||||
const result = await executeQuery('SELECT id, iban, name, kreditorId, is_banking FROM fibdash.Kreditor ORDER BY name, iban');
|
||||
res.json({ kreditoren: result.recordset });
|
||||
} catch (error) {
|
||||
console.error('Error fetching kreditoren:', error);
|
||||
@@ -32,22 +32,30 @@ router.get('/kreditoren', authenticateToken, async (req, res) => {
|
||||
|
||||
// Create new kreditor
|
||||
router.post('/kreditoren', authenticateToken, async (req, res) => {
|
||||
const { iban, name, kreditorId } = req.body;
|
||||
const { iban, name, kreditorId, is_banking } = req.body;
|
||||
|
||||
if (!iban || !name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'IBAN, Name und Kreditor ID sind erforderlich' });
|
||||
// IBAN is optional for banking accounts or manual kreditor assignments
|
||||
const isBanking = is_banking || false;
|
||||
|
||||
if (!name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'Name und Kreditor ID sind erforderlich' });
|
||||
}
|
||||
|
||||
// IBAN validation - required for non-banking accounts
|
||||
if (!isBanking && (!iban || iban.trim() === '')) {
|
||||
return res.status(400).json({ error: 'IBAN ist erforderlich (außer für Banking-Konten)' });
|
||||
}
|
||||
|
||||
try {
|
||||
await executeQuery(
|
||||
'INSERT INTO fibdash.Kreditor (iban, name, kreditorId) VALUES (@iban, @name, @kreditorId)',
|
||||
{ iban, name, kreditorId }
|
||||
'INSERT INTO fibdash.Kreditor (iban, name, kreditorId, is_banking) VALUES (@iban, @name, @kreditorId, @is_banking)',
|
||||
{ iban: iban || null, name, kreditorId, is_banking: isBanking }
|
||||
);
|
||||
res.json({ message: 'Kreditor erfolgreich erstellt' });
|
||||
} catch (error) {
|
||||
console.error('Error creating kreditor:', error);
|
||||
if (error.number === 2627) { // Unique constraint violation
|
||||
res.status(400).json({ error: 'Kreditor ID bereits vorhanden' });
|
||||
res.status(400).json({ error: 'IBAN oder Kreditor ID bereits vorhanden' });
|
||||
} else {
|
||||
res.status(500).json({ error: 'Fehler beim Erstellen des Kreditors' });
|
||||
}
|
||||
@@ -57,22 +65,30 @@ router.post('/kreditoren', authenticateToken, async (req, res) => {
|
||||
// Update kreditor
|
||||
router.put('/kreditoren/:id', authenticateToken, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
const { iban, name, kreditorId } = req.body;
|
||||
const { iban, name, kreditorId, is_banking } = req.body;
|
||||
|
||||
if (!iban || !name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'IBAN, Name und Kreditor ID sind erforderlich' });
|
||||
// IBAN is optional for banking accounts or manual kreditor assignments
|
||||
const isBanking = is_banking || false;
|
||||
|
||||
if (!name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'Name und Kreditor ID sind erforderlich' });
|
||||
}
|
||||
|
||||
// IBAN validation - required for non-banking accounts
|
||||
if (!isBanking && (!iban || iban.trim() === '')) {
|
||||
return res.status(400).json({ error: 'IBAN ist erforderlich (außer für Banking-Konten)' });
|
||||
}
|
||||
|
||||
try {
|
||||
await executeQuery(
|
||||
'UPDATE fibdash.Kreditor SET iban = @iban, name = @name, kreditorId = @kreditorId WHERE id = @id',
|
||||
{ iban, name, kreditorId, id }
|
||||
'UPDATE fibdash.Kreditor SET iban = @iban, name = @name, kreditorId = @kreditorId, is_banking = @is_banking WHERE id = @id',
|
||||
{ iban: iban || null, name, kreditorId, is_banking: isBanking, id }
|
||||
);
|
||||
res.json({ message: 'Kreditor erfolgreich aktualisiert' });
|
||||
} catch (error) {
|
||||
console.error('Error updating kreditor:', error);
|
||||
if (error.number === 2627) { // Unique constraint violation
|
||||
res.status(400).json({ error: 'Kreditor ID bereits vorhanden' });
|
||||
res.status(400).json({ error: 'IBAN oder Kreditor ID bereits vorhanden' });
|
||||
} else {
|
||||
res.status(500).json({ error: 'Fehler beim Aktualisieren des Kreditors' });
|
||||
}
|
||||
|
||||
@@ -239,7 +239,7 @@ router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
||||
let kreditorData = [];
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const kreditorQuery = `SELECT id, iban, name, kreditorId FROM fibdash.Kreditor`;
|
||||
const kreditorQuery = `SELECT id, iban, name, kreditorId, is_banking FROM fibdash.Kreditor`;
|
||||
const kreditorResult = await executeQuery(kreditorQuery);
|
||||
kreditorData = kreditorResult.recordset || [];
|
||||
} catch (error) {
|
||||
@@ -284,7 +284,8 @@ router.get('/transactions/:timeRange', authenticateToken, async (req, res) => {
|
||||
id: kreditorMatch.id,
|
||||
name: kreditorMatch.name,
|
||||
kreditorId: kreditorMatch.kreditorId,
|
||||
iban: kreditorMatch.iban
|
||||
iban: kreditorMatch.iban,
|
||||
is_banking: Boolean(kreditorMatch.is_banking)
|
||||
} : null,
|
||||
hasKreditor: !!kreditorMatch
|
||||
};
|
||||
@@ -612,7 +613,7 @@ router.get('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
||||
const query = `
|
||||
SELECT id, iban, name, kreditorId
|
||||
SELECT id, iban, name, kreditorId, is_banking
|
||||
FROM fibdash.Kreditor
|
||||
WHERE id = @id
|
||||
`;
|
||||
@@ -634,32 +635,47 @@ router.get('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
router.post('/kreditors', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { iban, name, kreditorId } = req.body;
|
||||
const { iban, name, kreditorId, is_banking } = req.body;
|
||||
|
||||
// IBAN is optional for banking accounts or manual kreditor assignments
|
||||
const isBanking = is_banking || false;
|
||||
|
||||
// Validate required fields
|
||||
if (!iban || !name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'IBAN, name, and kreditorId are required' });
|
||||
if (!name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'Name and kreditorId are required' });
|
||||
}
|
||||
|
||||
// Check if IBAN already exists (only IBAN needs to be unique)
|
||||
const checkQuery = `
|
||||
SELECT id FROM fibdash.Kreditor
|
||||
WHERE iban = @iban
|
||||
`;
|
||||
// IBAN validation - required for non-banking accounts
|
||||
if (!isBanking && (!iban || iban.trim() === '')) {
|
||||
return res.status(400).json({ error: 'IBAN is required (except for banking accounts)' });
|
||||
}
|
||||
|
||||
const checkResult = await executeQuery(checkQuery, { iban });
|
||||
|
||||
if (checkResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Kreditor with this IBAN already exists' });
|
||||
// Check if IBAN already exists (only if IBAN is provided)
|
||||
if (iban && iban.trim() !== '') {
|
||||
const checkQuery = `
|
||||
SELECT id FROM fibdash.Kreditor
|
||||
WHERE iban = @iban
|
||||
`;
|
||||
|
||||
const checkResult = await executeQuery(checkQuery, { iban });
|
||||
|
||||
if (checkResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Kreditor with this IBAN already exists' });
|
||||
}
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.Kreditor (iban, name, kreditorId)
|
||||
OUTPUT INSERTED.id, INSERTED.iban, INSERTED.name, INSERTED.kreditorId
|
||||
VALUES (@iban, @name, @kreditorId)
|
||||
INSERT INTO fibdash.Kreditor (iban, name, kreditorId, is_banking)
|
||||
OUTPUT INSERTED.id, INSERTED.iban, INSERTED.name, INSERTED.kreditorId, INSERTED.is_banking
|
||||
VALUES (@iban, @name, @kreditorId, @is_banking)
|
||||
`;
|
||||
|
||||
const result = await executeQuery(insertQuery, { iban, name, kreditorId });
|
||||
const result = await executeQuery(insertQuery, {
|
||||
iban: iban || null,
|
||||
name,
|
||||
kreditorId,
|
||||
is_banking: isBanking
|
||||
});
|
||||
|
||||
res.status(201).json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
@@ -673,11 +689,19 @@ router.put('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { id } = req.params;
|
||||
const { iban, name, kreditorId } = req.body;
|
||||
const { iban, name, kreditorId, is_banking } = req.body;
|
||||
|
||||
// IBAN is optional for banking accounts or manual kreditor assignments
|
||||
const isBanking = is_banking || false;
|
||||
|
||||
// Validate required fields
|
||||
if (!iban || !name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'IBAN, name, and kreditorId are required' });
|
||||
if (!name || !kreditorId) {
|
||||
return res.status(400).json({ error: 'Name and kreditorId are required' });
|
||||
}
|
||||
|
||||
// IBAN validation - required for non-banking accounts
|
||||
if (!isBanking && (!iban || iban.trim() === '')) {
|
||||
return res.status(400).json({ error: 'IBAN is required (except for banking accounts)' });
|
||||
}
|
||||
|
||||
// Check if kreditor exists
|
||||
@@ -688,26 +712,34 @@ router.put('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
return res.status(404).json({ error: 'Kreditor not found' });
|
||||
}
|
||||
|
||||
// Check for conflicts with other kreditors (only IBAN needs to be unique)
|
||||
const conflictQuery = `
|
||||
SELECT id FROM fibdash.Kreditor
|
||||
WHERE iban = @iban AND id != @id
|
||||
`;
|
||||
|
||||
const conflictResult = await executeQuery(conflictQuery, { iban, id: parseInt(id) });
|
||||
|
||||
if (conflictResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Another kreditor with this IBAN already exists' });
|
||||
// Check for conflicts with other kreditors (only if IBAN is provided)
|
||||
if (iban && iban.trim() !== '') {
|
||||
const conflictQuery = `
|
||||
SELECT id FROM fibdash.Kreditor
|
||||
WHERE iban = @iban AND id != @id
|
||||
`;
|
||||
|
||||
const conflictResult = await executeQuery(conflictQuery, { iban, id: parseInt(id) });
|
||||
|
||||
if (conflictResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Another kreditor with this IBAN already exists' });
|
||||
}
|
||||
}
|
||||
|
||||
const updateQuery = `
|
||||
UPDATE fibdash.Kreditor
|
||||
SET iban = @iban, name = @name, kreditorId = @kreditorId
|
||||
OUTPUT INSERTED.id, INSERTED.iban, INSERTED.name, INSERTED.kreditorId
|
||||
SET iban = @iban, name = @name, kreditorId = @kreditorId, is_banking = @is_banking
|
||||
OUTPUT INSERTED.id, INSERTED.iban, INSERTED.name, INSERTED.kreditorId, INSERTED.is_banking
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(updateQuery, { iban, name, kreditorId, id: parseInt(id) });
|
||||
const result = await executeQuery(updateQuery, {
|
||||
iban: iban || null,
|
||||
name,
|
||||
kreditorId,
|
||||
is_banking: isBanking,
|
||||
id: parseInt(id)
|
||||
});
|
||||
|
||||
res.json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
@@ -740,4 +772,545 @@ router.delete('/kreditors/:id', authenticateToken, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Banking Account Transactions endpoints
|
||||
|
||||
// Get banking account transactions for a specific transaction
|
||||
router.get('/banking-transactions/:transactionId', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { transactionId } = req.params;
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
bat.*,
|
||||
k.name as assigned_kreditor_name,
|
||||
k.kreditorId as assigned_kreditor_id_code
|
||||
FROM fibdash.BankingAccountTransactions bat
|
||||
LEFT JOIN fibdash.Kreditor k ON bat.assigned_kreditor_id = k.id
|
||||
WHERE bat.transaction_id = @transactionId OR bat.csv_transaction_id = @transactionId
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query, { transactionId: parseInt(transactionId) });
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching banking account transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch banking account transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create banking account transaction assignment
|
||||
router.post('/banking-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { transaction_id, csv_transaction_id, banking_iban, assigned_kreditor_id, notes, assigned_by } = req.body;
|
||||
|
||||
// Validate required fields - need either transaction_id or csv_transaction_id
|
||||
if ((!transaction_id && !csv_transaction_id) || !banking_iban || !assigned_kreditor_id) {
|
||||
return res.status(400).json({
|
||||
error: 'Transaction ID (or CSV Transaction ID), banking IBAN, and assigned kreditor ID are required'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if assignment already exists
|
||||
const checkQuery = `
|
||||
SELECT id FROM fibdash.BankingAccountTransactions
|
||||
WHERE transaction_id = @transaction_id OR csv_transaction_id = @csv_transaction_id
|
||||
`;
|
||||
|
||||
const checkResult = await executeQuery(checkQuery, {
|
||||
transaction_id: transaction_id || null,
|
||||
csv_transaction_id: csv_transaction_id || null
|
||||
});
|
||||
|
||||
if (checkResult.recordset.length > 0) {
|
||||
return res.status(409).json({ error: 'Banking transaction assignment already exists' });
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.BankingAccountTransactions
|
||||
(transaction_id, csv_transaction_id, banking_iban, assigned_kreditor_id, notes, assigned_by)
|
||||
OUTPUT INSERTED.*
|
||||
VALUES (@transaction_id, @csv_transaction_id, @banking_iban, @assigned_kreditor_id, @notes, @assigned_by)
|
||||
`;
|
||||
|
||||
const result = await executeQuery(insertQuery, {
|
||||
transaction_id: transaction_id || null,
|
||||
csv_transaction_id: csv_transaction_id || null,
|
||||
banking_iban,
|
||||
assigned_kreditor_id,
|
||||
notes: notes || null,
|
||||
assigned_by: assigned_by || null
|
||||
});
|
||||
|
||||
res.status(201).json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating banking account transaction:', error);
|
||||
res.status(500).json({ error: 'Failed to create banking account transaction' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update banking account transaction assignment
|
||||
router.put('/banking-transactions/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { id } = req.params;
|
||||
const { assigned_kreditor_id, notes, assigned_by } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!assigned_kreditor_id) {
|
||||
return res.status(400).json({ error: 'Assigned kreditor ID is required' });
|
||||
}
|
||||
|
||||
const updateQuery = `
|
||||
UPDATE fibdash.BankingAccountTransactions
|
||||
SET assigned_kreditor_id = @assigned_kreditor_id,
|
||||
notes = @notes,
|
||||
assigned_by = @assigned_by,
|
||||
assigned_date = GETDATE()
|
||||
OUTPUT INSERTED.*
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(updateQuery, {
|
||||
assigned_kreditor_id,
|
||||
notes: notes || null,
|
||||
assigned_by: assigned_by || null,
|
||||
id: parseInt(id)
|
||||
});
|
||||
|
||||
if (result.recordset.length === 0) {
|
||||
return res.status(404).json({ error: 'Banking transaction assignment not found' });
|
||||
}
|
||||
|
||||
res.json(result.recordset[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating banking account transaction:', error);
|
||||
res.status(500).json({ error: 'Failed to update banking account transaction' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete banking account transaction assignment
|
||||
router.delete('/banking-transactions/:id', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { id } = req.params;
|
||||
|
||||
const deleteQuery = `
|
||||
DELETE FROM fibdash.BankingAccountTransactions
|
||||
WHERE id = @id
|
||||
`;
|
||||
|
||||
const result = await executeQuery(deleteQuery, { id: parseInt(id) });
|
||||
|
||||
res.json({ message: 'Banking transaction assignment deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting banking account transaction:', error);
|
||||
res.status(500).json({ error: 'Failed to delete banking account transaction' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get all kreditors that can be assigned to banking transactions (non-banking kreditors)
|
||||
router.get('/assignable-kreditors', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT id, name, kreditorId
|
||||
FROM fibdash.Kreditor
|
||||
WHERE is_banking = 0
|
||||
ORDER BY name
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching assignable kreditors:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch assignable kreditors' });
|
||||
}
|
||||
});
|
||||
|
||||
// CSV Import endpoints
|
||||
|
||||
// Test CSV import endpoint (no auth for testing) - ACTUALLY IMPORTS TO DATABASE
|
||||
router.post('/test-csv-import', async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { transactions, filename, batchId, headers } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || `test_import_${Date.now()}`;
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
// Validate required fields
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: `Validation failed: ${validationErrors.join(', ')}`,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the date
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0]);
|
||||
const month = parseInt(dateParts[1]) - 1;
|
||||
let year = parseInt(dateParts[2]);
|
||||
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900;
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
if (isNaN(parsedDate.getTime())) {
|
||||
parsedDate = null;
|
||||
validationErrors.push(`Invalid date format: ${dateStr}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the amount
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || 'test_import',
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`Error importing transaction ${i + 1}:`, error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined,
|
||||
paypalTransaction: transactions.find(t => t['Kontonummer/IBAN'] === 'LU89751000135104200E')
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test import error:', error);
|
||||
res.status(500).json({ error: 'Test import failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// Import CSV transactions to database
|
||||
router.post('/import-csv-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { transactions, filename, batchId, headers } = req.body;
|
||||
|
||||
if (!transactions || !Array.isArray(transactions)) {
|
||||
return res.status(400).json({ error: 'Transactions array is required' });
|
||||
}
|
||||
|
||||
// Expected CSV headers (German bank format)
|
||||
const expectedHeaders = [
|
||||
'Auftragskonto',
|
||||
'Buchungstag',
|
||||
'Valutadatum',
|
||||
'Buchungstext',
|
||||
'Verwendungszweck',
|
||||
'Glaeubiger ID',
|
||||
'Mandatsreferenz',
|
||||
'Kundenreferenz (End-to-End)',
|
||||
'Sammlerreferenz',
|
||||
'Lastschrift Ursprungsbetrag',
|
||||
'Auslagenersatz Ruecklastschrift',
|
||||
'Beguenstigter/Zahlungspflichtiger',
|
||||
'Kontonummer/IBAN',
|
||||
'BIC (SWIFT-Code)',
|
||||
'Betrag',
|
||||
'Waehrung',
|
||||
'Info'
|
||||
];
|
||||
|
||||
// Validate headers if provided
|
||||
if (headers && Array.isArray(headers)) {
|
||||
const missingHeaders = expectedHeaders.filter(expected =>
|
||||
!headers.some(header => header.trim() === expected)
|
||||
);
|
||||
|
||||
if (missingHeaders.length > 0) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid CSV format - missing required headers',
|
||||
missing: missingHeaders,
|
||||
expected: expectedHeaders,
|
||||
received: headers
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate that we have transactions
|
||||
if (transactions.length === 0) {
|
||||
return res.status(400).json({ error: 'No transaction data found' });
|
||||
}
|
||||
|
||||
const importBatchId = batchId || `import_${Date.now()}`;
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
const errors = [];
|
||||
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
const transaction = transactions[i];
|
||||
|
||||
try {
|
||||
// Validate required fields for each transaction
|
||||
const validationErrors = [];
|
||||
|
||||
if (!transaction['Buchungstag'] || transaction['Buchungstag'].trim() === '') {
|
||||
validationErrors.push('Buchungstag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Betrag'] || transaction['Betrag'].toString().trim() === '') {
|
||||
validationErrors.push('Betrag is required');
|
||||
}
|
||||
|
||||
if (!transaction['Beguenstigter/Zahlungspflichtiger'] || transaction['Beguenstigter/Zahlungspflichtiger'].trim() === '') {
|
||||
validationErrors.push('Beguenstigter/Zahlungspflichtiger is required');
|
||||
}
|
||||
|
||||
// Skip rows that are clearly invalid (like headers or empty rows)
|
||||
if (validationErrors.length > 2) {
|
||||
console.log(`Skipping invalid row ${i + 1}:`, validationErrors);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: `Validation failed: ${validationErrors.join(', ')}`,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
// Parse the date
|
||||
let parsedDate = null;
|
||||
if (transaction['Buchungstag']) {
|
||||
const dateStr = transaction['Buchungstag'].trim();
|
||||
// Try different date formats (DD.MM.YY, DD.MM.YYYY, DD/MM/YYYY, etc.)
|
||||
const dateParts = dateStr.split(/[.\/\-]/);
|
||||
if (dateParts.length === 3) {
|
||||
const day = parseInt(dateParts[0]);
|
||||
const month = parseInt(dateParts[1]) - 1; // JavaScript months are 0-based
|
||||
let year = parseInt(dateParts[2]);
|
||||
|
||||
// Handle 2-digit years (assume 21.07.25 means 2025)
|
||||
if (year < 100) {
|
||||
year += (year < 50) ? 2000 : 1900; // 00-49 = 2000-2049, 50-99 = 1950-1999
|
||||
}
|
||||
|
||||
parsedDate = new Date(year, month, day);
|
||||
|
||||
// Validate the date
|
||||
if (isNaN(parsedDate.getTime()) ||
|
||||
parsedDate.getDate() !== day ||
|
||||
parsedDate.getMonth() !== month ||
|
||||
parsedDate.getFullYear() !== year) {
|
||||
parsedDate = null;
|
||||
validationErrors.push(`Invalid date format: ${dateStr}`);
|
||||
}
|
||||
} else {
|
||||
validationErrors.push(`Invalid date format: ${dateStr}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the amount
|
||||
let numericAmount = 0;
|
||||
if (transaction['Betrag']) {
|
||||
const amountStr = transaction['Betrag'].toString().replace(/[^\d,.-]/g, '');
|
||||
const normalizedAmount = amountStr.replace(',', '.');
|
||||
numericAmount = parseFloat(normalizedAmount) || 0;
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO fibdash.CSVTransactions
|
||||
(buchungstag, wertstellung, umsatzart, betrag, betrag_original, waehrung,
|
||||
beguenstigter_zahlungspflichtiger, kontonummer_iban, bic, verwendungszweck,
|
||||
parsed_date, numeric_amount, import_batch_id, source_filename, source_row_number)
|
||||
VALUES
|
||||
(@buchungstag, @wertstellung, @umsatzart, @betrag, @betrag_original, @waehrung,
|
||||
@beguenstigter_zahlungspflichtiger, @kontonummer_iban, @bic, @verwendungszweck,
|
||||
@parsed_date, @numeric_amount, @import_batch_id, @source_filename, @source_row_number)
|
||||
`;
|
||||
|
||||
await executeQuery(insertQuery, {
|
||||
buchungstag: transaction['Buchungstag'] || null,
|
||||
wertstellung: transaction['Valutadatum'] || null,
|
||||
umsatzart: transaction['Buchungstext'] || null,
|
||||
betrag: numericAmount,
|
||||
betrag_original: transaction['Betrag'] || null,
|
||||
waehrung: transaction['Waehrung'] || null,
|
||||
beguenstigter_zahlungspflichtiger: transaction['Beguenstigter/Zahlungspflichtiger'] || null,
|
||||
kontonummer_iban: transaction['Kontonummer/IBAN'] || null,
|
||||
bic: transaction['BIC (SWIFT-Code)'] || null,
|
||||
verwendungszweck: transaction['Verwendungszweck'] || null,
|
||||
parsed_date: parsedDate,
|
||||
numeric_amount: numericAmount,
|
||||
import_batch_id: importBatchId,
|
||||
source_filename: filename || null,
|
||||
source_row_number: i + 1
|
||||
});
|
||||
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`Error importing transaction ${i + 1}:`, error);
|
||||
errors.push({
|
||||
row: i + 1,
|
||||
error: error.message,
|
||||
transaction: transaction
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
batchId: importBatchId,
|
||||
imported: successCount,
|
||||
errors: errorCount,
|
||||
details: errors.length > 0 ? errors : undefined
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error importing CSV transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to import CSV transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get imported CSV transactions
|
||||
router.get('/csv-transactions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
const { batchId, limit = 100, offset = 0 } = req.query;
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
csv.*,
|
||||
k.name as kreditor_name,
|
||||
k.kreditorId as kreditor_id,
|
||||
k.is_banking as kreditor_is_banking,
|
||||
bat.assigned_kreditor_id,
|
||||
ak.name as assigned_kreditor_name
|
||||
FROM fibdash.CSVTransactions csv
|
||||
LEFT JOIN fibdash.Kreditor k ON csv.kontonummer_iban = k.iban
|
||||
LEFT JOIN fibdash.BankingAccountTransactions bat ON csv.id = bat.csv_transaction_id
|
||||
LEFT JOIN fibdash.Kreditor ak ON bat.assigned_kreditor_id = ak.id
|
||||
`;
|
||||
|
||||
const params = {};
|
||||
|
||||
if (batchId) {
|
||||
query += ' WHERE csv.import_batch_id = @batchId';
|
||||
params.batchId = batchId;
|
||||
}
|
||||
|
||||
query += ' ORDER BY csv.parsed_date DESC, csv.id DESC';
|
||||
query += ' OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY';
|
||||
|
||||
params.offset = parseInt(offset);
|
||||
params.limit = parseInt(limit);
|
||||
|
||||
const result = await executeQuery(query, params);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching CSV transactions:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch CSV transactions' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get CSV import batches
|
||||
router.get('/csv-import-batches', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { executeQuery } = require('../config/database');
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
import_batch_id,
|
||||
source_filename,
|
||||
MIN(import_date) as import_date,
|
||||
COUNT(*) as transaction_count,
|
||||
SUM(CASE WHEN is_processed = 1 THEN 1 ELSE 0 END) as processed_count
|
||||
FROM fibdash.CSVTransactions
|
||||
GROUP BY import_batch_id, source_filename
|
||||
ORDER BY MIN(import_date) DESC
|
||||
`;
|
||||
|
||||
const result = await executeQuery(query);
|
||||
|
||||
res.json(result.recordset);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import batches:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch import batches' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
Reference in New Issue
Block a user