Add OpenAI API integration and document processing features
- Added OpenAI API key configuration to .env.example. - Integrated OpenAI for document processing, including markdown conversion and data extraction. - Implemented new API routes for fetching document processing status and handling various processing tasks. - Enhanced the App component to manage document status and processing states with user feedback via Snackbar. - Updated CSVImportPanel and TableManagement components to support navigation to specific tabs based on processing results. - Introduced transaction handling in the database configuration for improved error management during document processing.
This commit is contained in:
@@ -1,6 +1,4 @@
|
||||
---
|
||||
alwaysApply: true
|
||||
---
|
||||
sqlcmd -C -S tcp:192.168.56.1,1497 -U app -P 'readonly' -d eazybusiness -W
|
||||
|
||||
sqlcmd -C -S tcp:192.168.56.1,1497 -U sa -P 'sa_tekno23' -d eazybusiness -W
|
||||
7
.cursor/rules/devserver.mdc
Normal file
7
.cursor/rules/devserver.mdc
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
alwaysApply: true
|
||||
---
|
||||
pm2 restart 10 -> restart backend (configured as "npm run dev:backend")
|
||||
pm2 restart 11 -> restart backend (configured as "npm run dev:frontend")
|
||||
|
||||
(both should rarely neer restart because in dev mode HMR for frontend, and nodemon for backend should already do that)
|
||||
@@ -8,6 +8,9 @@ REACT_APP_GOOGLE_CLIENT_ID=your_google_client_id_here
|
||||
# JWT Secret
|
||||
JWT_SECRET=your_jwt_secret_here
|
||||
|
||||
# OpenAI API Configuration
|
||||
OPENAI_API_KEY=your_openai_api_key_here
|
||||
|
||||
# Authorized Email Addresses (comma-separated)
|
||||
AUTHORIZED_EMAILS=admin@example.com,user1@example.com,user2@example.com
|
||||
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
import React, { Component } from 'react';
|
||||
import { ThemeProvider, createTheme } from '@mui/material/styles';
|
||||
import CssBaseline from '@mui/material/CssBaseline';
|
||||
import { Container, AppBar, Toolbar, Typography, Button, Box, Tabs, Tab } from '@mui/material';
|
||||
import { Container, AppBar, Toolbar, Typography, Button, Box, Tabs, Tab, Badge, Chip, Divider, Snackbar, Alert } from '@mui/material';
|
||||
import LoginIcon from '@mui/icons-material/Login';
|
||||
import DashboardIcon from '@mui/icons-material/Dashboard';
|
||||
import DownloadIcon from '@mui/icons-material/Download';
|
||||
import TableChart from '@mui/icons-material/TableChart';
|
||||
import PlayArrowIcon from '@mui/icons-material/PlayArrow';
|
||||
import DocumentScannerIcon from '@mui/icons-material/DocumentScanner';
|
||||
import ExtractIcon from '@mui/icons-material/TextSnippet';
|
||||
import EmailIcon from '@mui/icons-material/Email';
|
||||
import UploadIcon from '@mui/icons-material/Upload';
|
||||
import AuthService from './services/AuthService';
|
||||
import DataViewer from './components/DataViewer';
|
||||
import Login from './components/Login';
|
||||
@@ -31,6 +36,18 @@ class App extends Component {
|
||||
loading: true,
|
||||
exportData: null, // { selectedMonth, canExport, onExport }
|
||||
currentView: 'dashboard', // 'dashboard' or 'tables'
|
||||
documentStatus: null,
|
||||
processingStatus: {
|
||||
markdown: false,
|
||||
extraction: false,
|
||||
datevSync: false,
|
||||
datevUpload: false
|
||||
},
|
||||
snackbar: {
|
||||
open: false,
|
||||
message: '',
|
||||
severity: 'info' // 'success', 'error', 'warning', 'info'
|
||||
}
|
||||
};
|
||||
this.authService = new AuthService();
|
||||
}
|
||||
@@ -39,6 +56,15 @@ class App extends Component {
|
||||
this.checkAuthStatus();
|
||||
}
|
||||
|
||||
componentDidUpdate(prevState) {
|
||||
// Clear targetTab after navigation is complete
|
||||
if (this.state.targetTab && prevState.currentView !== this.state.currentView) {
|
||||
setTimeout(() => {
|
||||
this.setState({ targetTab: null });
|
||||
}, 100); // Small delay to ensure navigation completes
|
||||
}
|
||||
}
|
||||
|
||||
checkAuthStatus = async () => {
|
||||
try {
|
||||
const token = localStorage.getItem('token');
|
||||
@@ -46,6 +72,7 @@ class App extends Component {
|
||||
const user = await this.authService.verifyToken(token);
|
||||
if (user) {
|
||||
this.setState({ isAuthenticated: true, user, loading: false });
|
||||
this.fetchDocumentStatus();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -62,6 +89,7 @@ class App extends Component {
|
||||
if (result.success) {
|
||||
localStorage.setItem('token', result.token);
|
||||
this.setState({ isAuthenticated: true, user: result.user });
|
||||
this.fetchDocumentStatus();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Login failed:', error);
|
||||
@@ -83,8 +111,131 @@ class App extends Component {
|
||||
this.setState({ currentView: newValue });
|
||||
};
|
||||
|
||||
showSnackbar = (message, severity = 'info') => {
|
||||
this.setState({
|
||||
snackbar: {
|
||||
open: true,
|
||||
message,
|
||||
severity
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
handleSnackbarClose = (event, reason) => {
|
||||
if (reason === 'clickaway') {
|
||||
return;
|
||||
}
|
||||
this.setState({
|
||||
snackbar: {
|
||||
...this.state.snackbar,
|
||||
open: false
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
fetchDocumentStatus = async () => {
|
||||
try {
|
||||
const token = localStorage.getItem('token');
|
||||
if (!token) {
|
||||
console.log('No token found for document status');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Fetching document status...');
|
||||
const response = await fetch('/api/data/document-status', {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const status = await response.json();
|
||||
console.log('Document status received:', status);
|
||||
this.setState({ documentStatus: status });
|
||||
} else {
|
||||
console.error('Failed to fetch document status:', response.status, await response.text());
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error fetching document status:', error);
|
||||
}
|
||||
};
|
||||
|
||||
handleProcessing = async (processType) => {
|
||||
if (this.state.processingStatus[processType]) {
|
||||
return; // Already processing
|
||||
}
|
||||
|
||||
// Handle datev upload navigation
|
||||
if (processType === 'datev-upload') {
|
||||
this.setState({
|
||||
currentView: 'tables',
|
||||
targetTab: {
|
||||
level1: 3, // CSV Import tab
|
||||
level2: 'DATEV_LINKS' // DATEV Beleglinks tab
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if there are documents to process
|
||||
const statusKey = processType === 'datev-sync' ? 'needDatevSync' :
|
||||
processType === 'extraction' ? 'needExtraction' : 'needMarkdown';
|
||||
|
||||
if (!this.state.documentStatus || this.state.documentStatus[statusKey] === 0) {
|
||||
this.showSnackbar(`No documents need ${processType} processing at this time.`, 'info');
|
||||
return;
|
||||
}
|
||||
|
||||
this.setState(prevState => ({
|
||||
processingStatus: {
|
||||
...prevState.processingStatus,
|
||||
[processType]: true
|
||||
}
|
||||
}));
|
||||
|
||||
try {
|
||||
const token = localStorage.getItem('token');
|
||||
if (!token) return;
|
||||
|
||||
const response = await fetch(`/api/data/process-${processType}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const result = await response.json();
|
||||
console.log(`${processType} processing result:`, result);
|
||||
this.showSnackbar(`${processType} processing completed successfully!`, 'success');
|
||||
// Refresh document status after successful processing
|
||||
await this.fetchDocumentStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
console.error(`Failed to process ${processType}:`, error);
|
||||
this.showSnackbar(`Failed to process ${processType}: ${error.error || response.status}`, 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing ${processType}:`, error);
|
||||
this.showSnackbar(`Error processing ${processType}: ${error.message}`, 'error');
|
||||
} finally {
|
||||
this.setState(prevState => ({
|
||||
processingStatus: {
|
||||
...prevState.processingStatus,
|
||||
[processType]: false
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
render() {
|
||||
const { isAuthenticated, user, loading, currentView } = this.state;
|
||||
const { isAuthenticated, user, loading, currentView, documentStatus, processingStatus, snackbar } = this.state;
|
||||
|
||||
// Debug logging
|
||||
console.log('App render - documentStatus:', documentStatus);
|
||||
console.log('App render - isAuthenticated:', isAuthenticated);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
@@ -149,6 +300,97 @@ class App extends Component {
|
||||
sx={{ minHeight: 48 }}
|
||||
/>
|
||||
</Tabs>
|
||||
<Divider orientation="vertical" flexItem sx={{ mx: 2, backgroundColor: 'rgba(255, 255, 255, 0.3)' }} />
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Button
|
||||
color="inherit"
|
||||
size="small"
|
||||
onClick={() => this.handleProcessing('markdown')}
|
||||
disabled={processingStatus.markdown || !documentStatus}
|
||||
sx={{
|
||||
minWidth: 'auto',
|
||||
px: 1,
|
||||
'&:hover': { backgroundColor: 'rgba(255, 255, 255, 0.1)' }
|
||||
}}
|
||||
title="Process markdown conversion"
|
||||
>
|
||||
<Badge
|
||||
badgeContent={documentStatus?.needMarkdown || 0}
|
||||
color={documentStatus?.needMarkdown > 0 ? "error" : "default"}
|
||||
max={999999}
|
||||
sx={{ mr: 0.5 }}
|
||||
>
|
||||
<DocumentScannerIcon fontSize="small" />
|
||||
</Badge>
|
||||
{processingStatus.markdown && <PlayArrowIcon fontSize="small" />}
|
||||
</Button>
|
||||
<Button
|
||||
color="inherit"
|
||||
size="small"
|
||||
onClick={() => this.handleProcessing('extraction')}
|
||||
disabled={processingStatus.extraction || !documentStatus}
|
||||
sx={{
|
||||
minWidth: 'auto',
|
||||
px: 1,
|
||||
'&:hover': { backgroundColor: 'rgba(255, 255, 255, 0.1)' }
|
||||
}}
|
||||
title="Process data extraction"
|
||||
>
|
||||
<Badge
|
||||
badgeContent={documentStatus?.needExtraction || 0}
|
||||
color={documentStatus?.needExtraction > 0 ? "warning" : "default"}
|
||||
max={999999}
|
||||
sx={{ mr: 0.5 }}
|
||||
>
|
||||
<ExtractIcon fontSize="small" />
|
||||
</Badge>
|
||||
{processingStatus.extraction && <PlayArrowIcon fontSize="small" />}
|
||||
</Button>
|
||||
<Button
|
||||
color="inherit"
|
||||
size="small"
|
||||
onClick={() => this.handleProcessing('datev-sync')}
|
||||
disabled={processingStatus.datevSync || !documentStatus}
|
||||
sx={{
|
||||
minWidth: 'auto',
|
||||
px: 1,
|
||||
'&:hover': { backgroundColor: 'rgba(255, 255, 255, 0.1)' }
|
||||
}}
|
||||
title="Process Datev sync"
|
||||
>
|
||||
<Badge
|
||||
badgeContent={documentStatus?.needDatevSync || 0}
|
||||
color={documentStatus?.needDatevSync > 0 ? "info" : "default"}
|
||||
max={999999}
|
||||
sx={{ mr: 0.5 }}
|
||||
>
|
||||
<EmailIcon fontSize="small" />
|
||||
</Badge>
|
||||
{processingStatus.datevSync && <PlayArrowIcon fontSize="small" />}
|
||||
</Button>
|
||||
<Button
|
||||
color="inherit"
|
||||
size="small"
|
||||
onClick={() => this.handleProcessing('datev-upload')}
|
||||
disabled={processingStatus.datevUpload || !documentStatus}
|
||||
sx={{
|
||||
minWidth: 'auto',
|
||||
px: 1,
|
||||
'&:hover': { backgroundColor: 'rgba(255, 255, 255, 0.1)' }
|
||||
}}
|
||||
title="Process Datev CSV upload"
|
||||
>
|
||||
<Badge
|
||||
badgeContent={documentStatus?.needDatevUpload || 0}
|
||||
color={documentStatus?.needDatevUpload > 0 ? "secondary" : "default"}
|
||||
max={999999}
|
||||
sx={{ mr: 0.5 }}
|
||||
>
|
||||
<UploadIcon fontSize="small" />
|
||||
</Badge>
|
||||
{processingStatus.datevUpload && <PlayArrowIcon fontSize="small" />}
|
||||
</Button>
|
||||
</Box>
|
||||
{this.state.exportData && (
|
||||
<Button
|
||||
color="inherit"
|
||||
@@ -194,12 +436,29 @@ class App extends Component {
|
||||
onUpdateExportData={this.updateExportData}
|
||||
currentView={currentView}
|
||||
onViewChange={this.handleViewChange}
|
||||
targetTab={this.state.targetTab}
|
||||
/>
|
||||
) : (
|
||||
<Login onLogin={this.handleLogin} />
|
||||
)}
|
||||
</Container>
|
||||
</Box>
|
||||
|
||||
<Snackbar
|
||||
open={snackbar.open}
|
||||
autoHideDuration={6000}
|
||||
onClose={this.handleSnackbarClose}
|
||||
anchorOrigin={{ vertical: 'bottom', horizontal: 'right' }}
|
||||
>
|
||||
<Alert
|
||||
onClose={this.handleSnackbarClose}
|
||||
severity={snackbar.severity}
|
||||
variant="filled"
|
||||
sx={{ width: '100%' }}
|
||||
>
|
||||
{snackbar.message}
|
||||
</Alert>
|
||||
</Snackbar>
|
||||
</ThemeProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -56,6 +56,20 @@ class CSVImportPanel extends Component {
|
||||
this.datevFileInputRef = React.createRef();
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
// Check if we should navigate to a specific tab
|
||||
if (this.props.targetTab) {
|
||||
this.setState({ activeTab: this.props.targetTab });
|
||||
}
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
// Handle targetTab changes
|
||||
if (this.props.targetTab !== prevProps.targetTab && this.props.targetTab) {
|
||||
this.setState({ activeTab: this.props.targetTab });
|
||||
}
|
||||
}
|
||||
|
||||
// Tab switch resets type-specific state but keeps success state as-is
|
||||
handleTabChange = (_e, value) => {
|
||||
this.setState({
|
||||
@@ -344,6 +358,7 @@ class CSVImportPanel extends Component {
|
||||
)}
|
||||
|
||||
{currentHeaders && (
|
||||
|
||||
<Box sx={{ mb: 2 }}>
|
||||
<Typography variant="subtitle2" gutterBottom>
|
||||
Erkannte Spalten ({currentHeaders.length}):
|
||||
|
||||
@@ -165,7 +165,7 @@ class DataViewer extends Component {
|
||||
</>
|
||||
) : (
|
||||
<Box sx={{ flex: 1, minHeight: 0, overflow: 'auto', p: 2 }}>
|
||||
<TableManagement user={user} />
|
||||
<TableManagement user={user} targetTab={this.props.targetTab} />
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
@@ -26,6 +26,21 @@ class TableManagement extends Component {
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
// Check if we should navigate to a specific tab
|
||||
if (this.props.targetTab?.level1 !== undefined) {
|
||||
this.setState({ activeTab: this.props.targetTab.level1 });
|
||||
}
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
// Handle targetTab changes
|
||||
if (this.props.targetTab?.level1 !== prevProps.targetTab?.level1 &&
|
||||
this.props.targetTab?.level1 !== undefined) {
|
||||
this.setState({ activeTab: this.props.targetTab.level1 });
|
||||
}
|
||||
}
|
||||
|
||||
handleTabChange = (event, newValue) => {
|
||||
this.setState({ activeTab: newValue });
|
||||
};
|
||||
@@ -92,6 +107,7 @@ class TableManagement extends Component {
|
||||
</Typography>
|
||||
<CSVImportPanel
|
||||
user={user}
|
||||
targetTab={this.props.targetTab?.level2}
|
||||
/>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
34
package-lock.json
generated
34
package-lock.json
generated
@@ -21,6 +21,8 @@
|
||||
"google-auth-library": "^9.0.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"mssql": "^9.1.0",
|
||||
"nodemailer": "^7.0.5",
|
||||
"openai": "^5.12.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
@@ -7142,6 +7144,15 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/nodemailer": {
|
||||
"version": "7.0.5",
|
||||
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.5.tgz",
|
||||
"integrity": "sha512-nsrh2lO3j4GkLLXoeEksAMgAOqxOv6QumNRVQTJwKH4nuiww6iC2y7GyANs9kRAxCexg3+lTWM3PZ91iLlVjfg==",
|
||||
"license": "MIT-0",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nodemon": {
|
||||
"version": "3.1.10",
|
||||
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.10.tgz",
|
||||
@@ -7365,6 +7376,27 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/openai": {
|
||||
"version": "5.12.0",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-5.12.0.tgz",
|
||||
"integrity": "sha512-vUdt02xiWgOHiYUmW0Hj1Qu9OKAiVQu5Bd547ktVCiMKC1BkB5L3ImeEnCyq3WpRKR6ZTaPgekzqdozwdPs7Lg==",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"openai": "bin/cli"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"ws": "^8.18.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"ws": {
|
||||
"optional": true
|
||||
},
|
||||
"zod": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/own-keys": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz",
|
||||
@@ -9827,7 +9859,7 @@
|
||||
"version": "8.18.3",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
|
||||
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
|
||||
"dev": true,
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
|
||||
@@ -32,6 +32,8 @@
|
||||
"google-auth-library": "^9.0.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"mssql": "^9.1.0",
|
||||
"nodemailer": "^7.0.5",
|
||||
"openai": "^5.12.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
|
||||
@@ -73,10 +73,42 @@ const executeQuery = async (query, params = {}) => {
|
||||
}
|
||||
};
|
||||
|
||||
const executeTransaction = async (callback) => {
|
||||
if (!process.env.DB_SERVER) {
|
||||
throw new Error('Database not configured');
|
||||
}
|
||||
|
||||
let pool;
|
||||
let transaction;
|
||||
|
||||
try {
|
||||
pool = await getPool();
|
||||
transaction = new sql.Transaction(pool);
|
||||
|
||||
await transaction.begin();
|
||||
|
||||
const result = await callback(transaction);
|
||||
|
||||
await transaction.commit();
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (transaction) {
|
||||
try {
|
||||
await transaction.rollback();
|
||||
} catch (rollbackError) {
|
||||
console.error('Transaction rollback failed:', rollbackError);
|
||||
}
|
||||
}
|
||||
console.error('Transaction error:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
config,
|
||||
getPool,
|
||||
testConnection,
|
||||
executeQuery,
|
||||
executeTransaction,
|
||||
sql,
|
||||
};
|
||||
418
src/routes/data/documentProcessing.js
Normal file
418
src/routes/data/documentProcessing.js
Normal file
@@ -0,0 +1,418 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../../middleware/auth');
|
||||
const { executeQuery, executeTransaction } = require('../../config/database');
|
||||
const sql = require('mssql');
|
||||
const nodemailer = require('nodemailer');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get document processing status
|
||||
router.get('/document-status', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
console.log('Document status endpoint called');
|
||||
const queries = {
|
||||
needMarkdownUmsatz: "SELECT COUNT(*) as count FROM tUmsatzBeleg WHERE markDown is null",
|
||||
needMarkdownPdf: "SELECT COUNT(*) as count FROM tPdfObjekt WHERE markDown is null",
|
||||
needExtractionUmsatz: "SELECT COUNT(*) as count FROM tUmsatzBeleg WHERE markDown is not null and extraction is null",
|
||||
needExtractionPdf: "SELECT COUNT(*) as count FROM tPdfObjekt WHERE markDown is not null and extraction is null",
|
||||
needDatevSyncUmsatz: "SELECT COUNT(*) as count FROM tUmsatzBeleg WHERE markDown is not null and datevlink is null",
|
||||
needDatevSyncPdf: "SELECT COUNT(*) as count FROM tPdfObjekt WHERE markDown is not null and datevlink is null",
|
||||
needDatevUploadUmsatz: "SELECT COUNT(*) as count FROM tUmsatzBeleg WHERE datevlink = 'pending'",
|
||||
needDatevUploadPdf: "SELECT COUNT(*) as count FROM tPdfObjekt WHERE datevlink = 'pending'"
|
||||
};
|
||||
|
||||
const results = {};
|
||||
for (const [key, query] of Object.entries(queries)) {
|
||||
const result = await executeQuery(query);
|
||||
results[key] = result.recordset[0].count;
|
||||
}
|
||||
|
||||
const status = {
|
||||
needMarkdown: results.needMarkdownUmsatz + results.needMarkdownPdf,
|
||||
needExtraction: results.needExtractionUmsatz + results.needExtractionPdf,
|
||||
needDatevSync: results.needDatevSyncUmsatz + results.needDatevSyncPdf,
|
||||
needDatevUpload: results.needDatevUploadUmsatz + results.needDatevUploadPdf,
|
||||
details: {
|
||||
markdown: {
|
||||
umsatzBeleg: results.needMarkdownUmsatz,
|
||||
pdfObjekt: results.needMarkdownPdf
|
||||
},
|
||||
extraction: {
|
||||
umsatzBeleg: results.needExtractionUmsatz,
|
||||
pdfObjekt: results.needExtractionPdf
|
||||
},
|
||||
datevSync: {
|
||||
umsatzBeleg: results.needDatevSyncUmsatz,
|
||||
pdfObjekt: results.needDatevSyncPdf
|
||||
},
|
||||
datevUpload: {
|
||||
umsatzBeleg: results.needDatevUploadUmsatz,
|
||||
pdfObjekt: results.needDatevUploadPdf
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
console.log('Document status computed:', status);
|
||||
res.json(status);
|
||||
} catch (error) {
|
||||
console.error('Error fetching document processing status:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch document processing status' });
|
||||
}
|
||||
});
|
||||
|
||||
// Process markdown conversion
|
||||
router.post('/process-markdown', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { OpenAI } = require('openai');
|
||||
|
||||
// Check environment for OpenAI API key
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
return res.status(500).json({ error: 'OpenAI API key not configured' });
|
||||
}
|
||||
|
||||
const openai = new OpenAI({
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
});
|
||||
|
||||
await executeTransaction(async (transaction) => {
|
||||
// Process UmsatzBeleg documents
|
||||
const umsatzResult = await new sql.Request(transaction).query(
|
||||
"SELECT TOP 1 kUmsatzBeleg, content FROM tUmsatzBeleg WHERE markDown is null"
|
||||
);
|
||||
|
||||
if (umsatzResult.recordset.length > 0) {
|
||||
const { kUmsatzBeleg, content } = umsatzResult.recordset[0];
|
||||
|
||||
const response = await openai.responses.create({
|
||||
model: "gpt-4o",
|
||||
input: [
|
||||
{ "role": "developer", "content": [{ "type": "input_text", "text": "Convert to Markdown" }] },
|
||||
{ "role": "user", "content": [{ "type": "input_file", "filename": "invoice.pdf", "file_data": "data:application/pdf;base64," + content.toString('base64') }] }
|
||||
],
|
||||
text: {
|
||||
"format": {
|
||||
"type": "json_schema", "name": "markdown", "strict": true, "schema": { "type": "object", "properties": {
|
||||
"output": { "type": "string", "description": "Input converted to Markdown" }
|
||||
}, "required": ["output"], "additionalProperties": false }
|
||||
}
|
||||
},
|
||||
tools: [],
|
||||
store: false
|
||||
});
|
||||
|
||||
const markdown = JSON.parse(response.output_text);
|
||||
|
||||
await new sql.Request(transaction)
|
||||
.input('kUmsatzBeleg', kUmsatzBeleg)
|
||||
.input('markDown', markdown.output)
|
||||
.query("UPDATE tUmsatzBeleg SET markDown = @markDown WHERE kUmsatzBeleg = @kUmsatzBeleg");
|
||||
}
|
||||
|
||||
// Process PdfObjekt documents
|
||||
const pdfResult = await new sql.Request(transaction).query(
|
||||
"SELECT TOP 1 kPdfObjekt, content FROM tPdfObjekt WHERE markDown is null"
|
||||
);
|
||||
|
||||
if (pdfResult.recordset.length > 0) {
|
||||
const { kPdfObjekt, content } = pdfResult.recordset[0];
|
||||
|
||||
const response = await openai.responses.create({
|
||||
model: "gpt-4o",
|
||||
input: [
|
||||
{ "role": "developer", "content": [{ "type": "input_text", "text": "Convert to Markdown" }] },
|
||||
{ "role": "user", "content": [{ "type": "input_file", "filename": "invoice.pdf", "file_data": "data:application/pdf;base64," + content.toString('base64') }] }
|
||||
],
|
||||
text: {
|
||||
"format": {
|
||||
"type": "json_schema", "name": "markdown", "strict": true, "schema": { "type": "object", "properties": {
|
||||
"output": { "type": "string", "description": "Input converted to Markdown" }
|
||||
}, "required": ["output"], "additionalProperties": false }
|
||||
}
|
||||
},
|
||||
tools: [],
|
||||
store: false
|
||||
});
|
||||
|
||||
const markdown = JSON.parse(response.output_text);
|
||||
|
||||
await new sql.Request(transaction)
|
||||
.input('kPdfObjekt', kPdfObjekt)
|
||||
.input('markDown', markdown.output)
|
||||
.query("UPDATE tPdfObjekt SET markDown = @markDown WHERE kPdfObjekt = @kPdfObjekt");
|
||||
}
|
||||
});
|
||||
|
||||
res.json({ success: true, message: 'Markdown processing completed' });
|
||||
} catch (error) {
|
||||
console.error('Error processing markdown:', error);
|
||||
res.status(500).json({ error: 'Failed to process markdown: ' + error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Process data extraction
|
||||
router.post('/process-extraction', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { OpenAI } = require('openai');
|
||||
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
return res.status(500).json({ error: 'OpenAI API key not configured' });
|
||||
}
|
||||
|
||||
const openai = new OpenAI({
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
});
|
||||
|
||||
await executeTransaction(async (transaction) => {
|
||||
// Get creditor IDs for extraction
|
||||
const creditorResult = await new sql.Request(transaction).query(
|
||||
"SELECT kreditorId FROM fibdash.Kreditor ORDER BY kreditorId"
|
||||
);
|
||||
const creditorIDs = creditorResult.recordset.map(r => r.kreditorId).join(', ');
|
||||
|
||||
// Process UmsatzBeleg documents
|
||||
const umsatzResult = await new sql.Request(transaction).query(
|
||||
"SELECT TOP 1 kUmsatzBeleg, markDown FROM tUmsatzBeleg WHERE markDown is not null and extraction is null"
|
||||
);
|
||||
|
||||
if (umsatzResult.recordset.length > 0) {
|
||||
const { kUmsatzBeleg, markDown } = umsatzResult.recordset[0];
|
||||
|
||||
const response = await openai.responses.create({
|
||||
model: "gpt-4o-mini",
|
||||
input: [
|
||||
{ "role": "developer", "content": [{ "type": "input_text", "text": `Extract specified information from provided input and structure it in a JSON format.
|
||||
|
||||
The aim is to accurately identify and capture the following elements:
|
||||
- Rechnungsdatum/Belegdatum (Invoice Date/Document Date),
|
||||
- Rechnungsnummer/Belegnummer (Invoice Number/Document Number),
|
||||
- Netto Betrag (Net Amount),
|
||||
- Brutto Betrag (Gross Amount),
|
||||
- and Absender (Sender).
|
||||
|
||||
# Steps
|
||||
|
||||
1. **Identify Dates**: Find and extract the invoice or document date (Rechnungsdatum/Belegdatum) from the input text.
|
||||
2. **Extract Numbers**: Locate and pull out the invoice or document number (Rechnungsnummer/Belegnummer).
|
||||
3. **Determine Amounts**: Identify the net amount (Netto Betrag) and the gross amount (Brutto Betrag) and the currency in the text.
|
||||
4. **Source the Sender**: Extract the sender's information (Absender, Country).
|
||||
5. **Structure Data**: Organize the extracted information into a JSON format following the specified schema.
|
||||
|
||||
# Notes
|
||||
|
||||
- Ensure that dates are formatted consistently.
|
||||
- Be mindful of various numerical representations (e.g., with commas or periods).
|
||||
- The sender's information might include company names, so recognize various formats.
|
||||
- Prioritize accuracy in identifying the correct fields, as there can be similar text elements present.
|
||||
|
||||
Also select the CreditorID, from that List: ${creditorIDs}` }] },
|
||||
{ "role": "user", "content": [{ "type": "input_text", "text": markDown }] }
|
||||
],
|
||||
text: {
|
||||
"format": {
|
||||
"type": "json_schema", "name": "invoice", "strict": true, "schema": { "type": "object", "properties": {
|
||||
"date": { "type": "string", "description": "Rechungsdatum / Belegdatum in ISO 8601" },
|
||||
"invoice_number": { "type": "string", "description": "Rechnungsnummer / Belegnummer / Invoicenr" },
|
||||
"net_amounts_and_tax": {
|
||||
"type": "array", "description": "Liste von Nettobeträgen mit jeweiligem Steuersatz und Steuerbetrag, ein Listeneintrag pro Steuersatz",
|
||||
"items": { "type": "object", "properties": {
|
||||
"net_amount": { "type": "number", "description": "Netto Betrag" },
|
||||
"tax_rate": { "type": "number", "description": "Steuersatz in Prozent" },
|
||||
"tax_amount": { "type": "number", "description": "Steuerbetrag" }
|
||||
}, "required": ["net_amount", "tax_rate", "tax_amount"], "additionalProperties": false }
|
||||
},
|
||||
"gross_amount": { "type": "number", "description": "Brutto Betrag (muss der Summe aller net_amount + tax_amount entsprechen)" },
|
||||
"currency": { "type": "string", "description": "currency code in ISO 4217" },
|
||||
"country": { "type": "string", "description": "country of origin in ISO 3166" },
|
||||
"sender": { "type": "string", "description": "Absender" },
|
||||
"creditorID": { "type": "string", "description": "CreditorID or empty if unknown" }
|
||||
}, "required": ["date", "invoice_number", "net_amounts_and_tax", "gross_amount", "currency", "country", "sender", "creditorID"], "additionalProperties": false }
|
||||
}
|
||||
},
|
||||
reasoning: { "effort": "medium", "summary": "auto" },
|
||||
tools: [],
|
||||
store: false
|
||||
});
|
||||
|
||||
const extraction = JSON.parse(response.output_text);
|
||||
|
||||
await new sql.Request(transaction)
|
||||
.input('kUmsatzBeleg', kUmsatzBeleg)
|
||||
.input('extraction', JSON.stringify(extraction))
|
||||
.query("UPDATE tUmsatzBeleg SET extraction = @extraction WHERE kUmsatzBeleg = @kUmsatzBeleg");
|
||||
}
|
||||
|
||||
// Process PdfObjekt documents
|
||||
const pdfResult = await new sql.Request(transaction).query(
|
||||
"SELECT TOP 1 kPdfObjekt, markDown FROM tPdfObjekt WHERE markDown is not null and extraction is null"
|
||||
);
|
||||
|
||||
if (pdfResult.recordset.length > 0) {
|
||||
const { kPdfObjekt, markDown } = pdfResult.recordset[0];
|
||||
|
||||
const response = await openai.responses.create({
|
||||
model: "gpt-4o-mini",
|
||||
input: [
|
||||
{ "role": "developer", "content": [{ "type": "input_text", "text": `Extract specified information from provided input and structure it in a JSON format.
|
||||
|
||||
The aim is to accurately identify and capture the following elements:
|
||||
- Rechnungsdatum/Belegdatum (Invoice Date/Document Date),
|
||||
- Rechnungsnummer/Belegnummer (Invoice Number/Document Number),
|
||||
- Netto Betrag (Net Amount),
|
||||
- Brutto Betrag (Gross Amount),
|
||||
- and Absender (Sender).
|
||||
|
||||
# Steps
|
||||
|
||||
1. **Identify Dates**: Find and extract the invoice or document date (Rechnungsdatum/Belegdatum) from the input text.
|
||||
2. **Extract Numbers**: Locate and pull out the invoice or document number (Rechnungsnummer/Belegnummer).
|
||||
3. **Determine Amounts**: Identify the net amount (Netto Betrag) and the gross amount (Brutto Betrag) and the currency in the text.
|
||||
4. **Source the Sender**: Extract the sender's information (Absender, Country).
|
||||
5. **Structure Data**: Organize the extracted information into a JSON format following the specified schema.
|
||||
|
||||
# Notes
|
||||
|
||||
- Ensure that dates are formatted consistently.
|
||||
- Be mindful of various numerical representations (e.g., with commas or periods).
|
||||
- The sender's information might include company names, so recognize various formats.
|
||||
- Prioritize accuracy in identifying the correct fields, as there can be similar text elements present.
|
||||
|
||||
Also select the CreditorID, from that List: ${creditorIDs}` }] },
|
||||
{ "role": "user", "content": [{ "type": "input_text", "text": markDown }] }
|
||||
],
|
||||
text: {
|
||||
"format": {
|
||||
"type": "json_schema", "name": "invoice", "strict": true, "schema": { "type": "object", "properties": {
|
||||
"date": { "type": "string", "description": "Rechungsdatum / Belegdatum in ISO 8601" },
|
||||
"invoice_number": { "type": "string", "description": "Rechnungsnummer / Belegnummer / Invoicenr" },
|
||||
"net_amounts_and_tax": {
|
||||
"type": "array", "description": "Liste von Nettobeträgen mit jeweiligem Steuersatz und Steuerbetrag, ein Listeneintrag pro Steuersatz",
|
||||
"items": { "type": "object", "properties": {
|
||||
"net_amount": { "type": "number", "description": "Netto Betrag" },
|
||||
"tax_rate": { "type": "number", "description": "Steuersatz in Prozent" },
|
||||
"tax_amount": { "type": "number", "description": "Steuerbetrag" }
|
||||
}, "required": ["net_amount", "tax_rate", "tax_amount"], "additionalProperties": false }
|
||||
},
|
||||
"gross_amount": { "type": "number", "description": "Brutto Betrag (muss der Summe aller net_amount + tax_amount entsprechen)" },
|
||||
"currency": { "type": "string", "description": "currency code in ISO 4217" },
|
||||
"country": { "type": "string", "description": "country of origin in ISO 3166" },
|
||||
"sender": { "type": "string", "description": "Absender" },
|
||||
"creditorID": { "type": "string", "description": "CreditorID or empty if unknown" }
|
||||
}, "required": ["date", "invoice_number", "net_amounts_and_tax", "gross_amount", "currency", "country", "sender", "creditorID"], "additionalProperties": false }
|
||||
}
|
||||
},
|
||||
reasoning: { "effort": "medium", "summary": "auto" },
|
||||
tools: [],
|
||||
store: false
|
||||
});
|
||||
|
||||
const extraction = JSON.parse(response.output_text);
|
||||
|
||||
await new sql.Request(transaction)
|
||||
.input('kPdfObjekt', kPdfObjekt)
|
||||
.input('extraction', JSON.stringify(extraction))
|
||||
.query("UPDATE tPdfObjekt SET extraction = @extraction WHERE kPdfObjekt = @kPdfObjekt");
|
||||
}
|
||||
});
|
||||
|
||||
res.json({ success: true, message: 'Extraction processing completed' });
|
||||
} catch (error) {
|
||||
console.error('Error processing extraction:', error);
|
||||
res.status(500).json({ error: 'Failed to process extraction: ' + error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Process Datev sync
|
||||
router.post('/process-datev-sync', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const transporter = nodemailer.createTransport({
|
||||
host: "smtp.gmail.com",
|
||||
port: 587,
|
||||
secure: false, // true for 465, false for other ports
|
||||
auth: {
|
||||
user: "sebgreenbus@gmail.com",
|
||||
pass: "abrp idub thbi kdws", // For Gmail, you might need an app-specific password
|
||||
},
|
||||
});
|
||||
|
||||
await executeTransaction(async (transaction) => {
|
||||
// Process UmsatzBeleg documents
|
||||
const umsatzResult = await new sql.Request(transaction).query(
|
||||
"SELECT TOP 1 kUmsatzBeleg, content FROM tUmsatzBeleg WHERE markDown is not null and datevlink is null"
|
||||
);
|
||||
|
||||
if (umsatzResult.recordset.length > 0) {
|
||||
const { kUmsatzBeleg, content } = umsatzResult.recordset[0];
|
||||
|
||||
const mailOptions = {
|
||||
from: '"Growheads" <sebgreenbus@gmail.com>',
|
||||
to: "97bfd9eb-770f-481a-accb-e69649d36a9e@uploadmail.datev.de",
|
||||
subject: `Beleg ${kUmsatzBeleg} für Datev`,
|
||||
text: "", // No body text as requested
|
||||
attachments: [
|
||||
{
|
||||
filename: `UmsatzBeleg${kUmsatzBeleg}.pdf`,
|
||||
content: content,
|
||||
contentType: "application/pdf",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
try {
|
||||
let info = await transporter.sendMail(mailOptions);
|
||||
console.log("Message sent: %s", info.messageId);
|
||||
|
||||
await new sql.Request(transaction)
|
||||
.input('kUmsatzBeleg', kUmsatzBeleg)
|
||||
.input('datevlink', 'pending')
|
||||
.query("UPDATE tUmsatzBeleg SET datevlink = @datevlink WHERE kUmsatzBeleg = @kUmsatzBeleg");
|
||||
} catch (emailError) {
|
||||
console.error("Error sending email:", emailError);
|
||||
throw emailError;
|
||||
}
|
||||
}
|
||||
|
||||
// Process PdfObjekt documents
|
||||
const pdfResult = await new sql.Request(transaction).query(
|
||||
"SELECT TOP 1 kPdfObjekt, content FROM tPdfObjekt WHERE markDown is not null and datevlink is null"
|
||||
);
|
||||
|
||||
if (pdfResult.recordset.length > 0) {
|
||||
const { kPdfObjekt, content } = pdfResult.recordset[0];
|
||||
|
||||
const mailOptions = {
|
||||
from: '"Growheads" <sebgreenbus@gmail.com>',
|
||||
to: "97bfd9eb-770f-481a-accb-e69649d36a9e@uploadmail.datev.de",
|
||||
subject: `Rechnung ${kPdfObjekt} für Datev`,
|
||||
text: "", // No body text as requested
|
||||
attachments: [
|
||||
{
|
||||
filename: `Rechnung${kPdfObjekt}.pdf`,
|
||||
content: content,
|
||||
contentType: "application/pdf",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
try {
|
||||
let info = await transporter.sendMail(mailOptions);
|
||||
console.log("Message sent: %s", info.messageId);
|
||||
|
||||
await new sql.Request(transaction)
|
||||
.input('kPdfObjekt', kPdfObjekt)
|
||||
.input('datevlink', 'pending')
|
||||
.query("UPDATE tPdfObjekt SET datevlink = @datevlink WHERE kPdfObjekt = @kPdfObjekt");
|
||||
} catch (emailError) {
|
||||
console.error("Error sending email:", emailError);
|
||||
throw emailError;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
res.json({ success: true, message: 'Datev sync processing completed' });
|
||||
} catch (error) {
|
||||
console.error('Error processing Datev sync:', error);
|
||||
res.status(500).json({ error: 'Failed to process Datev sync: ' + error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -8,6 +8,7 @@ const kreditors = require('./kreditors');
|
||||
const bankingTransactions = require('./bankingTransactions');
|
||||
const accountingItems = require('./accountingItems');
|
||||
const csvImport = require('./csvImport');
|
||||
const documentProcessing = require('./documentProcessing');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -20,5 +21,6 @@ router.use(kreditors);
|
||||
router.use(bankingTransactions);
|
||||
router.use(accountingItems);
|
||||
router.use(csvImport);
|
||||
router.use(documentProcessing);
|
||||
|
||||
module.exports = router;
|
||||
Reference in New Issue
Block a user