This commit is contained in:
sebseb7
2025-09-07 05:20:25 +02:00
commit 9cc1675862
8 changed files with 2440 additions and 0 deletions

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
node_modules
monitor_logs
tables
.env

129
extract_mssql_defs.js Normal file
View File

@@ -0,0 +1,129 @@
// Node.js script to extract table, view, and procedure definitions from a MSSQL database
// Requires: npm install mssql
// Fill in your connection details below
const sql = require('mssql');
const config = {
user: 'sa',
password: 'sa_tekno23',
server: '10.10.10.3', // e.g. 'localhost'
database: 'eazybusiness',
options: {
encrypt: false, // Set to true if using Azure
trustServerCertificate: true // For local dev/testing
}
};
async function getTableDefinitions(pool) {
// Get table columns, PKs, and constraints
const tables = await pool.request().query(`
SELECT t.object_id, s.name AS schema_name, t.name AS table_name
FROM sys.tables t
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
ORDER BY s.name, t.name
`);
let results = [];
for (const row of tables.recordset) {
const { object_id, schema_name, table_name } = row;
// Get columns
const columns = await pool.request().query(`
SELECT c.name AS column_name,
TYPE_NAME(c.user_type_id) AS data_type,
c.max_length, c.precision, c.scale, c.is_nullable, c.is_identity
FROM sys.columns c
WHERE c.object_id = ${object_id}
ORDER BY c.column_id
`);
// Get primary key
const pk = await pool.request().query(`
SELECT k.name AS pk_name, c.name AS column_name
FROM sys.key_constraints k
INNER JOIN sys.index_columns ic ON k.parent_object_id = ic.object_id AND k.unique_index_id = ic.index_id
INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
WHERE k.parent_object_id = ${object_id} AND k.type = 'PK'
ORDER BY ic.key_ordinal
`);
// Compose CREATE TABLE statement
let createStmt = `CREATE TABLE [${schema_name}].[${table_name}] (\n`;
createStmt += columns.recordset.map(col => {
let line = ` [${col.column_name}] ${col.data_type}`;
if (col.data_type.match(/char|binary|text|nchar|nvarchar|varbinary/i) && col.max_length > 0) {
line += `(${col.max_length === -1 ? 'MAX' : col.max_length})`;
} else if (col.data_type.match(/decimal|numeric/i)) {
line += `(${col.precision},${col.scale})`;
}
if (col.is_identity) line += ' IDENTITY(1,1)';
line += col.is_nullable ? ' NULL' : ' NOT NULL';
return line;
}).join(',\n');
if (pk.recordset.length > 0) {
const pkCols = pk.recordset.map(r => `[${r.column_name}]`).join(', ');
createStmt += `,\n CONSTRAINT [${pk.recordset[0].pk_name}] PRIMARY KEY (${pkCols})`;
}
createStmt += '\n);';
results.push(createStmt);
}
return results;
}
async function getViewDefinitions(pool) {
const views = await pool.request().query(`
SELECT s.name AS schema_name, v.name AS view_name, OBJECT_DEFINITION(v.object_id) AS definition
FROM sys.views v
INNER JOIN sys.schemas s ON v.schema_id = s.schema_id
ORDER BY s.name, v.name
`);
return views.recordset.map(v =>
`CREATE VIEW [${v.schema_name}].[${v.view_name}] AS\n${v.definition}\nGO`
);
}
async function getProcedureDefinitions(pool) {
const procs = await pool.request().query(`
SELECT s.name AS schema_name, p.name AS proc_name, OBJECT_DEFINITION(p.object_id) AS definition
FROM sys.procedures p
INNER JOIN sys.schemas s ON p.schema_id = s.schema_id
ORDER BY s.name, p.name
`);
return procs.recordset.map(p =>
`CREATE PROCEDURE [${p.schema_name}].[${p.proc_name}]\nAS\n${p.definition}\nGO`
);
}
async function main() {
try {
let pool = await sql.connect(config);
console.log('--- TABLES ---');
const tables = await getTableDefinitions(pool);
tables.forEach(def => {
console.log(def);
console.log('GO\n');
});
console.log('--- VIEWS ---');
const views = await getViewDefinitions(pool);
views.forEach(def => {
console.log(def);
console.log();
});
console.log('--- PROCEDURES ---');
const procs = await getProcedureDefinitions(pool);
procs.forEach(def => {
console.log(def);
console.log();
});
await pool.close();
} catch (err) {
console.error('Error:', err);
}
}
main();

View File

@@ -0,0 +1,202 @@
// @ts-nocheck
/**
* extract_mssql_defs_to_files.js
*
* Connects to a MSSQL database, for each user table:
* 1. Extracts the CREATE TABLE DDL.
* 2. Selects the TOP 10 rows by primary key DESC (or just TOP 10 if no PK).
* 3. Writes both the DDL and the rows (as INSERT statements) into
* tables/<schema>.<table>.sql
*
* Usage:
* npm install mssql
* node extract_mssql_defs_to_files.js
*/
const sql = require('mssql');
const fs = require('fs');
const path = require('path');
// --- CONFIGURE YOUR DATABASE CONNECTION HERE ---
const config = {
user: 'sa',
password: 'sa_tekno23',
server: '10.10.10.3',
database: 'eazybusiness',
options: { encrypt: false, trustServerCertificate: true }
};
async function ensureDir(dir) {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
function escapeSqlString(val) {
if (val === null || val === undefined) return 'NULL';
if (typeof val === 'number') return val;
if (typeof val === 'boolean') return val ? 1 : 0;
return "'" + String(val).replace(/'/g, "''") + "'";
}
async function getTables(pool) {
const tables = await pool.request().query(`
SELECT t.object_id, s.name AS schema_name, t.name AS table_name
FROM sys.tables t
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
ORDER BY s.name, t.name
`);
return tables.recordset;
}
async function getColumns(pool, object_id) {
const columns = await pool.request().query(`
SELECT c.name AS column_name,
TYPE_NAME(c.user_type_id) AS data_type,
c.max_length, c.precision, c.scale, c.is_nullable, c.is_identity
FROM sys.columns c
WHERE c.object_id = ${object_id}
ORDER BY c.column_id
`);
return columns.recordset;
}
async function getPrimaryKey(pool, object_id) {
const pk = await pool.request().query(`
SELECT k.name AS pk_name, c.name AS column_name
FROM sys.key_constraints k
INNER JOIN sys.index_columns ic ON k.parent_object_id = ic.object_id AND k.unique_index_id = ic.index_id
INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
WHERE k.parent_object_id = ${object_id} AND k.type = 'PK'
ORDER BY ic.key_ordinal
`);
return pk.recordset;
}
function buildCreateTable(schema_name, table_name, columns, pk) {
let createStmt = `CREATE TABLE [${schema_name}].[${table_name}] (\n`;
createStmt += columns.map(col => {
let line = ` [${col.column_name}] ${col.data_type}`;
if (col.data_type.match(/char|binary|text|nchar|nvarchar|varbinary/i) && col.max_length > 0) {
line += `(${col.max_length === -1 ? 'MAX' : col.max_length})`;
} else if (col.data_type.match(/decimal|numeric/i)) {
line += `(${col.precision},${col.scale})`;
}
if (col.is_identity) line += ' IDENTITY(1,1)';
line += col.is_nullable ? ' NULL' : ' NOT NULL';
return line;
}).join(',\n');
if (pk.length > 0) {
const pkCols = pk.map(r => `[${r.column_name}]`).join(', ');
createStmt += `,\n CONSTRAINT [${pk[0].pk_name}] PRIMARY KEY (${pkCols})`;
}
createStmt += '\n);\n\n';
return createStmt;
}
async function getLatestRows(pool, schema_name, table_name, pkCols, columns) {
let orderBy = '';
if (pkCols.length > 0) {
orderBy = pkCols.map(col => `[${col}] DESC`).join(', ');
}
const colList = columns.map(c => `[${c.column_name}]`).join(', ');
const query = `
SELECT TOP 10 ${colList}
FROM [${schema_name}].[${table_name}]
${orderBy ? `ORDER BY ${orderBy}` : ''}
`;
try {
const rows = await pool.request().query(query);
return rows.recordset;
} catch (e) {
// Table may be empty or inaccessible
return [];
}
}
function buildInsertStatements(schema_name, table_name, columns, rows) {
if (!rows || rows.length === 0) return '-- No data rows found\n';
let inserts = '-- Latest 10 rows:\n';
const colNames = columns.map(c => `[${c.column_name}]`).join(', ');
for (const row of rows) {
const vals = columns.map(c => escapeSqlString(row[c.column_name])).join(', ');
inserts += `INSERT INTO [${schema_name}].[${table_name}] (${colNames}) VALUES (${vals});\n`;
}
return inserts;
}
async function getViews(pool) {
const views = await pool.request().query(`
SELECT s.name AS schema_name, v.name AS view_name, OBJECT_DEFINITION(v.object_id) AS definition
FROM sys.views v
INNER JOIN sys.schemas s ON v.schema_id = s.schema_id
ORDER BY s.name, v.name
`);
return views.recordset;
}
async function getProcedures(pool) {
const procs = await pool.request().query(`
SELECT s.name AS schema_name, p.name AS proc_name, OBJECT_DEFINITION(p.object_id) AS definition
FROM sys.procedures p
INNER JOIN sys.schemas s ON p.schema_id = s.schema_id
ORDER BY s.name, p.name
`);
return procs.recordset;
}
async function main() {
await ensureDir(path.join(__dirname, 'tables'));
let pool;
try {
pool = await sql.connect(config);
const tables = await getTables(pool);
// TABLES
for (const t of tables) {
const { object_id, schema_name, table_name } = t;
const columns = await getColumns(pool, object_id);
const pk = await getPrimaryKey(pool, object_id);
const pkCols = pk.map(r => r.column_name);
const ddl = buildCreateTable(schema_name, table_name, columns, pk);
const rows = await getLatestRows(pool, schema_name, table_name, pkCols, columns);
const inserts = buildInsertStatements(schema_name, table_name, columns, rows);
const fileName = `${schema_name}.${table_name}.sql`;
const outPath = path.join(__dirname, 'tables', fileName);
fs.writeFileSync(outPath, ddl + inserts, 'utf8');
console.log('Written:', fileName);
}
// VIEWS
const views = await getViews(pool);
for (const v of views) {
const { schema_name, view_name, definition } = v;
const viewDef = `CREATE VIEW [${schema_name}].[${view_name}] AS\n${definition}\nGO\n`;
const fileName = `${schema_name}.${view_name}.sql`;
const outPath = path.join(__dirname, 'tables', fileName);
fs.writeFileSync(outPath, viewDef, 'utf8');
console.log('Written:', fileName);
}
// PROCEDURES
const procs = await getProcedures(pool);
for (const p of procs) {
const { schema_name, proc_name, definition } = p;
const procDef = `CREATE PROCEDURE [${schema_name}].[${proc_name}]\nAS\n${definition}\nGO\n`;
const fileName = `${schema_name}.${proc_name}.sql`;
const outPath = path.join(__dirname, 'tables', fileName);
fs.writeFileSync(outPath, procDef, 'utf8');
console.log('Written:', fileName);
}
await pool.close();
console.log('Done.');
} catch (err) {
if (pool) await pool.close();
console.error('Error:', err);
}
}
main();

195
mssql_monitor.js Normal file
View File

@@ -0,0 +1,195 @@
// SQL Server statement / procedure live monitor
// -------------------------------------------------
// • Starts a dedicated Extended-Event session on the SQL-Server instance.
// • Streams finished RPC & batch events every second.
// • Each event (sql-text, parameters, duration, row-count …) is appended
// to a session-specific log-file using fs.appendFileSync so the file
// can be watched while it grows (e.g. with “tail-f”).
// • A fresh log directory is created per run. Left-overs from previous
// runs (old XE sessions + log files) are removed automatically and
// again on Ctrl-C / SIGTERM.
//
// REQUIREMENTS
// -------------
// • npm install mssql (already present in package.json)
// • The executing principal must have ALTER ANY EVENT SESSION permission.
//
// USAGE
// ------
// > node mssql_monitor.js # uses env-vars for credentials
//
// Environment variables recognised:
// SQLSERVER ( default: 'localhost' )
// SQLUSER
// SQLPASSWORD
// SQLDATABASE ( default: 'master' )
/* eslint-disable no-console */
const fs = require('fs');
const path = require('path');
const sql = require('mssql');
// ---------- configuration ----------------------------------------------------
const cfg = {
server : process.env.SQLSERVER || '10.10.10.3',
port : process.env.SQLPORT || 1433, // Add port configuration
user : process.env.SQLUSER || 'sa',
password: process.env.SQLPASSWORD || 'sa_tekno23',
database: process.env.SQLDATABASE || 'eazybusiness',
options : { encrypt: false, trustServerCertificate: true }
};
// delay (ms) between polling the ring-buffer for new events
const POLL_INTERVAL = 1_000;
// keep log-files / XE sessions younger than:
const KEEP_HOURS = 24;
// -----------------------------------------------------------------------------
(async function main () {
const sessionName = `js_monitor_${Date.now()}`; // XE session
const logDir = path.join(__dirname, 'monitor_logs');
fs.mkdirSync(logDir, { recursive: true });
await cleanupLeftovers(logDir, sessionName); // old sessions
const pool = await sql.connect(cfg); // connect
await createXeSession(pool, sessionName); // XE start
console.log(`Monitoring started → ${logDir} (per client session)`);
let lastRead = new Date(0); // first run
// -------------------------------------------------------------------------
const timer = setInterval(async () => {
try {
const events = await fetchNewEvents(pool, sessionName, lastRead);
for (const ev of events) {
// Compose a unique log file name per client session
const sid = ev.session_id || 'unknown';
const user = (ev.username || 'unknown').replace(/[\\/:<>|?*"]/g, '_');
const host = (ev.client_hostname || 'unknown').replace(/[\\/:<>|?*"]/g, '_');
const perClientFile = path.join(
logDir,
`js_monitor_${user}_${host}_sid${sid}.log`
);
// Remove fields not needed in log entry
const { username, session_id, client_hostname, ...logEntry } = ev;
fs.appendFileSync(perClientFile, JSON.stringify(logEntry,null,2) + '\n', 'utf8');
lastRead = ev.timestamp;
}
} catch (e) { console.error('Polling error:', e); }
}, POLL_INTERVAL);
// graceful shutdown -------------------------------------------------------
const shutdown = async () => {
console.log('\nCtrl-C received, cleaning up...');
clearInterval(timer);
await dropXeSession(pool, sessionName);
await pool.close();
console.log('Monitor stopped and cleaned-up.');
process.exit(0);
};
process.on('SIGINT' , shutdown);
process.on('SIGTERM', shutdown);
})().catch(e => { console.error(e); process.exit(1); });
/* -------------------------------------------------------------------------- */
async function createXeSession (pool, name) {
const batch = `
IF EXISTS (SELECT 1 FROM sys.server_event_sessions WHERE name = '${name}')
DROP EVENT SESSION [${name}] ON SERVER;
DECLARE @sql NVARCHAR(MAX) = '
CREATE EVENT SESSION [${name}] ON SERVER
ADD EVENT sqlserver.rpc_completed
(SET collect_statement=(1)
ACTION(sqlserver.sql_text, sqlserver.username, sqlserver.session_id, sqlserver.client_hostname)),
ADD EVENT sqlserver.sql_batch_completed
(ACTION(sqlserver.sql_text, sqlserver.username, sqlserver.session_id, sqlserver.client_hostname))
ADD TARGET package0.ring_buffer
WITH (MAX_DISPATCH_LATENCY = 1 SECONDS);
';
EXEC (@sql);
ALTER EVENT SESSION [${name}] ON SERVER STATE = START;`;
await pool.request().batch(batch);
}
/* -------------------------------------------------------------------------- */
async function dropXeSession (pool, name) {
const cmd = `
IF EXISTS (SELECT 1 FROM sys.server_event_sessions WHERE name = @name)
BEGIN
ALTER EVENT SESSION [${name}] ON SERVER STATE = STOP;
DROP EVENT SESSION [${name}] ON SERVER;
END`;
await pool.request().input('name', sql.NVarChar, name).batch(cmd);
}
/* -------------------------------------------------------------------------- */
async function fetchNewEvents (pool, name, last) {
// convert last JS Date → SQL datetime2
const lastTS = last.toISOString();
const query = `
;WITH src AS (
SELECT CAST(t.target_data AS XML) AS x
FROM sys.dm_xe_session_targets AS t
JOIN sys.dm_xe_sessions AS s
ON t.event_session_address = s.address
WHERE s.name = @name
AND t.target_name = 'ring_buffer'
)
SELECT
evt.value('@timestamp', 'datetime2') AS [timestamp],
evt.value('(data[@name="statement"]/value)[1]', 'nvarchar(max)')
AS statement_text,
evt.value('(action[@name="sql_text"]/value)[1]', 'nvarchar(max)')
AS batch_text,
evt.value('(data[@name="object_name"]/value)[1]', 'nvarchar(max)')
AS object_name,
evt.value('(data[@name="row_count"]/value)[1]', 'bigint')
AS rows,
evt.value('(data[@name="duration"]/value)[1]', 'bigint') / 1000
AS duration_ms,
evt.value('(action[@name="username"]/value)[1]', 'nvarchar(128)')
AS username,
evt.value('(action[@name="session_id"]/value)[1]', 'int')
AS session_id,
evt.value('(action[@name="client_hostname"]/value)[1]', 'nvarchar(128)')
AS client_hostname
FROM src
CROSS APPLY x.nodes('//RingBufferTarget/event') n(evt)
WHERE evt.value('@timestamp', 'datetime2') > @last
ORDER BY [timestamp];`;
const rs = await pool.request()
.input('name', sql.NVarChar, name)
.input('last', sql.DateTime2, lastTS)
.query(query);
// ensure proper JS dates
return rs.recordset.map(r => ({ ...r, timestamp: new Date(r.timestamp) }));
}
/* -------------------------------------------------------------------------- */
async function cleanupLeftovers (logDir, currentSession) {
// delete old log files ----------------------------------------------------
const now = Date.now();
for (const f of fs.readdirSync(logDir)) {
const p = path.join(logDir, f);
try {
const stat = fs.statSync(p);
if (now - stat.mtimeMs > KEEP_HOURS * 3_600_000) fs.rmSync(p);
} catch (_) { /* ignore */ }
}
// stop / drop stale XE sessions ------------------------------------------
try {
const pool = await sql.connect(cfg);
const rs = await pool.request()
.query(`SELECT name FROM sys.server_event_sessions
WHERE name LIKE 'js_monitor_%'`);
for (const { name } of rs.recordset) {
if (name !== currentSession) await dropXeSession(pool, name);
}
await pool.close();
} catch (e) { /* might lack permission ignore */ }
}

1702
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

18
package.json Normal file
View File

@@ -0,0 +1,18 @@
{
"name": "som",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"description": "",
"dependencies": {
"dotenv": "^17.2.2",
"express": "^5.1.0",
"mssql": "^11.0.1",
"openai": "^5.10.2"
}
}

44
server.js Normal file
View File

@@ -0,0 +1,44 @@
const express = require('express');
const fs = require('fs');
const path = require('path');
const app = express();
const PORT = 3000;
const DIRECTORY = path.join(__dirname, 'tables'); // Change to your directory
app.get('/', (req, res) => {
fs.readdir(DIRECTORY, (err, files) => {
if (err) return res.status(500).send('Error reading directory');
let html = '<h1>File Index</h1><ul>';
files.forEach(file => {
html += `<li><a href="/file/${encodeURIComponent(file)}">${file}</a></li>`;
});
html += '</ul>';
res.send(html);
});
});
app.get('/file/:filename', (req, res) => {
const filename = req.params.filename;
const filepath = path.join(DIRECTORY, filename);
fs.readFile(filepath, 'utf8', (err, data) => {
if (err) return res.status(404).send('File not found');
res.send(`<h1>${filename}</h1><pre>${escapeHtml(data)}</pre><a href="/">Back</a>`);
});
});
function escapeHtml(text) {
return text.replace(/[&<>"']/g, function(m) {
return ({
'&': '&amp;',
'<': '&lt;',
'>': '&gt;',
'"': '&quot;',
"'": '&#39;'
})[m];
});
}
app.listen(PORT, () => {
console.log(`Server running at http://localhost:${PORT}`);
});

146
summarize_sql.js Normal file
View File

@@ -0,0 +1,146 @@
// summarize_sql.js
// Usage: node summarize_sql.js <inputfile.sql>
// Requires: OPENAI_API_KEY in environment
require('dotenv').config();
const fs = require('fs');
const path = require('path');
const { OpenAI } = require('openai');
if (!process.env.OPENAI_API_KEY) {
console.error('Error: OPENAI_API_KEY environment variable is required.');
console.error('Please create a .env file with your OpenAI API key or set the environment variable.');
process.exit(1);
}
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
if (process.argv.length < 3) {
console.error('Usage: node summarize_sql.js <inputfile.sql>');
process.exit(1);
}
const inputFile = process.argv[2];
const inputDir = path.dirname(inputFile);
const baseName = path.basename(inputFile, '.sql');
const summaryFile = path.join(inputDir, baseName + '.summary.txt');
function findTableOrViewFile(schemaDotName) {
// e.g. "Amazon.tVcsLiteUploadQueue" => tables/Amazon.tVcsLiteUploadQueue.sql
const tablesDir = path.join(__dirname, 'tables');
const file = path.join(tablesDir, schemaDotName + '.sql');
if (fs.existsSync(file)) {
return file;
}
return null;
}
function readTableOrView(schemaDotName) {
const file = findTableOrViewFile(schemaDotName);
if (!file) return `-- Definition for ${schemaDotName} not found.`;
return fs.readFileSync(file, 'utf8');
}
const SYSTEM_PROMPT = `
You are a SQL expert assistant. Your task is to help the user understand the definition and logic of a given SQL stored procedure or view.
- Focus on explaining the purpose, main logic, and important details of the procedure/view.
- If you need to look up the definition of a table or view referenced in the code, you can call the function read_table_or_view(schemaDotName) (e.g. read_table_or_view('Amazon.tVcsLiteUploadQueue')) and you will receive the full CREATE statement for that object.
- If you need more than one table/view definition, call read_table_or_view multiple times.
- Be concise but thorough. Output your summary in clear, readable language.
`;
async function main() {
const sqlText = fs.readFileSync(inputFile, 'utf8');
let messages = [
{ role: 'system', content: SYSTEM_PROMPT },
{ role: 'user', content: `Please summarize the following SQL stored procedure or view:\n\n${sqlText}` }
];
// Function tool definition for OpenAI function-calling
const functions = [
{
name: "read_table_or_view",
description: "Get the CREATE statement for a table or view by schema.name",
parameters: {
type: "object",
properties: {
schemaDotName: {
type: "string",
description: "The schema and name, e.g. 'Amazon.tVcsLiteUploadQueue'"
}
},
required: ["schemaDotName"]
}
}
];
let summary = null;
let maxToolCalls = 5; // Prevent infinite loops
while (maxToolCalls-- > 0) {
// Log model call
console.log('\n--- Model Call ---');
console.log('Model:', "o4-mini");
console.log('Messages:', JSON.stringify(messages, null, 2));
console.log('Functions:', JSON.stringify(functions, null, 2));
const response = await openai.chat.completions.create({
model: "o4-mini", // or "gpt-3.5-turbo-1106" if you don't have access to gpt-4o
messages,
service_tier: "flex",
functions,
function_call: "auto"
});
const msg = response.choices[0].message;
if (msg.content) {
summary = msg.content;
break;
} else if (msg.function_call) {
// LLM is calling our tool
const { name, arguments: argsJson } = msg.function_call;
console.log('\n--- Tool Call ---');
console.log('Function:', name);
console.log('Arguments:', argsJson);
if (name === "read_table_or_view") {
let args;
try {
args = JSON.parse(argsJson);
} catch (e) {
messages.push({ role: 'assistant', content: "Error: Invalid function call arguments." });
continue;
}
const def = readTableOrView(args.schemaDotName);
// Log tool result (truncate if large)
const defPreview = def.length > 500 ? def.slice(0, 500) + '...[truncated]' : def;
console.log('Result:', defPreview);
messages.push({
role: 'function',
name: 'read_table_or_view',
content: def
});
} else {
messages.push({ role: 'assistant', content: `Error: Unknown function ${name}` });
}
} else {
messages.push({ role: 'assistant', content: "Error: No content or function call in response." });
}
}
if (!summary) {
summary = "Error: Could not generate summary after several tool calls.";
}
fs.writeFileSync(summaryFile, summary, 'utf8');
console.log(`Summary written to ${summaryFile}`);
}
main().catch(e => {
console.error(e);
process.exit(1);
});
hi