Enhance output logging in CLI and ModelDialog by integrating chalk for better readability. Update output handling to include detailed reasoning and token information. Refactor message management in ModelDialog to improve response processing and add support for function call arguments. Adjust chroot paths in various tools for consistency.

This commit is contained in:
sebseb7
2025-08-14 09:41:17 +00:00
parent df85e5e603
commit 657b6af993
9 changed files with 112 additions and 49 deletions

View File

@@ -4,6 +4,7 @@ import EventEmitter from 'events';
import path from 'path';
import fs from 'fs/promises';
import { fileURLToPath } from 'node:url';
import chalk from 'chalk';
async function loadTools() {
const __dirname = path.dirname(fileURLToPath(import.meta.url));
@@ -43,6 +44,7 @@ if (!Array.fromAsync) {
class ModelDialog {
constructor() {
this.messages = [systemprompt];
this.messagesSent = [];
this.isActive = false;
this.currentStream = null;
this.previousResponseId = null;
@@ -79,54 +81,99 @@ class ModelDialog {
this.messages.push({"role": "user", "content": [ {"type": "input_text","text": prompt }]});
const call = {
model: 'gpt-5-nano',
input: structuredClone(this.messages),
text: { format: { type: 'text' }, verbosity: 'low' },
reasoning: { effort: 'medium', summary: 'detailed' },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
previous_response_id: this.previousResponseId
}
const outputs = [];
this.currentStream = await openai.responses.stream(call);
this.currentStream.on('response.created', (event) => {
this.previousResponseId = event.response.id;
});
do{
const messagesToSend = this.messages.splice(0);
this.messagesSent.push(...messagesToSend);
const deltas = [];
this.currentStream.on('response.output_text.delta', (event) => {
deltas.push(event.delta);
this.emitter.emit('outputUpdate', deltas.join(''));
});
const call = {
model: 'gpt-5-nano',
input: messagesToSend,
text: { format: { type: 'text' }, verbosity: 'low' },
reasoning: { effort: 'medium', summary: 'detailed' },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
previous_response_id: this.previousResponseId
}
const reasoningDeltas = [];
this.currentStream.on('response.reasoning_summary_text.delta', (event) => {
if(!reasoningDeltas[event.summary_index]) reasoningDeltas[event.summary_index] = [];
reasoningDeltas[event.summary_index].push(event.delta);
this.emitter.emit('reasoningUpdate', reasoningDeltas[event.summary_index].join(''));
});
this.currentStream = openai.responses.stream(call);
this.currentStream.on('response.created', (event) => {
this.previousResponseId = event.response.id;
});
this.currentStream.on('response.reasoning_summary_text.done', (event) => {
//console.log(event);
});
const deltas = [];
this.currentStream.on('response.output_text.delta', (event) => {
deltas.push(event.delta);
this.emitter.emit('outputUpdate', deltas.join(''));
});
const reasoningDeltas = [];
this.currentStream.on('response.reasoning_summary_text.delta', (event) => {
if(!reasoningDeltas[event.summary_index]) reasoningDeltas[event.summary_index] = [];
reasoningDeltas[event.summary_index].push(event.delta);
this.emitter.emit('reasoningUpdate', reasoningDeltas[event.summary_index].join(''));
});
this.currentStream.on('response.reasoning_summary_text.done', (event) => {
//console.log(event);
});
this.currentStream.on('response.function_call_arguments.delta', (event) => {
process.stdout.write(chalk.yellow(event.delta));
});
this.currentStream.on('response.function_call_arguments.done', (event) => {
process.stdout.write("\n");
});
this.currentStream.on('response.completed', async (event) => {
this.handleUsage(event.response.usage, event.response.model);
outputs.push(...event.response.output);
let output;
this.currentStream.on('response.completed', async (event) => {
this.handleUsage(event.response.usage, event.response.model);
output = event.response.output;
});
for(const toolCall of event.response.output.filter(i => i.type === 'function_call')){
// Limit the 'arguments' field to 400 characters for logging
const limitedArgs = typeof toolCall.arguments === 'string'
? (toolCall.arguments.length > 400 ? toolCall.arguments.slice(0, 400) + '...[truncated]' : toolCall.arguments)
: toolCall.arguments;
console.log(
chalk.green('tool call:'),
{ ...toolCall, arguments: limitedArgs }
);
const tool = toolsByFile[toolCall.name];
let args;
try{
args = JSON.parse(toolCall.arguments);
} catch(e){
console.error(chalk.red('Error parsing arguments:'), e, toolCall.arguments);
this.messages.push({
type: "function_call_output",
call_id: toolCall.call_id,
output: {error: 'Exception in parsing arguments', exception: e},
});
continue;
}
const result = await tool.run(args);
console.log(chalk.green('function call result:'),'<toolCall.name>',toolCall.name,'</toolCall.name>\n','<args>',args,'</args>\n','<result>',result,'</result>');
this.messages.push({
type: "function_call_output",
call_id: toolCall.call_id,
output: JSON.stringify(result),
});
}
});
await Array.fromAsync(this.currentStream);
console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length)
} while(this.messages.length > 0);
await Array.fromAsync(this.currentStream);
this.isActive = false;
const now = Date.now();
this.lastDebouncedUpdate = now;
this.lastDebouncedUpdate = 0;
return {
output: output.filter(i => i.type === 'message').map(i => i.content[0].text).join('\n') ,
reasoning: reasoningDeltas.map(i => i.join('')),
inputTokens: this.inputTokens, outputTokens: this.outputTokens, cachedTokens: this.cachedTokens};
output: outputs.filter(i => i.type === 'message').map(i => i.content[0].text) ,
reasoning: outputs.filter(i => i.type === 'reasoning').map(i => i.summary.map(j => j.text).join('\n')),
inputTokens: this.inputTokens, outputTokens: this.outputTokens, cachedTokens: this.cachedTokens
};
}
}