Refactor CLI tool to support streaming responses with previous response tracking and update file listing tool schema by removing unnecessary 'chroot' parameter from required fields.

This commit is contained in:
sebseb7
2025-08-11 15:54:58 +02:00
parent f6f809263c
commit 70f54684e6
4 changed files with 1124 additions and 72 deletions

146
cli.js
View File

@@ -24,81 +24,95 @@ async function loadTools() {
}
streamOnce(new OpenAI({ apiKey: process.env.OPENAI_API_KEY }), 'Zeig mir die Dateiein in /');
async function streamOnce(openai, userText) {
const toolsByFile = await loadTools();
let previousResponseId;
const input =[
let input = [
{ "role": "developer", "content": [ {"type": "input_text","text": '' }] },
{"role": "user", "content": [ { "type": "input_text", "text": userText } ]},
]
const call = {
model: 'gpt-5-nano',
input,
text: { format: { type: 'text' }, verbosity: 'low' },
reasoning: { effort: 'high', summary: 'detailed' },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
while(input.length > 0){
console.log('input:', input.length);
const call = {
model: 'gpt-5-nano',
input: input,
text: { format: { type: 'text' }, verbosity: 'low' },
reasoning: { effort: 'high', summary: 'detailed' },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
}
if(previousResponseId) call.previous_response_id = previousResponseId;
const stream = await openai.responses.stream(call);
stream.on('response.created', (event) => {
if(!previousResponseId){
previousResponseId = event.response.id;
}
});
stream.on('response.reasoning_summary_text.delta', (event) => {
//process.stdout.write(event.delta);
});
stream.on('response.reasoning_summary_text.done', () => {
//process.stdout.write('\n');
//clear on next delta
});
stream.on('response.output_text.delta', (event) => {
process.stdout.write(event.delta);
});
stream.on('response.output_item.added', (event) => {
if(event.item && event.item.type === 'function_call'){
//console.log('function call:', event.item);
}
});
stream.on('response.function_call_arguments.delta', (event) => {
//process.stdout.write(event.delta);
});
const functionCalls = [];
stream.on('response.output_item.done', async (event) => {
if(event.item && event.item.type === 'function_call'){
const id = event.item.call_id;
const name = event.item.name;
let args = {};
try {
args = JSON.parse(event.item.arguments);
} catch (e){
console.error('Error parsing arguments:', e, event.item.arguments);
}
functionCalls.push({ id, name, args, promise: toolsByFile[name].run(args) });
}
});
stream.on('response.completed', async (event) => {
//log usage & print messages to user
});
await Array.fromAsync(stream);
input=[];
for (const call of functionCalls) {
try {
const result = await call.promise;
input.push({
type: "function_call_output",
call_id: call.id,
output: JSON.stringify(result),
})
//console.log('function call result:', call,result);
} catch (err) {
console.error('Error in function call:', call.name, err);
}
}
}
const stream = await openai.responses.stream(call);
stream.on('response.created', (event) => {
//console.log('respid:', event.response.id);
});
stream.on('response.reasoning_summary_text.delta', (event) => {
//process.stdout.write(event.delta);
});
stream.on('response.reasoning_summary_text.done', () => {
//process.stdout.write('\n');
//clear on next delta
});
stream.on('response.output_text.delta', (event) => {
process.stdout.write(event.delta);
});
stream.on('response.output_item.added', (event) => {
if(event.item && event.item.type === 'function_call'){
//console.log('function call:', event.item);
}
});
stream.on('response.function_call_arguments.delta', (event) => {
//process.stdout.write(event.delta);
});
const functionCalls = [];
stream.on('response.output_item.done', async (event) => {
if(event.item && event.item.type === 'function_call'){
const id = event.item.id;
const name = event.item.name;
let args = {};
try {
args = JSON.parse(event.item.arguments);
} catch (e){
console.error('Error parsing arguments:', e, event.item.arguments);
}
console.log('function call:', id,name, args,await toolsByFile[name].run(args));
}
});
stream.on('response.completed', (event) => {
const filtered = {
id: event?.response?.id,
status: event?.response?.status,
output: event?.response?.output,
usage: event?.response?.usage,
};
//console.log('OPENAI RESPONSE:', event);
});
await Array.fromAsync(stream);
console.log('OPENAI STREAM FINISHED');
//console.log('OPENAI STREAM FINISHED');
}