Files
toolLooper/cli.js

192 lines
6.4 KiB
JavaScript

#!/usr/bin/env node
import 'dotenv/config';
import OpenAI from 'openai';
import terminalKit from 'terminal-kit';
//npm install tiktoken
//csk-8jftdte6r6vf8fdvp9xkyek5t3jnc6jfhh93d3ewfcwxxvh9
import { promises as fs, unwatchFile } from "node:fs";
import { fileURLToPath } from "node:url";
import path from "node:path";
if (!Array.fromAsync) {
Array.fromAsync = async function fromAsync(asyncIterable) {
const array = [];
for await (const item of asyncIterable) {
array.push(item);
}
return array;
};
}
function renderUsage(usage) {
const inputTokens = usage.input_tokens - usage.input_tokens_details.cached_tokens;
const cacheTokens = usage.input_tokens_details.cached_tokens;
const outputToken = usage.output_tokens;
console.log('\nCost', inputTokens, cacheTokens, outputToken);
}
function printIndented(indentNum, ...args) {
const indent = ' '.repeat(indentNum);
const output = args.map(arg => {
if (typeof arg === 'string') return arg;
try {
return JSON.stringify(arg, null, 2);
} catch {
return String(arg);
}
}).join(' ');
// Indent every line
console.log(output.split('\n').map(line => indent + line).join('\n'));
}
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const term = terminalKit.terminal;
// Global key handler so CTRL-C works everywhere (input fields, loops, etc.).
// Disable mouse tracking so terminal mouse wheel keeps controlling scrollback.
term.grabInput({ mouse: false });
term.on('key', (name) => {
if (name === 'CTRL_C') {
term.grabInput(false);
term.processExit(0);
}
});
async function askUserForInput() {
term.cyan("Enter your request: ");
const input = await term.inputField({ mouse: false }).promise;
console.log('\n');
return input;
}
async function loadTools() {
const toolsDir = path.join(__dirname, "tools");
const dirents = await fs.readdir(toolsDir, { withFileTypes: true });
const toolEntries = await Promise.all(
dirents
.filter((dirent) => dirent.isFile() && dirent.name.endsWith(".js"))
.map(async (dirent) => {
const fileName = dirent.name.replace(/\.js$/, "");
const module = await import(`file://${path.join(toolsDir, dirent.name)}`);
return [fileName, { def: module.default, run: module.run }];
})
);
return Object.fromEntries(toolEntries);
}
let counter = 0;
let previousResponseId;
while(true){
// Block for user input before kicking off the LLM loop
const userText = await askUserForInput();
await streamOnce(new OpenAI({ apiKey: process.env.OPENAI_API_KEY }), userText );
//await streamOnce(new OpenAI({ baseURL: "https://api.cerebras.ai/v1",apiKey: "csk-8jftdte6r6vf8fdvp9xkyek5t3jnc6jfhh93d3ewfcwxxvh9" }), userText );
async function streamOnce(openai, userText) {
const toolsByFile = await loadTools();
const systemprompt = {"role": "developer", "content": [ {
"type": "input_text","text": `You are an interactive CLI AI assistant. Follow the user's instructions.
If a tool is available and relevant, plan to use it.
Tools:
list_files - (no/empty path means root)
patch_files - (zum anlegen, ändern und löschen von Dateien)
read_file - (nach zeilen)
ripgrep - suchmusater und dateimuster
websearch - eine Google Suche machen mit Schlüsselwörtern
`
}]};
const input = [{"role": "user", "content": [ {"type": "input_text","text": userText } ]}];
do{
const call = {
model: 'gpt-4.1-nano',
input: counter == 0 ? [systemprompt,...structuredClone(input)] : structuredClone(input),
text: { format: { type: 'text' }/*, verbosity: 'low' */},
//reasoning: { effort: 'minimal', summary: 'detailed' },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
}
if(previousResponseId) call.previous_response_id = previousResponseId;
//console.log("\n\n\n\n\n------NEW OPENAI CALL-"+input.length+"-------------"
// ,"\n",counter++,"\n",'----INPUT-----------------'
// ,"\n",call.input.map(i => JSON.stringify(i)),"\n",
// '--------CALL-------------',call,"\n");
const stream = await openai.responses.stream(call);
stream.on('response.created', (event) => {
previousResponseId = event.response.id;
});
stream.on('response.reasoning_summary_text.delta', (event) => {
process.stdout.write('o')
});
stream.on('response.reasoning_summary_text.done', () => {
process.stdout.write('\n');
//clear on next delta
});
stream.on('response.output_text.delta', (event) => {
process.stdout.write('.')
});
stream.on('response.output_item.added', (event) => {
if(event.item && event.item.type === 'function_call'){
//console.log('function call:', event.item);
}
});
stream.on('response.function_call_arguments.delta', (event) => {
process.stdout.write('x');
});
const functionCalls = [];
stream.on('response.output_item.done', async (event) => {
if(event.item && event.item.type === 'function_call'){
const id = event.item.call_id;
const name = event.item.name;
let args = {};
try {
args = JSON.parse(event.item.arguments);
} catch (e){
// console.error('Error parsing arguments:', e, event.item.arguments);
}
//console.log(' function call:', id, name);
functionCalls.push({ id, name, args, promise: toolsByFile[name].run(args) });
}
});
stream.on('response.completed', async (event) => {
printIndented(10,renderUsage(event.response.usage));
if (event.response.output.filter(i => i.type === 'message').length > 0) printIndented(10,event.response.output.filter(i => i.type === 'message').map(i => i.content[0].text).join('\n'));
});
await Array.fromAsync(stream);
input.length = 0;
for (const call of functionCalls) {
//try {
const result = await call.promise;
input.push({
type: "function_call_output",
call_id: call.id,
output: JSON.stringify(result),
})
printIndented(10,'function call result:',result);
//} catch (err) {
// console.error('Error in function call:', call.name, err);
//}
}
}while(input.length > 0)
//console.log('OPENAI STREAM FINISHED');
}
}