Enhance terminal functionality in InkApp by implementing a new input handling system. Introduce a command history feature for improved user navigation and streamline the process for submitting commands. Update state management to ensure accurate tracking of user inputs and command history.
This commit is contained in:
133
modelDialog.js
Normal file
133
modelDialog.js
Normal file
@@ -0,0 +1,133 @@
|
||||
import OpenAI from 'openai';
|
||||
import 'dotenv/config';
|
||||
import EventEmitter from 'events';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
async function loadTools() {
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const toolsDir = path.join(__dirname, "tools");
|
||||
const dirents = await fs.readdir(toolsDir, { withFileTypes: true });
|
||||
const toolEntries = await Promise.all(
|
||||
dirents
|
||||
.filter((dirent) => dirent.isFile() && dirent.name.endsWith(".js"))
|
||||
.map(async (dirent) => {
|
||||
const fileName = dirent.name.replace(/\.js$/, "");
|
||||
const module = await import(`file://${path.join(toolsDir, dirent.name)}`);
|
||||
return [fileName, { def: module.default, run: module.run }];
|
||||
})
|
||||
);
|
||||
return Object.fromEntries(toolEntries);
|
||||
}
|
||||
const toolsByFile = await loadTools();
|
||||
|
||||
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
||||
|
||||
const systemprompt = {"role": "developer", "content": [ { "type": "input_text","text":
|
||||
|
||||
`You are a helpful assistant.`
|
||||
|
||||
}]};
|
||||
|
||||
if (!Array.fromAsync) {
|
||||
Array.fromAsync = async function fromAsync(asyncIterable) {
|
||||
const array = [];
|
||||
for await (const item of asyncIterable) {
|
||||
array.push(item);
|
||||
}
|
||||
return array;
|
||||
};
|
||||
}
|
||||
|
||||
class ModelDialog {
|
||||
constructor() {
|
||||
this.messages = [systemprompt];
|
||||
this.isActive = false;
|
||||
this.currentStream = null;
|
||||
this.previousResponseId = null;
|
||||
this.emitter = new EventEmitter();
|
||||
this.inputTokens = {};
|
||||
this.outputTokens = {};
|
||||
this.cachedTokens = {};
|
||||
this.lastDebouncedUpdate = 0;
|
||||
};
|
||||
|
||||
handleUsage = (usage, model) => {
|
||||
this.inputTokens[model] = usage.input_tokens-usage.input_tokens_details.cached_tokens;
|
||||
this.outputTokens[model] = usage.output_tokens;
|
||||
this.cachedTokens[model] = usage.input_tokens_details.cached_tokens;
|
||||
}
|
||||
|
||||
on = (event, callback) => {
|
||||
const debounceTime = 1000; // 1 second
|
||||
|
||||
const debouncedCallback = (...args) => {
|
||||
const now = Date.now();
|
||||
if (now - this.lastDebouncedUpdate >= debounceTime) {
|
||||
this.lastDebouncedUpdate = now;
|
||||
callback(...args);
|
||||
}
|
||||
};
|
||||
|
||||
this.emitter.on(event, debouncedCallback);
|
||||
}
|
||||
|
||||
interrogate = async (prompt) => {
|
||||
if(this.isActive) return;
|
||||
this.isActive = true;
|
||||
|
||||
this.messages.push({"role": "user", "content": [ {"type": "input_text","text": prompt }]});
|
||||
|
||||
const call = {
|
||||
model: 'gpt-5-nano',
|
||||
input: structuredClone(this.messages),
|
||||
text: { format: { type: 'text' }, verbosity: 'low' },
|
||||
reasoning: { effort: 'medium', summary: 'detailed' },
|
||||
tools: Object.values(toolsByFile).map(t => t.def),
|
||||
store: true,
|
||||
previous_response_id: this.previousResponseId
|
||||
}
|
||||
|
||||
this.currentStream = await openai.responses.stream(call);
|
||||
this.currentStream.on('response.created', (event) => {
|
||||
this.previousResponseId = event.response.id;
|
||||
});
|
||||
|
||||
const deltas = [];
|
||||
this.currentStream.on('response.output_text.delta', (event) => {
|
||||
deltas.push(event.delta);
|
||||
this.emitter.emit('outputUpdate', deltas.join(''));
|
||||
});
|
||||
|
||||
const reasoningDeltas = [];
|
||||
this.currentStream.on('response.reasoning_summary_text.delta', (event) => {
|
||||
if(!reasoningDeltas[event.summary_index]) reasoningDeltas[event.summary_index] = [];
|
||||
reasoningDeltas[event.summary_index].push(event.delta);
|
||||
this.emitter.emit('reasoningUpdate', reasoningDeltas[event.summary_index].join(''));
|
||||
});
|
||||
|
||||
this.currentStream.on('response.reasoning_summary_text.done', (event) => {
|
||||
//console.log(event);
|
||||
});
|
||||
|
||||
|
||||
|
||||
let output;
|
||||
this.currentStream.on('response.completed', async (event) => {
|
||||
this.handleUsage(event.response.usage, event.response.model);
|
||||
output = event.response.output;
|
||||
});
|
||||
|
||||
await Array.fromAsync(this.currentStream);
|
||||
this.isActive = false;
|
||||
const now = Date.now();
|
||||
this.lastDebouncedUpdate = now;
|
||||
return {
|
||||
output: output.filter(i => i.type === 'message').map(i => i.content[0].text).join('\n') ,
|
||||
reasoning: reasoningDeltas.map(i => i.join('')),
|
||||
inputTokens: this.inputTokens, outputTokens: this.outputTokens, cachedTokens: this.cachedTokens};
|
||||
}
|
||||
}
|
||||
|
||||
export default ModelDialog;
|
||||
Reference in New Issue
Block a user