Update ModelDialog to accept options for model selection and enhance error handling for invalid models. Modify CLI to use the new model configuration and update interrogation command for improved functionality.

This commit is contained in:
sebseb7
2025-08-22 22:43:27 +00:00
parent 46c9fe9fac
commit f43e0af918
2 changed files with 31 additions and 6 deletions

21
cli2.js
View File

@@ -7,7 +7,7 @@ import chalk from 'chalk';
const modelDialog = new ModelDialog();
const modelDialog = new ModelDialog({model: 'gpt-5-mini'});
modelDialog.on('outputUpdate', (output) => {
//console.log(chalk.blue('output event'),output);
@@ -50,7 +50,24 @@ const price = {
//const output = await modelDialog.interrogate('Can you remember "seven" ?');
//console.log(output.output,JSON.stringify(output.reasoning,null,2));
//const output2 = await modelDialog.interrogate('read a file that is what you remebered plus 1 as a word with txt ending, check that file.');
const output2 = await modelDialog.interrogate('Hi, use the list files tools and the read files tool on /readme.txt in same variations to test it. use the tools in parallel.');
const output2 = await modelDialog.interrogate('schau dich mal um und wenn du html dateien findest, dann invertiere den gradient.');
console.log('final output:',output2.output);
console.log('reasoning:',output2.reasoning);
//Ti: { 'gpt-5-2025-08-07': 3019 } Tc: { 'gpt-5-2025-08-07': 0 } To: { 'gpt-5-2025-08-07': 751 }

View File

@@ -42,7 +42,8 @@ if (!Array.fromAsync) {
}
class ModelDialog {
constructor() {
constructor(options) {
this.options = options;
this.messages = [systemprompt];
this.messagesSent = [];
this.isActive = false;
@@ -91,8 +92,11 @@ class ModelDialog {
console.log(chalk.blue('sending messages:'),messagesToSend.length);
//console.log(chalk.blue('messages:'),JSON.stringify(messagesToSend,null,2));
this.messagesSent.push(...messagesToSend);
const model = 'gpt-5-mini';
const model = this.options.model || 'gpt-5-mini';
if(!['gpt-5', 'gpt-5-mini', 'gpt-5-nano', 'gpt-4.1', 'gpt-4.1-mini'].includes(model)){
throw new Error('Invalid model: ' + model);
}
const call = {
model: model,
@@ -101,7 +105,8 @@ class ModelDialog {
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
previous_response_id: this.previousResponseId,
parallel_tool_calls: true
parallel_tool_calls: true,
include: ['reasoning.encrypted_content']
}
if(model.startsWith('gpt-5')){
call.reasoning = { effort: 'low', summary: 'detailed' };
@@ -110,6 +115,7 @@ class ModelDialog {
this.currentStream = openai.responses.stream(call);
this.currentStream.on('response.created', (event) => {
this.previousResponseId = event.response.id;
});
@@ -174,7 +180,9 @@ class ModelDialog {
await Array.fromAsync(this.currentStream);
console.log(chalk.green('Tico'),[Object.values(this.inputTokens),Object.values(this.cachedTokens),Object.values(this.outputTokens)]);
console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length)
} while(this.messages.length > 0);
this.isActive = false;