Update ModelDialog to accept options for model selection and enhance error handling for invalid models. Modify CLI to use the new model configuration and update interrogation command for improved functionality.

This commit is contained in:
sebseb7
2025-08-22 22:43:27 +00:00
parent 46c9fe9fac
commit f43e0af918
2 changed files with 31 additions and 6 deletions

View File

@@ -42,7 +42,8 @@ if (!Array.fromAsync) {
}
class ModelDialog {
constructor() {
constructor(options) {
this.options = options;
this.messages = [systemprompt];
this.messagesSent = [];
this.isActive = false;
@@ -91,8 +92,11 @@ class ModelDialog {
console.log(chalk.blue('sending messages:'),messagesToSend.length);
//console.log(chalk.blue('messages:'),JSON.stringify(messagesToSend,null,2));
this.messagesSent.push(...messagesToSend);
const model = 'gpt-5-mini';
const model = this.options.model || 'gpt-5-mini';
if(!['gpt-5', 'gpt-5-mini', 'gpt-5-nano', 'gpt-4.1', 'gpt-4.1-mini'].includes(model)){
throw new Error('Invalid model: ' + model);
}
const call = {
model: model,
@@ -101,7 +105,8 @@ class ModelDialog {
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
previous_response_id: this.previousResponseId,
parallel_tool_calls: true
parallel_tool_calls: true,
include: ['reasoning.encrypted_content']
}
if(model.startsWith('gpt-5')){
call.reasoning = { effort: 'low', summary: 'detailed' };
@@ -110,6 +115,7 @@ class ModelDialog {
this.currentStream = openai.responses.stream(call);
this.currentStream.on('response.created', (event) => {
this.previousResponseId = event.response.id;
});
@@ -174,7 +180,9 @@ class ModelDialog {
await Array.fromAsync(this.currentStream);
console.log(chalk.green('Tico'),[Object.values(this.inputTokens),Object.values(this.cachedTokens),Object.values(this.outputTokens)]);
console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length)
} while(this.messages.length > 0);
this.isActive = false;