Compare commits
1 Commits
46c9fe9fac
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f43e0af918 |
21
cli2.js
21
cli2.js
@@ -7,7 +7,7 @@ import chalk from 'chalk';
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
const modelDialog = new ModelDialog();
|
const modelDialog = new ModelDialog({model: 'gpt-5-mini'});
|
||||||
|
|
||||||
modelDialog.on('outputUpdate', (output) => {
|
modelDialog.on('outputUpdate', (output) => {
|
||||||
//console.log(chalk.blue('output event'),output);
|
//console.log(chalk.blue('output event'),output);
|
||||||
@@ -50,7 +50,24 @@ const price = {
|
|||||||
//const output = await modelDialog.interrogate('Can you remember "seven" ?');
|
//const output = await modelDialog.interrogate('Can you remember "seven" ?');
|
||||||
//console.log(output.output,JSON.stringify(output.reasoning,null,2));
|
//console.log(output.output,JSON.stringify(output.reasoning,null,2));
|
||||||
//const output2 = await modelDialog.interrogate('read a file that is what you remebered plus 1 as a word with txt ending, check that file.');
|
//const output2 = await modelDialog.interrogate('read a file that is what you remebered plus 1 as a word with txt ending, check that file.');
|
||||||
const output2 = await modelDialog.interrogate('Hi, use the list files tools and the read files tool on /readme.txt in same variations to test it. use the tools in parallel.');
|
|
||||||
|
|
||||||
|
|
||||||
|
const output2 = await modelDialog.interrogate('schau dich mal um und wenn du html dateien findest, dann invertiere den gradient.');
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
console.log('final output:',output2.output);
|
console.log('final output:',output2.output);
|
||||||
console.log('reasoning:',output2.reasoning);
|
console.log('reasoning:',output2.reasoning);
|
||||||
//Ti: { 'gpt-5-2025-08-07': 3019 } Tc: { 'gpt-5-2025-08-07': 0 } To: { 'gpt-5-2025-08-07': 751 }
|
//Ti: { 'gpt-5-2025-08-07': 3019 } Tc: { 'gpt-5-2025-08-07': 0 } To: { 'gpt-5-2025-08-07': 751 }
|
||||||
|
|||||||
@@ -42,7 +42,8 @@ if (!Array.fromAsync) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class ModelDialog {
|
class ModelDialog {
|
||||||
constructor() {
|
constructor(options) {
|
||||||
|
this.options = options;
|
||||||
this.messages = [systemprompt];
|
this.messages = [systemprompt];
|
||||||
this.messagesSent = [];
|
this.messagesSent = [];
|
||||||
this.isActive = false;
|
this.isActive = false;
|
||||||
@@ -91,8 +92,11 @@ class ModelDialog {
|
|||||||
console.log(chalk.blue('sending messages:'),messagesToSend.length);
|
console.log(chalk.blue('sending messages:'),messagesToSend.length);
|
||||||
//console.log(chalk.blue('messages:'),JSON.stringify(messagesToSend,null,2));
|
//console.log(chalk.blue('messages:'),JSON.stringify(messagesToSend,null,2));
|
||||||
this.messagesSent.push(...messagesToSend);
|
this.messagesSent.push(...messagesToSend);
|
||||||
|
|
||||||
const model = 'gpt-5-mini';
|
const model = this.options.model || 'gpt-5-mini';
|
||||||
|
if(!['gpt-5', 'gpt-5-mini', 'gpt-5-nano', 'gpt-4.1', 'gpt-4.1-mini'].includes(model)){
|
||||||
|
throw new Error('Invalid model: ' + model);
|
||||||
|
}
|
||||||
|
|
||||||
const call = {
|
const call = {
|
||||||
model: model,
|
model: model,
|
||||||
@@ -101,7 +105,8 @@ class ModelDialog {
|
|||||||
tools: Object.values(toolsByFile).map(t => t.def),
|
tools: Object.values(toolsByFile).map(t => t.def),
|
||||||
store: true,
|
store: true,
|
||||||
previous_response_id: this.previousResponseId,
|
previous_response_id: this.previousResponseId,
|
||||||
parallel_tool_calls: true
|
parallel_tool_calls: true,
|
||||||
|
include: ['reasoning.encrypted_content']
|
||||||
}
|
}
|
||||||
if(model.startsWith('gpt-5')){
|
if(model.startsWith('gpt-5')){
|
||||||
call.reasoning = { effort: 'low', summary: 'detailed' };
|
call.reasoning = { effort: 'low', summary: 'detailed' };
|
||||||
@@ -110,6 +115,7 @@ class ModelDialog {
|
|||||||
|
|
||||||
|
|
||||||
this.currentStream = openai.responses.stream(call);
|
this.currentStream = openai.responses.stream(call);
|
||||||
|
|
||||||
this.currentStream.on('response.created', (event) => {
|
this.currentStream.on('response.created', (event) => {
|
||||||
this.previousResponseId = event.response.id;
|
this.previousResponseId = event.response.id;
|
||||||
});
|
});
|
||||||
@@ -174,7 +180,9 @@ class ModelDialog {
|
|||||||
|
|
||||||
await Array.fromAsync(this.currentStream);
|
await Array.fromAsync(this.currentStream);
|
||||||
|
|
||||||
|
console.log(chalk.green('Tico'),[Object.values(this.inputTokens),Object.values(this.cachedTokens),Object.values(this.outputTokens)]);
|
||||||
console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length)
|
console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length)
|
||||||
|
|
||||||
} while(this.messages.length > 0);
|
} while(this.messages.length > 0);
|
||||||
|
|
||||||
this.isActive = false;
|
this.isActive = false;
|
||||||
|
|||||||
Reference in New Issue
Block a user