Compare commits

..

8 Commits

Author SHA1 Message Date
sebseb7
f43e0af918 Update ModelDialog to accept options for model selection and enhance error handling for invalid models. Modify CLI to use the new model configuration and update interrogation command for improved functionality. 2025-08-22 22:43:27 +00:00
sebseb7
46c9fe9fac Add pricing structure for token usage in CLI and enhance token handling in ModelDialog. Implement cost breakdown per model based on input, cached, and output tokens, and ensure proper initialization of token counts in usage handling. 2025-08-21 13:31:15 +00:00
sebseb7
eb3f58b2e6 Refactor CLI and file handling tools for improved functionality. Update CLI interrogation command for better clarity and adjust logging format. Modify list_files.js to enhance path display logic and update read_file.js schema to allow null values for optional parameters, improving flexibility in file reading operations. 2025-08-21 12:58:14 +00:00
sebseb7
6e8a336143 Enhance message handling in ModelDialog by adding console logging for sent messages and enabling parallel_tool_calls for improved response processing. 2025-08-21 12:50:44 +00:00
sebseb7
839cea7fe6 Update ModelDialog and list_files.js to enhance functionality. Add parallel_tool_calls option in ModelDialog for improved response handling. Modify list_files.js schema to allow null types for path, depth, and includeHidden properties, and remove required fields for greater flexibility. 2025-08-21 12:41:31 +00:00
sebseb7
131a45e305 Update CLI interrogation command for improved file handling and output clarity. Enhance error logging in patch_files.js by integrating chalk for better visibility of patch errors and refining path resolution logic for file updates. 2025-08-21 08:33:00 +00:00
sebseb7
7fb261a3b7 u 2025-08-21 08:21:15 +00:00
sebseb7
7ad5d10378 Update CLI and ModelDialog to enhance functionality and user experience. Modify interrogation command in CLI for improved output generation, adjust model settings in ModelDialog for better reasoning effort, and introduce a new plugin structure in plan.md for LLM integration in Roundcube. Add spinner functionality in InkApp for loading states and improve error handling in read_file.js to ensure proper line breaks in file content output. 2025-08-21 08:20:38 +00:00
9 changed files with 560 additions and 51 deletions

3
.gitignore vendored
View File

@@ -1,3 +1,4 @@
node_modules node_modules
.env .env
tmp tmp
root

88
cli2.js
View File

@@ -7,7 +7,7 @@ import chalk from 'chalk';
const modelDialog = new ModelDialog(); const modelDialog = new ModelDialog({model: 'gpt-5-mini'});
modelDialog.on('outputUpdate', (output) => { modelDialog.on('outputUpdate', (output) => {
//console.log(chalk.blue('output event'),output); //console.log(chalk.blue('output event'),output);
@@ -16,16 +16,94 @@ modelDialog.on('reasoningUpdate', (output) => {
//console.log(chalk.blue('reasoning event'),output); //console.log(chalk.blue('reasoning event'),output);
}); });
// $ / 1million tokens
const price = {
'gpt-5-2025-08-07': {
input: 1.25,
cached: 0.125,
output: 10
},
'gpt-5-mini-2025-08-07': {
input: 0.25,
cached: 0.025,
output: 2
},
'gpt-5-nano-2025-08-07': {
input: 0.05,
cached: 0.005,
output: 0.4
},
'gpt-4.1-2025-04-14': {
input: 2,
cached: 0.5,
output: 8
},
'gpt-4.1-mini-2025-04-14': {
input: 0.4,
cached: 0.1,
output: 1.6
},
};
(async ()=>{ (async ()=>{
//const output = await modelDialog.interrogate('Can you remember "seven" ?'); //const output = await modelDialog.interrogate('Can you remember "seven" ?');
//console.log(output.output,JSON.stringify(output.reasoning,null,2)); //console.log(output.output,JSON.stringify(output.reasoning,null,2));
//const output2 = await modelDialog.interrogate('read a file that is what you remebered plus 1 as a word with txt ending, check that file.'); //const output2 = await modelDialog.interrogate('read a file that is what you remebered plus 1 as a word with txt ending, check that file.');
const output2 = await modelDialog.interrogate('Ersttelle eine beispiel business webseite für acme mit react und webpack. Lege die Dateien in /demo an');
const output2 = await modelDialog.interrogate('schau dich mal um und wenn du html dateien findest, dann invertiere den gradient.');
console.log('final output:',output2.output); console.log('final output:',output2.output);
console.log('reasoning:',output2.reasoning); console.log('reasoning:',output2.reasoning);
console.log('Tokens:',output2.inputTokens,output2.cachedTokens,output2.outputTokens); //Ti: { 'gpt-5-2025-08-07': 3019 } Tc: { 'gpt-5-2025-08-07': 0 } To: { 'gpt-5-2025-08-07': 751 }
console.log('Ti:',output2.inputTokens,'Tc:',output2.cachedTokens,'To:',output2.outputTokens);
// cost breakdown per model and totals (prices are per 1M tokens)
const perMillion = 1_000_000;
const models = new Set([
...Object.keys(output2.inputTokens || {}),
...Object.keys(output2.cachedTokens || {}),
...Object.keys(output2.outputTokens || {})
]);
let grandTotal = 0;
for (const model of models) {
const inputT = (output2.inputTokens || {})[model];
const cachedT = (output2.cachedTokens || {})[model];
const outputT = (output2.outputTokens || {})[model];
const p = price[model];
const inputCost = (typeof inputT === 'number' && p) ? (inputT / perMillion) * p.input : undefined;
const cachedCost = (typeof cachedT === 'number' && p) ? (cachedT / perMillion) * p.cached : undefined;
const outputCost = (typeof outputT === 'number' && p) ? (outputT / perMillion) * p.output : undefined;
const subtotal = [inputCost, cachedCost, outputCost].every(v => typeof v === 'number')
? (inputCost + cachedCost + outputCost)
: undefined;
if (typeof subtotal === 'number') grandTotal += subtotal;
console.log('cost for', model, {
inputCost: parseFloat(inputCost.toFixed(6)),
cachedCost: parseFloat(cachedCost.toFixed(6)),
outputCost: parseFloat(outputCost.toFixed(6)),
subtotal: parseFloat(subtotal.toFixed(4))
});
}
//console.log('total cost:', grandTotal);
})() })()

View File

@@ -42,7 +42,8 @@ if (!Array.fromAsync) {
} }
class ModelDialog { class ModelDialog {
constructor() { constructor(options) {
this.options = options;
this.messages = [systemprompt]; this.messages = [systemprompt];
this.messagesSent = []; this.messagesSent = [];
this.isActive = false; this.isActive = false;
@@ -56,9 +57,12 @@ class ModelDialog {
}; };
handleUsage = (usage, model) => { handleUsage = (usage, model) => {
this.inputTokens[model] = usage.input_tokens-usage.input_tokens_details.cached_tokens; if (typeof this.inputTokens[model] !== 'number') this.inputTokens[model] = 0;
this.outputTokens[model] = usage.output_tokens; if (typeof this.outputTokens[model] !== 'number') this.outputTokens[model] = 0;
this.cachedTokens[model] = usage.input_tokens_details.cached_tokens; if (typeof this.cachedTokens[model] !== 'number') this.cachedTokens[model] = 0;
this.inputTokens[model] += usage.input_tokens - usage.input_tokens_details.cached_tokens;
this.outputTokens[model] += usage.output_tokens;
this.cachedTokens[model] += usage.input_tokens_details.cached_tokens;
} }
on = (event, callback) => { on = (event, callback) => {
@@ -85,19 +89,33 @@ class ModelDialog {
do{ do{
const messagesToSend = this.messages.splice(0); const messagesToSend = this.messages.splice(0);
console.log(chalk.blue('sending messages:'),messagesToSend.length);
//console.log(chalk.blue('messages:'),JSON.stringify(messagesToSend,null,2));
this.messagesSent.push(...messagesToSend); this.messagesSent.push(...messagesToSend);
const call = { const model = this.options.model || 'gpt-5-mini';
model: 'gpt-5-nano', if(!['gpt-5', 'gpt-5-mini', 'gpt-5-nano', 'gpt-4.1', 'gpt-4.1-mini'].includes(model)){
input: messagesToSend, throw new Error('Invalid model: ' + model);
text: { format: { type: 'text' }, verbosity: 'low' },
reasoning: { effort: 'medium', summary: 'detailed' },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
previous_response_id: this.previousResponseId
} }
const call = {
model: model,
input: messagesToSend,
text: { format: { type: 'text' } },
tools: Object.values(toolsByFile).map(t => t.def),
store: true,
previous_response_id: this.previousResponseId,
parallel_tool_calls: true,
include: ['reasoning.encrypted_content']
}
if(model.startsWith('gpt-5')){
call.reasoning = { effort: 'low', summary: 'detailed' };
//call.text.format.verbosity = 'low';
}
this.currentStream = openai.responses.stream(call); this.currentStream = openai.responses.stream(call);
this.currentStream.on('response.created', (event) => { this.currentStream.on('response.created', (event) => {
this.previousResponseId = event.response.id; this.previousResponseId = event.response.id;
}); });
@@ -127,6 +145,7 @@ class ModelDialog {
this.currentStream.on('response.completed', async (event) => { this.currentStream.on('response.completed', async (event) => {
//console.log(chalk.blue('response completed:'),event.response.usage);
this.handleUsage(event.response.usage, event.response.model); this.handleUsage(event.response.usage, event.response.model);
outputs.push(...event.response.output); outputs.push(...event.response.output);
@@ -161,7 +180,9 @@ class ModelDialog {
await Array.fromAsync(this.currentStream); await Array.fromAsync(this.currentStream);
console.log(chalk.green('Tico'),[Object.values(this.inputTokens),Object.values(this.cachedTokens),Object.values(this.outputTokens)]);
console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length) console.log(chalk.green('Do we need to loop? messages in array = '),this.messages.length)
} while(this.messages.length > 0); } while(this.messages.length > 0);
this.isActive = false; this.isActive = false;
@@ -174,4 +195,4 @@ class ModelDialog {
} }
} }
export default ModelDialog; export default ModelDialog;

359
plan.md Normal file
View File

@@ -0,0 +1,359 @@
You're on the right track! You want to create a **Roundcube plugin** that allows users to **enhance or rewrite the email body using an LLM**, by sending the current message content to a backend API (your LLM service), then replacing the textarea content with the response.
Heres a complete working example of such a plugin, called `llm_compose_helper`. It adds a button to the compose screen, sends the current message text to a configured URL via AJAX, and replaces the message body with the LLM-generated result.
---
## ✅ Goal
- Add a "Rewrite with AI" button in the compose window.
- On click: open a popup asking the user for a rewrite prompt/instructions.
- Submit both the current message body and the user prompt to the configured LLM API endpoint.
- Replace the `<textarea>` content with the returned text.
---
## 📁 Plugin Structure
```
plugins/
llm_compose_helper/
llm_compose_helper.php <-- Main plugin class
config.inc.php <-- Configuration file
localization/en_US.inc <-- Language labels
js/llm_compose_helper.js <-- Client-side JavaScript
```
---
## 1. `llm_compose_helper.php` Plugin Class
```php
<?php
/**
* LLM Compose Helper Plugin
*
* Allows rewriting the message body using an external LLM service
*
* @version 1.0
* @author You
* @license GPL
*/
class llm_compose_helper extends rcube_plugin
{
public $task = 'mail';
function init()
{
// Load configuration
$this->load_config();
// Load localization texts (available in JS if second param = true)
$this->add_texts('localization/', true);
// Only run in compose view
$rcmail = rcmail::get_instance();
if ($rcmail->action == 'compose') {
// Include JS and CSS
$this->include_script('js/llm_compose_helper.js');
// Register UI placeholder handler
$this->register_handler('plugin.llm_button', array($this, 'llm_button'));
}
}
/**
* Generate the button HTML to be inserted into the toolbar
*/
function llm_button()
{
$rcmail = rcmail::get_instance();
return html::tag('a', array(
'href' => '#',
'id' => 'llm_rewrite_button',
'class' => 'button',
'title' => $this->gettext('rewrite_with_llm'),
'onclick' => 'return false;'
), $this->gettext('rewrite_label'));
}
}
```
---
## 2. `config.inc.php` Plugin Configuration
```php
<?php
// URL of your LLM API endpoint
$config['llm_api_url'] = 'https://your-llm-api.example.com/rewrite';
// Optional: headers (e.g., auth tokens)
$config['llm_api_headers'] = array(
'Authorization: Bearer your-secret-token',
'Content-Type: application/json'
);
```
> 💡 You can later override this in `config/config.inc.php` globally.
---
## 3. `localization/en_US.inc`
```php
<?php
$labels = array(
'rewrite_with_llm' => 'Rewrite message with AI',
'rewrite_label' => 'AI Rewrite',
'enter_prompt' => 'Enter rewrite prompt',
'rewrite_submit' => 'Rewrite',
'rewrite_cancel' => 'Cancel',
);
$messages = array(
'prompt_placeholder' => 'e.g., Make it shorter and more formal',
);
```
> Add other language files like `fr_FR.inc`, `de_DE.inc`, etc., if needed.
---
## 4. `js/llm_compose_helper.js` Client-Side Logic
```javascript
// Wait for Roundcube to initialize
rcmail.addEventListener('init', function() {
const button = document.getElementById('llm_rewrite_button');
if (!button) return;
// Insert button into compose toolbar
const toolbar = rcmail.gui_objects.toolbar;
if (toolbar) {
toolbar.appendChild(button);
}
// Attach click event
$(button).click(function(e) {
e.preventDefault();
const textarea = rcmail.env.html_editor ? rcmail.editor : document.getElementById('composebody');
let messageText;
// Get current message body
if (rcmail.env.html_editor && rcmail.editor && rcmail.editor.getData) {
messageText = rcmail.editor.getData(); // CKEditor
} else {
messageText = $('#composebody').val();
}
if (!messageText || messageText.trim() === '') {
alert(rcmail.gettext('non_empty', 'llm_compose_helper'));
return;
}
// Build prompt dialog content
var promptId = 'llm_prompt_input_' + Date.now();
var dialogHtml = '<div style="padding:8px 0">' +
'<label for="' + promptId + '">' + rcmail.gettext('enter_prompt', 'llm_compose_helper') + '</label>' +
'<textarea id="' + promptId + '" style="width:100%;height:120px;box-sizing:border-box;margin-top:6px" placeholder="' + (rcmail.gettext('prompt_placeholder', 'llm_compose_helper') || '') + '"></textarea>' +
'</div>';
var buttons = [
{
text: rcmail.gettext('rewrite_submit', 'llm_compose_helper'),
classes: 'mainaction',
click: function(e, ref) {
var promptValue = document.getElementById(promptId).value || '';
// Show loading
rcmail.set_busy(true, 'loading');
// Send to LLM API with message and prompt
rcmail.http_post('plugin.llm_rewrite', {
message: messageText,
prompt: promptValue
}, function() {
rcmail.set_busy(false);
});
if (ref && ref.hide) ref.hide();
}
},
{
text: rcmail.gettext('rewrite_cancel', 'llm_compose_helper'),
click: function(e, ref) { if (ref && ref.hide) ref.hide(); }
}
];
// Open Roundcube dialog
rcmail.show_popup_dialog(dialogHtml, rcmail.gettext('rewrite_with_llm', 'llm_compose_helper'), buttons, {modal: true, width: 520});
});
});
// Handle response from server
rcmail.addEventListener('plugin.llm_rewrite_response', function(response) {
if (response.status === 'success' && response.text) {
const newText = response.text;
if (rcmail.env.html_editor && rcmail.editor && rcmail.editor.setData) {
rcmail.editor.setData(newText); // For CKEditor
} else {
$('#composebody').val(newText);
}
rcmail.showMessage(rcmail.gettext('rewrite_success', 'llm_compose_helper'), 'confirmation');
} else {
var errorMsg = response && response.message ? String(response.message) : rcmail.gettext('rewrite_error', 'llm_compose_helper');
rcmail.showMessage(errorMsg, 'error');
}
});
```
---
## 5. Extend `llm_compose_helper.php` Add Server-Side Action
Update the `llm_compose_helper.php` file to register the AJAX action and handle the request:
```php
function init()
{
$this->load_config();
$this->add_texts('localization/', true);
$rcmail = rcmail::get_instance();
if ($rcmail->action == 'compose') {
$this->include_script('js/llm_compose_helper.js');
$this->register_handler('plugin.llm_button', array($this, 'llm_button'));
// Register custom action for AJAX
$this->register_action('plugin.llm_rewrite', array($this, 'action_handler'));
}
}
function action_handler()
{
$rcmail = rcmail::get_instance();
// Get input
$message = rcube_utils::get_input_value('message', rcube_utils::INPUT_POST);
$prompt = rcube_utils::get_input_value('prompt', rcube_utils::INPUT_POST);
if (empty($message)) {
$rcmail->output->command('plugin.llm_rewrite_response', [
'status' => 'error',
'message' => 'No message provided'
]);
return;
}
// Get config
$api_url = $rcmail->config->get('llm_api_url');
if (!$api_url) {
$rcmail->output->command('plugin.llm_rewrite_response', [
'status' => 'error',
'message' => 'LLM API URL not configured'
]);
return;
}
$headers = $rcmail->config->get('llm_api_headers', ['Content-Type: application/json']);
// Prepare request
$data = json_encode(['text' => $message, 'prompt' => $prompt]);
// Use file_get_contents or cURL
$options = [
'http' => [
'header' => $headers,
'method' => 'POST',
'content' => $data,
'timeout' => 30
],
'ssl' => [
'verify_peer' => true,
'verify_peer_name' => true,
'cafile' => '/etc/ssl/certs/ca-certificates.crt', // Adjust as needed
]
];
$context = stream_context_create($options);
$result = file_get_contents($api_url, false, $context);
if ($result === false) {
$rcmail->output->command('plugin.llm_rewrite_response', [
'status' => 'error',
'message' => 'Request failed'
]);
return;
}
$response = json_decode($result, true);
$rewritten = isset($response['text']) && $response['text'] !== null ? $response['text'] : ($response['message'] ?? $result);
// Return success
$rcmail->output->command('plugin.llm_rewrite_response', [
'status' => 'success',
'text' => $rewritten
]);
}
```
---
## 6. Example LLM API Response Format (Expected)
Your external LLM endpoint should accept POST JSON:
```json
{ "text": "Hello, I am writing to ask about...", "prompt": "Make it shorter and more formal" }
```
And return:
```json
{ "text": "Hi, I hope you're doing well. I'm reaching out to inquire about..." }
```
Ensure CORS is allowed if hosted separately.
---
## 7. Optional: Add More Labels
In `localization/en_US.inc`, add:
```php
$messages = array(
'rewrite_success' => 'Message rewritten successfully!',
'rewrite_error' => 'Failed to rewrite message.',
'non_empty' => 'Please write something before using AI rewrite.'
);
```
---
## 8. Enable the Plugin
In `config/config.inc.php`:
```php
$config['plugins'] = array(
// ... other plugins
'llm_compose_helper'
);
```
---
## ✅ Final Result
When composing or replying:
- A new button labeled "AI Rewrite" appears in the toolbar.
- Clicking it opens a dialog asking for a rewrite prompt.
- Submitting sends the current message and the prompt to your LLM API.
- The response replaces the message body.
- Works with plain text and HTML (via CKEditor if enabled).

View File

@@ -3,6 +3,10 @@ import { Box, Text } from 'ink';
import uiService from './uiService.js'; import uiService from './uiService.js';
import TextInput from 'ink-text-input'; import TextInput from 'ink-text-input';
import terminalService from '../terminalService.js'; import terminalService from '../terminalService.js';
import ModelDialog from '../../modelDialog.js';
const sharedModelDialog = new ModelDialog();
const npmSpinnerFrames = ['⠋','⠙','⠹','⠸','⠼','⠴','⠦','⠧','⠇','⠏'];
class Pane extends React.Component { class Pane extends React.Component {
constructor(props) { constructor(props) {
@@ -113,7 +117,9 @@ export default class InkApp extends React.Component {
menuIndex: 0, menuIndex: 0,
model: 'gpt-5', model: 'gpt-5',
reasoningEffort: 'minimal', reasoningEffort: 'minimal',
outputVerbosity: 'low' outputVerbosity: 'low',
isLoading: false,
spinnerIndex: 0
}; };
this.handleSubmit = this.handleSubmit.bind(this); this.handleSubmit = this.handleSubmit.bind(this);
this.handleChange = this.handleChange.bind(this); this.handleChange = this.handleChange.bind(this);
@@ -156,6 +162,12 @@ export default class InkApp extends React.Component {
try { process.stdin.setRawMode(true); } catch {} try { process.stdin.setRawMode(true); } catch {}
process.stdin.on('data', this.onKeypress); process.stdin.on('data', this.onKeypress);
} }
// spinner timer
this._spinnerTimer = setInterval(() => {
if (this.state.isLoading) {
this.setState((s) => ({ spinnerIndex: (s.spinnerIndex + 1) % npmSpinnerFrames.length }));
}
}, 80);
} }
componentWillUnmount() { componentWillUnmount() {
if (this.terminalUnsub) { if (this.terminalUnsub) {
@@ -169,6 +181,10 @@ export default class InkApp extends React.Component {
if (process.stdin && process.stdin.off) { if (process.stdin && process.stdin.off) {
process.stdin.off('data', this.onKeypress); process.stdin.off('data', this.onKeypress);
} }
if (this._spinnerTimer) {
clearInterval(this._spinnerTimer);
this._spinnerTimer = null;
}
} }
setPaneLines(stateKey, lines) { setPaneLines(stateKey, lines) {
@@ -205,21 +221,42 @@ export default class InkApp extends React.Component {
this.setState({ input: value }); this.setState({ input: value });
} }
handleSubmit() { async handleSubmit() {
const { input } = this.state; const { input } = this.state;
if (!input) return; if (!input) return;
try {
terminalService.write(`${input}\r`);
} catch (e) {
// do not hide errors; show in logs
this.setState((state) => ({
logs: [...state.logs, `! write error: ${String(e && e.message ? e.message : e)}`],
}));
}
this.setState((state) => ({ this.setState((state) => ({
logs: [...state.logs, `> ${input}`], logs: [...state.logs, `> ${input}`],
input: '' input: '',
isLoading: true
})); }));
try {
const result = await sharedModelDialog.interrogate(input);
const finalOutput = Array.isArray(result && result.output) ? result.output : [String(result && result.output ? result.output : '')];
const finalReasoning = Array.isArray(result && result.reasoning) ? result.reasoning : (result && result.reasoning ? [String(result.reasoning)] : []);
// Append to LLM output with a separator, overwrite chain of thought
this.setState((state) => ({
llmOutput: [
...state.llmOutput,
...(state.llmOutput.length ? ['----------'] : []),
...finalOutput
]
}));
this.setChainOfThought(finalReasoning);
this.setState((state) => ({
logs: [
...state.logs,
`tokens input: ${JSON.stringify(result && result.inputTokens)}`,
`tokens cached: ${JSON.stringify(result && result.cachedTokens)}`,
`tokens output: ${JSON.stringify(result && result.outputTokens)}`
]
}));
} catch (e) {
this.setState((state) => ({
logs: [...state.logs, `! interrogate error: ${String(e && e.message ? e.message : e)}`]
}));
} finally {
this.setState({ isLoading: false });
}
} }
toggleMenu(open) { toggleMenu(open) {
@@ -412,12 +449,16 @@ export default class InkApp extends React.Component {
)} )}
<Box marginTop={1}> <Box marginTop={1}>
<Text>Input: </Text> <Text>Input: </Text>
<TextInput {this.state.isLoading ? (
value={input} <Text color="yellow">{npmSpinnerFrames[this.state.spinnerIndex]} Processing...</Text>
onChange={this.handleChange} ) : (
onSubmit={this.handleSubmit} <TextInput
placeholder="Type and press Enter..." value={input}
/> onChange={this.handleChange}
onSubmit={this.handleSubmit}
placeholder="Type and press Enter..."
/>
)}
</Box> </Box>
</Box> </Box>
); );

2
todo.md Normal file
View File

@@ -0,0 +1,2 @@
return the function call result via event.
display function call evenst in logging

View File

@@ -103,24 +103,24 @@ export default {
type: "object", type: "object",
properties: { properties: {
path: { path: {
type: "string", type: ["string", "null"],
description: "Directory or file path relative to the root. Use '/' for the root. Defaults to root if not specified.", description: "Directory or file path relative to the root. Use '/' for the root. Defaults to root if not specified.",
}, },
depth: { depth: {
type: "integer", type: ["integer", "null"],
description: "Maximum subdirectory levels to traverse. Use -1 for unlimited depth. Defaults to 1.", description: "Maximum subdirectory levels to traverse. Use -1 for unlimited depth. Defaults to 1.",
minimum: -1, minimum: -1,
}, },
includeHidden: { includeHidden: {
type: "boolean", type: ["boolean", "null"],
description: "Whether to include hidden files and directories (starting with '.'). Defaults to false.", description: "Whether to include hidden files and directories (starting with '.'). Defaults to false.",
default: false, default: false,
} }
}, },
required: ["path", "depth", "includeHidden"], required: [],
additionalProperties: false, additionalProperties: false,
}, },
strict: true, strict: false,
}; };
export async function run(args) { export async function run(args) {
@@ -159,7 +159,10 @@ export async function run(args) {
return { err: `Path does not exist${inputPath ? `: ${inputPath}` : ""}` }; return { err: `Path does not exist${inputPath ? `: ${inputPath}` : ""}` };
} }
const cwd = path.relative(chrootResolved, stat.isFile() ? path.dirname(resolvedBase) : resolvedBase) || "."; const cwd = toDisplayPath(
stat.isFile() ? path.dirname(resolvedBase) : resolvedBase,
chrootResolved
);
// Handle single file case // Handle single file case
if (stat.isFile()) { if (stat.isFile()) {

View File

@@ -1,5 +1,7 @@
#!/usr/bin/env node #!/usr/bin/env node
import chalk from 'chalk';
const desc = ` const desc = `
This is a custom utility that makes it more convenient to add, remove, move, or edit code files. 'apply_patch' effectively allows you to execute a diff/patch against a file, This is a custom utility that makes it more convenient to add, remove, move, or edit code files. 'apply_patch' effectively allows you to execute a diff/patch against a file,
but the format of the diff specification is unique to this task, so pay careful attention to these instructions. but the format of the diff specification is unique to this task, so pay careful attention to these instructions.
@@ -612,10 +614,11 @@ function _get_updated_file(text, action, path) {
function patch_to_commit(patch, orig, chroot = null) { function patch_to_commit(patch, orig, chroot = null) {
const commit = new Commit(); const commit = new Commit();
for (const [path, action] of Object.entries(patch.actions)) { for (const [path, action] of Object.entries(patch.actions)) {
const resolvedPath = resolvePath(chroot, path);
if (action.type === ActionType.DELETE) { if (action.type === ActionType.DELETE) {
commit.changes[path] = new FileChange( commit.changes[path] = new FileChange(
ActionType.DELETE, ActionType.DELETE,
orig[path], orig[resolvedPath],
null, null,
null null
); );
@@ -630,11 +633,11 @@ function patch_to_commit(patch, orig, chroot = null) {
null null
); );
} else if (action.type === ActionType.UPDATE) { } else if (action.type === ActionType.UPDATE) {
const new_content = _get_updated_file(orig[path], action, path); const new_content = _get_updated_file(orig[resolvedPath], action, path);
const move_path = action.move_path ? unresolvePath(chroot, action.move_path) : null; const move_path = action.move_path ? unresolvePath(chroot, action.move_path) : null;
commit.changes[path] = new FileChange( commit.changes[path] = new FileChange(
ActionType.UPDATE, ActionType.UPDATE,
orig[path], orig[resolvedPath],
new_content, new_content,
move_path move_path
); );
@@ -824,6 +827,7 @@ export async function run(args) {
); );
return result; return result;
} catch (error) { } catch (error) {
console.log(chalk.red('Patch error:'),error);
return `Patch error: ${error.message}` return `Patch error: ${error.message}`
} }
} }

View File

@@ -8,12 +8,12 @@ const virtual_chroot = '/workspaces/aiTools/root';
// Ensures reads are confined to `virtual_chroot`. // Ensures reads are confined to `virtual_chroot`.
export default { export default {
type: "function", name: "read_file", description: "read a file", strict: true, type: "function", name: "read_file", description: "read a file", strict: false,
parameters: { parameters: {
type: "object", required: ["path","linesToSkip","linesToRead"], additionalProperties: false, properties: { type: "object", required: ["path"], additionalProperties: false, properties: {
path: { type: "string", description: "The path to the file to read.", }, path: { type: "string", description: "The path to the file to read.", },
linesToSkip: { type: "integer", description: "The number of lines to skip. Use 0 to read from the beginning.", minimum: 0 }, linesToSkip: { type: ["integer", "null"], description: "The number of lines to skip. Use 0 to read from the beginning, which is the default.", minimum: 0 },
linesToRead: { type: "integer", description: "1-400 The number of lines to read.", minimum: 1, maximum: 400 } linesToRead: { type: ["integer", "null"], description: "1-400 The number of lines to read. 400 is the default.", minimum: 1, maximum: 400 }
} }
} }
}; };
@@ -56,7 +56,7 @@ export async function run(args) {
} }
} }
return 'Filecontent: ´´´'+lines.join('')+'´´´'; return 'Filecontent: ´´´'+lines.join('\n')+'´´´';
} catch (error) { } catch (error) {
return `read_file error: ${error.message}`; return `read_file error: ${error.message}`;
} }