feat(api): add LLM-based DE executive summary to Telegram release hook

Introduce helpers to fetch recent git commits and call an OpenAI-compatible
LLM to generate a short German executive summary appended to Telegram
notifications. Normalize async formatting in message builder and ensure
Promise handling when sending messages.

Also remove committed .env with secrets and add .env to .gitignore to
prevent future leaks.
This commit is contained in:
sebseb7
2025-08-04 10:30:35 +02:00
parent 9a8086acea
commit e4a01f387b
4 changed files with 182 additions and 9 deletions

3
.env
View File

@@ -1,3 +0,0 @@
# Telegram Bot Configuration
TELEGRAM_BOT_TOKEN=7754031341:AAGbnPNYxTwdSRD_HqBxzOTiiINKaFlGFww
TELEGRAM_CHAT_ID=-1002534599713

26
.env.example Normal file
View File

@@ -0,0 +1,26 @@
# Server
PORT=9304
# Telegram Bot
TELEGRAM_BOT_TOKEN=1234567890:ABCDEF-your-telegram-bot-token
TELEGRAM_CHAT_ID=123456789
# LLM Provider selection: "openrouter" (default) or "openai"
LLM_PROVIDER=openrouter
# OpenRouter configuration (used when LLM_PROVIDER=openrouter)
OPENROUTER_API_KEY=your-openrouter-api-key
# Optional override (defaults to https://openrouter.ai/api)
OPENROUTER_BASE_URL=https://openrouter.ai/api
# Optional model override (defaults to openrouter/anthropic/claude-3.5-sonnet when provider=openrouter)
# Example models:
# - openrouter/anthropic/claude-3.5-sonnet
# - openrouter/openai/gpt-4o-mini
LLM_MODEL=openrouter/anthropic/claude-3.5-sonnet
# OpenAI configuration (used when LLM_PROVIDER=openai)
OPENAI_API_KEY=your-openai-api-key
# Optional override (defaults to https://api.openai.com)
OPENAI_BASE_URL=https://api.openai.com
# Optional model override (defaults to gpt-4o-mini when provider=openai)
# LLM_MODEL=gpt-4o-mini

1
.gitignore vendored
View File

@@ -1,2 +1,3 @@
node_modules/
logs/
.env

161
x.js
View File

@@ -8,6 +8,137 @@ const { exec, spawn } = require('child_process');
const fs = require('fs');
const path = require('path');
// --- Helpers: Git log + LLM summarization for Executive Summary (DE) ---
// Promise-based exec wrapper with timeout
function execCmd(cmd, { cwd = process.cwd(), timeoutMs = 8000 } = {}) {
return new Promise((resolve, reject) => {
const child = require('child_process').exec(cmd, { cwd, maxBuffer: 5 * 1024 * 1024, timeout: timeoutMs }, (error, stdout, stderr) => {
if (error) {
return reject(Object.assign(new Error(`exec failed: ${error.message}`), { stdout, stderr }));
}
resolve({ stdout, stderr });
});
// Best-effort: if process times out, Node will error; no extra handling here
});
}
// Retrieve and parse last N git commits (subject + body)
async function getLastCommits({ count = 10, repoDir = process.cwd() } = {}) {
try {
const format = '%H%x1f%an%x1f%ad%x1f%s%x1f%b';
const cmd = `git log -n ${count} --pretty=format:${format} --date=iso`;
const { stdout } = await execCmd(cmd, { cwd: repoDir, timeoutMs: 8000 });
const lines = stdout.split('\n').filter(Boolean);
return lines.map(line => {
const [hash, author, date, subject, body = ''] = line.split('\x1f');
return { hash, author, date, subject, body };
});
} catch (e) {
logMessage(`getLastCommits failed: ${e.message}`, 'warn');
return [];
}
}
function buildMostRecentCommitPromptGerman(commits) {
if (!commits || commits.length === 0) return null;
const c0 = commits[0];
// Keep prompt compact but informative
const latest = [
`Commit: ${c0.hash}`,
`Autor: ${c0.author}`,
`Datum: ${c0.date}`,
`Betreff: ${c0.subject}`,
`Inhalt:\n${c0.body || '(kein Body)'}`
].join('\n');
const history = commits.slice(0, 10).map(c => `- ${c.hash.slice(0,7)} | ${c.subject}`).join('\n');
return {
system: 'Du bist ein erfahrener Software-Produktmanager. Erstelle eine kurze, laienverständliche, executive-taugliche Zusammenfassung der Auswirkungen der jüngsten Änderung. Vermeide Fachjargon, nenne das „Warum“ und den Nutzen. Antworte ausschließlich auf Deutsch in 13 Sätzen.',
user: `Hier ist der Git-Verlauf (letzte 10 Commits); fokussiere die jüngste Änderung:\n\nVerlauf:\n${history}\n\nDetail der neuesten Änderung:\n${latest}\n\nGib eine kurze Executive-Zusammenfassung (Deutsch, 13 Sätze).`
};
}
// Generic OpenAI-compatible client over fetch
async function callLLMOpenAICompatible({ baseUrl, apiKey, model, system, user, timeoutMs = 12000 }) {
const controller = new AbortController();
const t = setTimeout(() => controller.abort(), timeoutMs);
try {
const res = await fetch(`${baseUrl}/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${apiKey}`
},
body: JSON.stringify({
model,
messages: [
{ role: 'system', content: system },
{ role: 'user', content: user }
],
temperature: 0.2
}),
signal: controller.signal
});
if (!res.ok) {
const text = await res.text().catch(() => '');
throw new Error(`LLM HTTP ${res.status}: ${text.slice(0, 500)}`);
}
const json = await res.json();
const msg = json.choices?.[0]?.message?.content?.trim();
return msg || '';
} finally {
clearTimeout(t);
}
}
// Summarize the most recent commit in German
async function summarizeMostRecentCommitDE() {
// Determine provider
const provider = (process.env.LLM_PROVIDER || 'openrouter').toLowerCase();
const model = process.env.LLM_MODEL || (provider === 'openrouter' ? 'openrouter/anthropic/claude-3.5-sonnet' : 'gpt-4o-mini');
let baseUrl;
let apiKey;
if (provider === 'openai') {
baseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com';
apiKey = process.env.OPENAI_API_KEY;
} else {
// default openrouter
baseUrl = process.env.OPENROUTER_BASE_URL || 'https://openrouter.ai/api';
apiKey = process.env.OPENROUTER_API_KEY;
}
if (!apiKey) {
logMessage('LLM API key not configured; skipping executive summary', 'warn');
return null;
}
// Pull commits from the current working directory (assumed repo root or subdir)
const commits = await getLastCommits({ count: 10, repoDir: process.cwd() });
if (!commits.length) return null;
const prompt = buildMostRecentCommitPromptGerman(commits);
if (!prompt) return null;
try {
const summary = await callLLMOpenAICompatible({
baseUrl,
apiKey,
model,
system: prompt.system,
user: prompt.user,
timeoutMs: 15000
});
if (!summary) return null;
return summary;
} catch (e) {
logMessage(`LLM summarization failed: ${e.message}`, 'warn');
return null;
}
}
// Create logs directory if it doesn't exist
const logsDir = path.join(__dirname, 'logs');
if (!fs.existsSync(logsDir)) {
@@ -162,10 +293,26 @@ function formatCommitMessage(payload) {
compare = `[${linkText}](${linkUrl})`;
}
return `${heading}
// Try to append a German executive summary of the most recent commit.
// Reuse escapeMdV2 defined above.
return (async () => {
let summaryBlock = '';
try {
const summary = await summarizeMostRecentCommitDE();
if (summary) {
const escapedSummary = escapeMdV2(summary);
summaryBlock = `\n———————————————\n🧠 *Executive Summary \\(DE\\)*\n${escapedSummary}\n`;
}
} catch (e) {
// already logged inside summarizer; keep silent here
}
return `${heading}
${commitsText}
${filesList}
${compare}`;
${compare}${summaryBlock}`;
})();
}
app.use(bodyParser.json());
@@ -179,10 +326,12 @@ app.post('/releasehook_kjfhdkf987987', (req, res) => {
logMessage(`Complete payload received: ${JSON.stringify(payload, null, 2)}`);
// Send Telegram notification
const telegramMessage = formatCommitMessage(payload);
sendTelegramMessage(telegramMessage).catch(error => {
logMessage(`Error sending Telegram message: ${error.message}`, 'error');
});
// formatCommitMessage may return a Promise; normalize before sending
Promise.resolve(formatCommitMessage(payload))
.then((telegramMessage) => sendTelegramMessage(telegramMessage))
.catch(error => {
logMessage(`Error sending Telegram message: ${error.message}`, 'error');
});
// Set a flag to track if we've sent a response
let responseSent = false;