const fs = require('fs').promises; const path = require('path'); const axios = require('axios'); const { exec } = require('child_process'); const util = require('util'); const execPromise = util.promisify(exec); const README_PATH = path.join(__dirname, '..', 'README'); const TESTS_DIR = path.join(__dirname, '..', 'tests'); const TEMP_FILE = path.join(__dirname, 'temp_test.mjs'); const getLlmCode = async (prompt, model) => { try { const res = await axios.post( 'https://openrouter.ai/api/v1/chat/completions', { model, messages: [{ role: 'user', content: prompt }] }, { headers: { Authorization: `Bearer ${process.env.OPENROUTER_KEY}` } } ); const content = res.data.choices[0].message.content; return content.match(/```(?:javascript|js)?\n([\s\S]+?)\n```/)?.[1].trim() ?? content.trim(); } catch (error) { console.error(`API Error for ${model}: ${error.message}`); return null; } }; const runTest = async (code) => { try { await fs.writeFile(TEMP_FILE, code); await execPromise(`node ${TEMP_FILE}`); return true; } catch (error) { return false; } finally { await fs.unlink(TEMP_FILE).catch(() => {}); } }; const main = async () => { const readme = await fs.readFile(README_PATH, 'utf-8'); const models = readme.match(/\n([\s\S]+?)\n/)[1].trim().split('\n'); const percentage = parseInt(readme.match(/RUN_PERCENTAGE:\s*(\d+)/)?.[1] ?? '100', 10); const allTestDirs = (await fs.readdir(TESTS_DIR, { withFileTypes: true })) .filter(d => d.isDirectory()).map(d => d.name).sort(); const testsToRun = allTestDirs.slice(0, Math.ceil(allTestDirs.length * (percentage / 100))); let resultsTable = '| Model | ' + allTestDirs.join(' | ') + ' |\n'; resultsTable += '|' + ' --- |'.repeat(allTestDirs.length + 1) + '\n'; for (const model of models) { resultsTable += `| ${model} |`; for (const dir of allTestDirs) { if (!testsToRun.includes(dir)) { resultsTable += ' ⚪ Not Run |'; continue; } const { prompt, harness } = require(path.join(TESTS_DIR, dir, 'test.js')); console.log(`Running ${dir} for ${model}...`); const llmCode = await getLlmCode(prompt, model); if (!llmCode) { resultsTable += ' ❌ API Error |'; continue; } const outDir = path.join(TESTS_DIR, dir, 'outputs'); await fs.mkdir(outDir, { recursive: true }); const fname = `${model.replace(/[\/:]/g, '_')}_${new Date().toISOString().replace(/:/g, '-')}.js`; await fs.writeFile(path.join(outDir, fname), llmCode); const passed = await runTest(`${llmCode}\n${harness}`); resultsTable += ` ${passed ? '✅ Pass' : '❌ Fail'} |`; } resultsTable += '\n'; } const newReadme = readme.replace( /[\s\S]*/, `\n${resultsTable}\n` ); await fs.writeFile(README_PATH, newReadme); console.log('Benchmark complete. README updated.'); }; main().catch(console.error);