mirror of
https://github.com/multipleof4/lynchmark.git
synced 2026-01-14 00:27:55 +00:00
Refactor: Allow benchmark runs for a single model
This commit is contained in:
@@ -8,6 +8,7 @@ const CWD = process.cwd();
|
|||||||
const README_PATH = path.join(CWD, 'README');
|
const README_PATH = path.join(CWD, 'README');
|
||||||
const TESTS_DIR = path.join(CWD, 'tests');
|
const TESTS_DIR = path.join(CWD, 'tests');
|
||||||
const RESULTS_PATH = path.join(CWD, 'results.json');
|
const RESULTS_PATH = path.join(CWD, 'results.json');
|
||||||
|
const getArg = name => { const i = process.argv.indexOf(name); return i > -1 ? process.argv[i + 1] : null; };
|
||||||
|
|
||||||
const getLlmCode = async (prompt, model, functionName, temperature) => {
|
const getLlmCode = async (prompt, model, functionName, temperature) => {
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
@@ -30,20 +31,38 @@ const getLlmCode = async (prompt, model, functionName, temperature) => {
|
|||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
const readme = await fs.readFile(README_PATH, 'utf-8');
|
const readme = await fs.readFile(README_PATH, 'utf-8');
|
||||||
const models = readme.match(/<!-- MODELS_START -->\n([\s\S]+?)\n<!-- MODELS_END -->/)[1].trim().split('\n');
|
const allModels = readme.match(/<!-- MODELS_START -->\n([\s\S]+?)\n<!-- MODELS_END -->/)[1].trim().split('\n');
|
||||||
const percentage = parseInt(readme.match(/RUN_PERCENTAGE:\s*(\d+)/)?.[1] ?? '100', 10);
|
const percentage = parseInt(readme.match(/RUN_PERCENTAGE:\s*(\d+)/)?.[1] ?? '100', 10);
|
||||||
const sharedPrompt = readme.match(/SHARED_PROMPT:\s*"([\s\S]+?)"/)?.[1] ?? '';
|
const sharedPrompt = readme.match(/SHARED_PROMPT:\s*"([\s\S]+?)"/)?.[1] ?? '';
|
||||||
|
|
||||||
|
const singleModel = getArg('--model');
|
||||||
|
if (singleModel && !allModels.includes(singleModel)) {
|
||||||
|
throw new Error(`Model "${singleModel}" not found in README.md.`);
|
||||||
|
}
|
||||||
|
const modelsToRun = singleModel ? [singleModel] : allModels;
|
||||||
|
|
||||||
const allTestDirs = (await fs.readdir(TESTS_DIR, { withFileTypes: true }))
|
const allTestDirs = (await fs.readdir(TESTS_DIR, { withFileTypes: true }))
|
||||||
.filter(d => d.isDirectory()).map(d => d.name).sort();
|
.filter(d => d.isDirectory()).map(d => d.name).sort();
|
||||||
|
|
||||||
await Promise.all(
|
if (singleModel) {
|
||||||
allTestDirs.map(dir => fs.rm(path.join(TESTS_DIR, dir, 'outputs'), { recursive: true, force: true }))
|
const sModel = singleModel.replace(/[\/:]/g, '_');
|
||||||
);
|
await Promise.all(allTestDirs.map(dir =>
|
||||||
|
fs.rm(path.join(TESTS_DIR, dir, 'outputs', `${sModel}.js`), { force: true })
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
await Promise.all(allTestDirs.map(dir =>
|
||||||
|
fs.rm(path.join(TESTS_DIR, dir, 'outputs'), { recursive: true, force: true })
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
const testsToRun = allTestDirs.slice(0, Math.ceil(allTestDirs.length * (percentage / 100)));
|
const testsToRun = allTestDirs.slice(0, Math.ceil(allTestDirs.length * (percentage / 100)));
|
||||||
const genData = {};
|
|
||||||
|
|
||||||
for (const modelSpec of models) {
|
let genData = {};
|
||||||
|
if (singleModel) {
|
||||||
|
try { genData = JSON.parse(await fs.readFile(RESULTS_PATH, 'utf-8')); } catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const modelSpec of modelsToRun) {
|
||||||
const [model, tempStr] = modelSpec.split(' TEMP:');
|
const [model, tempStr] = modelSpec.split(' TEMP:');
|
||||||
const temperature = tempStr ? parseFloat(tempStr) : undefined;
|
const temperature = tempStr ? parseFloat(tempStr) : undefined;
|
||||||
|
|
||||||
@@ -51,12 +70,7 @@ const main = async () => {
|
|||||||
for (const dir of testsToRun) {
|
for (const dir of testsToRun) {
|
||||||
const { prompt, functionName } = (await import(pathToFileURL(path.join(TESTS_DIR, dir, 'test.js')))).default;
|
const { prompt, functionName } = (await import(pathToFileURL(path.join(TESTS_DIR, dir, 'test.js')))).default;
|
||||||
console.log(`Generating ${dir} for ${modelSpec}...`);
|
console.log(`Generating ${dir} for ${modelSpec}...`);
|
||||||
const result = await getLlmCode(
|
const result = await getLlmCode(`${sharedPrompt}\n\n${prompt.trim()}`, model, functionName, temperature);
|
||||||
`${sharedPrompt}\n\n${prompt.trim()}`,
|
|
||||||
model,
|
|
||||||
functionName,
|
|
||||||
temperature
|
|
||||||
);
|
|
||||||
|
|
||||||
genData[modelSpec][dir] = result?.duration ?? null;
|
genData[modelSpec][dir] = result?.duration ?? null;
|
||||||
if (!result) continue;
|
if (!result) continue;
|
||||||
@@ -72,4 +86,3 @@ const main = async () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
main().catch(console.error);
|
main().catch(console.error);
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user