mirror of
https://github.com/multipleof4/lynchmark.git
synced 2026-01-14 00:27:55 +00:00
Docs: Update benchmark results
This commit is contained in:
@@ -0,0 +1,26 @@
|
||||
async function processCSV(csv, config) {
|
||||
const { parse } = await import('https://cdn.skypack.dev/papaparse@5.4.1');
|
||||
|
||||
const { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op } = config;
|
||||
const { data } = parse(csv, { header: true, skipEmptyLines: true });
|
||||
|
||||
const filtered = data.filter(r => r[fc] == fv);
|
||||
const grouped = filtered.reduce((acc, row) => {
|
||||
const key = row[gb];
|
||||
if (!acc[key]) acc[key] = [];
|
||||
acc[key].push(row);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return Object.entries(grouped).map(([key, rows]) => {
|
||||
const vals = rows.map(r => parseFloat(r[ac]) || 0);
|
||||
const result = op === 'sum'
|
||||
? vals.reduce((a, b) => a + b, 0)
|
||||
: op === 'avg'
|
||||
? vals.reduce((a, b) => a + b, 0) / vals.length
|
||||
: vals.length;
|
||||
|
||||
return { [gb]: key, result };
|
||||
});
|
||||
}
|
||||
export default processCSV;
|
||||
@@ -0,0 +1,30 @@
|
||||
const processCSV = async (csv, { filterColumn, filterValue, groupBy, aggregateColumn, operation }) => {
|
||||
const { default: Papa } = await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm');
|
||||
|
||||
const { data } = Papa.parse(csv, { header: true, skipEmptyLines: true });
|
||||
|
||||
const filtered = data.filter(row => row[filterColumn] == filterValue);
|
||||
|
||||
const grouped = filtered.reduce((acc, row) => {
|
||||
const key = row[groupBy];
|
||||
if (!acc[key]) acc[key] = [];
|
||||
acc[key].push(row);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return Object.entries(grouped).map(([key, rows]) => {
|
||||
let result;
|
||||
|
||||
if (operation === 'count') {
|
||||
result = rows.length;
|
||||
} else {
|
||||
const values = rows.map(r => parseFloat(r[aggregateColumn]) || 0);
|
||||
result = operation === 'sum'
|
||||
? values.reduce((a, b) => a + b, 0)
|
||||
: values.reduce((a, b) => a + b, 0) / values.length;
|
||||
}
|
||||
|
||||
return { [groupBy]: key, [operation]: result };
|
||||
});
|
||||
};
|
||||
export default processCSV;
|
||||
30
tests/6_csv_processor/outputs/anthropic_claude-sonnet-4.5.js
Normal file
30
tests/6_csv_processor/outputs/anthropic_claude-sonnet-4.5.js
Normal file
@@ -0,0 +1,30 @@
|
||||
async function processCSV(csvString, config) {
|
||||
const { parse } = await import('https://cdn.skypack.dev/papaparse@5.4.1');
|
||||
|
||||
const { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op } = config;
|
||||
|
||||
const { data } = parse(csvString, { header: true, skipEmptyLines: true });
|
||||
|
||||
const filtered = data.filter(row => row[fc] == fv);
|
||||
|
||||
const grouped = filtered.reduce((acc, row) => {
|
||||
const key = row[gb];
|
||||
if (!acc[key]) acc[key] = [];
|
||||
acc[key].push(row);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return Object.entries(grouped).map(([key, rows]) => {
|
||||
let result;
|
||||
if (op === 'count') {
|
||||
result = rows.length;
|
||||
} else {
|
||||
const values = rows.map(r => parseFloat(r[ac]) || 0);
|
||||
result = op === 'sum'
|
||||
? values.reduce((a, b) => a + b, 0)
|
||||
: values.reduce((a, b) => a + b, 0) / values.length;
|
||||
}
|
||||
return { [gb]: key, result };
|
||||
});
|
||||
}
|
||||
export default processCSV;
|
||||
@@ -0,0 +1,40 @@
|
||||
const processCSV = async (csvString, {
|
||||
filterColumn,
|
||||
filterValue,
|
||||
groupBy,
|
||||
aggregateColumn,
|
||||
operation
|
||||
}) => {
|
||||
const { default: Papa } = await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm');
|
||||
|
||||
const aggregations = {
|
||||
sum: g => g.s,
|
||||
count: g => g.c,
|
||||
avg: g => g.c ? g.s / g.c : 0,
|
||||
};
|
||||
|
||||
const groupedData = Papa.parse(csvString, {
|
||||
header: true,
|
||||
skipEmptyLines: true,
|
||||
dynamicTyping: true,
|
||||
}).data
|
||||
.filter(row => row[filterColumn] === filterValue)
|
||||
.reduce((acc, row) => {
|
||||
const key = row[groupBy];
|
||||
const val = row[aggregateColumn];
|
||||
|
||||
acc[key] = acc[key] || { s: 0, c: 0 };
|
||||
if (typeof val === 'number' && !isNaN(val)) {
|
||||
acc[key].s += val;
|
||||
}
|
||||
acc[key].c++;
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return Object.entries(groupedData).map(([key, group]) => ({
|
||||
[groupBy]: /^-?\d+(\.\d+)?$/.test(key) ? Number(key) : key,
|
||||
[operation]: aggregations[operation](group),
|
||||
}));
|
||||
};
|
||||
export default processCSV;
|
||||
20
tests/6_csv_processor/outputs/openai_gpt-5-codex.js
Normal file
20
tests/6_csv_processor/outputs/openai_gpt-5-codex.js
Normal file
@@ -0,0 +1,20 @@
|
||||
let cache;
|
||||
async function processCSV(csv,cfg){
|
||||
const {csvParse,rollups}=await (cache??=import('https://cdn.jsdelivr.net/npm/d3@7/+esm'));
|
||||
const {filterColumn,filterValue,groupBy,aggregateColumn,operation}=cfg;
|
||||
if(!['sum','avg','count'].includes(operation))throw new Error('Unsupported operation');
|
||||
if(!csv||!filterColumn||!groupBy)throw new Error('Missing essentials');
|
||||
if(operation!=='count'&&!aggregateColumn)throw new Error('Missing aggregateColumn');
|
||||
const rows=csvParse(csv).filter(r=>r[filterColumn]==filterValue);
|
||||
if(!rows.length)return[];
|
||||
const fn=operation==='count'?v=>v.length:v=>{
|
||||
let s=0,n=0;
|
||||
for(const row of v){
|
||||
const val=+row[aggregateColumn];
|
||||
if(!Number.isNaN(val)){s+=val;n++;}
|
||||
}
|
||||
return operation==='sum'?s:n?s/n:0;
|
||||
};
|
||||
return rollups(rows,fn,r=>r[groupBy]).map(([k,v])=>({[groupBy]:k,[operation]:v}));
|
||||
}
|
||||
export default processCSV;
|
||||
30
tests/6_csv_processor/outputs/openrouter_polaris-alpha.js
Normal file
30
tests/6_csv_processor/outputs/openrouter_polaris-alpha.js
Normal file
@@ -0,0 +1,30 @@
|
||||
async function processCSV(csv, cfg) {
|
||||
const {filterColumn,filterValue,groupBy,aggregateColumn,operation} = cfg
|
||||
if(!csv||!filterColumn||!groupBy||!operation) throw new Error('Invalid configuration')
|
||||
const [{parse},{default:lodash}] = await Promise.all([
|
||||
import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/lodash-es@4.17.21/+esm')
|
||||
])
|
||||
const {data,errors} = parse(csv,{header:true,skipEmptyLines:true,dynamicTyping:true})
|
||||
if(errors?.length) throw new Error('CSV parse error')
|
||||
const rows = data.filter(r=>r[filterColumn]===filterValue)
|
||||
if(!rows.length) return []
|
||||
const grouped = lodash.groupBy(rows,r=>r[groupBy])
|
||||
const out = []
|
||||
for(const k in grouped){
|
||||
const g = grouped[k]
|
||||
let v
|
||||
if(operation==='count') v = g.length
|
||||
else{
|
||||
const nums = g.map(r=>Number(r[aggregateColumn])).filter(n=>Number.isFinite(n))
|
||||
if(!nums.length) { v = operation==='sum'?0:null }
|
||||
else{
|
||||
const s = nums.reduce((a,b)=>a+b,0)
|
||||
v = operation==='sum'?s:s/nums.length
|
||||
}
|
||||
}
|
||||
out.push({[groupBy]:k,result:v})
|
||||
}
|
||||
return out
|
||||
}
|
||||
export default processCSV;
|
||||
Reference in New Issue
Block a user