Docs: Update Gemini benchmark results

This commit is contained in:
github-actions[bot]
2025-11-18 19:30:39 +00:00
parent 51a98c1e1b
commit 76fb066932
133 changed files with 2340 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
const processCSV = async (csv, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const { parse } = (await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm')).default
const { data } = parse(csv, { header: true, skipEmptyLines: true })
return Object.entries(data.reduce((a, r) => {
if (r[fc] == fv) {
const k = r[gb], v = +r[ac] || 0
a[k] ??= { s: 0, c: 0 }
a[k].s += v
a[k].c++
}
return a
}, {})).map(([k, { s, c }]) => ({
[gb]: k,
result: op === 'count' ? c : op === 'avg' ? s / c : s
}))
}
export default processCSV;

View File

@@ -0,0 +1,17 @@
export const processCSV = async (str, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const [{ parse }, { filter, groupBy, map, sum }] = await Promise.all([
import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm'),
import('https://cdn.jsdelivr.net/npm/lodash-es@4.17.21/+esm')
])
const { data } = parse(str, { header: true, skipEmptyLines: true })
const rows = filter(data, r => r[fc] == fv)
return map(groupBy(rows, gb), (group, key) => {
const vals = map(group, r => +r[ac] || 0)
const total = sum(vals)
const res = op === 'count' ? group.length : op === 'sum' ? total : total / group.length
return { [gb]: key, result: res }
})
}
export default processCSV;

View File

@@ -0,0 +1,18 @@
const processCSV = async (csvString, { filterColumn, filterValue, groupBy, aggregateColumn, operation }) => {
const [{ csvParse }, { rollup, sum, mean }] = await Promise.all([
import('https://cdn.jsdelivr.net/npm/d3-dsv@3/+esm'),
import('https://cdn.jsdelivr.net/npm/d3-array@3/+esm')
]);
const getValue = d => +d[aggregateColumn] || 0;
const data = csvParse(csvString).filter(d => d[filterColumn] == filterValue);
const aggregated = rollup(
data,
g => operation === 'count' ? g.length : (operation === 'sum' ? sum : mean)(g, getValue),
d => d[groupBy]
);
return Array.from(aggregated, ([key, result]) => ({ [groupBy]: key, result }));
};
export default processCSV;

View File

@@ -0,0 +1,22 @@
const processCSV = async (csvString, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const { parse } = await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm')
const { data } = parse(csvString, { header: true, skipEmptyLines: true })
const groups = new Map()
for (const row of data) {
if (row[fc] == fv) {
const key = row[gb]
const val = +row[ac] || 0
const acc = groups.get(key) || { sum: 0, count: 0 }
acc.sum += val
acc.count += 1
groups.set(key, acc)
}
}
return Array.from(groups, ([key, { sum, count }]) => ({
[gb]: key,
result: op === 'count' ? count : op === 'sum' ? sum : sum / count
}))
}
export default processCSV;

View File

@@ -0,0 +1,18 @@
const processCSV = async (csv, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const [{ default: Papa }, { default: _ }] = await Promise.all([
import('https://esm.sh/papaparse@5.4.1'),
import('https://esm.sh/lodash@4.17.21')
])
const { data } = Papa.parse(csv, { header: true, skipEmptyLines: true })
return _.map(_.groupBy(_.filter(data, r => r[fc] == fv), gb), (rows, k) => {
const vals = rows.map(r => +r[ac] || 0)
const sum = _.sum(vals)
return {
[gb]: k,
result: op === 'count' ? vals.length : op === 'sum' ? sum : sum / vals.length
}
})
}
export default processCSV;

View File

@@ -0,0 +1,20 @@
const processCSV = async (csv, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const { default: Papa } = await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm')
const { data } = Papa.parse(csv, { header: true, skipEmptyLines: true })
const groups = data.reduce((acc, row) => {
if (row[fc] == fv) {
const k = row[gb], v = +row[ac] || 0
acc[k] = acc[k] || { s: 0, c: 0 }
acc[k].s += v
acc[k].c++
}
return acc
}, {})
return Object.entries(groups).map(([k, { s, c }]) => ({
[gb]: k,
result: op === 'count' ? c : op === 'avg' ? s / c : s
}))
}
export default processCSV;

View File

@@ -0,0 +1,18 @@
const processCSV = async (csv, cfg) => {
const [{default: P}, {default: _}] = await Promise.all([
import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm'),
import('https://cdn.jsdelivr.net/npm/lodash@4.17.21/+esm')
]);
const {data} = P.parse(csv, {header: true, skipEmptyLines: true});
const {filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op} = cfg;
return _.map(_.groupBy(_.filter(data, r => r[fc] == fv), gb), (rows, key) => {
const vals = rows.map(r => +r[ac] || 0), sum = _.sum(vals);
return {
[gb]: key,
result: op === 'count' ? vals.length : op === 'sum' ? sum : sum / vals.length
};
});
};
export default processCSV;

View File

@@ -0,0 +1,21 @@
const processCSV = async (csvString, config) => {
const { parse } = await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm');
const { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op } = config;
const acc = {};
parse(csvString, { header: true, skipEmptyLines: true }).data.forEach(row => {
if (row[fc] == fv) {
const key = row[gb];
const val = +row[ac] || 0;
const entry = acc[key] ??= { sum: 0, count: 0 };
entry.sum += val;
entry.count++;
}
});
return Object.entries(acc).map(([key, { sum, count }]) => ({
[gb]: key,
result: op === 'avg' ? sum / count : op === 'count' ? count : sum
}));
};
export default processCSV;

View File

@@ -0,0 +1,20 @@
const processCSV = async (csvStr, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const { csvParse } = await import('https://esm.sh/d3-dsv@3');
const groups = csvParse(csvStr).reduce((acc, row) => {
if (row[fc] == fv) {
const key = row[gb], val = +row[ac] || 0;
const entry = acc.get(key) || { s: 0, c: 0 };
entry.s += val;
entry.c++;
acc.set(key, entry);
}
return acc;
}, new Map());
return Array.from(groups, ([key, { s, c }]) => ({
[gb]: key,
result: op === 'count' ? c : op === 'sum' ? s : s / c
}));
};
export default processCSV;

View File

@@ -0,0 +1,13 @@
export const processCSV = async (csv, { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op }) => {
const { csvParse, rollups, sum } = await import('https://cdn.jsdelivr.net/npm/d3@7/+esm')
return rollups(
csvParse(csv).filter(d => d[fc] == fv),
g => {
const s = sum(g, d => +d[ac] || 0)
return op === 'count' ? g.length : op === 'sum' ? s : s / g.length
},
d => d[gb]
).map(([k, v]) => ({ [gb]: k, result: v }))
}
export default processCSV;

View File

@@ -0,0 +1,21 @@
const processCSV = async (csvString, config) => {
const { csvParse } = await import('https://esm.sh/d3-dsv@3');
const { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op } = config;
const groups = csvParse(csvString).reduce((acc, row) => {
if (row[fc] == fv) {
const key = row[gb];
const val = +row[ac] || 0;
acc[key] = acc[key] || { sum: 0, count: 0 };
acc[key].sum += val;
acc[key].count++;
}
return acc;
}, {});
return Object.entries(groups).map(([key, { sum, count }]) => ({
[gb]: key,
result: op === 'avg' ? sum / count : op === 'count' ? count : sum
}));
};
export default processCSV;

View File

@@ -0,0 +1,21 @@
const processCSV = async (csv, cfg) => {
const { default: Papa } = await import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm');
const { filterColumn: fc, filterValue: fv, groupBy: gb, aggregateColumn: ac, operation: op } = cfg;
const grouped = Papa.parse(csv, { header: true, skipEmptyLines: true }).data.reduce((acc, row) => {
if (row[fc] == fv) {
const key = row[gb];
const val = +row[ac] || 0;
acc[key] = acc[key] || { sum: 0, count: 0 };
acc[key].sum += val;
acc[key].count++;
}
return acc;
}, {});
return Object.entries(grouped).map(([key, { sum, count }]) => ({
[gb]: key,
result: op === 'count' ? count : op === 'avg' ? sum / count : sum
}));
};
export default processCSV;