mirror of
https://github.com/multipleof4/lynchmark.git
synced 2026-01-13 16:17:54 +00:00
Docs: Update benchmark for openai/gpt-5.2 EFF:xhigh
This commit is contained in:
24
tests/10_scrypt_hash/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
24
tests/10_scrypt_hash/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const scryptUrl = "https://cdn.jsdelivr.net/npm/scrypt-js@3.0.1/+esm";
|
||||
const te = new TextEncoder();
|
||||
const N = 1024, r = 8, p = 1, dkLen = 32;
|
||||
let scryptP;
|
||||
|
||||
const getScrypt = () =>
|
||||
scryptP ??= import(scryptUrl).then(m => {
|
||||
const f = m.scrypt || m.default?.scrypt || m.default;
|
||||
if (typeof f != "function") throw Error("scrypt-js: missing scrypt export");
|
||||
return f;
|
||||
});
|
||||
|
||||
const toHex = u8 =>
|
||||
Array.from(u8, b => b.toString(16).padStart(2, "0")).join("");
|
||||
|
||||
async function hashPassword(password, salt) {
|
||||
if (typeof password != "string" || typeof salt != "string") throw TypeError("Expected (string, string)");
|
||||
const scrypt = await getScrypt();
|
||||
const key = await scrypt(te.encode(password), te.encode(salt), N, r, p, dkLen);
|
||||
return toHex(key);
|
||||
}
|
||||
export default hashPassword;
|
||||
// Generation time: 101.462s
|
||||
// Result: PASS
|
||||
31
tests/11_geospatial/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
31
tests/11_geospatial/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const TURF_SRC = "https://cdn.jsdelivr.net/npm/@turf/turf@6.5.0/+esm"
|
||||
let turfP
|
||||
const turf = () => turfP || (turfP = import(TURF_SRC))
|
||||
|
||||
async function analyzeGPS(readings, boundary) {
|
||||
if (!Array.isArray(readings) || !boundary) return 0
|
||||
|
||||
const {
|
||||
point: pt,
|
||||
booleanPointInPolygon: inPoly,
|
||||
lineString: ls,
|
||||
length: len
|
||||
} = await turf()
|
||||
|
||||
const ok = []
|
||||
for (const r of readings) {
|
||||
const ts = +r?.ts
|
||||
const loc = [+r?.loc?.[0], +r?.loc?.[1]]
|
||||
if (!Number.isFinite(ts) || loc.some(n => !Number.isFinite(n))) continue
|
||||
if (inPoly(pt(loc), boundary, { ignoreBoundary: true })) ok.push({ ts, loc })
|
||||
}
|
||||
|
||||
if (ok.length < 2) return 0
|
||||
ok.sort((a, b) => a.ts - b.ts)
|
||||
|
||||
const km = len(ls(ok.map(r => r.loc)), { units: "kilometers" })
|
||||
return Number.isFinite(km) ? +km.toFixed(2) : 0
|
||||
}
|
||||
export default analyzeGPS;
|
||||
// Generation time: 113.549s
|
||||
// Result: PASS
|
||||
34
tests/1_dijkstra/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
34
tests/1_dijkstra/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,34 @@
|
||||
let _pqP;
|
||||
const _loadPQ = () =>
|
||||
_pqP || (_pqP = import("https://esm.sh/js-priority-queue@0.1.5"));
|
||||
|
||||
async function findShortestPath(graph, start, end) {
|
||||
if (start === end) return 0;
|
||||
if (!graph) return Infinity;
|
||||
|
||||
const { default: PriorityQueue } = await _loadPQ();
|
||||
const dist = Object.create(null);
|
||||
const pq = new PriorityQueue({ comparator: (a, b) => a[0] - b[0] });
|
||||
|
||||
dist[start] = 0;
|
||||
pq.queue([0, start]);
|
||||
|
||||
while (pq.length) {
|
||||
const [d, u] = pq.dequeue();
|
||||
if (d !== dist[u]) continue;
|
||||
if (u === end) return d;
|
||||
|
||||
for (const [v, w] of Object.entries(graph[u] || {})) {
|
||||
const nd = d + +w;
|
||||
if (nd < (dist[v] ?? Infinity)) {
|
||||
dist[v] = nd;
|
||||
pq.queue([nd, v]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Infinity;
|
||||
}
|
||||
export default findShortestPath;
|
||||
// Generation time: 91.630s
|
||||
// Result: PASS
|
||||
41
tests/2_convex_hull/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
41
tests/2_convex_hull/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const loadLodash = (() => {
|
||||
let p;
|
||||
return () =>
|
||||
p || (p = import("https://cdn.jsdelivr.net/npm/lodash@4.17.21/+esm")
|
||||
.then(m => m.default || m)
|
||||
.catch(e => (p = null, Promise.reject(e))));
|
||||
})();
|
||||
|
||||
async function findConvexHull(points) {
|
||||
const _ = await loadLodash();
|
||||
if (!Array.isArray(points)) throw new TypeError("Expected an array of points");
|
||||
|
||||
const pts = _.uniqBy(
|
||||
points.filter(p => p && Number.isFinite(p.x) && Number.isFinite(p.y)),
|
||||
p => `${p.x}|${p.y}`
|
||||
);
|
||||
|
||||
if (pts.length < 2) return pts.slice();
|
||||
|
||||
const s = _.sortBy(pts, ["x", "y"]);
|
||||
const cross = (o, a, b) =>
|
||||
(a.x - o.x) * (b.y - o.y) - (a.y - o.y) * (b.x - o.x);
|
||||
|
||||
const build = arr => {
|
||||
const h = [];
|
||||
for (const p of arr) {
|
||||
while (h.length > 1 && cross(h[h.length - 2], h[h.length - 1], p) <= 0) h.pop();
|
||||
h.push(p);
|
||||
}
|
||||
return h;
|
||||
};
|
||||
|
||||
const lo = build(s);
|
||||
const up = build(s.slice().reverse());
|
||||
lo.pop();
|
||||
up.pop();
|
||||
return lo.concat(up);
|
||||
}
|
||||
export default findConvexHull;
|
||||
// Generation time: 134.929s
|
||||
// Result: PASS
|
||||
20
tests/3_lis/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
20
tests/3_lis/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const d3ArrayUrl = 'https://cdn.jsdelivr.net/npm/d3-array@3/+esm'
|
||||
const err = 'Expected an array of numbers'
|
||||
let d3Array
|
||||
|
||||
async function findLISLength(nums) {
|
||||
if (!Array.isArray(nums)) throw TypeError(err)
|
||||
|
||||
d3Array ||= import(d3ArrayUrl)
|
||||
let { bisectLeft } = await d3Array
|
||||
|
||||
let tails = []
|
||||
for (let x of nums) {
|
||||
if (typeof x != 'number' || x != x) throw TypeError(err)
|
||||
tails[bisectLeft(tails, x)] = x
|
||||
}
|
||||
return tails.length
|
||||
}
|
||||
export default findLISLength;
|
||||
// Generation time: 227.924s
|
||||
// Result: PASS
|
||||
17
tests/4_transpiler/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
17
tests/4_transpiler/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const HANDLEBARS_CDN = "https://cdn.jsdelivr.net/npm/handlebars@4.7.8/+esm"
|
||||
|
||||
let hbP
|
||||
const tplCache = new Map()
|
||||
|
||||
const loadHB = () => hbP ??= import(HANDLEBARS_CDN).then(m => m.default ?? m)
|
||||
|
||||
async function renderTemplate(template, data = {}) {
|
||||
if (typeof template != "string") throw new TypeError("Template must be a string")
|
||||
const HB = await loadHB()
|
||||
let fn = tplCache.get(template)
|
||||
fn || tplCache.set(template, fn = HB.compile(template))
|
||||
return fn(data ?? {})
|
||||
}
|
||||
export default renderTemplate;
|
||||
// Generation time: 77.305s
|
||||
// Result: PASS
|
||||
21
tests/5_markdown_parser/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
21
tests/5_markdown_parser/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,21 @@
|
||||
let p, md, purify;
|
||||
|
||||
const load = () =>
|
||||
p ??= Promise.all([
|
||||
import("https://cdn.jsdelivr.net/npm/markdown-it@14/dist/markdown-it.esm.js"),
|
||||
import("https://cdn.jsdelivr.net/npm/dompurify@3/dist/purify.es.mjs")
|
||||
]).then(([{ default: MarkdownIt }, { default: DOMPurify }]) => {
|
||||
md ||= new MarkdownIt({ html: true, linkify: true });
|
||||
purify ||= DOMPurify;
|
||||
}).catch(e => {
|
||||
p = 0;
|
||||
throw e;
|
||||
});
|
||||
|
||||
async function parseMarkdown(src) {
|
||||
await load();
|
||||
return purify.sanitize(md.render(`${src ?? ""}`), { USE_PROFILES: { html: true } });
|
||||
}
|
||||
export default parseMarkdown;
|
||||
// Generation time: 136.807s
|
||||
// Result: FAIL
|
||||
37
tests/6_csv_processor/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
37
tests/6_csv_processor/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,37 @@
|
||||
const _csvLibs = (() => {
|
||||
let p
|
||||
return () =>
|
||||
p ??= Promise.all([
|
||||
import("https://cdn.jsdelivr.net/npm/d3-dsv@3/+esm"),
|
||||
import("https://cdn.jsdelivr.net/npm/d3-array@3/+esm")
|
||||
]).then(([dsv, arr]) => ({ csvParse: dsv.csvParse, group: arr.group }))
|
||||
})()
|
||||
|
||||
async function processCSV(csv, cfg = {}) {
|
||||
if (typeof csv != "string") throw new TypeError("csv must be a string")
|
||||
|
||||
const { filterColumn, filterValue, groupBy, aggregateColumn, operation } = cfg
|
||||
if (typeof filterColumn != "string" || !filterColumn) throw new TypeError("filterColumn required")
|
||||
if (typeof groupBy != "string" || !groupBy) throw new TypeError("groupBy required")
|
||||
if (!["sum", "avg", "count"].includes(operation)) throw new TypeError("operation must be sum|avg|count")
|
||||
if (operation != "count" && (typeof aggregateColumn != "string" || !aggregateColumn))
|
||||
throw new TypeError("aggregateColumn required for sum/avg")
|
||||
|
||||
const { csvParse, group } = await _csvLibs()
|
||||
const rows = csvParse(csv)
|
||||
const filtered = rows.filter(r => r[filterColumn] == filterValue)
|
||||
const groups = group(filtered, r => r[groupBy])
|
||||
|
||||
const num = v => +v || 0
|
||||
const calc = rs => {
|
||||
const c = rs.length
|
||||
if (operation == "count") return c
|
||||
const s = rs.reduce((a, r) => a + num(r[aggregateColumn]), 0)
|
||||
return operation == "avg" ? s / c : s
|
||||
}
|
||||
|
||||
return Array.from(groups, ([k, rs]) => ({ [groupBy]: k, result: calc(rs) }))
|
||||
}
|
||||
export default processCSV;
|
||||
// Generation time: 136.946s
|
||||
// Result: PASS
|
||||
96
tests/7_scheduler/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
96
tests/7_scheduler/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,96 @@
|
||||
let _lx;
|
||||
const _loadLuxon = () =>
|
||||
_lx || (_lx = import("https://cdn.jsdelivr.net/npm/luxon@3.4.4/build/es6/luxon.js"));
|
||||
|
||||
async function findAvailableSlots(calendarA, calendarB, constraints) {
|
||||
const { DateTime } = await _loadLuxon();
|
||||
const M = 6e4, DAY = 1440, Z = { zone: "utc" };
|
||||
|
||||
const a = Array.isArray(calendarA) ? calendarA : [];
|
||||
const b = Array.isArray(calendarB) ? calendarB : [];
|
||||
const o = constraints && typeof constraints === "object" ? constraints : {};
|
||||
|
||||
const durMin = Number(o.durationMinutes);
|
||||
if (!(durMin > 0)) return [];
|
||||
const dur = durMin * M;
|
||||
|
||||
const sr = o.searchRange || {};
|
||||
const rs = DateTime.fromISO(sr.start || "", Z);
|
||||
const re = DateTime.fromISO(sr.end || "", Z);
|
||||
if (!rs.isValid || !re.isValid) return [];
|
||||
const r0 = rs.toMillis(), r1 = re.toMillis();
|
||||
if (r0 >= r1) return [];
|
||||
|
||||
const wh = o.workHours || {};
|
||||
const toMin = t => {
|
||||
const [h, m] = String(t).split(":").map(Number);
|
||||
if (!Number.isFinite(h) || !Number.isFinite(m)) return NaN;
|
||||
if (h === 24 && m === 0) return DAY;
|
||||
if (h < 0 || h > 23 || m < 0 || m > 59) return NaN;
|
||||
return h * 60 + m;
|
||||
};
|
||||
const ws = toMin(wh.start), we = toMin(wh.end);
|
||||
if (!Number.isFinite(ws) || !Number.isFinite(we)) return [];
|
||||
|
||||
const clamp = (s, e) => {
|
||||
s = Math.max(s, r0);
|
||||
e = Math.min(e, r1);
|
||||
return s < e ? [s, e] : null;
|
||||
};
|
||||
|
||||
const busy = [];
|
||||
const add = (s, e) => {
|
||||
const c = clamp(s, e);
|
||||
if (c) busy.push(c);
|
||||
};
|
||||
|
||||
const addEvent = ev => {
|
||||
const s = DateTime.fromISO(ev?.start || "", Z);
|
||||
const e = DateTime.fromISO(ev?.end || "", Z);
|
||||
if (!s.isValid || !e.isValid) return;
|
||||
const x = s.toMillis(), y = e.toMillis();
|
||||
if (x < y) add(x, y);
|
||||
};
|
||||
|
||||
a.forEach(addEvent);
|
||||
b.forEach(addEvent);
|
||||
|
||||
for (let d = rs.startOf("day"); d.toMillis() < r1; d = d.plus({ days: 1 })) {
|
||||
const base = d.toMillis();
|
||||
const off = (x, y) => add(base + x * M, base + y * M);
|
||||
|
||||
if (ws === we) off(0, DAY);
|
||||
else if (ws < we) {
|
||||
if (ws) off(0, ws);
|
||||
if (we < DAY) off(we, DAY);
|
||||
} else off(we, ws);
|
||||
}
|
||||
|
||||
busy.sort((x, y) => x[0] - y[0] || x[1] - y[1]);
|
||||
|
||||
const merged = [];
|
||||
for (const [s, e] of busy) {
|
||||
const last = merged[merged.length - 1];
|
||||
if (!last || s > last[1]) merged.push([s, e]);
|
||||
else last[1] = Math.max(last[1], e);
|
||||
}
|
||||
|
||||
const out = [];
|
||||
const iso = ms => new Date(ms).toISOString();
|
||||
const pushSlot = (s, e) => out.push({ start: iso(s), end: iso(e) });
|
||||
|
||||
let cur = r0;
|
||||
for (const [s, e] of merged) {
|
||||
if (cur < s) {
|
||||
for (let t = cur; t + dur <= s; t += dur) pushSlot(t, t + dur);
|
||||
}
|
||||
if (e > cur) cur = e;
|
||||
if (cur >= r1) return out;
|
||||
}
|
||||
|
||||
for (let t = cur; t + dur <= r1; t += dur) pushSlot(t, t + dur);
|
||||
return out;
|
||||
}
|
||||
export default findAvailableSlots;
|
||||
// Generation time: 822.838s
|
||||
// Result: PASS
|
||||
40
tests/8_json_validator/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
40
tests/8_json_validator/outputs/openai_gpt-5.2 EFF_xhigh.js
Normal file
@@ -0,0 +1,40 @@
|
||||
let _ajvP
|
||||
|
||||
const _ajv = async () => {
|
||||
if (_ajvP) return _ajvP
|
||||
_ajvP = (async () => {
|
||||
const [{ default: Ajv }, { default: addFormats }] = await Promise.all([
|
||||
import("https://esm.sh/ajv@8.12.0"),
|
||||
import("https://esm.sh/ajv-formats@2.1.1")
|
||||
])
|
||||
const ajv = new Ajv({ allErrors: true, strict: false, allowUnionTypes: true })
|
||||
addFormats(ajv)
|
||||
return ajv
|
||||
})()
|
||||
return _ajvP
|
||||
}
|
||||
|
||||
const _err = e => {
|
||||
let p = e.instancePath || ""
|
||||
let m = e.message || "is invalid"
|
||||
if (e.keyword === "required" && e.params?.missingProperty) {
|
||||
p += `/${e.params.missingProperty}`
|
||||
m = "is required"
|
||||
} else if (e.keyword === "additionalProperties" && e.params?.additionalProperty) {
|
||||
p += `/${e.params.additionalProperty}`
|
||||
m = "is not allowed"
|
||||
}
|
||||
return `${p || "/"} ${m}`.trim()
|
||||
}
|
||||
|
||||
async function validateJSON(json, schema) {
|
||||
let ajv, v
|
||||
try { ajv = await _ajv() } catch (e) { return { valid: false, errors: [String(e?.message || e)] } }
|
||||
try { v = ajv.compile(schema) } catch (e) { return { valid: false, errors: [String(e?.message || e)] } }
|
||||
|
||||
const valid = !!v(json)
|
||||
return { valid, errors: valid ? [] : (v.errors || []).map(_err) }
|
||||
}
|
||||
export default validateJSON;
|
||||
// Generation time: 95.873s
|
||||
// Result: PASS
|
||||
@@ -0,0 +1,77 @@
|
||||
const d3Url = "https://cdn.jsdelivr.net/npm/d3@7/+esm";
|
||||
let d3p;
|
||||
|
||||
const loadD3 = () => d3p || (d3p = import(d3Url));
|
||||
const toNum = v => (typeof v == "string" && !v.trim() ? NaN : +v);
|
||||
const toTime = t => (t instanceof Date ? +t : typeof t == "string" && isNaN(+t) ? Date.parse(t) : +t);
|
||||
|
||||
const asPoint = p => {
|
||||
if (p == null) return {};
|
||||
if (typeof p == "number") return { timestamp: Date.now(), value: p };
|
||||
if (Array.isArray(p)) return { timestamp: toTime(p[0]), value: toNum(p[1]) };
|
||||
if (typeof p == "object")
|
||||
return {
|
||||
timestamp: toTime(p.timestamp ?? p.time ?? p.t),
|
||||
value: toNum(p.value ?? p.v ?? p.y)
|
||||
};
|
||||
return {};
|
||||
};
|
||||
|
||||
async function createStreamVisualizer(asyncIterable, o = {}) {
|
||||
const { scaleLinear, line } = await loadD3();
|
||||
|
||||
let {
|
||||
maxPoints = 500,
|
||||
alpha = 0.2,
|
||||
width = 600,
|
||||
height = 200,
|
||||
yDomain = [0, 1]
|
||||
} = o;
|
||||
|
||||
maxPoints =
|
||||
maxPoints === Infinity
|
||||
? Infinity
|
||||
: Number.isFinite(+maxPoints)
|
||||
? Math.max(1, Math.floor(+maxPoints))
|
||||
: 500;
|
||||
|
||||
alpha = Number.isFinite(+alpha) ? Math.min(1, Math.max(0, +alpha)) : 0.2;
|
||||
width = Math.max(0, +width || 0);
|
||||
height = Math.max(0, +height || 0);
|
||||
|
||||
const data = [];
|
||||
let ema, seeded = false;
|
||||
|
||||
for await (const item of asyncIterable) {
|
||||
const { timestamp, value } = asPoint(item);
|
||||
if (!Number.isFinite(timestamp) || !Number.isFinite(value)) continue;
|
||||
|
||||
ema = seeded ? alpha * value + (1 - alpha) * ema : (seeded = true, value);
|
||||
data.push({ timestamp, value, ema });
|
||||
if (data.length > maxPoints) data.splice(0, data.length - maxPoints);
|
||||
}
|
||||
|
||||
if (!data.length) return { data, path: "" };
|
||||
|
||||
let t0 = data[0].timestamp,
|
||||
t1 = data[data.length - 1].timestamp;
|
||||
if (t0 === t1) t1 = t0 + 1;
|
||||
|
||||
let [y0, y1] = Array.isArray(yDomain) ? yDomain : [0, 1];
|
||||
y0 = toNum(y0);
|
||||
y1 = toNum(y1);
|
||||
if (!Number.isFinite(y0) || !Number.isFinite(y1)) (y0 = 0), (y1 = 1);
|
||||
if (y0 === y1) y1 = y0 + 1;
|
||||
|
||||
const x = scaleLinear().domain([t0, t1]).range([0, width]);
|
||||
const y = scaleLinear().domain([y0, y1]).range([height, 0]);
|
||||
|
||||
const path = line()
|
||||
.x(d => x(d.timestamp))
|
||||
.y(d => y(d.ema))(data) || "";
|
||||
|
||||
return { data, path };
|
||||
}
|
||||
export default createStreamVisualizer;
|
||||
// Generation time: 140.623s
|
||||
// Result: PASS
|
||||
Reference in New Issue
Block a user