mirror of
https://github.com/multipleof4/lynchmark.git
synced 2026-01-13 16:17:54 +00:00
Docs: Update benchmark for openai/gpt-5.2
This commit is contained in:
20
tests/10_scrypt_hash/outputs/openai_gpt-5.2.js
Normal file
20
tests/10_scrypt_hash/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,20 @@
|
||||
let _scryptMod
|
||||
let _enc = new TextEncoder
|
||||
|
||||
let _getScrypt = () =>
|
||||
_scryptMod ||= import("https://cdn.jsdelivr.net/npm/scrypt-js@3.0.1/+esm")
|
||||
.then(m => (m.scrypt && m) || m.default || m)
|
||||
|
||||
let _toHex = u8 =>
|
||||
Array.from(u8, b => b.toString(16).padStart(2, "0")).join("")
|
||||
|
||||
async function hashPassword(password, salt) {
|
||||
let { scrypt } = await _getScrypt()
|
||||
let pw = _enc.encode(password)
|
||||
let sa = _enc.encode(salt)
|
||||
let hash = await scrypt(pw, sa, 1024, 8, 1, 32)
|
||||
return _toHex(hash)
|
||||
}
|
||||
export default hashPassword;
|
||||
// Generation time: 13.034s
|
||||
// Result: PASS
|
||||
20
tests/11_geospatial/outputs/openai_gpt-5.2.js
Normal file
20
tests/11_geospatial/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,20 @@
|
||||
let turf$;
|
||||
|
||||
async function analyzeGPS(readings, boundary) {
|
||||
turf$ ||= import("https://esm.sh/@turf/turf@6.5.0");
|
||||
const { point, booleanPointInPolygon, lineString, length } = await turf$;
|
||||
|
||||
const coords = (readings || [])
|
||||
.filter(r => r && Array.isArray(r.loc) && r.loc.length === 2 && Number.isFinite(r.ts))
|
||||
.filter(r => booleanPointInPolygon(point(r.loc), boundary, { ignoreBoundary: true }))
|
||||
.sort((a, b) => a.ts - b.ts)
|
||||
.map(r => r.loc);
|
||||
|
||||
if (coords.length < 2) return 0;
|
||||
|
||||
const km = length(lineString(coords), { units: "kilometers" });
|
||||
return +km.toFixed(2);
|
||||
}
|
||||
export default analyzeGPS;
|
||||
// Generation time: 7.395s
|
||||
// Result: PASS
|
||||
39
tests/1_dijkstra/outputs/openai_gpt-5.2.js
Normal file
39
tests/1_dijkstra/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,39 @@
|
||||
export async function findShortestPath(graph, start, end) {
|
||||
if (start === end) return 0
|
||||
if (!graph || typeof graph !== "object") return Infinity
|
||||
|
||||
const { default: PriorityQueue } = await import(
|
||||
"https://cdn.jsdelivr.net/npm/js-priority-queue@0.1.5/+esm"
|
||||
)
|
||||
|
||||
const dist = Object.create(null)
|
||||
dist[start] = 0
|
||||
|
||||
const pq = new PriorityQueue({ comparator: (a, b) => a[0] - b[0] })
|
||||
pq.queue([0, start])
|
||||
|
||||
while (pq.length) {
|
||||
const [d, u] = pq.dequeue()
|
||||
if (d !== dist[u]) continue
|
||||
if (u === end) return d
|
||||
|
||||
const nbrs = graph[u]
|
||||
if (!nbrs || typeof nbrs !== "object") continue
|
||||
|
||||
for (const v in nbrs) {
|
||||
const w = +nbrs[v]
|
||||
if (!Number.isFinite(w) || w < 0) continue
|
||||
const nd = d + w
|
||||
const cur = dist[v]
|
||||
if (cur === undefined || nd < cur) {
|
||||
dist[v] = nd
|
||||
pq.queue([nd, v])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Infinity
|
||||
}
|
||||
export default findShortestPath;
|
||||
// Generation time: 6.889s
|
||||
// Result: PASS
|
||||
30
tests/2_convex_hull/outputs/openai_gpt-5.2.js
Normal file
30
tests/2_convex_hull/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,30 @@
|
||||
async function findConvexHull(points) {
|
||||
if (!Array.isArray(points)) throw new TypeError("points must be an array")
|
||||
|
||||
const { default: _ } = await import("https://cdn.jsdelivr.net/npm/lodash@4.17.21/+esm")
|
||||
const eq = (a, b) => a.x === b.x && a.y === b.y
|
||||
const cross = (a, b, c) => (b.x - a.x) * (c.y - a.y) - (b.y - a.y) * (c.x - a.x)
|
||||
|
||||
const pts = _.uniqWith(_.sortBy(points, ["x", "y"]), eq)
|
||||
if (pts.length < 2) return pts.slice()
|
||||
|
||||
const lower = []
|
||||
for (const p of pts) {
|
||||
while (lower.length > 1 && cross(lower.at(-2), lower.at(-1), p) <= 0) lower.pop()
|
||||
lower.push(p)
|
||||
}
|
||||
|
||||
const upper = []
|
||||
for (let i = pts.length; i--;) {
|
||||
const p = pts[i]
|
||||
while (upper.length > 1 && cross(upper.at(-2), upper.at(-1), p) <= 0) upper.pop()
|
||||
upper.push(p)
|
||||
}
|
||||
|
||||
lower.pop()
|
||||
upper.pop()
|
||||
return lower.concat(upper)
|
||||
}
|
||||
export default findConvexHull;
|
||||
// Generation time: 15.278s
|
||||
// Result: PASS
|
||||
15
tests/3_lis/outputs/openai_gpt-5.2.js
Normal file
15
tests/3_lis/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,15 @@
|
||||
let _d3
|
||||
|
||||
async function findLISLength(a) {
|
||||
if (!Array.isArray(a)) throw new TypeError("Expected an array of numbers")
|
||||
|
||||
let { bisectLeft: b } = await (_d3 ||= import("https://cdn.jsdelivr.net/npm/d3-array@3/+esm"))
|
||||
let t = []
|
||||
|
||||
for (let x of a) t[b(t, x)] = x
|
||||
|
||||
return t.length
|
||||
}
|
||||
export default findLISLength;
|
||||
// Generation time: 9.821s
|
||||
// Result: PASS
|
||||
7
tests/4_transpiler/outputs/openai_gpt-5.2.js
Normal file
7
tests/4_transpiler/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export async function renderTemplate(t, d = {}) {
|
||||
let { default: H } = await import("https://esm.sh/handlebars@4.7.8")
|
||||
return H.compile(String(t))({ ...d })
|
||||
}
|
||||
export default renderTemplate;
|
||||
// Generation time: 2.200s
|
||||
// Result: PASS
|
||||
17
tests/5_markdown_parser/outputs/openai_gpt-5.2.js
Normal file
17
tests/5_markdown_parser/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const _mdLibs=globalThis.__mdLibs||=(()=> {
|
||||
const md="https://cdn.jsdelivr.net/npm/markdown-it@14.1.0/dist/markdown-it.min.mjs"
|
||||
const pur="https://cdn.jsdelivr.net/npm/dompurify@3.0.6/dist/purify.es.mjs"
|
||||
return Promise.all([import(md),import(pur)]).then(([{default:MarkdownIt},{default:DOMPurify}])=>({
|
||||
md:new MarkdownIt({html:false,linkify:true,breaks:false}),
|
||||
pur:DOMPurify
|
||||
}))
|
||||
})()
|
||||
|
||||
async function parseMarkdown(markdown){
|
||||
const {md,pur}=await _mdLibs
|
||||
const html=md.render(String(markdown??""))
|
||||
return pur.sanitize(html,{USE_PROFILES:{html:true}})
|
||||
}
|
||||
export default parseMarkdown;
|
||||
// Generation time: 22.022s
|
||||
// Result: FAIL
|
||||
47
tests/6_csv_processor/outputs/openai_gpt-5.2.js
Normal file
47
tests/6_csv_processor/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,47 @@
|
||||
async function processCSV(csv, cfg) {
|
||||
cfg ||= {};
|
||||
let {
|
||||
filterColumn,
|
||||
filterValue,
|
||||
groupBy,
|
||||
aggregateColumn,
|
||||
operation
|
||||
} = cfg;
|
||||
|
||||
if (typeof csv != "string" || !groupBy || !operation) return [];
|
||||
|
||||
let [{ default: Papa }, { rollups }] = await Promise.all([
|
||||
import("https://esm.sh/papaparse@5.4.1"),
|
||||
import("https://esm.sh/d3-array@3.2.4")
|
||||
]);
|
||||
|
||||
let parsed = Papa.parse(csv, { header: true, skipEmptyLines: true });
|
||||
if (parsed.errors?.length) throw new Error(parsed.errors[0].message || "CSV parse error");
|
||||
|
||||
let rows = parsed.data || [];
|
||||
if (filterColumn != null) rows = rows.filter(r => r?.[filterColumn] == filterValue);
|
||||
|
||||
let num = v => {
|
||||
v = Number(v);
|
||||
return Number.isFinite(v) ? v : 0;
|
||||
};
|
||||
|
||||
let agg = op => {
|
||||
if (op === "count") return v => v.length;
|
||||
|
||||
let sum = v => {
|
||||
let s = 0;
|
||||
for (let i = 0; i < v.length; i++) s += num(v[i]?.[aggregateColumn]);
|
||||
return s;
|
||||
};
|
||||
|
||||
if (op === "avg") return v => v.length ? sum(v) / v.length : 0;
|
||||
return v => sum(v);
|
||||
};
|
||||
|
||||
let reduce = agg(operation);
|
||||
return rollups(rows, reduce, d => d?.[groupBy]).map(([k, result]) => ({ [groupBy]: k, result }));
|
||||
}
|
||||
export default processCSV;
|
||||
// Generation time: 22.087s
|
||||
// Result: PASS
|
||||
94
tests/7_scheduler/outputs/openai_gpt-5.2.js
Normal file
94
tests/7_scheduler/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,94 @@
|
||||
const _luxon = import("https://cdn.jsdelivr.net/npm/luxon@3.5.0/+esm");
|
||||
|
||||
const findAvailableSlots = async (calA = [], calB = [], c = {}) => {
|
||||
const { DateTime } = await _luxon;
|
||||
|
||||
const durMin = c?.durationMinutes;
|
||||
const sr = c?.searchRange;
|
||||
const wh = c?.workHours;
|
||||
|
||||
if (!Number.isFinite(durMin) || durMin <= 0) throw new TypeError("Invalid durationMinutes");
|
||||
if (!sr?.start || !sr?.end) throw new TypeError("Invalid searchRange");
|
||||
if (!wh?.start || !wh?.end) throw new TypeError("Invalid workHours");
|
||||
|
||||
const parseHm = s => {
|
||||
const m = /^([01]\d|2[0-3]):([0-5]\d)$/.exec(String(s).trim());
|
||||
if (!m) throw new TypeError("Invalid workHours time");
|
||||
return { h: +m[1], m: +m[2] };
|
||||
};
|
||||
|
||||
const rs = DateTime.fromISO(sr.start, { zone: "utc" });
|
||||
const re = DateTime.fromISO(sr.end, { zone: "utc" });
|
||||
if (!rs.isValid || !re.isValid) throw new TypeError("Invalid searchRange ISO");
|
||||
const rS = rs.toMillis(), rE = re.toMillis();
|
||||
if (!(rE > rS)) return [];
|
||||
|
||||
const ws = parseHm(wh.start), we = parseHm(wh.end);
|
||||
if (we.h * 60 + we.m <= ws.h * 60 + ws.m) throw new TypeError("workHours.end must be after workHours.start");
|
||||
|
||||
const durMs = Math.round(durMin * 60000);
|
||||
if (durMs <= 0) throw new TypeError("Invalid durationMinutes");
|
||||
|
||||
const norm = x => {
|
||||
const s = DateTime.fromISO(x.start, { zone: "utc" });
|
||||
const e = DateTime.fromISO(x.end, { zone: "utc" });
|
||||
if (!s.isValid || !e.isValid) return null;
|
||||
let a = s.toMillis(), b = e.toMillis();
|
||||
if (!(b > a)) return null;
|
||||
if (b <= rS || a >= rE) return null;
|
||||
a = Math.max(a, rS);
|
||||
b = Math.min(b, rE);
|
||||
return b > a ? [a, b] : null;
|
||||
};
|
||||
|
||||
const mergedBusy = (() => {
|
||||
const xs = [...calA, ...calB].map(norm).filter(Boolean).sort((p, q) => p[0] - q[0] || p[1] - q[1]);
|
||||
if (!xs.length) return [];
|
||||
const out = [];
|
||||
let [cs, ce] = xs[0];
|
||||
for (let i = 1; i < xs.length; i++) {
|
||||
const [s, e] = xs[i];
|
||||
if (s <= ce) ce = Math.max(ce, e);
|
||||
else out.push([cs, ce]), (cs = s), (ce = e);
|
||||
}
|
||||
out.push([cs, ce]);
|
||||
return out;
|
||||
})();
|
||||
|
||||
const free = (() => {
|
||||
const out = [];
|
||||
let cur = rS;
|
||||
for (const [s, e] of mergedBusy) {
|
||||
if (s > cur) out.push([cur, s]);
|
||||
cur = Math.max(cur, e);
|
||||
if (cur >= rE) break;
|
||||
}
|
||||
if (cur < rE) out.push([cur, rE]);
|
||||
return out;
|
||||
})();
|
||||
|
||||
const iso = ms => DateTime.fromMillis(ms, { zone: "utc" }).toISO({ suppressMilliseconds: true });
|
||||
|
||||
const out = [];
|
||||
const dayMs = 86400000;
|
||||
|
||||
for (const [fs, fe] of free) {
|
||||
let d0 = DateTime.fromMillis(fs, { zone: "utc" }).startOf("day").toMillis();
|
||||
const d1 = DateTime.fromMillis(fe - 1, { zone: "utc" }).startOf("day").toMillis();
|
||||
|
||||
for (let d = d0; d <= d1; d += dayMs) {
|
||||
const base = DateTime.fromMillis(d, { zone: "utc" });
|
||||
const wS = base.set({ hour: ws.h, minute: ws.m, second: 0, millisecond: 0 }).toMillis();
|
||||
const wE = base.set({ hour: we.h, minute: we.m, second: 0, millisecond: 0 }).toMillis();
|
||||
|
||||
const a = Math.max(fs, wS);
|
||||
const b = Math.min(fe, wE);
|
||||
for (let t = a; t + durMs <= b; t += durMs) out.push({ start: iso(t), end: iso(t + durMs) });
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
export default findAvailableSlots;
|
||||
// Generation time: 32.586s
|
||||
// Result: PASS
|
||||
42
tests/8_json_validator/outputs/openai_gpt-5.2.js
Normal file
42
tests/8_json_validator/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,42 @@
|
||||
let _ajvP
|
||||
|
||||
const _getAjv = () =>
|
||||
_ajvP ||= (async () => {
|
||||
const [{ default: Ajv }, { default: addFormats }] = await Promise.all([
|
||||
import("https://cdn.jsdelivr.net/npm/ajv@8.17.1/dist/ajv.mjs"),
|
||||
import("https://cdn.jsdelivr.net/npm/ajv-formats@3.0.1/dist/ajv-formats.mjs")
|
||||
])
|
||||
|
||||
const ajv = new Ajv({
|
||||
allErrors: true,
|
||||
strict: false,
|
||||
allowUnionTypes: true,
|
||||
loadSchema: async u => (await fetch(u, { credentials: "same-origin" })).json()
|
||||
})
|
||||
|
||||
addFormats(ajv)
|
||||
return ajv
|
||||
})()
|
||||
|
||||
const _fmtErr = e =>
|
||||
[e.instancePath || "/", e.message, e.params && Object.keys(e.params).length ? JSON.stringify(e.params) : ""]
|
||||
.filter(Boolean)
|
||||
.join(" ")
|
||||
|
||||
const validateJSON = async (data, schema) => {
|
||||
const ajv = await _getAjv()
|
||||
let validate
|
||||
|
||||
try {
|
||||
validate = await ajv.compileAsync(schema)
|
||||
} catch {
|
||||
validate = ajv.compile(schema)
|
||||
}
|
||||
|
||||
const valid = !!validate(data)
|
||||
const errors = valid ? [] : (validate.errors || []).map(_fmtErr)
|
||||
return { valid, errors }
|
||||
}
|
||||
export default validateJSON;
|
||||
// Generation time: 12.999s
|
||||
// Result: FAIL
|
||||
48
tests/9_stream_visualizer/outputs/openai_gpt-5.2.js
Normal file
48
tests/9_stream_visualizer/outputs/openai_gpt-5.2.js
Normal file
@@ -0,0 +1,48 @@
|
||||
async function createStreamVisualizer(asyncIterable, o = {}) {
|
||||
let {
|
||||
maxPoints = 500,
|
||||
alpha = 0.2,
|
||||
width = 600,
|
||||
height = 200,
|
||||
yDomain = [0, 1]
|
||||
} = o
|
||||
|
||||
alpha = +alpha
|
||||
if (!(alpha >= 0 && alpha <= 1)) alpha = 0.2
|
||||
|
||||
let d3 = await import("https://cdn.jsdelivr.net/npm/d3@7/+esm")
|
||||
let data = [], ema, seeded = false
|
||||
|
||||
for await (let p of asyncIterable) {
|
||||
let timestamp, value
|
||||
|
||||
if (Array.isArray(p)) [timestamp, value] = p
|
||||
else if (p && typeof p == "object") ({ timestamp, value } = p)
|
||||
else (timestamp = Date.now()), (value = p)
|
||||
|
||||
timestamp = +timestamp
|
||||
value = +value
|
||||
|
||||
if (!seeded) (ema = value), (seeded = true)
|
||||
else ema = alpha * value + (1 - alpha) * ema
|
||||
|
||||
data.push({ timestamp, value, ema })
|
||||
|
||||
if (data.length > maxPoints) data.splice(0, data.length - maxPoints)
|
||||
}
|
||||
|
||||
let path = ""
|
||||
if (data.length) {
|
||||
let t0 = data[0].timestamp, t1 = data.at(-1).timestamp
|
||||
if (t0 === t1) t1 = t0 + 1
|
||||
|
||||
let x = d3.scaleLinear().domain([t0, t1]).range([0, width])
|
||||
let y = d3.scaleLinear().domain(yDomain).range([height, 0])
|
||||
path = d3.line().x(d => x(d.timestamp)).y(d => y(d.ema))(data) || ""
|
||||
}
|
||||
|
||||
return { data, path }
|
||||
}
|
||||
export default createStreamVisualizer;
|
||||
// Generation time: 18.105s
|
||||
// Result: PASS
|
||||
Reference in New Issue
Block a user