mirror of
https://github.com/multipleof4/lynchmark.git
synced 2026-03-17 03:11:01 +00:00
Docs: Update benchmark for openai/gpt-5.4
This commit is contained in:
9
tests/10_scrypt_hash/outputs/openai_gpt-5.4.js
Normal file
9
tests/10_scrypt_hash/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,9 @@
|
||||
async function hashPassword(password, salt) {
|
||||
const { scrypt } = await import('https://cdn.jsdelivr.net/npm/scrypt-js@3.0.1/+esm')
|
||||
const e = new TextEncoder()
|
||||
const hash = await scrypt(e.encode(password), e.encode(salt), 1024, 8, 1, 32)
|
||||
return [...hash].map(b => b.toString(16).padStart(2, '0')).join('')
|
||||
}
|
||||
export default hashPassword;
|
||||
// Generation time: 1.666s
|
||||
// Result: PASS
|
||||
20
tests/11_geospatial/outputs/openai_gpt-5.4.js
Normal file
20
tests/11_geospatial/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,20 @@
|
||||
async function analyzeGPS(readings, boundary) {
|
||||
const turf = await import('https://cdn.skypack.dev/@turf/turf')
|
||||
const inside = readings
|
||||
.filter(({ ts, loc }) =>
|
||||
Number.isFinite(ts) &&
|
||||
Array.isArray(loc) &&
|
||||
loc.length === 2 &&
|
||||
loc.every(Number.isFinite) &&
|
||||
turf.booleanPointInPolygon(turf.point(loc), boundary, { ignoreBoundary: true })
|
||||
)
|
||||
.sort((a, b) => a.ts - b.ts)
|
||||
|
||||
if (inside.length < 2) return 0
|
||||
|
||||
const line = turf.lineString(inside.map(({ loc }) => loc))
|
||||
return +turf.length(line, { units: 'kilometers' }).toFixed(2)
|
||||
}
|
||||
export default analyzeGPS;
|
||||
// Generation time: 2.011s
|
||||
// Result: PASS
|
||||
73
tests/12_mst_pipeline/outputs/openai_gpt-5.4.js
Normal file
73
tests/12_mst_pipeline/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,73 @@
|
||||
async function computeMST(tomlString) {
|
||||
const [{ parse }, heapMod, { default: table }] = await Promise.all([
|
||||
import('https://cdn.jsdelivr.net/npm/smol-toml/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/mnemonist/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/text-table/+esm')
|
||||
])
|
||||
|
||||
const Heap = heapMod.Heap || heapMod.default?.Heap
|
||||
if (!Heap) throw new Error('Failed to load mnemonist Heap')
|
||||
|
||||
const data = parse(String(tomlString ?? ''))
|
||||
const edges = Array.isArray(data?.edges) ? data.edges : []
|
||||
const norm = edges.map((e, i) => {
|
||||
const from = e?.from
|
||||
const to = e?.to
|
||||
const weight = e?.weight
|
||||
if (typeof from !== 'string' || typeof to !== 'string' || !Number.isFinite(weight)) {
|
||||
throw new TypeError(`Invalid edge at index ${i}`)
|
||||
}
|
||||
return { from, to, weight: +weight }
|
||||
})
|
||||
|
||||
const nodes = [...new Set(norm.flatMap(({ from, to }) => [from, to]))]
|
||||
const parent = new Map(nodes.map(n => [n, n]))
|
||||
const rank = new Map(nodes.map(n => [n, 0]))
|
||||
|
||||
const find = x => {
|
||||
let p = parent.get(x)
|
||||
if (p !== x) {
|
||||
p = find(p)
|
||||
parent.set(x, p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
const union = (a, b) => {
|
||||
a = find(a)
|
||||
b = find(b)
|
||||
if (a === b) return false
|
||||
const ra = rank.get(a)
|
||||
const rb = rank.get(b)
|
||||
if (ra < rb) parent.set(a, b)
|
||||
else if (ra > rb) parent.set(b, a)
|
||||
else {
|
||||
parent.set(b, a)
|
||||
rank.set(a, ra + 1)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
const heap = new Heap((a, b) => a.weight - b.weight)
|
||||
for (const edge of norm) heap.push(edge)
|
||||
|
||||
const mst = []
|
||||
let totalWeight = 0
|
||||
const limit = Math.max(0, nodes.length - 1)
|
||||
|
||||
while (heap.size && mst.length < limit) {
|
||||
const edge = heap.pop()
|
||||
if (union(edge.from, edge.to)) {
|
||||
mst.push([edge.from, edge.to, String(edge.weight)])
|
||||
totalWeight += edge.weight
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
table: table([['From', 'To', 'Weight'], ...mst]),
|
||||
totalWeight
|
||||
}
|
||||
}
|
||||
export default computeMST;
|
||||
// Generation time: 5.731s
|
||||
// Result: PASS
|
||||
35
tests/1_dijkstra/outputs/openai_gpt-5.4.js
Normal file
35
tests/1_dijkstra/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,35 @@
|
||||
async function findShortestPath(graph, start, end) {
|
||||
if (!graph || !(start in graph) || !(end in graph)) return Infinity
|
||||
if (start === end) return 0
|
||||
|
||||
const { default: PriorityQueue } = await import('https://cdn.jsdelivr.net/npm/js-priority-queue@0.1.5/+esm')
|
||||
const dist = Object.create(null)
|
||||
const seen = new Set()
|
||||
const pq = new PriorityQueue({ comparator: (a, b) => a[0] - b[0] })
|
||||
|
||||
for (const node in graph) dist[node] = Infinity
|
||||
dist[start] = 0
|
||||
pq.queue([0, start])
|
||||
|
||||
while (pq.length) {
|
||||
const [d, node] = pq.dequeue()
|
||||
if (seen.has(node)) continue
|
||||
if (node === end) return d
|
||||
seen.add(node)
|
||||
|
||||
const neighbors = graph[node]
|
||||
for (const next in neighbors) {
|
||||
if (seen.has(next)) continue
|
||||
const nd = d + neighbors[next]
|
||||
if (nd < (dist[next] ?? Infinity)) {
|
||||
dist[next] = nd
|
||||
pq.queue([nd, next])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Infinity
|
||||
}
|
||||
export default findShortestPath;
|
||||
// Generation time: 3.635s
|
||||
// Result: PASS
|
||||
38
tests/2_convex_hull/outputs/openai_gpt-5.4.js
Normal file
38
tests/2_convex_hull/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,38 @@
|
||||
export async function findConvexHull(points) {
|
||||
if (!Array.isArray(points)) throw new TypeError('Expected an array of points')
|
||||
|
||||
const { sortBy, uniqBy } = await import('https://cdn.jsdelivr.net/npm/lodash-es@4.17.21/lodash.js')
|
||||
|
||||
const pts = uniqBy(
|
||||
points
|
||||
.filter(p => p && Number.isFinite(p.x) && Number.isFinite(p.y))
|
||||
.map(({ x, y }) => ({ x, y })),
|
||||
p => `${p.x},${p.y}`
|
||||
)
|
||||
|
||||
if (pts.length <= 1) return pts.slice()
|
||||
|
||||
const sorted = sortBy(pts, ['x', 'y'])
|
||||
const cross = (o, a, b) => (a.x - o.x) * (b.y - o.y) - (a.y - o.y) * (b.x - o.x)
|
||||
|
||||
const lower = []
|
||||
for (const p of sorted) {
|
||||
while (lower.length > 1 && cross(lower.at(-2), lower.at(-1), p) <= 0) lower.pop()
|
||||
lower.push(p)
|
||||
}
|
||||
|
||||
const upper = []
|
||||
for (let i = sorted.length - 1; i >= 0; i--) {
|
||||
const p = sorted[i]
|
||||
while (upper.length > 1 && cross(upper.at(-2), upper.at(-1), p) <= 0) upper.pop()
|
||||
upper.push(p)
|
||||
}
|
||||
|
||||
lower.pop()
|
||||
upper.pop()
|
||||
|
||||
return lower.concat(upper)
|
||||
}
|
||||
export default findConvexHull;
|
||||
// Generation time: 4.241s
|
||||
// Result: PASS
|
||||
74
tests/3_signal_pipeline/outputs/openai_gpt-5.4.js
Normal file
74
tests/3_signal_pipeline/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,74 @@
|
||||
async function analyzeSignal(yamlString) {
|
||||
const [
|
||||
yamlMod,
|
||||
mathMod,
|
||||
ndarrayMod,
|
||||
fftMod,
|
||||
purifyMod
|
||||
] = await Promise.all([
|
||||
import('https://cdn.jsdelivr.net/npm/js-yaml@4/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/mathjs@13/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/ndarray@1.0.19/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/ndarray-fft@1.0.3/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/dompurify@3/+esm')
|
||||
])
|
||||
|
||||
const yaml = yamlMod.load ? yamlMod : yamlMod.default || yamlMod
|
||||
const math = mathMod.create && mathMod.all ? mathMod.create(mathMod.all) : mathMod.default || mathMod
|
||||
const ndarray = ndarrayMod.default || ndarrayMod
|
||||
const fft = fftMod.default || fftMod
|
||||
const DOMPurify = purifyMod.default || purifyMod
|
||||
|
||||
const cfg = yaml.load(yamlString) || {}
|
||||
const sampleRate = Number(cfg.sampleRate) || 0
|
||||
const duration = Number(cfg.duration) || 0
|
||||
const components = Array.isArray(cfg.components) ? cfg.components : []
|
||||
const N = Math.max(0, Math.floor(sampleRate * duration))
|
||||
|
||||
const signal = new Float64Array(N)
|
||||
|
||||
for (let i = 0; i < N; i++) {
|
||||
const t = i / sampleRate
|
||||
let sum = 0
|
||||
for (let j = 0; j < components.length; j++) {
|
||||
const c = components[j] || {}
|
||||
sum += (Number(c.amplitude) || 0) * math.sin(2 * math.pi * (Number(c.frequency) || 0) * t)
|
||||
}
|
||||
signal[i] = sum
|
||||
}
|
||||
|
||||
const realData = new Float64Array(signal)
|
||||
const imagData = new Float64Array(N)
|
||||
const real = ndarray(realData, [N])
|
||||
const imag = ndarray(imagData, [N])
|
||||
|
||||
if (N) fft(1, real, imag)
|
||||
|
||||
const peaks = []
|
||||
const half = N / 2
|
||||
|
||||
for (let k = 0; k <= Math.floor(half); k++) {
|
||||
const magnitude = half ? math.sqrt(real.get(k) ** 2 + imag.get(k) ** 2) / half : 0
|
||||
if (magnitude > 0.1) {
|
||||
peaks.push({
|
||||
frequencyHz: Math.round(k * sampleRate / N),
|
||||
magnitude: Math.round(magnitude * 100) / 100
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
peaks.sort((a, b) => b.magnitude - a.magnitude)
|
||||
|
||||
const rows = peaks.map(({ frequencyHz, magnitude }) =>
|
||||
`<tr><td>${frequencyHz}</td><td>${magnitude.toFixed(2)}</td></tr>`
|
||||
).join('')
|
||||
|
||||
const html = DOMPurify.sanitize(
|
||||
`<table><tr><th>Frequency (Hz)</th><th>Magnitude</th></tr>${rows}</table>`
|
||||
)
|
||||
|
||||
return { peaks, html, signalLength: N }
|
||||
}
|
||||
export default analyzeSignal;
|
||||
// Generation time: 7.472s
|
||||
// Result: PASS
|
||||
61
tests/4_hexchain_pipeline/outputs/openai_gpt-5.4.js
Normal file
61
tests/4_hexchain_pipeline/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,61 @@
|
||||
export async function hexchain(toml) {
|
||||
const [
|
||||
{ parse },
|
||||
{ default: SeedRandom },
|
||||
ss,
|
||||
{ default: Ajv },
|
||||
{ default: textTable },
|
||||
domPurifyMod
|
||||
] = await Promise.all([
|
||||
import('https://cdn.jsdelivr.net/npm/smol-toml/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/seedrandom/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/simple-statistics/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/ajv/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/text-table/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/dompurify/+esm')
|
||||
]);
|
||||
|
||||
const config = parse(toml);
|
||||
const ajv = new Ajv();
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: ['seed', 'count', 'label'],
|
||||
properties: {
|
||||
seed: { type: 'string' },
|
||||
count: { type: 'integer', minimum: 1, maximum: 10000 },
|
||||
label: { type: 'string', minLength: 1 }
|
||||
}
|
||||
};
|
||||
const valid = ajv.validate(schema, config);
|
||||
|
||||
if (!valid) {
|
||||
return { valid: false, errors: ajv.errorsText() };
|
||||
}
|
||||
|
||||
const rng = new SeedRandom(config.seed);
|
||||
const nums = Array.from({ length: config.count }, () => rng());
|
||||
const round = n => +n.toFixed(6);
|
||||
const mean = round(ss.mean(nums));
|
||||
const stddev = round(ss.standardDeviation(nums));
|
||||
const median = round(ss.median(nums));
|
||||
const table = textTable([
|
||||
['Stat', 'Value'],
|
||||
['mean', String(mean)],
|
||||
['stddev', String(stddev)],
|
||||
['median', String(median)]
|
||||
]);
|
||||
|
||||
const DOMPurify = domPurifyMod.default || domPurifyMod;
|
||||
const sanitizedHTML = DOMPurify.sanitize(`<pre class="stats">${table}</pre>`);
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
label: config.label,
|
||||
stats: { mean, stddev, median },
|
||||
table: sanitizedHTML,
|
||||
count: config.count
|
||||
};
|
||||
}
|
||||
export default hexchain;
|
||||
// Generation time: 5.200s
|
||||
// Result: PASS
|
||||
15
tests/5_markdown_parser/outputs/openai_gpt-5.4.js
Normal file
15
tests/5_markdown_parser/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,15 @@
|
||||
const parseMarkdown = async markdown => {
|
||||
if (typeof markdown !== 'string') throw new TypeError('Expected a Markdown string')
|
||||
|
||||
const { marked } = await import('https://cdn.jsdelivr.net/npm/marked/lib/marked.esm.js')
|
||||
|
||||
marked.setOptions({
|
||||
gfm: true,
|
||||
breaks: false
|
||||
})
|
||||
|
||||
return marked.parse(markdown)
|
||||
}
|
||||
export default parseMarkdown;
|
||||
// Generation time: 1.414s
|
||||
// Result: PASS
|
||||
42
tests/6_csv_processor/outputs/openai_gpt-5.4.js
Normal file
42
tests/6_csv_processor/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,42 @@
|
||||
export async function processCSV(csv, config) {
|
||||
const [{ default: Papa }, _] = await Promise.all([
|
||||
import('https://cdn.jsdelivr.net/npm/papaparse@5.4.1/+esm'),
|
||||
import('https://cdn.jsdelivr.net/npm/lodash-es@4.17.21/lodash.min.js')
|
||||
])
|
||||
|
||||
const {
|
||||
filterColumn,
|
||||
filterValue,
|
||||
groupBy,
|
||||
aggregateColumn,
|
||||
operation
|
||||
} = config || {}
|
||||
|
||||
const { data } = Papa.parse(csv, {
|
||||
header: true,
|
||||
skipEmptyLines: true,
|
||||
transformHeader: v => String(v).trim()
|
||||
})
|
||||
|
||||
const toNum = v => {
|
||||
const n = Number(v)
|
||||
return Number.isNaN(n) ? 0 : n
|
||||
}
|
||||
|
||||
const filtered = data.filter(row => row?.[filterColumn] == filterValue)
|
||||
const groups = _.groupBy(filtered, row => row?.[groupBy])
|
||||
|
||||
return Object.entries(groups).map(([key, rows]) => {
|
||||
const count = rows.length
|
||||
const sum = rows.reduce((a, row) => a + toNum(row?.[aggregateColumn]), 0)
|
||||
const result =
|
||||
operation === 'count' ? count :
|
||||
operation === 'avg' ? (count ? sum / count : 0) :
|
||||
sum
|
||||
|
||||
return { [groupBy]: key, result }
|
||||
})
|
||||
}
|
||||
export default processCSV;
|
||||
// Generation time: 3.573s
|
||||
// Result: PASS
|
||||
105
tests/7_scheduler/outputs/openai_gpt-5.4.js
Normal file
105
tests/7_scheduler/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,105 @@
|
||||
export async function findAvailableSlots(calendarA, calendarB, constraints) {
|
||||
const { DateTime, Interval } = await import('https://cdn.jsdelivr.net/npm/luxon@3/+esm')
|
||||
const { durationMinutes, searchRange, workHours } = constraints
|
||||
const toUtc = v => DateTime.fromISO(v, { zone: 'utc' })
|
||||
const pad = v => `${v}`.padStart(2, '0')
|
||||
const [whs, wms] = workHours.start.split(':').map(Number)
|
||||
const [whe, wme] = workHours.end.split(':').map(Number)
|
||||
const rangeStart = toUtc(searchRange.start)
|
||||
const rangeEnd = toUtc(searchRange.end)
|
||||
const duration = { minutes: durationMinutes }
|
||||
const stepMs = durationMinutes * 6e4
|
||||
|
||||
if (
|
||||
!rangeStart.isValid ||
|
||||
!rangeEnd.isValid ||
|
||||
!Number.isFinite(durationMinutes) ||
|
||||
durationMinutes <= 0 ||
|
||||
rangeEnd <= rangeStart ||
|
||||
[whs, wms, whe, wme].some(v => !Number.isInteger(v))
|
||||
) return []
|
||||
|
||||
const clip = ({ start, end }) => {
|
||||
const s = toUtc(start)
|
||||
const e = toUtc(end)
|
||||
if (!s.isValid || !e.isValid || e <= s) return null
|
||||
const a = s < rangeStart ? rangeStart : s
|
||||
const b = e > rangeEnd ? rangeEnd : e
|
||||
return b > a ? { start: a, end: b } : null
|
||||
}
|
||||
|
||||
const busy = [...calendarA, ...calendarB]
|
||||
.map(clip)
|
||||
.filter(Boolean)
|
||||
.sort((a, b) => a.start.toMillis() - b.start.toMillis())
|
||||
|
||||
const merged = []
|
||||
for (const slot of busy) {
|
||||
const last = merged[merged.length - 1]
|
||||
if (!last || slot.start > last.end) merged.push({ ...slot })
|
||||
else if (slot.end > last.end) last.end = slot.end
|
||||
}
|
||||
|
||||
const free = []
|
||||
let cursor = rangeStart
|
||||
for (const slot of merged) {
|
||||
if (slot.start > cursor) free.push({ start: cursor, end: slot.start })
|
||||
if (slot.end > cursor) cursor = slot.end
|
||||
}
|
||||
if (cursor < rangeEnd) free.push({ start: cursor, end: rangeEnd })
|
||||
|
||||
const dayWindows = []
|
||||
for (
|
||||
let d = rangeStart.startOf('day');
|
||||
d < rangeEnd;
|
||||
d = d.plus({ days: 1 })
|
||||
) {
|
||||
const ws = d.set({
|
||||
hour: whs,
|
||||
minute: wms,
|
||||
second: 0,
|
||||
millisecond: 0
|
||||
})
|
||||
const we = d.set({
|
||||
hour: whe,
|
||||
minute: wme,
|
||||
second: 0,
|
||||
millisecond: 0
|
||||
})
|
||||
if (we <= ws) continue
|
||||
const start = ws < rangeStart ? rangeStart : ws
|
||||
const end = we > rangeEnd ? rangeEnd : we
|
||||
if (end > start) dayWindows.push({ start, end })
|
||||
}
|
||||
|
||||
const results = []
|
||||
let wi = 0
|
||||
|
||||
for (const f of free) {
|
||||
while (wi < dayWindows.length && dayWindows[wi].end <= f.start) wi++
|
||||
for (let i = wi; i < dayWindows.length; i++) {
|
||||
const w = dayWindows[i]
|
||||
if (w.start >= f.end) break
|
||||
const start = f.start > w.start ? f.start : w.start
|
||||
const end = f.end < w.end ? f.end : w.end
|
||||
if (end <= start) continue
|
||||
const ms = end.toMillis() - start.toMillis()
|
||||
const count = Math.floor(ms / stepMs)
|
||||
for (let j = 0; j < count; j++) {
|
||||
const s = start.plus({ minutes: j * durationMinutes })
|
||||
const e = s.plus(duration)
|
||||
if (e <= end && Interval.fromDateTimes(s, e).length('minutes') === durationMinutes) {
|
||||
results.push({
|
||||
start: s.toUTC().toISO({ suppressMilliseconds: true }),
|
||||
end: e.toUTC().toISO({ suppressMilliseconds: true })
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
export default findAvailableSlots;
|
||||
// Generation time: 9.884s
|
||||
// Result: PASS
|
||||
27
tests/8_json_validator/outputs/openai_gpt-5.4.js
Normal file
27
tests/8_json_validator/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,27 @@
|
||||
export async function validateJSON(data, schema) {
|
||||
try {
|
||||
const [{ default: Ajv }, { default: addFormats }] = await Promise.all([
|
||||
import('https://cdn.jsdelivr.net/npm/ajv@8/dist/ajv.min.js'),
|
||||
import('https://cdn.jsdelivr.net/npm/ajv-formats@3/dist/ajv-formats.min.js')
|
||||
])
|
||||
const ajv = new Ajv({ allErrors: true, strict: false })
|
||||
addFormats(ajv)
|
||||
const validate = ajv.compile(schema)
|
||||
const valid = !!validate(data)
|
||||
return {
|
||||
valid,
|
||||
errors: valid ? [] : (validate.errors || []).map(e => {
|
||||
const path = e.instancePath || e.schemaPath || ''
|
||||
return path ? `${path} ${e.message}` : e.message || 'Validation error'
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [e?.message || 'Schema validation failed']
|
||||
}
|
||||
}
|
||||
}
|
||||
export default validateJSON;
|
||||
// Generation time: 2.663s
|
||||
// Result: FAIL
|
||||
48
tests/9_stream_visualizer/outputs/openai_gpt-5.4.js
Normal file
48
tests/9_stream_visualizer/outputs/openai_gpt-5.4.js
Normal file
@@ -0,0 +1,48 @@
|
||||
export async function createStreamVisualizer(asyncIterable, options = {}) {
|
||||
const {
|
||||
maxPoints = 500,
|
||||
alpha = 0.2,
|
||||
width = 800,
|
||||
height = 300,
|
||||
yDomain = [0, 1]
|
||||
} = options
|
||||
|
||||
const d3 = await import('https://cdn.jsdelivr.net/npm/d3@7/+esm')
|
||||
|
||||
const cap = Math.max(1, maxPoints | 0)
|
||||
const a = Math.min(1, Math.max(0, Number(alpha)))
|
||||
const data = []
|
||||
let prevEma
|
||||
|
||||
for await (const item of asyncIterable) {
|
||||
const timestamp = Number(item?.timestamp)
|
||||
const value = Number(item?.value)
|
||||
|
||||
if (!Number.isFinite(timestamp) || !Number.isFinite(value)) continue
|
||||
|
||||
prevEma = prevEma === undefined ? value : a * value + (1 - a) * prevEma
|
||||
data.push({ timestamp, value, ema: prevEma })
|
||||
|
||||
if (data.length > cap) data.splice(0, data.length - cap)
|
||||
}
|
||||
|
||||
if (!data.length) return { data, path: '' }
|
||||
|
||||
const first = data[0].timestamp
|
||||
const last = data[data.length - 1].timestamp
|
||||
const x0 = first === last ? first - 1 : first
|
||||
const x1 = first === last ? last + 1 : last
|
||||
|
||||
const x = d3.scaleLinear().domain([x0, x1]).range([0, width])
|
||||
const y = d3.scaleLinear().domain(yDomain).range([height, 0])
|
||||
|
||||
const path =
|
||||
d3.line()
|
||||
.x(d => x(d.timestamp))
|
||||
.y(d => y(d.ema))(data) || ''
|
||||
|
||||
return { data, path }
|
||||
}
|
||||
export default createStreamVisualizer;
|
||||
// Generation time: 4.457s
|
||||
// Result: PASS
|
||||
Reference in New Issue
Block a user