Update sw.js

This commit is contained in:
2025-08-23 11:14:04 -07:00
committed by GitHub
parent 2cec196cae
commit 8364e33b6e

View File

@@ -1,67 +1,86 @@
// /sw.js // /sw.js (drop in at root)
// Service worker that tees streaming responses and continuously overwrites the latest // Debug service worker — tee & write to localforage and expose debug commands.
// thread in localForage (key: 'threads_v1') with the accumulating assistant text.
// Keeps ping/pong and PING_STATUS support and broadcasts live events.
//
// Requirements: place this at root (/sw.js). No changes to index required.
importScripts('https://cdn.jsdelivr.net/npm/localforage@1.10.0/dist/localforage.min.js'); const LF_CDN = 'https://cdn.jsdelivr.net/npm/localforage@1.10.0/dist/localforage.min.js';
const TARGET_SUBSTRING = 'openrouter.ai/api/v1/chat/completions'; // change if needed
const THREADS_KEY = 'threads_v1'; const THREADS_KEY = 'threads_v1';
const SAVE_BYTES_THRESHOLD = 8 * 1024; // flush every ~8KB of new text const TARGET_SUBSTRING = 'openrouter.ai/api/v1/chat/completions'; // adjust if needed
const SAVE_TIME_THRESHOLD = 1000; // or at least every 1s const SAVE_BYTES_THRESHOLD = 6 * 1024; // ~6KB
const BROADCAST_THROTTLE_MS = 700; // throttle progress broadcasts const SAVE_TIME_THRESHOLD = 800; // ms
const BROADCAST_THROTTLE_MS = 600;
/* --- Utilities --- */
const gid = () => Math.random().toString(36).slice(2,9) + '-' + Date.now().toString(36); const gid = () => Math.random().toString(36).slice(2,9) + '-' + Date.now().toString(36);
const now = () => Date.now(); const now = () => Date.now();
async function readThreads() { let localforageAvailable = false;
let lfLoadError = null;
// Attempt to import localforage
try {
importScripts(LF_CDN);
if (self.localforage) {
localforageAvailable = true;
// configure a name to avoid collisions (optional)
try {
localforage.config({ name: 'sw-localforage' });
} catch(e){}
} else {
lfLoadError = 'localforage not present after importScripts';
}
} catch (e) {
lfLoadError = String(e && e.message ? e.message : e);
}
// in-memory state for debug/status
const state = {
totalIntercepted: 0,
activeStreams: {}, // streamId -> meta
lastStreamSummary: null,
debugWrites: [] // ids of test threads written by SW
};
async function safeReadThreads() {
if (!localforageAvailable) throw new Error('localforage not available: ' + lfLoadError);
try { try {
const v = await localforage.getItem(THREADS_KEY); const v = await localforage.getItem(THREADS_KEY);
return Array.isArray(v) ? v : []; return Array.isArray(v) ? v : [];
} catch (e) { } catch (err) {
console.error('sw: readThreads error', e); throw err;
return [];
} }
} }
async function writeThreads(arr) { async function safeWriteThreads(arr) {
if (!localforageAvailable) throw new Error('localforage not available: ' + lfLoadError);
try { try {
await localforage.setItem(THREADS_KEY, arr); await localforage.setItem(THREADS_KEY, arr);
} catch (e) { } catch (err) {
console.error('sw: writeThreads error', e); throw err;
throw e;
} }
} }
/* pick last thread heuristic: newest updatedAt, fallback to first */ // pick last thread heuristic: newest updatedAt
function pickLastThread(threads) { function pickLastThread(threads) {
if (!threads || threads.length === 0) return null; if (!threads || threads.length === 0) return null;
let sorted = [...threads].sort((a,b) => (b.updatedAt||0) - (a.updatedAt||0)); const copy = [...threads].sort((a,b) => (b.updatedAt||0) - (a.updatedAt||0));
return sorted[0]; return copy[0] || null;
} }
/* Upsert assistant message in a thread by sw_streamId (overwrite content) */
function upsertAssistantInThreadObj(threadObj, streamId, text) { function upsertAssistantInThreadObj(threadObj, streamId, text) {
threadObj.updatedAt = now(); threadObj.updatedAt = now();
// look for existing message with sw_streamId (search from end)
for (let i = threadObj.messages.length - 1; i >= 0; i--) { for (let i = threadObj.messages.length - 1; i >= 0; i--) {
const m = threadObj.messages[i]; const m = threadObj.messages[i];
if (m && m.sw_streamId === streamId) { if (m && m.sw_streamId === streamId) {
m.content = text; m.content = text;
m.contentParts = [{ type: 'text', text }]; m.contentParts = [{type:'text', text}];
m.updatedAt = now(); m.updatedAt = now();
m._sw_savedAt = now(); m._sw_savedAt = now();
return threadObj; return threadObj;
} }
} }
// not found: append a new assistant message // append
const msg = { const msg = {
id: 'swmsg-' + gid(), id: 'swmsg-' + gid(),
role: 'assistant', role: 'assistant',
content: text, content: text,
contentParts: [{ type: 'text', text }], contentParts: [{type:'text', text}],
kind: 'assistant', kind: 'assistant',
sw_saved: true, sw_saved: true,
sw_streamId: streamId, sw_streamId: streamId,
@@ -73,37 +92,33 @@ function upsertAssistantInThreadObj(threadObj, streamId, text) {
return threadObj; return threadObj;
} }
/* Broadcast helpers */
async function broadcast(msg) { async function broadcast(msg) {
try { try {
const cl = await self.clients.matchAll({ includeUncontrolled: true, type: 'window' }); const cl = await self.clients.matchAll({ includeUncontrolled: true, type: 'window' });
for (const c of cl) { for (const c of cl) {
try { c.postMessage(msg); } catch (e) { /* ignore */ } try { c.postMessage(msg); } catch(e) {}
} }
} catch (e) { /* ignore */ } } catch(e) {}
} }
/* --- Worker lifecycle --- */ function logDebug(text) { // also broadcast small logs
self.addEventListener('install', e => self.skipWaiting()); console.log('[sw-debug]', text);
self.addEventListener('activate', e => e.waitUntil(self.clients.claim())); broadcast({ type: 'sw-debug-log', ts: now(), text: String(text) });
}
/* --- Stream tracking state (in-memory) --- */ /* lifecycle */
const state = { self.addEventListener('install', (ev) => { self.skipWaiting(); });
totalIntercepted: 0, self.addEventListener('activate', (ev) => { ev.waitUntil(self.clients.claim()); });
activeStreams: {}, // streamId => meta
lastStream: null
};
/* --- Main fetch handler: tee + continuously overwrite latest thread --- */ /* fetch handler: tee, accumulate, and repeatedly overwrite last thread */
self.addEventListener('fetch', event => { self.addEventListener('fetch', (event) => {
try { try {
const url = String(event.request.url || ''); const url = String(event.request.url || '');
if (!url.includes(TARGET_SUBSTRING)) return; // not our target if (!url.includes(TARGET_SUBSTRING)) return; // not target
event.respondWith((async () => { event.respondWith((async () => {
const upstream = await fetch(event.request); const upstream = await fetch(event.request);
// nothing to do if no stream body
if (!upstream || !upstream.body) return upstream; if (!upstream || !upstream.body) return upstream;
const streamId = 'sw-' + gid(); const streamId = 'sw-' + gid();
@@ -112,56 +127,45 @@ self.addEventListener('fetch', event => {
state.activeStreams[streamId] = meta; state.activeStreams[streamId] = meta;
broadcast({ type: 'sw-intercept-start', streamId, meta }); broadcast({ type: 'sw-intercept-start', streamId, meta });
// tee the stream
const [clientStream, swStream] = upstream.body.tee(); const [clientStream, swStream] = upstream.body.tee();
// background saving task (continually overwrite latest thread) // save task
const savePromise = (async () => { const savePromise = (async () => {
const reader = swStream.getReader(); const reader = swStream.getReader();
const decoder = new TextDecoder('utf-8'); const decoder = new TextDecoder('utf-8');
let accumulated = ''; // full text accumulated for this stream let accumulated = '';
let sinceLastSaveBytes = 0; let sinceLastSaveBytes = 0;
let lastSaveAt = 0; let lastSaveAt = 0;
let lastBroadcastAt = 0; let lastBroadcastAt = 0;
// Helper to save accumulated text into last thread
async function flushToLastThread(force = false) { async function flushToLastThread(force = false) {
try { try {
const nowMs = now(); const nowMs = now();
if (!force && sinceLastSaveBytes < SAVE_BYTES_THRESHOLD && (nowMs - lastSaveAt) < SAVE_TIME_THRESHOLD) return; if (!force && sinceLastSaveBytes < SAVE_BYTES_THRESHOLD && (nowMs - lastSaveAt) < SAVE_TIME_THRESHOLD) return;
// read latest threads if (!localforageAvailable) {
const threads = await readThreads(); logDebug('flushToLastThread: localforage not available: ' + lfLoadError);
let thread = pickLastThread(threads); return;
const createdAt = nowMs; }
if (!thread) { const threads = await safeReadThreads();
// create fallback thread if none exists let thread = pickLastThread(threads);
thread = { if (!thread) {
id: 'sw-thread-' + gid(), thread = { id: 'sw-thread-' + gid(), title: 'Missed while backgrounded', pinned:false, updatedAt: nowMs, messages: [] };
title: 'Missed while backgrounded', threads.unshift(thread);
pinned: false, logDebug('flush: created fallback thread ' + thread.id);
updatedAt: createdAt,
messages: []
};
threads.unshift(thread);
} }
// upsert message
upsertAssistantInThreadObj(thread, streamId, accumulated); upsertAssistantInThreadObj(thread, streamId, accumulated);
// write back (this will overwrite whole array, which matches page reading expectation) // write back (overwrite entire array)
await writeThreads(threads); await safeWriteThreads(threads);
sinceLastSaveBytes = 0; sinceLastSaveBytes = 0;
lastSaveAt = nowMs; lastSaveAt = nowMs;
// broadcast progress summary // throttle broadcasts
const now2 = Date.now(); const now2 = Date.now();
if (now2 - lastBroadcastAt > BROADCAST_THROTTLE_MS) { if (now2 - lastBroadcastAt > BROADCAST_THROTTLE_MS) {
lastBroadcastAt = now2; lastBroadcastAt = now2;
broadcast({ broadcast({ type: 'sw-intercept-progress', streamId, meta: { bytes: meta.bytes, savedAt: lastSaveAt, snippet: accumulated.slice(-1024) } });
type: 'sw-intercept-progress',
streamId,
meta: { bytes: meta.bytes, savedAt: lastSaveAt, snippet: accumulated.slice(-1024) }
});
} }
} catch (err) { } catch (err) {
console.error('sw: flushToLastThread error', err); logDebug('flushToLastThread error: ' + (err && err.message ? err.message : String(err)));
} }
} }
@@ -169,31 +173,24 @@ self.addEventListener('fetch', event => {
while (true) { while (true) {
const { value, done } = await reader.read(); const { value, done } = await reader.read();
if (done) break; if (done) break;
// value is Uint8Array (may be chunked). decode and append
let chunkText = ''; let chunkText = '';
try { try { chunkText = decoder.decode(value, { stream: true }); } catch(e) { try { chunkText = String(value); } catch(_) { chunkText = ''; } }
chunkText = decoder.decode(value, { stream: true });
} catch (e) {
// fallback: best-effort text conversion
try { chunkText = String(value); } catch (ee) { chunkText = ''; }
}
accumulated += chunkText; accumulated += chunkText;
const bytes = value ? (value.byteLength || 0) : chunkText.length; const bytes = value ? (value.byteLength || 0) : chunkText.length;
meta.bytes += bytes; meta.bytes += bytes;
meta.lastProgressAt = now(); meta.lastProgressAt = now();
// accumulate for thresholded saves
sinceLastSaveBytes += bytes; sinceLastSaveBytes += bytes;
// flush condition: size or time // flush if thresholds met
await flushToLastThread(false); await flushToLastThread(false);
} }
// final flush and finalize // final flush
await flushToLastThread(true); await flushToLastThread(true);
// finalize
meta.status = 'finished'; meta.status = 'finished';
meta.endedAt = now(); meta.endedAt = now();
state.lastStream = { streamId, url: meta.url, startedAt: meta.startedAt, endedAt: meta.endedAt, totalBytes: meta.bytes }; state.lastStreamSummary = { streamId, url: meta.url, startedAt: meta.startedAt, endedAt: meta.endedAt, totalBytes: meta.bytes };
delete state.activeStreams[streamId]; delete state.activeStreams[streamId];
broadcast({ type: 'sw-intercept-end', streamId, meta: { totalBytes: meta.bytes, endedAt: meta.endedAt } }); broadcast({ type: 'sw-intercept-end', streamId, meta: { totalBytes: meta.bytes, endedAt: meta.endedAt } });
} catch (err) { } catch (err) {
@@ -201,14 +198,12 @@ self.addEventListener('fetch', event => {
meta.error = String(err && err.message ? err.message : err); meta.error = String(err && err.message ? err.message : err);
delete state.activeStreams[streamId]; delete state.activeStreams[streamId];
broadcast({ type: 'sw-intercept-error', streamId, meta: { error: meta.error } }); broadcast({ type: 'sw-intercept-error', streamId, meta: { error: meta.error } });
console.error('sw: savePromise error', err); logDebug('savePromise error: ' + meta.error);
} }
})(); })();
// keep SW alive while saving
event.waitUntil(savePromise); event.waitUntil(savePromise);
// respond to page
return new Response(clientStream, { return new Response(clientStream, {
status: upstream.status, status: upstream.status,
statusText: upstream.statusText, statusText: upstream.statusText,
@@ -216,15 +211,14 @@ self.addEventListener('fetch', event => {
}); });
})()); })());
} catch (err) { } catch (err) {
console.error('sw: fetch handler error', err); logDebug('fetch handler error: ' + (err && err.message ? err.message : String(err)));
} }
}); });
/* --- Messaging: PING / PING_STATUS / GET_STATE --- */ /* Message handler: PING, PING_STATUS, TEST_WRITE, CHECK_LF, LIST_SW_SAVED, CLEAR_TESTS */
self.addEventListener('message', event => { self.addEventListener('message', (event) => {
const data = event.data || {}; const data = event.data || {};
try { try {
// simple ping (original behavior)
if (data && data.type === 'PING') { if (data && data.type === 'PING') {
if (event.ports && event.ports[0]) { if (event.ports && event.ports[0]) {
event.ports[0].postMessage({ type: 'PONG', ts: now(), ok: true }); event.ports[0].postMessage({ type: 'PONG', ts: now(), ok: true });
@@ -236,42 +230,131 @@ self.addEventListener('message', event => {
return; return;
} }
// status ping that returns internal state
if (data && data.type === 'PING_STATUS') { if (data && data.type === 'PING_STATUS') {
const reply = { const reply = {
type: 'PONG_STATUS', type: 'PONG_STATUS',
ts: now(), ts: now(),
totalIntercepted: state.totalIntercepted || 0, totalIntercepted: state.totalIntercepted || 0,
activeStreams: Object.entries(state.activeStreams).map(([id,m]) => ({ streamId: id, url: m.url, bytes: m.bytes, status: m.status, startedAt: m.startedAt })), activeStreams: Object.entries(state.activeStreams).map(([id,m]) => ({ streamId: id, url: m.url, bytes: m.bytes, status: m.status, startedAt: m.startedAt })),
lastStream: state.lastStream || null lastStreamSummary: state.lastStreamSummary || null,
lfAvailable: localforageAvailable,
lfLoadError: lfLoadError
}; };
if (event.ports && event.ports[0]) { if (event.ports && event.ports[0]) event.ports[0].postMessage(reply);
event.ports[0].postMessage(reply); else if (event.source && event.source.postMessage) event.source.postMessage(reply);
} else if (event.source && typeof event.source.postMessage === 'function') { else broadcast(reply);
try { event.source.postMessage(reply); } catch(e) {}
} else {
broadcast(reply);
}
return; return;
} }
// optional: client requests list of SW-saved streams/messages if (data && data.type === 'TEST_WRITE') {
if (data && data.type === 'LIST_SW_SAVED') {
(async () => { (async () => {
const threads = await readThreads(); if (!localforageAvailable) {
const found = []; const res = { type:'TEST_WRITE_RESULT', ok:false, error: 'localforage not available: ' + lfLoadError };
for (const t of (threads || [])) { if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
for (const m of (t.messages || [])) { return;
if (m && m.sw_streamId) found.push({ threadId: t.id, threadTitle: t.title, messageId: m.id, sw_streamId: m.sw_streamId, snippet: (m.content||'').slice(0,200), updatedAt: m.updatedAt }); }
} try {
const threads = await safeReadThreads();
const tid = 'sw-test-' + gid();
const nowMs = now();
const testThread = {
id: tid,
title: 'SW test thread ' + nowMs,
pinned: false,
updatedAt: nowMs,
messages: [
{ id: 'swtestmsg-' + gid(), role: 'assistant', content: 'sw test write @' + new Date(nowMs).toISOString(), contentParts: [{type:'text',text:'sw test write @' + new Date(nowMs).toISOString()}], createdAt: nowMs, updatedAt: nowMs }
]
};
threads.unshift(testThread);
await safeWriteThreads(threads);
state.debugWrites = (state.debugWrites||[]).concat(tid);
const res = { type:'TEST_WRITE_RESULT', ok:true, tid, now: nowMs };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
logDebug('TEST_WRITE created ' + tid);
} catch (err) {
const res = { type:'TEST_WRITE_RESULT', ok:false, error: String(err && err.message ? err.message : err) };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
logDebug('TEST_WRITE error: ' + res.error);
} }
if (event.ports && event.ports[0]) event.ports[0].postMessage({ type: 'LIST_SW_SAVED_RESULT', streams: found });
else if (event.source && typeof event.source.postMessage === 'function') event.source.postMessage({ type: 'LIST_SW_SAVED_RESULT', streams: found });
else broadcast({ type: 'LIST_SW_SAVED_RESULT', streams: found });
})(); })();
return; return;
} }
} catch (e) {
console.error('sw: message handler error', e); if (data && data.type === 'CHECK_LF') {
(async () => {
if (!localforageAvailable) {
const res = { type:'CHECK_LF_RESULT', ok:false, error: 'localforage not available: ' + lfLoadError };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
return;
}
try {
const threads = await safeReadThreads();
const res = { type:'CHECK_LF_RESULT', ok:true, threadsCount: Array.isArray(threads)?threads.length:0, sample: (threads && threads[0]) ? threads[0] : null };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
logDebug('CHECK_LF returned ' + (Array.isArray(threads)?threads.length:'?') + ' threads');
} catch (err) {
const res = { type:'CHECK_LF_RESULT', ok:false, error: String(err && err.message ? err.message : err) };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
logDebug('CHECK_LF error: ' + res.error);
}
})();
return;
}
if (data && data.type === 'LIST_SW_SAVED') {
(async () => {
if (!localforageAvailable) {
const res = { type:'LIST_SW_SAVED_RESULT', ok:false, error: 'localforage not available: ' + lfLoadError };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
return;
}
try {
const threads = await safeReadThreads();
const found = [];
for (const t of (threads || [])) {
for (const m of (t.messages || [])) {
if (m && m.sw_streamId) found.push({ threadId: t.id, threadTitle: t.title, messageId: m.id, sw_streamId: m.sw_streamId, snippet: (m.content||'').slice(0,200), updatedAt: m.updatedAt });
}
}
const res = { type:'LIST_SW_SAVED_RESULT', ok:true, found };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
logDebug('LIST_SW_SAVED returned ' + found.length + ' messages');
} catch (err) {
const res = { type:'LIST_SW_SAVED_RESULT', ok:false, error: String(err && err.message ? err.message : err) };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else (event.source && event.source.postMessage ? event.source.postMessage(res) : broadcast(res));
logDebug('LIST_SW_SAVED error: ' + res.error);
}
})();
return;
}
if (data && data.type === 'CLEAR_TESTS') {
(async () => {
if (!localforageAvailable) {
const res = { type:'CLEAR_TESTS_RESULT', ok:false, error: 'localforage not available: ' + lfLoadError };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else broadcast(res);
return;
}
try {
const threads = await safeReadThreads();
const before = threads.length;
const cleaned = threads.filter(t => !(t.id && (String(t.id).startsWith('sw-test-') || String(t.id).startsWith('sw-thread-') || state.debugWrites.includes(t.id))));
await safeWriteThreads(cleaned);
const removed = before - cleaned.length;
state.debugWrites = [];
const res = { type:'CLEAR_TESTS_RESULT', ok:true, removed };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else broadcast(res);
logDebug('CLEAR_TESTS removed ' + removed);
} catch (err) {
const res = { type:'CLEAR_TESTS_RESULT', ok:false, error: String(err && err.message ? err.message : err) };
if (event.ports && event.ports[0]) event.ports[0].postMessage(res); else broadcast(res);
}
})();
return;
}
} catch (err) {
logDebug('message handler error: ' + (err && err.message ? err.message : String(err)));
} }
}); });