mirror of
https://github.com/multipleof4/sune.git
synced 2026-01-14 08:38:00 +00:00
Update sw.js
This commit is contained in:
341
public/sw.js
341
public/sw.js
@@ -1,90 +1,275 @@
|
|||||||
// /sw.js - minimal worker that reports if it was restarted (killed)
|
// sw.js
|
||||||
'use strict';
|
// Drop-in service worker to tee streaming chat responses and write progress to localForage (threads_v1).
|
||||||
|
// - Adjust TARGET_SUBSTRING if your stream URL differs.
|
||||||
|
// - The SW writes messages tagged with `sw_streamId` to avoid clobbering unrelated messages.
|
||||||
|
|
||||||
const DB_NAME = 'sune-sw-db';
|
importScripts('https://cdn.jsdelivr.net/npm/localforage@1.10.0/dist/localforage.min.js');
|
||||||
const STORE = 'kv';
|
|
||||||
const KEY = 'lastSession';
|
|
||||||
|
|
||||||
// tiny IndexedDB helpers
|
const TARGET_SUBSTRING = 'openrouter.ai/api/v1/chat/completions'; // change if needed
|
||||||
function idbOpen() {
|
const THREADS_KEY = 'threads_v1'; // matches your index
|
||||||
return new Promise((resolve, reject) => {
|
const BUFFER_SAVE_BYTES = 32 * 1024; // save every ~32KB of new data (tune)
|
||||||
const r = indexedDB.open(DB_NAME, 1);
|
const SAVE_INTERVAL_MS = 2000; // or at least every 2s even if buffer < size
|
||||||
r.onupgradeneeded = () => {
|
|
||||||
r.result.createObjectStore(STORE);
|
// utils
|
||||||
|
const gid = () => Math.random().toString(36).slice(2, 9) + '-' + Date.now().toString(36);
|
||||||
|
|
||||||
|
function now() { return Date.now(); }
|
||||||
|
|
||||||
|
async function readThreads() {
|
||||||
|
try {
|
||||||
|
const v = await localforage.getItem(THREADS_KEY);
|
||||||
|
return Array.isArray(v) ? v : [];
|
||||||
|
} catch (e) {
|
||||||
|
console.error('sw: idb read error', e);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function writeThreads(arr) {
|
||||||
|
try {
|
||||||
|
await localforage.setItem(THREADS_KEY, arr);
|
||||||
|
} catch (e) {
|
||||||
|
console.error('sw: idb write error', e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// choose a thread to attach progress to
|
||||||
|
function pickThread(threads) {
|
||||||
|
if (!threads || threads.length === 0) return null;
|
||||||
|
// prefer newest updatedAt
|
||||||
|
threads.sort((a,b) => (b.updatedAt||0) - (a.updatedAt||0));
|
||||||
|
return threads[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
// update or append assistant message for streamId
|
||||||
|
async function upsertStreamMessage(streamId, text, meta = {}) {
|
||||||
|
// read-modify-write
|
||||||
|
const threads = await readThreads();
|
||||||
|
let th = pickThread(threads);
|
||||||
|
const createdNow = now();
|
||||||
|
|
||||||
|
if (!th) {
|
||||||
|
// create a new thread if none found
|
||||||
|
th = {
|
||||||
|
id: 'sw-' + gid(),
|
||||||
|
title: 'Missed while backgrounded',
|
||||||
|
pinned: false,
|
||||||
|
updatedAt: createdNow,
|
||||||
|
messages: []
|
||||||
};
|
};
|
||||||
r.onsuccess = () => resolve(r.result);
|
threads.unshift(th);
|
||||||
r.onerror = () => reject(r.error || new Error('idb open error'));
|
}
|
||||||
|
|
||||||
|
// look for existing message with sw_streamId
|
||||||
|
let msgIndex = -1;
|
||||||
|
for (let i = th.messages.length - 1; i >= 0; i--) {
|
||||||
|
const m = th.messages[i];
|
||||||
|
if (m && m.sw_streamId === streamId) { msgIndex = i; break; }
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentParts = [{ type: 'text', text }];
|
||||||
|
if (msgIndex >= 0) {
|
||||||
|
// update message content
|
||||||
|
const existing = th.messages[msgIndex];
|
||||||
|
existing.content = text;
|
||||||
|
existing.contentParts = contentParts;
|
||||||
|
existing.updatedAt = createdNow;
|
||||||
|
existing._sw_lastSave = createdNow;
|
||||||
|
existing._sw_meta = Object.assign({}, existing._sw_meta || {}, meta);
|
||||||
|
} else {
|
||||||
|
// append new assistant message (tag with sw_streamId)
|
||||||
|
const msg = {
|
||||||
|
id: 'swmsg-' + gid(),
|
||||||
|
role: 'assistant',
|
||||||
|
content: text,
|
||||||
|
contentParts,
|
||||||
|
kind: 'assistant',
|
||||||
|
sw_saved: true,
|
||||||
|
sw_streamId: streamId,
|
||||||
|
createdAt: createdNow,
|
||||||
|
updatedAt: createdNow,
|
||||||
|
_sw_meta: Object.assign({}, meta)
|
||||||
|
};
|
||||||
|
th.messages.push(msg);
|
||||||
|
}
|
||||||
|
th.updatedAt = createdNow;
|
||||||
|
|
||||||
|
// write back
|
||||||
|
await writeThreads(threads);
|
||||||
|
return { threadId: th.id };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function finalizeStream(streamId, meta = {}) {
|
||||||
|
// mark the message as complete; put complete flag in _sw_meta
|
||||||
|
const threads = await readThreads();
|
||||||
|
const th = pickThread(threads);
|
||||||
|
if (!th) return;
|
||||||
|
for (let i = th.messages.length - 1; i >= 0; i--) {
|
||||||
|
const m = th.messages[i];
|
||||||
|
if (m && m.sw_streamId === streamId) {
|
||||||
|
m._sw_meta = Object.assign({}, m._sw_meta || {}, meta, { completeAt: now() });
|
||||||
|
m.updatedAt = now();
|
||||||
|
th.updatedAt = now();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await writeThreads(threads);
|
||||||
|
// inform clients
|
||||||
|
const clientsList = await self.clients.matchAll({ includeUncontrolled: true, type: 'window' });
|
||||||
|
clientsList.forEach(c => {
|
||||||
|
try { c.postMessage({ type: 'stream-saved', streamId, meta }); } catch(e){ }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function idbGet(key) {
|
|
||||||
return idbOpen().then(db => new Promise((res, rej) => {
|
// notify utility
|
||||||
const tx = db.transaction(STORE, 'readonly');
|
async function notifyClients(msg) {
|
||||||
const req = tx.objectStore(STORE).get(key);
|
try {
|
||||||
req.onsuccess = () => res(req.result);
|
const list = await self.clients.matchAll({ includeUncontrolled: true, type: 'window' });
|
||||||
req.onerror = () => rej(req.error);
|
for (const c of list) {
|
||||||
}));
|
try { c.postMessage(msg); } catch (e) {}
|
||||||
}
|
}
|
||||||
function idbSet(key, val) {
|
} catch (e) {
|
||||||
return idbOpen().then(db => new Promise((res, rej) => {
|
/* ignore */
|
||||||
const tx = db.transaction(STORE, 'readwrite');
|
}
|
||||||
const req = tx.objectStore(STORE).put(val, key);
|
|
||||||
req.onsuccess = () => res(true);
|
|
||||||
req.onerror = () => rej(req.error);
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// lightweight session identity
|
// fetch handler: tee and save
|
||||||
const SESSION_ID = Date.now().toString(36) + '-' + Math.floor(Math.random() * 1e9).toString(36);
|
self.addEventListener('fetch', event => {
|
||||||
const STARTED_AT = Date.now();
|
try {
|
||||||
|
const url = event.request.url || '';
|
||||||
|
if (!url.includes(TARGET_SUBSTRING)) {
|
||||||
|
return; // not our target; let it pass through
|
||||||
|
}
|
||||||
|
|
||||||
self.addEventListener('install', ev => {
|
event.respondWith((async () => {
|
||||||
// activate immediately so the page can become controlled quickly
|
// perform the upstream fetch
|
||||||
|
const upstream = await fetch(event.request);
|
||||||
|
|
||||||
|
// if no streaming body, return upstream
|
||||||
|
if (!upstream || !upstream.body) return upstream;
|
||||||
|
|
||||||
|
const streamId = 'swstream-' + gid();
|
||||||
|
const headers = new Headers(upstream.headers);
|
||||||
|
|
||||||
|
// tee so one stream goes to client, one consumed by SW
|
||||||
|
const [clientStream, swStream] = upstream.body.tee();
|
||||||
|
|
||||||
|
// background save logic:
|
||||||
|
const savePromise = (async () => {
|
||||||
|
try {
|
||||||
|
const reader = swStream.getReader();
|
||||||
|
const dec = new TextDecoder('utf-8');
|
||||||
|
let bufferText = '';
|
||||||
|
let bufferedBytes = 0;
|
||||||
|
let lastSaveAt = 0;
|
||||||
|
|
||||||
|
const saveIfNeeded = async (force = false) => {
|
||||||
|
const nowMs = Date.now();
|
||||||
|
if (!force && bufferedBytes < BUFFER_SAVE_BYTES && (nowMs - lastSaveAt) < SAVE_INTERVAL_MS) return;
|
||||||
|
// upsert into threads
|
||||||
|
try {
|
||||||
|
await upsertStreamMessage(streamId, bufferText, { partialBytes: bufferedBytes, savedAt: Date.now() });
|
||||||
|
lastSaveAt = nowMs;
|
||||||
|
} catch (e) {
|
||||||
|
console.error('sw: upsert save error', e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const { value, done } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
// value is Uint8Array; decode incrementally
|
||||||
|
const chunkText = dec.decode(value, { stream: true });
|
||||||
|
bufferText += chunkText;
|
||||||
|
bufferedBytes += (value && value.byteLength) ? value.byteLength : chunkText.length;
|
||||||
|
// try saving periodically
|
||||||
|
await saveIfNeeded(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// final save + finalize
|
||||||
|
await saveIfNeeded(true);
|
||||||
|
await finalizeStream(streamId, { totalBytes: bufferedBytes });
|
||||||
|
} catch (err) {
|
||||||
|
console.error('sw: error saving stream', err);
|
||||||
|
// try to mark failure
|
||||||
|
try { await finalizeStream(streamId, { error: String(err) }); } catch(e){ }
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
// keep worker alive while saving
|
||||||
|
event.waitUntil(savePromise);
|
||||||
|
|
||||||
|
// return response to client using the clientStream
|
||||||
|
return new Response(clientStream, {
|
||||||
|
status: upstream.status,
|
||||||
|
statusText: upstream.statusText,
|
||||||
|
headers
|
||||||
|
});
|
||||||
|
})());
|
||||||
|
} catch (err) {
|
||||||
|
// if anything goes wrong, let the request fallback
|
||||||
|
console.error('sw: fetch handler error', err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ping/pong: support both port reply and broadcast reply
|
||||||
|
self.addEventListener('message', event => {
|
||||||
|
const data = event.data || {};
|
||||||
|
try {
|
||||||
|
if (data && data.type === 'PING') {
|
||||||
|
// prefer replying on message port if provided
|
||||||
|
if (event.ports && event.ports[0]) {
|
||||||
|
try {
|
||||||
|
event.ports[0].postMessage({ type: 'PONG', ts: Date.now(), ok: true });
|
||||||
|
} catch (e) { /* ignore */ }
|
||||||
|
} else {
|
||||||
|
// fallback: send message back to the source (if possible) or broadcast to clients
|
||||||
|
if (event.source && typeof event.source.postMessage === 'function') {
|
||||||
|
try { event.source.postMessage({ type: 'PONG', ts: Date.now(), ok: true }); } catch(e) {}
|
||||||
|
} else {
|
||||||
|
notifyClients({ type: 'PONG', ts: Date.now(), ok: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// support a client request to list SW-saved streams or threads
|
||||||
|
if (data && data.type === 'list-sw-streams') {
|
||||||
|
(async () => {
|
||||||
|
const threads = await readThreads();
|
||||||
|
// collect messages that have sw_streamId
|
||||||
|
const found = [];
|
||||||
|
for (const t of (threads||[])) {
|
||||||
|
for (const m of (t.messages||[])) {
|
||||||
|
if (m && m.sw_streamId) found.push({ threadId: t.id, threadTitle: t.title, messageId: m.id, sw_streamId: m.sw_streamId, summary: (m.content||'').slice(0,200), updatedAt: m.updatedAt });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// reply to the source if possible, otherwise broadcast
|
||||||
|
if (event.ports && event.ports[0]) {
|
||||||
|
event.ports[0].postMessage({ type: 'sw-streams-list', streams: found });
|
||||||
|
} else if (event.source && typeof event.source.postMessage === 'function') {
|
||||||
|
event.source.postMessage({ type: 'sw-streams-list', streams: found });
|
||||||
|
} else {
|
||||||
|
notifyClients({ type: 'sw-streams-list', streams: found });
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// other messages — ignore or log
|
||||||
|
} catch (e) {
|
||||||
|
console.error('sw: message handler error', e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// basic install/activate to claim clients quickly
|
||||||
|
self.addEventListener('install', event => {
|
||||||
self.skipWaiting();
|
self.skipWaiting();
|
||||||
});
|
});
|
||||||
|
self.addEventListener('activate', event => {
|
||||||
self.addEventListener('activate', ev => {
|
event.waitUntil((async () => {
|
||||||
// claim clients so the page becomes controlled without reload where possible
|
|
||||||
ev.waitUntil(self.clients.claim());
|
|
||||||
});
|
|
||||||
|
|
||||||
// respond to messages (works with MessageChannel from the sune)
|
|
||||||
self.addEventListener('message', ev => {
|
|
||||||
const data = ev.data || {};
|
|
||||||
// only handle PING to keep this tiny
|
|
||||||
if (data.type !== 'PING') return;
|
|
||||||
|
|
||||||
const respond = async () => {
|
|
||||||
try {
|
try {
|
||||||
const last = await idbGet(KEY); // may be undefined on first run
|
await self.clients.claim();
|
||||||
const restarted = !!last && last !== SESSION_ID; // true if there was a previous session different than this one
|
} catch(e) { /* ignore */ }
|
||||||
// store current session id for subsequent comparisons
|
})());
|
||||||
await idbSet(KEY, SESSION_ID);
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
type: 'PONG',
|
|
||||||
ts: Date.now(),
|
|
||||||
sessionId: SESSION_ID,
|
|
||||||
lastSessionId: last || null,
|
|
||||||
restarted: restarted,
|
|
||||||
uptimeMs: Date.now() - STARTED_AT,
|
|
||||||
ok: true
|
|
||||||
};
|
|
||||||
|
|
||||||
// prefer replying on the provided port
|
|
||||||
if (ev.ports && ev.ports[0]) {
|
|
||||||
ev.ports[0].postMessage(payload);
|
|
||||||
} else {
|
|
||||||
// fallback: postMessage to all clients
|
|
||||||
const clients = await self.clients.matchAll({includeUncontrolled: true});
|
|
||||||
clients.forEach(c => c.postMessage(payload));
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
const errPayload = {type:'PONG', ok:false, error: String(err), ts: Date.now()};
|
|
||||||
if (ev.ports && ev.ports[0]) ev.ports[0].postMessage(errPayload);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// ensure the work completes even if the worker might otherwise be stopped
|
|
||||||
if (ev.waitUntil) ev.waitUntil(Promise.resolve(respond()));
|
|
||||||
else respond();
|
|
||||||
});
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user