Refactor: Conversation-based windows with reason model + ntfy

Co-authored-by: Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-04-12 14:35:47 -07:00
parent 3d2f2c4fa8
commit 5b5a34d6e6

100
pink.js
View File

@@ -1,9 +1,39 @@
import { readdir, readFile, writeFile } from "fs/promises"
import { SEARCH_MODEL, JSON_ROLLING_WINDOW } from "./config.js"
import {
SEARCH_MODEL, REASON_MODEL, JSON_ROLLING_WINDOW,
REASON_MODEL_QUERY, REASON_MODEL_SYS_PROMPT
} from "./config.js"
const DIR = "?"
const files = (await readdir(DIR)).filter(f => f.endsWith(".json"))
async function chat(model, messages, system) {
const body = { model, messages }
if (system) body.messages = [{ role: "system", content: system }, ...messages]
const res = await fetch("https://openrouter.ai/api/v1/chat/completions", {
method: "POST",
headers: {
"Authorization": `Bearer ${process.env.OPENROUTER_KEY}`,
"Content-Type": "application/json"
},
body: JSON.stringify(body)
})
if (!res.ok) throw new Error(`${res.status} ${await res.text()}`)
const data = await res.json()
return data.choices?.[0]?.message?.content ?? "No response."
}
async function sendNtfy(msg, priority) {
const url = process.env.NTFY_URL
if (!url) return console.warn("!! NTFY_URL not set")
await fetch(url, {
method: "POST",
headers: { "Priority": String(priority) },
body: msg
})
console.log(`📨 ntfy (p${priority}): ${msg.slice(0, 80)}`)
}
for (const file of files) {
const query = file.replace(/\.json$/, "")
const path = `${DIR}/${file}`
@@ -11,36 +41,52 @@ for (const file of files) {
console.log(`>> ${query}`)
const res = await fetch("https://openrouter.ai/api/v1/chat/completions", {
method: "POST",
headers: {
"Authorization": `Bearer ${process.env.OPENROUTER_KEY}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
model: SEARCH_MODEL,
messages: [{ role: "user", content: query }]
})
})
if (!res.ok) {
console.error(`!! ${file}: ${res.status} ${await res.text()}`)
continue
}
const data = await res.json()
const answer = data.choices?.[0]?.message?.content ?? "No response."
let entries = []
let messages = []
try {
const raw = await readFile(path, "utf-8")
const parsed = JSON.parse(raw || "[]")
if (Array.isArray(parsed)) entries = parsed
if (Array.isArray(parsed)) messages = parsed
} catch {}
entries.unshift({ date: now, response: answer })
entries = entries.slice(0, JSON_ROLLING_WINDOW)
// --- Search Model ---
const searchAnswer = await chat(SEARCH_MODEL, [
...messages,
{ role: "user", content: query }
]).catch(e => { console.error(`!! search ${file}: ${e.message}`); return null })
await writeFile(path, JSON.stringify(entries, null, 2) + "\n")
console.log(`<< ${file} (${entries.length} entries)`)
if (!searchAnswer) continue
messages.push(
{ role: "user", content: query },
{ role: "assistant", content: `${now}: ${searchAnswer}` }
)
// --- Reason Model ---
const reasonAnswer = await chat(REASON_MODEL, [
...messages,
{ role: "user", content: REASON_MODEL_QUERY }
], REASON_MODEL_SYS_PROMPT).catch(e => {
console.error(`!! reason ${file}: ${e.message}`)
return null
})
if (!reasonAnswer) continue
messages.push(
{ role: "user", content: REASON_MODEL_QUERY },
{ role: "assistant", content: reasonAnswer }
)
// --- Parse ntfy block ---
const ntfyMatch = reasonAnswer.match(/```ntfy([1-5])\n([\s\S]*?)```/)
if (ntfyMatch) await sendNtfy(ntfyMatch[2].trim(), parseInt(ntfyMatch[1]))
// --- Trim to rolling window (each window = 4 messages) ---
const windowSize = 4
const maxMessages = JSON_ROLLING_WINDOW * windowSize
if (messages.length > maxMessages)
messages = messages.slice(messages.length - maxMessages)
await writeFile(path, JSON.stringify(messages, null, 2) + "\n")
console.log(`<< ${file} (${messages.length / windowSize} windows)`)
}