Refine story summary prompts and vector sync

This commit is contained in:
2026-01-30 00:55:04 +08:00
parent 6aaed2af4a
commit d87c8a0207
7 changed files with 1174 additions and 432 deletions

View File

@@ -60,6 +60,7 @@ import {
updateMeta,
saveEventVectors as saveEventVectorsToDb,
clearEventVectors,
deleteEventVectorsByIds,
clearAllChunks,
saveChunks,
saveChunkVectors,
@@ -506,6 +507,91 @@ async function handleGenerateVectors(vectorCfg) {
xbLog.info(MODULE_ID, `向量生成完成: L1=${l1Vectors.filter(Boolean).length}, L2=${l2VectorItems.length}`);
}
// ═══════════════════════════════════════════════════════════════════════════
// L2 自动增量向量化(总结完成后调用)
// ═══════════════════════════════════════════════════════════════════════════
async function autoVectorizeNewEvents(newEventIds) {
if (!newEventIds?.length) return;
const vectorCfg = getVectorConfig();
if (!vectorCfg?.enabled) return;
const { chatId } = getContext();
if (!chatId) return;
// 本地模型未加载时跳过(不阻塞总结流程)
if (vectorCfg.engine === "local") {
const modelId = vectorCfg.local?.modelId || DEFAULT_LOCAL_MODEL;
if (!isLocalModelLoaded(modelId)) {
xbLog.warn(MODULE_ID, "L2 自动向量化跳过:本地模型未加载");
return;
}
}
const store = getSummaryStore();
const events = store?.json?.events || [];
const newEventIdSet = new Set(newEventIds);
// 只取本次新增的 events
const newEvents = events.filter((e) => newEventIdSet.has(e.id));
if (!newEvents.length) return;
const pairs = newEvents
.map((e) => ({ id: e.id, text: `${e.title || ""} ${e.summary || ""}`.trim() }))
.filter((p) => p.text);
if (!pairs.length) return;
try {
const fingerprint = getEngineFingerprint(vectorCfg);
const batchSize = vectorCfg.engine === "local" ? 5 : 25;
for (let i = 0; i < pairs.length; i += batchSize) {
const batch = pairs.slice(i, i + batchSize);
const texts = batch.map((p) => p.text);
const vectors = await embed(texts, vectorCfg);
const items = batch.map((p, idx) => ({
eventId: p.id,
vector: vectors[idx],
}));
await saveEventVectorsToDb(chatId, items, fingerprint);
}
xbLog.info(MODULE_ID, `L2 自动增量完成: ${pairs.length} 个事件`);
await sendVectorStatsToFrame();
} catch (e) {
xbLog.error(MODULE_ID, "L2 自动向量化失败", e);
// 不抛出,不阻塞总结流程
}
}
// ═══════════════════════════════════════════════════════════════════════════
// L2 跟随编辑同步(用户编辑 events 时调用)
// ═══════════════════════════════════════════════════════════════════════════
async function syncEventVectorsOnEdit(oldEvents, newEvents) {
const vectorCfg = getVectorConfig();
if (!vectorCfg?.enabled) return;
const { chatId } = getContext();
if (!chatId) return;
const oldIds = new Set((oldEvents || []).map((e) => e.id).filter(Boolean));
const newIds = new Set((newEvents || []).map((e) => e.id).filter(Boolean));
// 找出被删除的 eventIds
const deletedIds = [...oldIds].filter((id) => !newIds.has(id));
if (deletedIds.length > 0) {
await deleteEventVectorsByIds(chatId, deletedIds);
xbLog.info(MODULE_ID, `L2 同步删除: ${deletedIds.length} 个事件向量`);
await sendVectorStatsToFrame();
}
}
// ═══════════════════════════════════════════════════════════════════════════
// 向量完整性检测(仅提醒,不自动操作)
// ═══════════════════════════════════════════════════════════════════════════
@@ -565,6 +651,7 @@ async function handleClearVectors() {
await clearAllChunks(chatId);
await updateMeta(chatId, { lastChunkFloor: -1 });
await sendVectorStatsToFrame();
await executeSlashCommand('/echo severity=info 向量数据已清除。如需恢复召回功能,请重新点击"生成向量"。');
xbLog.info(MODULE_ID, "向量数据已清除");
}
@@ -769,6 +856,11 @@ function openPanelForMessage(mesId) {
// ═══════════════════════════════════════════════════════════════════════════
async function getHideBoundaryFloor(store) {
// 没有总结时,不隐藏
if (store?.lastSummarizedMesId == null || store.lastSummarizedMesId < 0) {
return -1;
}
const vectorCfg = getVectorConfig();
if (!vectorCfg?.enabled) {
return store?.lastSummarizedMesId ?? -1;
@@ -845,7 +937,7 @@ async function autoRunSummaryWithRetry(targetMesId, configForRun) {
const result = await runSummaryGeneration(targetMesId, configForRun, {
onStatus: (text) => postToFrame({ type: "SUMMARY_STATUS", statusText: text }),
onError: (msg) => postToFrame({ type: "SUMMARY_ERROR", message: msg }),
onComplete: ({ merged, endMesId }) => {
onComplete: async ({ merged, endMesId, newEventIds }) => {
postToFrame({
type: "SUMMARY_FULL_DATA",
payload: {
@@ -860,6 +952,9 @@ async function autoRunSummaryWithRetry(targetMesId, configForRun) {
applyHideStateDebounced();
updateFrameStatsAfterSummary(endMesId, merged);
// L2 自动增量向量化
await autoVectorizeNewEvents(newEventIds);
},
});
@@ -1060,11 +1155,20 @@ function handleFrameMessage(event) {
const store = getSummaryStore();
if (!store) break;
store.json ||= {};
// 如果是 events先记录旧数据用于同步向量
const oldEvents = data.section === "events" ? [...(store.json.events || [])] : null;
if (VALID_SECTIONS.includes(data.section)) {
store.json[data.section] = data.data;
}
store.updatedAt = Date.now();
saveSummaryStore();
// 同步 L2 向量(删除被移除的事件)
if (data.section === "events" && oldEvents) {
syncEventVectorsOnEdit(oldEvents, data.data);
}
break;
}
@@ -1133,7 +1237,7 @@ async function handleManualGenerate(mesId, config) {
await runSummaryGeneration(mesId, config, {
onStatus: (text) => postToFrame({ type: "SUMMARY_STATUS", statusText: text }),
onError: (msg) => postToFrame({ type: "SUMMARY_ERROR", message: msg }),
onComplete: ({ merged, endMesId }) => {
onComplete: async ({ merged, endMesId, newEventIds }) => {
postToFrame({
type: "SUMMARY_FULL_DATA",
payload: {
@@ -1148,6 +1252,9 @@ async function handleManualGenerate(mesId, config) {
applyHideStateDebounced();
updateFrameStatsAfterSummary(endMesId, merged);
// L2 自动增量向量化
await autoVectorizeNewEvents(newEventIds);
},
});
@@ -1206,6 +1313,9 @@ async function handleMessageReceived() {
initButtonsForAll();
// 向量全量生成中时跳过 L1 sync避免竞争写入
if (vectorGenerating) return;
await syncOnMessageReceived(chatId, lastFloor, message, vectorConfig);
await maybeAutoBuildChunks();
@@ -1289,7 +1399,8 @@ async function handleGenerationStarted(type, _params, isDryRun) {
if (boundary < 0) return;
// 2) depth倒序插入从末尾往前数
const depth = chatLen - boundary - 1;
// 最小为 1避免插入到最底部导致 AI 看到的最后是总结
const depth = Math.max(1, chatLen - boundary - 1);
if (depth < 0) return;
// 3) 构建注入文本(保持原逻辑)