Add per-request AI logging, DB batch queue, WS entity updates, and UI polish
- log_thread.py: thread-safe ContextVar bridge so executor threads can log
individual LLM calls and archive searches back to the event loop
- ai_log.py: init_thread_logging(), notify_entity_update(); WS now pushes
entity_update messages when book data changes after any plugin or batch run
- batch.py: replace batch_pending.json with batch_queue SQLite table;
run_batch_consumer() reads queue dynamically so new books can be added
while batch is running; add_to_queue() deduplicates
- migrate.py: fix _migrate_v1 (clear-on-startup bug); add _migrate_v2 for
batch_queue table
- _client.py / archive.py / identification.py: wrap each LLM API call and
archive search with log_thread start/finish entries
- api.py: POST /api/batch returns {already_running, added}; notify_entity_update
after identify pipeline
- models.default.yaml: strengthen ai_identify confidence-scoring instructions;
warn against placeholder data
- detail-render.js: book log entries show clickable ID + spine thumbnail;
book spine/title images open full-screen popup
- events.js: batch-start handles already_running+added; open-img-popup action
- init.js: entity_update WS handler; image popup close listeners
- overlays.css / index.html: full-screen image popup overlay
- eslint.config.js: add new globals; fix no-redeclare/no-unused-vars for
multi-file global architecture; all lint errors resolved
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -10,16 +10,18 @@
|
||||
* renderDetail() does a cheaper in-place update of the right panel only,
|
||||
* used during plugin runs and field edits to avoid re-rendering the sidebar.
|
||||
*
|
||||
* Depends on: S, _plugins, _batchState, _batchPollTimer (state.js);
|
||||
* Depends on: S, _plugins, _batchState, _batchWs (state.js);
|
||||
* req, toast (api.js / helpers.js); isDesktop (helpers.js);
|
||||
* vApp, vDetailBody, mainTitle, mainHeaderBtns, vBatchBtn
|
||||
* (tree-render.js / detail-render.js);
|
||||
* attachEditables, initSortables (editing.js);
|
||||
* setupDetailCanvas (canvas-boundary.js)
|
||||
* Provides: render(), renderDetail(), loadConfig(), startBatchPolling(),
|
||||
* Provides: render(), renderDetail(), loadConfig(), connectBatchWs(),
|
||||
* loadTree()
|
||||
*/
|
||||
|
||||
/* exported render, renderDetail, connectBatchWs, connectAiLogWs, loadTree */
|
||||
|
||||
// ── Full re-render ────────────────────────────────────────────────────────────
|
||||
function render() {
|
||||
if (document.activeElement?.contentEditable === 'true') return;
|
||||
@@ -37,46 +39,121 @@ function renderDetail() {
|
||||
const body = document.getElementById('main-body');
|
||||
if (body) body.innerHTML = vDetailBody();
|
||||
const t = document.getElementById('main-title');
|
||||
if (t) t.innerHTML = mainTitle(); // innerHTML: mainTitle() returns an HTML span
|
||||
if (t) t.innerHTML = mainTitle(); // innerHTML: mainTitle() returns an HTML string
|
||||
const hb = document.getElementById('main-hdr-btns');
|
||||
if (hb) hb.innerHTML = mainHeaderBtns();
|
||||
const bb = document.getElementById('main-hdr-batch');
|
||||
if (bb) bb.innerHTML = vBatchBtn();
|
||||
attachEditables(); // pick up the new editable span in the header
|
||||
attachEditables(); // pick up the new editable span in the header
|
||||
requestAnimationFrame(setupDetailCanvas);
|
||||
}
|
||||
|
||||
// ── Data loading ──────────────────────────────────────────────────────────────
|
||||
async function loadConfig() {
|
||||
try {
|
||||
const cfg = await req('GET','/api/config');
|
||||
const cfg = await req('GET', '/api/config');
|
||||
window._grabPx = cfg.boundary_grab_px ?? 14;
|
||||
window._confidenceThreshold = cfg.confidence_threshold ?? 0.8;
|
||||
window._aiLogMax = cfg.ai_log_max_entries ?? 100;
|
||||
_plugins = cfg.plugins || [];
|
||||
} catch { window._grabPx = 14; window._confidenceThreshold = 0.8; }
|
||||
} catch {
|
||||
window._grabPx = 14;
|
||||
window._confidenceThreshold = 0.8;
|
||||
window._aiLogMax = 100;
|
||||
}
|
||||
}
|
||||
|
||||
function startBatchPolling() {
|
||||
if (_batchPollTimer) clearInterval(_batchPollTimer);
|
||||
_batchPollTimer = setInterval(async () => {
|
||||
try {
|
||||
const st = await req('GET', '/api/batch/status');
|
||||
_batchState = st;
|
||||
const bb = document.getElementById('main-hdr-batch');
|
||||
if (bb) bb.innerHTML = vBatchBtn();
|
||||
if (!st.running) {
|
||||
clearInterval(_batchPollTimer); _batchPollTimer = null;
|
||||
toast(`Batch: ${st.done} done, ${st.errors} errors`);
|
||||
await loadTree();
|
||||
function connectBatchWs() {
|
||||
if (_batchWs) {
|
||||
_batchWs.close();
|
||||
_batchWs = null;
|
||||
}
|
||||
const proto = location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const ws = new WebSocket(`${proto}//${location.host}/ws/batch`);
|
||||
_batchWs = ws;
|
||||
ws.onmessage = async (ev) => {
|
||||
const st = JSON.parse(ev.data);
|
||||
_batchState = st;
|
||||
const bb = document.getElementById('main-hdr-batch');
|
||||
if (bb) bb.innerHTML = vBatchBtn();
|
||||
if (!st.running) {
|
||||
ws.close();
|
||||
_batchWs = null;
|
||||
toast(`Batch: ${st.done} done, ${st.errors} errors`);
|
||||
await loadTree();
|
||||
}
|
||||
};
|
||||
ws.onerror = () => {
|
||||
_batchWs = null;
|
||||
};
|
||||
ws.onclose = () => {
|
||||
_batchWs = null;
|
||||
};
|
||||
}
|
||||
|
||||
function connectAiLogWs() {
|
||||
const proto = location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const ws = new WebSocket(`${proto}//${location.host}/ws/ai-log`);
|
||||
_aiLogWs = ws;
|
||||
ws.onmessage = (ev) => {
|
||||
const msg = JSON.parse(ev.data);
|
||||
if (msg.type === 'snapshot') {
|
||||
_aiLog = msg.entries || [];
|
||||
} else if (msg.type === 'update') {
|
||||
const entry = msg.entry;
|
||||
const idx = _aiLog.findIndex((e) => e.id === entry.id);
|
||||
if (idx >= 0) {
|
||||
_aiLog[idx] = entry;
|
||||
} else {
|
||||
_aiLog.push(entry);
|
||||
const max = window._aiLogMax ?? 100;
|
||||
if (_aiLog.length > max) _aiLog.splice(0, _aiLog.length - max);
|
||||
}
|
||||
} catch { /* ignore poll errors */ }
|
||||
}, 2000);
|
||||
} else if (msg.type === 'entity_update') {
|
||||
const etype = msg.entity_type.slice(0, -1); // "books" → "book"
|
||||
walkTree((n) => {
|
||||
if (n.id === msg.entity_id) Object.assign(n, msg.data);
|
||||
});
|
||||
if (S.selected && S.selected.type === etype && S.selected.id === msg.entity_id) {
|
||||
renderDetail();
|
||||
} else {
|
||||
render(); // update sidebar badges
|
||||
}
|
||||
return; // skip AI indicator update — not a log entry
|
||||
}
|
||||
// Update header AI indicator
|
||||
const hdr = document.getElementById('hdr-ai-indicator');
|
||||
if (hdr) {
|
||||
const running = _aiLog.filter((e) => e.status === 'running').length;
|
||||
hdr.innerHTML = running > 0 ? vAiIndicator(running) : '';
|
||||
}
|
||||
// Update root detail panel if shown
|
||||
if (!S.selected) renderDetail();
|
||||
};
|
||||
ws.onerror = () => {};
|
||||
ws.onclose = () => {
|
||||
// Reconnect after a short delay
|
||||
setTimeout(connectAiLogWs, 3000);
|
||||
};
|
||||
}
|
||||
|
||||
async function loadTree() {
|
||||
S.tree = await req('GET','/api/tree');
|
||||
S.tree = await req('GET', '/api/tree');
|
||||
render();
|
||||
}
|
||||
|
||||
// ── Init ──────────────────────────────────────────────────────────────────────
|
||||
Promise.all([loadConfig(), loadTree()]);
|
||||
|
||||
// Image popup: close when clicking the overlay background or the × button.
|
||||
(function () {
|
||||
const popup = document.getElementById('img-popup');
|
||||
const closeBtn = document.getElementById('img-popup-close');
|
||||
if (popup) {
|
||||
popup.addEventListener('click', (e) => {
|
||||
if (e.target === popup) popup.classList.remove('open');
|
||||
});
|
||||
}
|
||||
if (closeBtn) {
|
||||
closeBtn.addEventListener('click', () => popup && popup.classList.remove('open'));
|
||||
}
|
||||
})();
|
||||
|
||||
Promise.all([loadConfig(), loadTree()]).then(() => connectAiLogWs());
|
||||
|
||||
Reference in New Issue
Block a user