// Overview dashboard const { useMemo: useMemoOV } = React; // Rolling 24-hour incident history. Primary source is the backend // (/api/incident-history) so every LAN device sees the same timeline // and the data survives browser-cache clears. localStorage is only a // tiny resilience layer for the current device when the API is // temporarily unreachable. const INCIDENT_HISTORY_KEY = 'pulse.incidentHistory'; const HISTORY_HOURS = 24; const HISTORY_REFRESH_MS = 60_000; function _readLocalHistory() { try { return JSON.parse(localStorage.getItem(INCIDENT_HISTORY_KEY) || '[]'); } catch (_) { return []; } } function _writeLocalHistory(samples) { try { localStorage.setItem(INCIDENT_HISTORY_KEY, JSON.stringify(samples)); } catch (_) { /* quota / private mode */ } } function IncidentHistoryBars({ currentCount }) { // Fetch authoritative history from backend on mount + every minute. // Fall back to whatever was last cached in localStorage if the fetch // fails (we never want empty bars just because a probe request took // a beat too long). const [samples, setSamples] = React.useState(() => _readLocalHistory()); React.useEffect(() => { let cancelled = false; async function pull() { try { const r = await fetch('/api/incident-history?hours=24', { cache: 'no-store' }); if (!r.ok) throw new Error(`http ${r.status}`); const body = await r.json(); if (cancelled) return; const s = Array.isArray(body.samples) ? body.samples : []; setSamples(s); _writeLocalHistory(s); } catch (_) { // keep the last good cached value } } pull(); const t = setInterval(pull, HISTORY_REFRESH_MS); return () => { cancelled = true; clearInterval(t); }; }, []); // Bucket samples into HISTORY_HOURS hourly slots. Index 23 is "now"; // index 0 is "23h ago". const buckets = React.useMemo(() => { const now = Date.now(); const slots = Array.from({length: HISTORY_HOURS}, () => ({ max: null, samples: 0 })); for (const d of samples) { // Accept both the backend schema ({ts, down, crit, off, ...}) and the // legacy localStorage schema ({ts, count}) so upgrading a tab doesn't // blank the chart. const count = (typeof d.down === 'number') ? d.down : (d.count ?? 0); const hoursAgo = Math.floor((now - d.ts) / (3600 * 1000)); const idx = HISTORY_HOURS - 1 - hoursAgo; if (idx >= 0 && idx < HISTORY_HOURS) { slots[idx].max = Math.max(slots[idx].max ?? 0, count); slots[idx].samples++; } } // The final slot always reflects the current live count so the // most recent bar updates immediately without waiting for a sample. slots[HISTORY_HOURS - 1].max = Math.max(slots[HISTORY_HOURS - 1].max ?? 0, currentCount); return slots; }, [samples, currentCount]); const maxCount = Math.max(1, ...buckets.map(b => b.max || 0)); return (
{buckets.map((b, i) => { const hoursAgo = HISTORY_HOURS - 1 - i; const hasData = b.max !== null; const count = b.max || 0; const h = hasData ? 4 + (count / maxCount) * 22 : 3; const color = count > 0 ? 'var(--crit)' : hasData ? 'var(--border-3)' : 'var(--border)'; const when = hoursAgo === 0 ? 'now' : `${hoursAgo}h ago`; const label = hasData ? `${when} · peak ${count} incident${count === 1 ? '' : 's'}` : `${when} · no data yet`; return
; })}
); } // Auto-scale bytes-per-second to a human-friendly unit. Uses binary // multiples (KB=1024) like most Linux tools do for network rates. function _fmtRate(bps) { const v = Math.max(0, bps || 0); if (v < 1024) return { val: v.toFixed(0), unit: 'B/s' }; if (v < 1024 * 1024) return { val: (v / 1024).toFixed(1), unit: 'KB/s' }; if (v < 1024 * 1024 * 1024) return { val: (v / 1048576).toFixed(2), unit: 'MB/s' }; return { val: (v / 1073741824).toFixed(2), unit: 'GB/s' }; } function NetThroughputKPI() { const [snap, setSnap] = React.useState(null); React.useEffect(() => { let cancelled = false; async function pull() { try { const r = await fetch('/api/netstats', { cache: 'no-store' }); if (!r.ok) return; const body = await r.json(); if (!cancelled) setSnap(body); } catch (_) { /* keep last */ } } pull(); const t = setInterval(pull, 5000); return () => { cancelled = true; clearInterval(t); }; }, []); const history = snap?.history || []; const rx = snap?.totalRxPerSec || 0; const tx = snap?.totalTxPerSec || 0; const total = rx + tx; const tot = _fmtRate(total); const rxf = _fmtRate(rx); const txf = _fmtRate(tx); const ifaceNames = Object.keys(snap?.interfaces || {}); const activeIfaces = ifaceNames.filter(n => { const i = snap.interfaces[n]; return (i.rxPerSec + i.txPerSec) > 0; }); const ifaceHint = activeIfaces.length ? activeIfaces.join(', ') : (snap && snap.ready ? 'idle' : 'sampling…'); // Sparkline values = total throughput per sample in KB/s for readable scale. const values = history.map(h => (h.rxPerSec + h.txPerSec) / 1024); return (
Net throughput · host
{snap && snap.ready ? tot.val : '—'} {snap && snap.ready ? tot.unit : ''}
↓ rx {rxf.val} {rxf.unit} · ↑ tx {txf.val} {txf.unit}
{values.length >= 2 ? :
collecting samples…
}
); } function Sparkline({ values, color = 'var(--accent)', fill = true, height = 30, width = 160 }) { if (!values || !values.length) return null; const min = Math.min(...values); const max = Math.max(...values); const range = max - min || 1; const n = values.length; const pts = values.map((v, i) => { const x = (i / (n - 1)) * width; const y = height - ((v - min) / range) * (height - 4) - 2; return `${x.toFixed(1)},${y.toFixed(1)}`; }).join(' '); const last = values[n - 1]; const lastX = width; const lastY = height - ((last - min) / range) * (height - 4) - 2; return ( {fill && ( )} ); } // One group's section in the status grid. Owns its own drop-zone state // (drop-target / drop-same highlight). Drag-enabled tiles and drop zones // only render if `canDrag` is true — i.e. the viewer is an admin. function StatusGroupSection({ group, nodes, onSelect, canDrag, onGroupMove }) { const [isOver, setIsOver] = React.useState(false); const dnd = window.useDragState ? window.useDragState() : { activeNodeId: null, sourceGroup: null }; const dragging = !!dnd.activeNodeId; const sameGroup = dragging && dnd.sourceGroup === group; function onDragOver(e) { if (!canDrag || !dragging) return; e.preventDefault(); // required to allow drop e.dataTransfer.dropEffect = sameGroup ? 'none' : 'move'; if (!isOver) setIsOver(true); } function onDragLeave(e) { // Only clear when leaving the container, not when hovering over a child. if (e.currentTarget.contains(e.relatedTarget)) return; if (isOver) setIsOver(false); } function onDrop(e) { if (!canDrag || !dragging) return; e.preventDefault(); setIsOver(false); const nodeId = e.dataTransfer.getData('text/plain'); if (!nodeId) return; if (dnd.sourceGroup === group) return; // same-group drop = no-op if (typeof onGroupMove === 'function') onGroupMove(nodeId, group); } const hint = (dragging && !sameGroup && isOver) ? drop to move here : (dragging && sameGroup) ? same group : null; const classes = [ 'heatmap-group', (canDrag && dragging && !sameGroup && isOver) ? 'drop-target' : '', (canDrag && dragging && sameGroup) ? 'drop-same' : '', ].filter(Boolean).join(' '); return (
{group} {nodes.filter(n => n.status==='ok').length}/{nodes.length} up {hint}
{nodes.map(n => { const isThisDragging = dnd.activeNodeId === n.id; const anom = n.anomaly && n.anomaly.active; const tileTitle = anom ? `${n.hostname} · ${n.status} · ${n.latency.toFixed(0)}ms · ANOMALY z=${n.anomaly.z.toFixed(1)}σ (baseline ${Math.round(n.anomaly.baselineMean)}ms)${canDrag ? ' · drag to another group to move' : ''}` : `${n.hostname} · ${n.status} · ${n.latency.toFixed(0)}ms${canDrag ? ' · drag to another group to move' : ''}`; return ( ); })}
); } function Overview({ nodes, alerts, onSelect, currentUser, onGroupMove }) { const stats = useMemoOV(() => { const live = nodes.filter(n => n.status === 'ok' || n.status === 'maint'); const avg = live.reduce((s, n) => s + n.latency, 0) / (live.length || 1); const sorted = [...live.map(n => n.latency)].sort((a,b)=>a-b); const p99 = sorted[Math.floor(sorted.length * 0.99)] || 0; const netTotal = nodes.reduce((s, n) => s + n.netIn + n.netOut, 0); const critCount = nodes.filter(n => n.status === 'crit').length; const offCount = nodes.filter(n => n.status === 'off').length; const anomalyCount = nodes.filter(n => n.anomaly && n.anomaly.active).length; const uptime = ((nodes.length - offCount - critCount) / nodes.length) * 100; const histLen = nodes[0]?.latencyHist.length || 0; const fleetLat = []; const fleetNet = []; for (let i = 0; i < histLen; i++) { let ls = 0, lc = 0, ns = 0; for (const n of nodes) { if (n.status !== 'off') { ls += n.latencyHist[i]; lc++; } ns += n.netHist[i]; } fleetLat.push(ls / (lc || 1)); fleetNet.push(ns); } return { avg, p99, netTotal, critCount, offCount, anomalyCount, uptime, fleetLat, fleetNet }; }, [nodes]); const byGroup = useMemoOV(() => { const m = {}; for (const n of nodes) { if (!m[n.group]) m[n.group] = { total: 0, ok: 0, crit: 0, off: 0, maint: 0, lat: 0 }; m[n.group].total++; m[n.group][n.status]++; if (n.status !== 'off') m[n.group].lat += n.latency; } return m; }, [nodes]); const recentAlerts = alerts.slice(0, 5); return (
Availability
{stats.uptime.toFixed(2)}%
{stats.offCount === 0 && stats.critCount === 0 ? '↑ all endpoints up' : `↓ ${stats.offCount + stats.critCount} down`}
100 - Math.min(99, v/20))} color="var(--ok)" />
Avg response time
{stats.avg.toFixed(0)} ms
p99 {stats.p99.toFixed(0)}ms · sla < 800ms
Open incidents
{stats.critCount + stats.offCount}
{alerts.filter(a=>a.severity==='crit'&&!a.ack).length} unacked · {alerts.filter(a=>a.severity==='warn').length} warnings {stats.anomalyCount > 0 && ( · {stats.anomalyCount} anomal{stats.anomalyCount === 1 ? 'y' : 'ies'} )}
{/* Heatmap grouped by category */}

Endpoints · status grid

{nodes.length} endpoints · live
up down maint offline
{window.GROUPS.map(group => { const g = nodes.filter(n => n.group === group); if (!g.length) return null; return ; })}
{/* Groups breakdown */}

By group

{Object.entries(byGroup).map(([group, s]) => (
{group}
{s.ok > 0 &&
} {s.maint > 0 &&
} {s.crit > 0 &&
} {s.off > 0 &&
}
{s.ok}/{s.total}
))}
{/* Latency distribution */}

Response time · 60s window

p50 / p95 / p99
{/* Alerts preview */}

Recent alerts

{alerts.length} total
{recentAlerts.map(a => { const node = nodes.find(n => n.id === a.node); return (
{window.fmtRel(a.ts)} {a.severity} {a.title} {node?.hostname} · {a.body} {a.ack ? 'ack' : 'new'}
); })}
{/* Events timeline */}

Events · last 60 minutes

{String(new Date().getHours()).padStart(2,'0')}:00 local
); } function LatencyChart({ nodes }) { const histLen = nodes[0]?.latencyHist.length || 0; const p50 = [], p95 = [], p99 = []; for (let i = 0; i < histLen; i++) { const vals = nodes.filter(n => n.status !== 'off').map(n => n.latencyHist[i]).sort((a,b)=>a-b); p50.push(vals[Math.floor(vals.length * 0.5)] || 0); p95.push(vals[Math.floor(vals.length * 0.95)] || 0); p99.push(vals[Math.floor(vals.length * 0.99)] || 0); } const W = 560, H = 160; const all = [...p50, ...p95, ...p99]; const max = Math.max(...all, 100); const pts = (arr) => arr.map((v, i) => `${(i/(arr.length-1))*W},${H - (v/max)*(H-10) - 4}`).join(' '); return ( {[0, 0.25, 0.5, 0.75, 1].map(p => ( ))} {[0.25, 0.5, 0.75, 1].map(p => ( {Math.round(max * p)}ms ))} p50 p95 p99 ); } function EventsTimeline({ alerts }) { const now = Date.now(); const windowMs = 60 * 60 * 1000; const W = 1000, H = 60; return ( {Array.from({length: 13}).map((_, i) => { const x = (i / 12) * W; return ( -{60 - i*5}m ); })} {alerts.map(a => { const age = now - a.ts; if (age > windowMs) return null; const x = W - (age / windowMs) * W; const color = a.severity === 'crit' ? 'var(--crit)' : a.severity === 'warn' ? 'var(--warn)' : 'var(--accent)'; return ( {a.title.slice(0, 26)} ); })} NOW ); } Object.assign(window, { Overview, Sparkline });