mirror of
https://github.com/Kpa-clawbot/meshcore-analyzer.git
synced 2026-05-14 05:55:03 +00:00
fix: stop autoLearnHopNodes from creating phantom nodes, fixes #133
autoLearnHopNodes was creating stub 'repeater' entries in the nodes table for every unresolved hop prefix. With hash_size=1, this generated thousands of phantom nodes (6,638 fake repeaters on a ~300-node mesh). Root cause fix: - autoLearnHopNodes no longer calls db.upsertNode() for unresolved hops - Hop prefixes are still cached to avoid repeated DB lookups - Unresolved hops display as raw hex via hop-resolver (no behavior change) Cleanup: - Added db.removePhantomNodes() — deletes nodes with public_key <= 16 chars (real MeshCore pubkeys are 64 hex chars / 32 bytes) - Called at server startup to purge existing phantoms Tests: 14 new assertions in test-db.js (109 total, all passing) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -603,6 +603,18 @@ if (require.main === module) {
|
||||
console.log('Stats:', getStats());
|
||||
}
|
||||
|
||||
// Remove phantom nodes created by autoLearnHopNodes before this fix.
|
||||
// Real MeshCore pubkeys are 32 bytes (64 hex chars). Phantom nodes have only
|
||||
// the hop prefix as their public_key (typically 4-8 hex chars).
|
||||
// Threshold: public_key <= 16 hex chars (8 bytes) is too short to be real.
|
||||
function removePhantomNodes() {
|
||||
const result = db.prepare(`DELETE FROM nodes WHERE LENGTH(public_key) <= 16`).run();
|
||||
if (result.changes > 0) {
|
||||
console.log(`[cleanup] Removed ${result.changes} phantom node(s) with short public_key prefixes`);
|
||||
}
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
function searchNodes(query, limit = 10) {
|
||||
return db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
@@ -830,4 +842,4 @@ function getNodeAnalytics(pubkey, days) {
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { db, schemaVersion, observerIdToRowid, resolveObserverIdx, insertTransmission, upsertNode, upsertObserver, updateObserverStatus, getPackets, getPacket, getTransmission, getNodes, getNode, getObservers, getStats, searchNodes, getNodeHealth, getNodeAnalytics };
|
||||
module.exports = { db, schemaVersion, observerIdToRowid, resolveObserverIdx, insertTransmission, upsertNode, upsertObserver, updateObserverStatus, getPackets, getPacket, getTransmission, getNodes, getNode, getObservers, getStats, searchNodes, getNodeHealth, getNodeAnalytics, removePhantomNodes };
|
||||
|
||||
@@ -518,8 +518,8 @@ function broadcast(msg) {
|
||||
wss.clients.forEach(c => { if (c.readyState === 1) c.send(data); });
|
||||
}
|
||||
|
||||
// Auto-create stub nodes from path hops (≥2 bytes / 4 hex chars)
|
||||
// When an advert arrives later with a full pubkey matching the prefix, upsertNode will upgrade it
|
||||
// Resolve path hops to known nodes (≥2 bytes / 4 hex chars) — never creates phantom nodes.
|
||||
// Hops that can't be resolved are displayed as raw hex prefixes by the hop-resolver.
|
||||
const hopNodeCache = new Set(); // Avoid repeated DB lookups for known hops
|
||||
// Track when nodes were last seen as relay hops in packet paths (full pubkey → ISO timestamp)
|
||||
const lastPathSeenMap = new Map();
|
||||
@@ -539,14 +539,11 @@ function autoLearnHopNodes(hops, now) {
|
||||
const hopLower = hop.toLowerCase();
|
||||
const existing = db.db.prepare("SELECT public_key FROM nodes WHERE LOWER(public_key) LIKE ?").get(hopLower + '%');
|
||||
if (existing) {
|
||||
hopNodeCache.add(hop);
|
||||
hopPrefixToKey.set(hopLower, existing.public_key);
|
||||
continue;
|
||||
}
|
||||
// Create stub node — role is likely repeater (most hops are)
|
||||
db.upsertNode({ public_key: hopLower, name: null, role: 'repeater', lat: null, lon: null, last_seen: now });
|
||||
// Cache either way to avoid repeated DB lookups — but never create phantom nodes.
|
||||
// Unresolved hops are displayed as raw prefixes by the hop-resolver.
|
||||
hopNodeCache.add(hop);
|
||||
hopPrefixToKey.set(hopLower, hopLower); // stub uses prefix as key
|
||||
}
|
||||
}
|
||||
|
||||
@@ -675,7 +672,7 @@ for (const source of mqttSources) {
|
||||
if (decoded.path.hops.length > 0) {
|
||||
// Auto-create stub nodes from 2+ byte path hops
|
||||
autoLearnHopNodes(decoded.path.hops, now);
|
||||
// Track when each hop node was last seen relaying
|
||||
// Track when each resolved hop node was last seen relaying
|
||||
updatePathSeenTimestamps(decoded.path.hops, now);
|
||||
}
|
||||
|
||||
@@ -2917,6 +2914,8 @@ app.get('/{*splat}', (req, res) => {
|
||||
// --- Start ---
|
||||
const listenPort = process.env.PORT || config.port;
|
||||
if (require.main === module) {
|
||||
// Clean up phantom nodes created by the old autoLearnHopNodes behavior (fixes #133)
|
||||
db.removePhantomNodes();
|
||||
server.listen(listenPort, () => {
|
||||
const protocol = isHttps ? 'https' : 'http';
|
||||
console.log(`MeshCore Analyzer running on ${protocol}://localhost:${listenPort}`);
|
||||
|
||||
+53
@@ -388,6 +388,59 @@ console.log('\nv3 dedup:');
|
||||
assert(result2.observationId > 0, 'different observer is not a dupe');
|
||||
}
|
||||
|
||||
// --- removePhantomNodes ---
|
||||
console.log('\nremovePhantomNodes:');
|
||||
{
|
||||
// Insert phantom nodes (short public_keys like hop prefixes)
|
||||
db.upsertNode({ public_key: 'aabb', name: null, role: 'repeater' });
|
||||
db.upsertNode({ public_key: 'ccddee', name: null, role: 'repeater' });
|
||||
db.upsertNode({ public_key: 'ff001122', name: null, role: 'repeater' });
|
||||
db.upsertNode({ public_key: '0011223344556677', name: null, role: 'repeater' }); // 16 chars — still phantom
|
||||
|
||||
// Verify they exist
|
||||
assert(db.getNode('aabb') !== null, 'phantom node aabb exists before cleanup');
|
||||
assert(db.getNode('ccddee') !== null, 'phantom node ccddee exists before cleanup');
|
||||
assert(db.getNode('ff001122') !== null, 'phantom node ff001122 exists before cleanup');
|
||||
assert(db.getNode('0011223344556677') !== null, 'phantom 16-char exists before cleanup');
|
||||
|
||||
// Verify real node still exists
|
||||
assert(db.getNode('aabbccdd11223344aabbccdd11223344') !== null, 'real node exists before cleanup');
|
||||
|
||||
// Run cleanup
|
||||
const removed = db.removePhantomNodes();
|
||||
assert(removed === 4, `removed 4 phantom nodes (got ${removed})`);
|
||||
|
||||
// Verify phantoms are gone
|
||||
assert(db.getNode('aabb') === null, 'phantom aabb removed');
|
||||
assert(db.getNode('ccddee') === null, 'phantom ccddee removed');
|
||||
assert(db.getNode('ff001122') === null, 'phantom ff001122 removed');
|
||||
assert(db.getNode('0011223344556677') === null, 'phantom 16-char removed');
|
||||
|
||||
// Verify real node is still there
|
||||
assert(db.getNode('aabbccdd11223344aabbccdd11223344') !== null, 'real node preserved after cleanup');
|
||||
|
||||
// Running again should remove 0
|
||||
const removed2 = db.removePhantomNodes();
|
||||
assert(removed2 === 0, 'second cleanup removes nothing');
|
||||
}
|
||||
|
||||
// --- stats exclude phantom nodes ---
|
||||
console.log('\nstats exclude phantom nodes:');
|
||||
{
|
||||
const statsBefore = db.getStats();
|
||||
const countBefore = statsBefore.totalNodesAllTime;
|
||||
|
||||
// Insert a phantom — should be cleanable
|
||||
db.upsertNode({ public_key: 'deadbeef', name: null, role: 'repeater' });
|
||||
const statsWithPhantom = db.getStats();
|
||||
assert(statsWithPhantom.totalNodesAllTime === countBefore + 1, 'phantom inflates totalNodesAllTime');
|
||||
|
||||
// Clean it
|
||||
db.removePhantomNodes();
|
||||
const statsAfter = db.getStats();
|
||||
assert(statsAfter.totalNodesAllTime === countBefore, 'phantom removed from totalNodesAllTime');
|
||||
}
|
||||
|
||||
cleanup();
|
||||
delete process.env.DB_PATH;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user