mirror of
https://github.com/Kpa-clawbot/meshcore-analyzer.git
synced 2026-05-13 19:23:33 +00:00
Compare commits
1 Commits
v3.6.0
..
revert/870
| Author | SHA1 | Date | |
|---|---|---|---|
| 201fa2787d |
@@ -1 +1 @@
|
||||
{"schemaVersion":1,"label":"e2e tests","message":"89 passed","color":"brightgreen"}
|
||||
{"schemaVersion":1,"label":"e2e tests","message":"45 passed","color":"brightgreen"}
|
||||
@@ -1 +1 @@
|
||||
{"schemaVersion":1,"label":"frontend coverage","message":"36.12%","color":"red"}
|
||||
{"schemaVersion":1,"label":"frontend coverage","message":"39.68%","color":"red"}
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
e2e-test:
|
||||
name: "🎭 Playwright E2E Tests"
|
||||
needs: [go-test]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux]
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -145,6 +145,13 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Free disk space
|
||||
run: |
|
||||
# Prune old runner diagnostic logs (can accumulate 50MB+)
|
||||
find ~/actions-runner/_diag/ -name '*.log' -mtime +3 -delete 2>/dev/null || true
|
||||
# Show available disk space
|
||||
df -h / | tail -1
|
||||
|
||||
- name: Set up Node.js 22
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
@@ -245,11 +252,17 @@ jobs:
|
||||
build-and-publish:
|
||||
name: "🏗️ Build & Publish Docker Image"
|
||||
needs: [e2e-test]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, meshcore-runner-2]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Free disk space
|
||||
run: |
|
||||
docker system prune -af 2>/dev/null || true
|
||||
docker builder prune -af 2>/dev/null || true
|
||||
df -h /
|
||||
|
||||
- name: Compute build metadata
|
||||
id: meta
|
||||
run: |
|
||||
@@ -449,7 +462,7 @@ jobs:
|
||||
name: "📝 Publish Badges & Summary"
|
||||
if: github.event_name == 'push'
|
||||
needs: [deploy]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
|
||||
@@ -14,7 +14,6 @@ WORKDIR /build/server
|
||||
COPY cmd/server/go.mod cmd/server/go.sum ./
|
||||
COPY internal/geofilter/ ../../internal/geofilter/
|
||||
COPY internal/sigvalidate/ ../../internal/sigvalidate/
|
||||
COPY internal/packetpath/ ../../internal/packetpath/
|
||||
RUN go mod download
|
||||
COPY cmd/server/ ./
|
||||
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
|
||||
@@ -25,7 +24,6 @@ WORKDIR /build/ingestor
|
||||
COPY cmd/ingestor/go.mod cmd/ingestor/go.sum ./
|
||||
COPY internal/geofilter/ ../../internal/geofilter/
|
||||
COPY internal/sigvalidate/ ../../internal/sigvalidate/
|
||||
COPY internal/packetpath/ ../../internal/packetpath/
|
||||
RUN go mod download
|
||||
COPY cmd/ingestor/ ./
|
||||
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
# v3.6.0 - The Forensics
|
||||
|
||||
CoreScope just got eyes everywhere. This release drops **path inspection**, **color-by-hash markers**, **clock skew detection**, **full channel encryption**, an **observer graph**, and a pile of robustness fixes that make your mesh network feel like it's being watched by someone who actually cares.
|
||||
|
||||
134 commits, 105 PRs merged, 18K+ lines added. Here's what shipped.
|
||||
|
||||
---
|
||||
|
||||
## 🚀 New Features
|
||||
|
||||
### Path-Prefix Candidate Inspector (#944, #945)
|
||||
The marquee feature. Click any path segment and CoreScope opens an interactive inspector showing every candidate node that could match that hop prefix - plotted on a map with scoring by neighbor-graph affinity and geographic centroid. Ambiguous hops? Now you can see *why* they're ambiguous and pick the right one.
|
||||
|
||||
**Why you'll love it:** No more guessing which `0xA3` is the real repeater. The inspector lays out every candidate, scores them, and lets you drill in visually.
|
||||
|
||||
### Color-by-Hash Packet Markers (#948, #951)
|
||||
Every packet type gets a vivid, hash-derived color - on the live feed, map polylines, and flying-packet animations. Bright fill with dark outline for contrast. No more monochrome blobs - you can visually track packet flows by color at a glance.
|
||||
|
||||
### Node Filter on Live Page (#924, #771)
|
||||
Filter the live packet stream to show only traffic flowing through a specific node. Pick a repeater, see exactly what it's carrying. That simple.
|
||||
|
||||
### Clock Skew Detection (#746, #752, #828, #850)
|
||||
Full pipeline: backend computes drift using Theil-Sen regression with outlier rejection (#828), the UI shows per-node badges, detail sparklines, and fleet-wide analytics (#752). Bimodal clock severity (#850) surfaces flaky-RTC nodes that toggle between accurate and drifted - instead of hiding them as "No Clock."
|
||||
|
||||
**Why you'll love it:** Nodes with bad clocks silently corrupt your timeline. Now they glow red before they ruin your analysis.
|
||||
|
||||
### Observer Graph (M1+M2) (#774)
|
||||
Observers are now first-class graph citizens. CoreScope builds a neighbor graph from observation overlaps, scores hop-resolver candidates by graph edges (#876), and uses geographic centroid for tiebreaking. The observer topology is visible and queryable.
|
||||
|
||||
### Channel Encryption - Full Stack (#726, #733, #750, #760)
|
||||
Three milestones landed as one: DB-backed channel message history (#726), client-side PSK decryption in the browser (#733), and PSK channel management with add/remove UX and message caching (#750). Add a channel key in the UI, and CoreScope decrypts messages client-side - no server-side key storage. The add-channel button (#760) makes it dead simple.
|
||||
|
||||
**Why you'll love it:** Encrypted channels are no longer black boxes. Add your PSK, see the messages, search history - all without exposing keys to the server.
|
||||
|
||||
### Hash Collision Inspector (#758)
|
||||
The Hash Usage Matrix now shows collision details for all hash sizes. When two nodes share a prefix, you see exactly who collides and at what size.
|
||||
|
||||
### Geofilter Builder - In-App (#735, #900)
|
||||
The geofilter polygon builder is now served directly from CoreScope with a full docs page (#900). No more hunting for external tools. Link from the customizer, draw your polygon, done.
|
||||
|
||||
### Node Blacklist (#742)
|
||||
`nodeBlacklist` in config hides abusive or troll nodes from all views. They're gone.
|
||||
|
||||
### Observer Retention (#764)
|
||||
Stale observers are automatically pruned after a configurable number of days. Your observer list stays clean without manual intervention.
|
||||
|
||||
### Advert Signature Validation (#794)
|
||||
Corrupt packets with invalid advert signatures are now rejected at ingest. Bad data never hits your store.
|
||||
|
||||
### Bounded Cold Load (#790)
|
||||
`Load()` now respects a memory budget - no more OOM on cold start with a fat database. Combined with retention-hours cutoff (#917), cold start is safe on constrained hardware.
|
||||
|
||||
### Multi-Arch Docker Images (#869)
|
||||
Official images now publish `amd64` + `arm64` in a single multi-arch manifest. Raspberry Pi operators: pull and run. No special tags needed.
|
||||
|
||||
### /nodes Detail Panel + Search (#868)
|
||||
The nodes detail panel ships with search improvements (#862) - find nodes fast, see their full detail in a slide-out panel.
|
||||
|
||||
### Deduplicated Top Longest Hops (#848)
|
||||
Longest hops are now deduplicated by pair with observation count and SNR cues. No more seeing the same link 47 times.
|
||||
|
||||
---
|
||||
|
||||
## 🔥 Performance Wins
|
||||
|
||||
### StoreTx ResolvedPath Elimination (#806)
|
||||
The per-transaction `ResolvedPath` computation is gone - replaced by a membership index with on-demand decode. This was one of the hottest paths in the ingestor.
|
||||
|
||||
### Node Packet Queries (#803)
|
||||
Raw JSON text search for node packets replaced with a proper `byNode` index (#673). Night and day.
|
||||
|
||||
### Channel Query Performance (#762, #763)
|
||||
New `channel_hash` column enables SQL-level channel filtering. No more full-table scan to find messages in a channel.
|
||||
|
||||
### SQLite Auto-Vacuum (#919, #920)
|
||||
Incremental auto-vacuum enabled - the database file actually shrinks after retention pruning. No more 2GB database holding 200MB of live data.
|
||||
|
||||
### Retention-Hours Cutoff on Load (#917)
|
||||
`Load()` now applies `retentionHours` at read time, preventing OOM when the DB has more history than memory allows.
|
||||
|
||||
---
|
||||
|
||||
## 🛡️ Security & Robustness
|
||||
|
||||
### MQTT Reconnect with Bounded Backoff (#947, #949)
|
||||
The ingestor now reconnects to MQTT brokers with exponential backoff, observability logging, and bounded retry. No more silent disconnects that kill your data stream.
|
||||
|
||||
---
|
||||
|
||||
## 🐛 Bugs Squashed
|
||||
|
||||
This release exterminates **40+ bugs** — from protocol-level hash mismatches to pixel-level CSS breakage. Operators told us what hurt; we listened.
|
||||
|
||||
- **Path inspector "Show on Map" missed origin and first hop** (#950) - map view now includes all hops
|
||||
- **Content hash used full header byte** (#787) - content hashing now uses payload type bits only, fixing hash collisions between packets that differ only in header flags
|
||||
- **Encrypted channel deep links showed broken UI** (#825, #826, #815) - deep links to encrypted channels now show a lock message instead of broken UI when you don't have the key
|
||||
- **Geofilter longitude wrapping** (#925) - geofilter builder wraps longitude to [-180, 180]; southern hemisphere polygons no longer invert
|
||||
- **Hash filter bypasses saved region filter** (#939) - hash lookups now skip the geo filter as intended
|
||||
- **Companion-as-repeater excluded from path hops** (#935, #936) - non-repeater nodes no longer pollute hop resolution
|
||||
- **Customize panel re-renders while typing** (#927) - text fields keep focus during config changes
|
||||
- **Per-observation raw_hex** (#881, #882) - each observer's hex dump now shows what *that observer* actually received
|
||||
- **Per-observation children in packet groups** (#866, #880) - expanded groups show per-obs data, not cross-observer aggregates
|
||||
- **Full-page obs-switch** (#866, #870) - switching observers updates hex, path, and direction correctly
|
||||
- **Packet detail shows wrong observation** (#849, #851) - clicking a specific observation opens *that* observation
|
||||
- **Byte breakdown hop count** (#844, #846) - derived from `path_len`, not aggregated `_parsedPath`
|
||||
- **Transport-route path_len offset** (#852, #853) - correct offset calculation + CSS variable fix
|
||||
- **Packets/hour chart bars + x-axis** (#858, #865) - bars render correctly, x-axis labels properly decimated
|
||||
- **Channel timeline capped to top 8** (#860, #864) - no more 47-channel chart spaghetti
|
||||
- **Reachability row opacity removed** (#859, #863) - clean rows without misleading gradient
|
||||
- **Sticky table headers on mobile** (#861, #867) - restored after regression
|
||||
- **Map popup 'Show Neighbors' on iOS Safari** (#840, #841) - link actually works now
|
||||
- **Node detail Recent Packets invisible text** (#829, #830) - CSS fix
|
||||
- **/api/packets/{hash} falls back to DB** (#827, #831) - when in-memory store misses, DB catches it
|
||||
- **IATA filter bypass for status messages** (#694, #802) - status packets no longer filtered out by airport codes
|
||||
- **Desktop node click URL hash** (#676, #739) - clicking a node updates the URL for deep linking
|
||||
- **Filter params in URL hash** (#682, #740) - all filter state serialized for shareable links
|
||||
- **Hide undecryptable channel messages** (#727, #728) - clean default view
|
||||
- **TRACE path_json uses path_sz** (#732) - correct field from flags byte, not header hash_size
|
||||
- **Multi-byte adopters** (#754, #767) - all node types, role column, advert precedence
|
||||
- **Channel key case sensitivity** (#761) - Public decode works correctly
|
||||
- **Transport route field offsets** (#766) - correct offsets in field table
|
||||
- **Clock skew sanity checks** (#769) - filter epoch-0, cap drift, require minimum samples
|
||||
- **Neighbor graph slider persistence** (#776) - default 0.7, persisted to localStorage
|
||||
- **Node detail panel navigation** (#779, #785) - Details/Analytics links actually navigate
|
||||
- **Channel key removal** (#898) - user-added keys for server-known channels can be removed
|
||||
- **Side-panel Details on desktop** (#892) - opens full-screen correctly
|
||||
- **Hex-dump byte ranges client-side** (#891) - computed from per-obs raw_hex
|
||||
- **path_json derived from raw_hex at ingest** (#886, #887) - single source of truth
|
||||
- **Path pill and byte breakdown hop agreement** (#885) - they match now
|
||||
- **Mobile close button + toolbar scroll** (#797, #805) - accessible and scrollable
|
||||
- **/health.recentPackets resolved_path fallback** (#810, #821) - falls back to longest sibling observation
|
||||
- **Channel filter on Packets page** (#812, #816) - UI and API both fixed
|
||||
- **Clock-skew section in side panel** (#813, #814) - renders correctly
|
||||
- **Real RSS in /api/stats** (#832, #835) - surface actual RSS alongside tracked store bytes
|
||||
- **Hash size detection for transport routes + zero-hop adverts** (#747) - correct detection
|
||||
- **Repeater+observer merged map marker** (#745) - single marker, not two overlapping
|
||||
|
||||
---
|
||||
|
||||
## 🎨 UI Polish
|
||||
|
||||
- QA findings applied across the board (#832, #833, #836, #837, #838) - dozens of small UX fixes from systematic QA pass
|
||||
|
||||
---
|
||||
|
||||
## 📦 Upgrading
|
||||
|
||||
```bash
|
||||
git pull
|
||||
docker compose down
|
||||
docker compose build prod
|
||||
docker compose up -d prod
|
||||
```
|
||||
|
||||
Your existing `config.json` works as-is. New optional config keys:
|
||||
- `nodeBlacklist` - array of node hashes to hide
|
||||
- `observerRetentionDays` - days before stale observers are pruned
|
||||
- `memoryBudgetMB` - cap on in-memory packet store
|
||||
|
||||
### Verify
|
||||
|
||||
```bash
|
||||
curl -s http://localhost/api/health | jq .version
|
||||
# "3.6.0"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🙏 External Contributors
|
||||
|
||||
- **#735** ([@efiten](https://github.com/efiten)) - Serve geofilter builder from app, link from customizer
|
||||
- **#739** ([@efiten](https://github.com/efiten)) - Desktop node click updates URL hash for deep linking
|
||||
- **#740** ([@efiten](https://github.com/efiten)) - Serialize filter params in URL hash for shareable links
|
||||
- **#742** ([@Joel-Claw](https://github.com/Joel-Claw)) - Add nodeBlacklist config to hide abusive/troll nodes
|
||||
- **#761** ([@copelaje](https://github.com/copelaje)) - Fix channel key case sensitivity for Public decode
|
||||
- **#764** ([@Joel-Claw](https://github.com/Joel-Claw)) - Add observer retention - prune stale observers after configurable days
|
||||
- **#802** ([@efiten](https://github.com/efiten)) - Bypass IATA filter for status messages, fill SNR on duplicate observations
|
||||
- **#803** ([@efiten](https://github.com/efiten)) - Replace raw JSON text search with byNode index for node packet queries
|
||||
- **#805** ([@efiten](https://github.com/efiten)) - Mobile close button accessible + toolbar scrollable
|
||||
- **#900** ([@efiten](https://github.com/efiten)) - App-served geofilter docs page
|
||||
- **#917** ([@efiten](https://github.com/efiten)) - Apply retentionHours cutoff in Load() to prevent OOM on cold start
|
||||
- **#924** ([@efiten](https://github.com/efiten)) - Node filter on live page - show only traffic through a specific node
|
||||
- **#925** ([@efiten](https://github.com/efiten)) - Fix geobuilder longitude wrapping for southern hemisphere polygons
|
||||
- **#927** ([@efiten](https://github.com/efiten)) - Skip customize panel re-render while text field has focus
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ Breaking Changes
|
||||
|
||||
**None.** All API endpoints remain backwards-compatible. New fields are additive only.
|
||||
|
||||
---
|
||||
|
||||
## 📊 By the Numbers
|
||||
|
||||
| Stat | Count |
|
||||
|------|-------|
|
||||
| Commits | 134 |
|
||||
| PRs merged | 105 |
|
||||
| Lines added | 18,480 |
|
||||
| Lines removed | 1,632 |
|
||||
| Files changed | 110 |
|
||||
| Contributors | 4 |
|
||||
|
||||
---
|
||||
|
||||
*Previous release: [v3.5.2](https://github.com/Kpa-clawbot/CoreScope/releases/tag/v3.5.2)*
|
||||
@@ -41,7 +41,6 @@ type Config struct {
|
||||
Metrics *MetricsConfig `json:"metrics,omitempty"`
|
||||
GeoFilter *GeoFilterConfig `json:"geo_filter,omitempty"`
|
||||
ValidateSignatures *bool `json:"validateSignatures,omitempty"`
|
||||
DB *DBConfig `json:"db,omitempty"`
|
||||
}
|
||||
|
||||
// GeoFilterConfig is an alias for the shared geofilter.Config type.
|
||||
@@ -59,20 +58,6 @@ type MetricsConfig struct {
|
||||
SampleIntervalSec int `json:"sampleIntervalSec"`
|
||||
}
|
||||
|
||||
// DBConfig controls SQLite vacuum and maintenance behavior (#919).
|
||||
type DBConfig struct {
|
||||
VacuumOnStartup bool `json:"vacuumOnStartup"` // one-time full VACUUM on startup if auto_vacuum is not INCREMENTAL
|
||||
IncrementalVacuumPages int `json:"incrementalVacuumPages"` // pages returned to OS per reaper cycle (default 1024)
|
||||
}
|
||||
|
||||
// IncrementalVacuumPages returns the configured pages per vacuum or 1024 default.
|
||||
func (c *Config) IncrementalVacuumPages() int {
|
||||
if c.DB != nil && c.DB.IncrementalVacuumPages > 0 {
|
||||
return c.DB.IncrementalVacuumPages
|
||||
}
|
||||
return 1024
|
||||
}
|
||||
|
||||
// ShouldValidateSignatures returns true (default) unless explicitly disabled.
|
||||
func (c *Config) ShouldValidateSignatures() bool {
|
||||
if c.ValidateSignatures != nil {
|
||||
|
||||
+10
-87
@@ -11,7 +11,6 @@ import (
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/meshcore-analyzer/packetpath"
|
||||
_ "modernc.org/sqlite"
|
||||
)
|
||||
|
||||
@@ -59,7 +58,7 @@ func OpenStoreWithInterval(dbPath string, sampleIntervalSec int) (*Store, error)
|
||||
return nil, fmt.Errorf("creating data dir: %w", err)
|
||||
}
|
||||
|
||||
db, err := sql.Open("sqlite", dbPath+"?_pragma=auto_vacuum(INCREMENTAL)&_pragma=journal_mode(WAL)&_pragma=foreign_keys(ON)&_pragma=busy_timeout(5000)")
|
||||
db, err := sql.Open("sqlite", dbPath+"?_pragma=journal_mode(WAL)&_pragma=foreign_keys(ON)&_pragma=busy_timeout(5000)")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("opening db: %w", err)
|
||||
}
|
||||
@@ -85,9 +84,6 @@ func OpenStoreWithInterval(dbPath string, sampleIntervalSec int) (*Store, error)
|
||||
}
|
||||
|
||||
func applySchema(db *sql.DB) error {
|
||||
// auto_vacuum=INCREMENTAL is set via DSN pragma (must be before journal_mode).
|
||||
// Logging of current mode is handled by CheckAutoVacuum — no duplicate log here.
|
||||
|
||||
schema := `
|
||||
CREATE TABLE IF NOT EXISTS nodes (
|
||||
public_key TEXT PRIMARY KEY,
|
||||
@@ -193,7 +189,7 @@ func applySchema(db *sql.DB) error {
|
||||
db.Exec(`DROP VIEW IF EXISTS packets_v`)
|
||||
_, vErr := db.Exec(`
|
||||
CREATE VIEW packets_v AS
|
||||
SELECT o.id, COALESCE(o.raw_hex, t.raw_hex) AS raw_hex,
|
||||
SELECT o.id, t.raw_hex,
|
||||
datetime(o.timestamp, 'unixepoch') AS timestamp,
|
||||
obs.id AS observer_id, obs.name AS observer_name,
|
||||
o.direction, o.snr, o.rssi, o.score, t.hash, t.route_type,
|
||||
@@ -412,15 +408,6 @@ func applySchema(db *sql.DB) error {
|
||||
log.Println("[migration] dropped_packets table created")
|
||||
}
|
||||
|
||||
// Migration: add raw_hex column to observations (#881)
|
||||
row = db.QueryRow("SELECT 1 FROM _migrations WHERE name = 'observations_raw_hex_v1'")
|
||||
if row.Scan(&migDone) != nil {
|
||||
log.Println("[migration] Adding raw_hex column to observations...")
|
||||
db.Exec(`ALTER TABLE observations ADD COLUMN raw_hex TEXT`)
|
||||
db.Exec(`INSERT INTO _migrations (name) VALUES ('observations_raw_hex_v1')`)
|
||||
log.Println("[migration] observations.raw_hex column added")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -446,13 +433,12 @@ func (s *Store) prepareStatements() error {
|
||||
}
|
||||
|
||||
s.stmtInsertObservation, err = s.db.Prepare(`
|
||||
INSERT INTO observations (transmission_id, observer_idx, direction, snr, rssi, score, path_json, timestamp, raw_hex)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
INSERT INTO observations (transmission_id, observer_idx, direction, snr, rssi, score, path_json, timestamp)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(transmission_id, observer_idx, COALESCE(path_json, '')) DO UPDATE SET
|
||||
snr = COALESCE(excluded.snr, snr),
|
||||
rssi = COALESCE(excluded.rssi, rssi),
|
||||
score = COALESCE(excluded.score, score),
|
||||
raw_hex = COALESCE(excluded.raw_hex, raw_hex)
|
||||
snr = COALESCE(excluded.snr, snr),
|
||||
rssi = COALESCE(excluded.rssi, rssi),
|
||||
score = COALESCE(excluded.score, score)
|
||||
`)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -598,7 +584,7 @@ func (s *Store) InsertTransmission(data *PacketData) (bool, error) {
|
||||
_, err = s.stmtInsertObservation.Exec(
|
||||
txID, observerIdx, data.Direction,
|
||||
data.SNR, data.RSSI, data.Score,
|
||||
data.PathJSON, epochTs, nilIfEmpty(data.RawHex),
|
||||
data.PathJSON, epochTs,
|
||||
)
|
||||
if err != nil {
|
||||
s.Stats.WriteErrors.Add(1)
|
||||
@@ -791,58 +777,6 @@ func (s *Store) PruneOldMetrics(retentionDays int) (int64, error) {
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// CheckAutoVacuum inspects the current auto_vacuum mode and logs a warning
|
||||
// if not INCREMENTAL. Performs opt-in full VACUUM if db.vacuumOnStartup is set (#919).
|
||||
func (s *Store) CheckAutoVacuum(cfg *Config) {
|
||||
var autoVacuum int
|
||||
if err := s.db.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum); err != nil {
|
||||
log.Printf("[db] warning: could not read auto_vacuum: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if autoVacuum == 2 {
|
||||
log.Printf("[db] auto_vacuum=INCREMENTAL")
|
||||
return
|
||||
}
|
||||
|
||||
modes := map[int]string{0: "NONE", 1: "FULL", 2: "INCREMENTAL"}
|
||||
mode := modes[autoVacuum]
|
||||
if mode == "" {
|
||||
mode = fmt.Sprintf("UNKNOWN(%d)", autoVacuum)
|
||||
}
|
||||
|
||||
log.Printf("[db] auto_vacuum=%s — DB needs one-time VACUUM to enable incremental auto-vacuum. "+
|
||||
"Set db.vacuumOnStartup: true in config to migrate (will block startup for several minutes on large DBs). "+
|
||||
"See https://github.com/Kpa-clawbot/CoreScope/issues/919", mode)
|
||||
|
||||
if cfg.DB != nil && cfg.DB.VacuumOnStartup {
|
||||
// WARNING: Full VACUUM creates a temporary copy of the entire DB file.
|
||||
// Requires ~2× the DB file size in free disk space or it will fail.
|
||||
log.Printf("[db] vacuumOnStartup=true — starting one-time full VACUUM (ensure 2x DB size free disk space)...")
|
||||
start := time.Now()
|
||||
|
||||
if _, err := s.db.Exec("PRAGMA auto_vacuum = INCREMENTAL"); err != nil {
|
||||
log.Printf("[db] VACUUM failed: could not set auto_vacuum: %v", err)
|
||||
return
|
||||
}
|
||||
if _, err := s.db.Exec("VACUUM"); err != nil {
|
||||
log.Printf("[db] VACUUM failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Printf("[db] VACUUM complete in %v — auto_vacuum is now INCREMENTAL", elapsed.Round(time.Millisecond))
|
||||
}
|
||||
}
|
||||
|
||||
// RunIncrementalVacuum returns free pages to the OS (#919).
|
||||
// Safe to call on auto_vacuum=NONE databases (noop).
|
||||
func (s *Store) RunIncrementalVacuum(pages int) {
|
||||
if _, err := s.db.Exec(fmt.Sprintf("PRAGMA incremental_vacuum(%d)", pages)); err != nil {
|
||||
log.Printf("[vacuum] incremental_vacuum error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Checkpoint forces a WAL checkpoint to release the WAL lock file,
|
||||
// preventing lock contention with a new process starting up.
|
||||
func (s *Store) Checkpoint() {
|
||||
@@ -997,22 +931,11 @@ type MQTTPacketMessage struct {
|
||||
}
|
||||
|
||||
// BuildPacketData constructs a PacketData from a decoded packet and MQTT message.
|
||||
// path_json is derived directly from raw_hex header bytes (not decoded.Path.Hops)
|
||||
// to guarantee the stored path always matches the raw bytes. This matters for
|
||||
// TRACE packets where decoded.Path.Hops is overwritten with payload hops (#886).
|
||||
func BuildPacketData(msg *MQTTPacketMessage, decoded *DecodedPacket, observerID, region string) *PacketData {
|
||||
now := time.Now().UTC().Format(time.RFC3339)
|
||||
pathJSON := "[]"
|
||||
// For TRACE packets, path_json must be the payload-decoded route hops
|
||||
// (decoded.Path.Hops), NOT the raw_hex header bytes which are SNR values.
|
||||
// For all other packet types, derive path from raw_hex (#886).
|
||||
if !packetpath.PathBytesAreHops(byte(decoded.Header.PayloadType)) {
|
||||
if len(decoded.Path.Hops) > 0 {
|
||||
b, _ := json.Marshal(decoded.Path.Hops)
|
||||
pathJSON = string(b)
|
||||
}
|
||||
} else if hops, err := packetpath.DecodePathFromRawHex(msg.Raw); err == nil && len(hops) > 0 {
|
||||
b, _ := json.Marshal(hops)
|
||||
if len(decoded.Path.Hops) > 0 {
|
||||
b, _ := json.Marshal(decoded.Path.Hops)
|
||||
pathJSON = string(b)
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -11,8 +10,6 @@ import (
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/meshcore-analyzer/packetpath"
|
||||
)
|
||||
|
||||
func tempDBPath(t *testing.T) string {
|
||||
@@ -1971,155 +1968,3 @@ func TestInsertObservationSNRFillIn(t *testing.T) {
|
||||
t.Errorf("RSSI overwritten by null arrival: got %v, want %v", rssi3, rssi)
|
||||
}
|
||||
}
|
||||
|
||||
// TestPerObservationRawHex verifies that two MQTT packets for the same hash
|
||||
// from different observers store distinct raw_hex per observation (#881).
|
||||
func TestPerObservationRawHex(t *testing.T) {
|
||||
store, err := OpenStore(tempDBPath(t))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
// Register two observers
|
||||
store.UpsertObserver("obs-A", "Observer A", "", nil)
|
||||
store.UpsertObserver("obs-B", "Observer B", "", nil)
|
||||
|
||||
hash := "abc123def456"
|
||||
rawA := "c0ffee01"
|
||||
rawB := "c0ffee0201aa"
|
||||
dir := "RX"
|
||||
|
||||
// First observation from observer A
|
||||
pdA := &PacketData{
|
||||
RawHex: rawA,
|
||||
Hash: hash,
|
||||
Timestamp: "2026-04-21T10:00:00Z",
|
||||
ObserverID: "obs-A",
|
||||
Direction: &dir,
|
||||
PathJSON: "[]",
|
||||
}
|
||||
isNew, err := store.InsertTransmission(pdA)
|
||||
if err != nil {
|
||||
t.Fatalf("insert A: %v", err)
|
||||
}
|
||||
if !isNew {
|
||||
t.Fatal("expected new transmission")
|
||||
}
|
||||
|
||||
// Second observation from observer B (same hash, different raw bytes)
|
||||
pdB := &PacketData{
|
||||
RawHex: rawB,
|
||||
Hash: hash,
|
||||
Timestamp: "2026-04-21T10:00:01Z",
|
||||
ObserverID: "obs-B",
|
||||
Direction: &dir,
|
||||
PathJSON: `["aabb"]`,
|
||||
}
|
||||
isNew2, err := store.InsertTransmission(pdB)
|
||||
if err != nil {
|
||||
t.Fatalf("insert B: %v", err)
|
||||
}
|
||||
if isNew2 {
|
||||
t.Fatal("expected duplicate transmission")
|
||||
}
|
||||
|
||||
// Query observations and verify per-observation raw_hex
|
||||
rows, err := store.db.Query(`
|
||||
SELECT o.raw_hex, obs.id
|
||||
FROM observations o
|
||||
LEFT JOIN observers obs ON obs.rowid = o.observer_idx
|
||||
ORDER BY o.id ASC
|
||||
`)
|
||||
if err != nil {
|
||||
t.Fatalf("query: %v", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type obsResult struct {
|
||||
rawHex string
|
||||
observerID string
|
||||
}
|
||||
var results []obsResult
|
||||
for rows.Next() {
|
||||
var rh, oid sql.NullString
|
||||
if err := rows.Scan(&rh, &oid); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
results = append(results, obsResult{
|
||||
rawHex: rh.String,
|
||||
observerID: oid.String,
|
||||
})
|
||||
}
|
||||
|
||||
if len(results) != 2 {
|
||||
t.Fatalf("expected 2 observations, got %d", len(results))
|
||||
}
|
||||
if results[0].rawHex != rawA {
|
||||
t.Errorf("obs A raw_hex: got %q, want %q", results[0].rawHex, rawA)
|
||||
}
|
||||
if results[1].rawHex != rawB {
|
||||
t.Errorf("obs B raw_hex: got %q, want %q", results[1].rawHex, rawB)
|
||||
}
|
||||
if results[0].rawHex == results[1].rawHex {
|
||||
t.Error("both observations have same raw_hex — should differ")
|
||||
}
|
||||
}
|
||||
|
||||
// TestBuildPacketData_TraceUsesPayloadHops verifies that TRACE packets use
|
||||
// payload-decoded route hops in path_json (NOT the raw_hex header SNR bytes).
|
||||
// Issue #886 / #887.
|
||||
func TestBuildPacketData_TraceUsesPayloadHops(t *testing.T) {
|
||||
// TRACE packet: header path has SNR bytes [30,2D,0D,23], but decoded.Path.Hops
|
||||
// is overwritten to payload hops [67,33,D6,33,67].
|
||||
rawHex := "2604302D0D2359FEE7B100000000006733D63367"
|
||||
decoded, err := DecodePacket(rawHex, nil, false)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// decoded.Path.Hops should be the TRACE-replaced hops (payload hops)
|
||||
if len(decoded.Path.Hops) != 5 {
|
||||
t.Fatalf("expected 5 decoded hops, got %d", len(decoded.Path.Hops))
|
||||
}
|
||||
|
||||
msg := &MQTTPacketMessage{Raw: rawHex}
|
||||
pd := BuildPacketData(msg, decoded, "test-obs", "TST")
|
||||
|
||||
// For TRACE: path_json MUST be the payload-decoded route hops, NOT the SNR bytes
|
||||
expectedPathJSON := `["67","33","D6","33","67"]`
|
||||
if pd.PathJSON != expectedPathJSON {
|
||||
t.Errorf("path_json = %s, want %s (TRACE must use payload hops)", pd.PathJSON, expectedPathJSON)
|
||||
}
|
||||
|
||||
// Verify that DecodePathFromRawHex returns the SNR bytes (header path) which differ
|
||||
headerHops, herr := packetpath.DecodePathFromRawHex(rawHex)
|
||||
if herr != nil {
|
||||
t.Fatal(herr)
|
||||
}
|
||||
headerJSON, _ := json.Marshal(headerHops)
|
||||
if string(headerJSON) == expectedPathJSON {
|
||||
t.Error("header path (SNR) should differ from payload hops for TRACE")
|
||||
}
|
||||
}
|
||||
|
||||
// TestBuildPacketData_NonTracePathJSON verifies non-TRACE packets also derive path from raw_hex.
|
||||
func TestBuildPacketData_NonTracePathJSON(t *testing.T) {
|
||||
// A simple ADVERT packet (payload type 0) with 2 hops, hash_size 1
|
||||
// Header 0x09 = FLOOD(1), ADVERT(2), version 0
|
||||
// Path byte 0x02 = hash_size 1, hash_count 2
|
||||
// Path bytes: AA BB
|
||||
rawHex := "0902AABB" + "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
|
||||
decoded, err := DecodePacket(rawHex, nil, false)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
msg := &MQTTPacketMessage{Raw: rawHex}
|
||||
pd := BuildPacketData(msg, decoded, "obs1", "TST")
|
||||
|
||||
expectedPathJSON := `["AA","BB"]`
|
||||
if pd.PathJSON != expectedPathJSON {
|
||||
t.Errorf("path_json = %s, want %s", pd.PathJSON, expectedPathJSON)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/meshcore-analyzer/packetpath"
|
||||
"github.com/meshcore-analyzer/sigvalidate"
|
||||
)
|
||||
|
||||
@@ -193,9 +192,8 @@ func decodePath(pathByte byte, buf []byte, offset int) (Path, int) {
|
||||
}, totalBytes
|
||||
}
|
||||
|
||||
// isTransportRoute delegates to packetpath.IsTransportRoute.
|
||||
func isTransportRoute(routeType int) bool {
|
||||
return packetpath.IsTransportRoute(routeType)
|
||||
return routeType == RouteTransportFlood || routeType == RouteTransportDirect
|
||||
}
|
||||
|
||||
func decodeEncryptedPayload(typeName string, buf []byte) Payload {
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/meshcore-analyzer/packetpath"
|
||||
"github.com/meshcore-analyzer/sigvalidate"
|
||||
)
|
||||
|
||||
@@ -1823,106 +1822,3 @@ func TestDecodeAdvertWithSignatureValidation(t *testing.T) {
|
||||
t.Error("SignatureValid should be nil when validation disabled")
|
||||
}
|
||||
}
|
||||
|
||||
// === Tests for DecodePathFromRawHex (issue #886) ===
|
||||
|
||||
func TestDecodePathFromRawHex_HashSize1(t *testing.T) {
|
||||
// Header byte 0x26 = route_type DIRECT, payload TRACE
|
||||
// Path byte 0x04 = hash_size 1 (bits 7-6 = 00 → 0+1=1), hash_count 4
|
||||
// Path bytes: 30 2D 0D 23
|
||||
raw := "2604302D0D2359FEE7B100000000006733D63367"
|
||||
hops, err := packetpath.DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
expected := []string{"30", "2D", "0D", "23"}
|
||||
if len(hops) != len(expected) {
|
||||
t.Fatalf("got %d hops, want %d", len(hops), len(expected))
|
||||
}
|
||||
for i, h := range hops {
|
||||
if h != expected[i] {
|
||||
t.Errorf("hop[%d] = %s, want %s", i, h, expected[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_HashSize2(t *testing.T) {
|
||||
// Path byte 0x42 = hash_size 2 (bits 7-6 = 01 → 1+1=2), hash_count 2
|
||||
// Header 0x09 = FLOOD route (rt=1), payload ADVERT (pt=2)
|
||||
// Path bytes: AABB CCDD (4 bytes = 2 hops * 2 bytes)
|
||||
raw := "0942AABBCCDD" + "00000000000000"
|
||||
hops, err := packetpath.DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
expected := []string{"AABB", "CCDD"}
|
||||
if len(hops) != len(expected) {
|
||||
t.Fatalf("got %d hops, want %d", len(hops), len(expected))
|
||||
}
|
||||
for i, h := range hops {
|
||||
if h != expected[i] {
|
||||
t.Errorf("hop[%d] = %s, want %s", i, h, expected[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_HashSize3(t *testing.T) {
|
||||
// Path byte 0x81 = hash_size 3 (bits 7-6 = 10 → 2+1=3), hash_count 1
|
||||
// Header 0x09 = FLOOD route (rt=1), payload ADVERT
|
||||
raw := "0981AABBCC" + "0000000000"
|
||||
hops, err := packetpath.DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(hops) != 1 || hops[0] != "AABBCC" {
|
||||
t.Fatalf("got %v, want [AABBCC]", hops)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_HashSize4(t *testing.T) {
|
||||
// Path byte 0xC1 = hash_size 4 (bits 7-6 = 11 → 3+1=4), hash_count 1
|
||||
// Header 0x09 = FLOOD route (rt=1)
|
||||
raw := "09C1AABBCCDD" + "0000000000"
|
||||
hops, err := packetpath.DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(hops) != 1 || hops[0] != "AABBCCDD" {
|
||||
t.Fatalf("got %v, want [AABBCCDD]", hops)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_DirectZeroHops(t *testing.T) {
|
||||
// Path byte 0x00 = hash_size 1, hash_count 0
|
||||
// Header 0x0A = DIRECT route (rt=2), payload ADVERT
|
||||
raw := "0A00" + "0000000000"
|
||||
hops, err := packetpath.DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(hops) != 0 {
|
||||
t.Fatalf("got %d hops, want 0", len(hops))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_Transport(t *testing.T) {
|
||||
// Route type 3 = TRANSPORT_DIRECT → 4 transport code bytes before path byte
|
||||
// Header 0x27 = route_type 3, payload TRACE
|
||||
// Transport codes: 1122 3344
|
||||
// Path byte 0x02 = hash_size 1, hash_count 2
|
||||
// Path bytes: AA BB
|
||||
raw := "2711223344" + "02AABB" + "0000000000"
|
||||
hops, err := packetpath.DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
expected := []string{"AA", "BB"}
|
||||
if len(hops) != len(expected) {
|
||||
t.Fatalf("got %d hops, want %d", len(hops), len(expected))
|
||||
}
|
||||
for i, h := range hops {
|
||||
if h != expected[i] {
|
||||
t.Errorf("hop[%d] = %s, want %s", i, h, expected[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,10 +13,6 @@ replace github.com/meshcore-analyzer/geofilter => ../../internal/geofilter
|
||||
|
||||
replace github.com/meshcore-analyzer/sigvalidate => ../../internal/sigvalidate
|
||||
|
||||
require github.com/meshcore-analyzer/packetpath v0.0.0
|
||||
|
||||
replace github.com/meshcore-analyzer/packetpath => ../../internal/packetpath
|
||||
|
||||
require (
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
|
||||
+18
-41
@@ -57,9 +57,6 @@ func main() {
|
||||
defer store.Close()
|
||||
log.Printf("SQLite opened: %s", cfg.DBPath)
|
||||
|
||||
// Check auto_vacuum mode and optionally migrate (#919)
|
||||
store.CheckAutoVacuum(cfg)
|
||||
|
||||
// Node retention: move stale nodes to inactive_nodes on startup
|
||||
nodeDays := cfg.NodeDaysOrDefault()
|
||||
store.MoveStaleNodes(nodeDays)
|
||||
@@ -72,15 +69,12 @@ func main() {
|
||||
metricsDays := cfg.MetricsRetentionDays()
|
||||
store.PruneOldMetrics(metricsDays)
|
||||
store.PruneDroppedPackets(metricsDays)
|
||||
vacuumPages := cfg.IncrementalVacuumPages()
|
||||
store.RunIncrementalVacuum(vacuumPages)
|
||||
|
||||
// Daily ticker for node retention
|
||||
retentionTicker := time.NewTicker(1 * time.Hour)
|
||||
go func() {
|
||||
for range retentionTicker.C {
|
||||
store.MoveStaleNodes(nodeDays)
|
||||
store.RunIncrementalVacuum(vacuumPages)
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -89,10 +83,8 @@ func main() {
|
||||
go func() {
|
||||
time.Sleep(90 * time.Second) // stagger after metrics prune
|
||||
store.RemoveStaleObservers(observerDays)
|
||||
store.RunIncrementalVacuum(vacuumPages)
|
||||
for range observerRetentionTicker.C {
|
||||
store.RemoveStaleObservers(observerDays)
|
||||
store.RunIncrementalVacuum(vacuumPages)
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -102,7 +94,6 @@ func main() {
|
||||
for range metricsRetentionTicker.C {
|
||||
store.PruneOldMetrics(metricsDays)
|
||||
store.PruneDroppedPackets(metricsDays)
|
||||
store.RunIncrementalVacuum(vacuumPages)
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -129,7 +120,23 @@ func main() {
|
||||
tag = source.Broker
|
||||
}
|
||||
|
||||
opts := buildMQTTOpts(source)
|
||||
opts := mqtt.NewClientOptions().
|
||||
AddBroker(source.Broker).
|
||||
SetAutoReconnect(true).
|
||||
SetConnectRetry(true).
|
||||
SetOrderMatters(true)
|
||||
|
||||
if source.Username != "" {
|
||||
opts.SetUsername(source.Username)
|
||||
}
|
||||
if source.Password != "" {
|
||||
opts.SetPassword(source.Password)
|
||||
}
|
||||
if source.RejectUnauthorized != nil && !*source.RejectUnauthorized {
|
||||
opts.SetTLSConfig(&tls.Config{InsecureSkipVerify: true})
|
||||
} else if strings.HasPrefix(source.Broker, "ssl://") {
|
||||
opts.SetTLSConfig(&tls.Config{})
|
||||
}
|
||||
|
||||
opts.SetOnConnectHandler(func(c mqtt.Client) {
|
||||
log.Printf("MQTT [%s] connected to %s", tag, source.Broker)
|
||||
@@ -149,11 +156,7 @@ func main() {
|
||||
})
|
||||
|
||||
opts.SetConnectionLostHandler(func(c mqtt.Client, err error) {
|
||||
log.Printf("MQTT [%s] disconnected from %s: %v", tag, source.Broker, err)
|
||||
})
|
||||
|
||||
opts.SetReconnectingHandler(func(c mqtt.Client, options *mqtt.ClientOptions) {
|
||||
log.Printf("MQTT [%s] reconnecting to %s", tag, source.Broker)
|
||||
log.Printf("MQTT [%s] disconnected: %v", tag, err)
|
||||
})
|
||||
|
||||
// Capture source for closure
|
||||
@@ -194,32 +197,6 @@ func main() {
|
||||
log.Println("Done.")
|
||||
}
|
||||
|
||||
// buildMQTTOpts creates MQTT client options for a source with bounded reconnect
|
||||
// backoff, connect timeout, and TLS/auth configuration.
|
||||
func buildMQTTOpts(source MQTTSource) *mqtt.ClientOptions {
|
||||
opts := mqtt.NewClientOptions().
|
||||
AddBroker(source.Broker).
|
||||
SetAutoReconnect(true).
|
||||
SetConnectRetry(true).
|
||||
SetOrderMatters(true).
|
||||
SetMaxReconnectInterval(30 * time.Second).
|
||||
SetConnectTimeout(10 * time.Second).
|
||||
SetWriteTimeout(10 * time.Second)
|
||||
|
||||
if source.Username != "" {
|
||||
opts.SetUsername(source.Username)
|
||||
}
|
||||
if source.Password != "" {
|
||||
opts.SetPassword(source.Password)
|
||||
}
|
||||
if source.RejectUnauthorized != nil && !*source.RejectUnauthorized {
|
||||
opts.SetTLSConfig(&tls.Config{InsecureSkipVerify: true})
|
||||
} else if strings.HasPrefix(source.Broker, "ssl://") {
|
||||
opts.SetTLSConfig(&tls.Config{})
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func handleMessage(store *Store, tag string, source MQTTSource, m mqtt.Message, channelKeys map[string]string, cfg *Config) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestBuildMQTTOpts_ReconnectSettings(t *testing.T) {
|
||||
source := MQTTSource{
|
||||
Broker: "tcp://localhost:1883",
|
||||
Name: "test",
|
||||
}
|
||||
opts := buildMQTTOpts(source)
|
||||
|
||||
if opts.MaxReconnectInterval != 30*time.Second {
|
||||
t.Errorf("MaxReconnectInterval = %v, want 30s", opts.MaxReconnectInterval)
|
||||
}
|
||||
if opts.ConnectTimeout != 10*time.Second {
|
||||
t.Errorf("ConnectTimeout = %v, want 10s", opts.ConnectTimeout)
|
||||
}
|
||||
if opts.WriteTimeout != 10*time.Second {
|
||||
t.Errorf("WriteTimeout = %v, want 10s", opts.WriteTimeout)
|
||||
}
|
||||
if !opts.AutoReconnect {
|
||||
t.Error("AutoReconnect should be true")
|
||||
}
|
||||
if !opts.ConnectRetry {
|
||||
t.Error("ConnectRetry should be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMQTTOpts_Credentials(t *testing.T) {
|
||||
source := MQTTSource{
|
||||
Broker: "tcp://broker:1883",
|
||||
Username: "user1",
|
||||
Password: "pass1",
|
||||
}
|
||||
opts := buildMQTTOpts(source)
|
||||
|
||||
if opts.Username != "user1" {
|
||||
t.Errorf("Username = %q, want %q", opts.Username, "user1")
|
||||
}
|
||||
if opts.Password != "pass1" {
|
||||
t.Errorf("Password = %q, want %q", opts.Password, "pass1")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMQTTOpts_TLS_InsecureSkipVerify(t *testing.T) {
|
||||
f := false
|
||||
source := MQTTSource{
|
||||
Broker: "ssl://broker:8883",
|
||||
RejectUnauthorized: &f,
|
||||
}
|
||||
opts := buildMQTTOpts(source)
|
||||
|
||||
if opts.TLSConfig == nil {
|
||||
t.Fatal("TLSConfig should be set")
|
||||
}
|
||||
if !opts.TLSConfig.InsecureSkipVerify {
|
||||
t.Error("InsecureSkipVerify should be true when RejectUnauthorized=false")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMQTTOpts_TLS_SSL_Prefix(t *testing.T) {
|
||||
source := MQTTSource{
|
||||
Broker: "ssl://broker:8883",
|
||||
}
|
||||
opts := buildMQTTOpts(source)
|
||||
|
||||
if opts.TLSConfig == nil {
|
||||
t.Fatal("TLSConfig should be set for ssl:// brokers")
|
||||
}
|
||||
if opts.TLSConfig.InsecureSkipVerify {
|
||||
t.Error("InsecureSkipVerify should be false by default")
|
||||
}
|
||||
}
|
||||
@@ -127,92 +127,6 @@ func TestBoundedLoad_AscendingOrder(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// loadStoreWithRetention creates a PacketStore with retentionHours set.
|
||||
func loadStoreWithRetention(t *testing.T, dbPath string, retentionHours float64) *PacketStore {
|
||||
t.Helper()
|
||||
db, err := OpenDB(dbPath)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cfg := &PacketStoreConfig{RetentionHours: retentionHours}
|
||||
store := NewPacketStore(db, cfg)
|
||||
if err := store.Load(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return store
|
||||
}
|
||||
|
||||
// createTestDBWithAgedPackets inserts numRecent packets with timestamps within
|
||||
// the last hour and numOld packets with timestamps 48 hours ago.
|
||||
func createTestDBWithAgedPackets(t *testing.T, numRecent, numOld int) string {
|
||||
t.Helper()
|
||||
dir := t.TempDir()
|
||||
dbPath := filepath.Join(dir, "test.db")
|
||||
|
||||
conn, err := sql.Open("sqlite", dbPath+"?_journal_mode=WAL")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
execOrFail := func(s string) {
|
||||
if _, err := conn.Exec(s); err != nil {
|
||||
t.Fatalf("setup: %v\nSQL: %s", err, s)
|
||||
}
|
||||
}
|
||||
execOrFail(`CREATE TABLE transmissions (id INTEGER PRIMARY KEY, raw_hex TEXT, hash TEXT, first_seen TEXT, route_type INTEGER, payload_type INTEGER, payload_version INTEGER, decoded_json TEXT)`)
|
||||
execOrFail(`CREATE TABLE observations (id INTEGER PRIMARY KEY, transmission_id INTEGER, observer_id TEXT, observer_name TEXT, direction TEXT, snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp TEXT, raw_hex TEXT)`)
|
||||
execOrFail(`CREATE TABLE observers (rowid INTEGER PRIMARY KEY, id TEXT, name TEXT)`)
|
||||
execOrFail(`CREATE TABLE nodes (pubkey TEXT PRIMARY KEY, name TEXT, role TEXT, lat REAL, lon REAL, last_seen TEXT, first_seen TEXT, frequency REAL)`)
|
||||
execOrFail(`CREATE TABLE schema_version (version INTEGER)`)
|
||||
execOrFail(`INSERT INTO schema_version (version) VALUES (1)`)
|
||||
execOrFail(`CREATE INDEX idx_tx_first_seen ON transmissions(first_seen)`)
|
||||
|
||||
now := time.Now().UTC()
|
||||
id := 1
|
||||
// Insert old packets (48 hours ago)
|
||||
for i := 0; i < numOld; i++ {
|
||||
ts := now.Add(-48 * time.Hour).Add(time.Duration(i) * time.Second).Format(time.RFC3339)
|
||||
conn.Exec("INSERT INTO transmissions VALUES (?,?,?,?,0,4,1,?)", id, "aa", fmt.Sprintf("old%d", i), ts, `{}`)
|
||||
conn.Exec("INSERT INTO observations VALUES (?,?,?,?,?,?,?,?,?,?,?)", id, id, "obs1", "Obs1", "RX", -10.0, -80.0, 5, `[]`, ts, "")
|
||||
id++
|
||||
}
|
||||
// Insert recent packets (within last hour)
|
||||
for i := 0; i < numRecent; i++ {
|
||||
ts := now.Add(-30 * time.Minute).Add(time.Duration(i) * time.Second).Format(time.RFC3339)
|
||||
conn.Exec("INSERT INTO transmissions VALUES (?,?,?,?,0,4,1,?)", id, "bb", fmt.Sprintf("new%d", i), ts, `{}`)
|
||||
conn.Exec("INSERT INTO observations VALUES (?,?,?,?,?,?,?,?,?,?,?)", id, id, "obs1", "Obs1", "RX", -10.0, -80.0, 5, `[]`, ts, "")
|
||||
id++
|
||||
}
|
||||
return dbPath
|
||||
}
|
||||
|
||||
func TestRetentionLoad_OnlyLoadsRecentPackets(t *testing.T) {
|
||||
dbPath := createTestDBWithAgedPackets(t, 50, 100)
|
||||
defer os.RemoveAll(filepath.Dir(dbPath))
|
||||
|
||||
// retention = 2 hours — should load only the 50 recent packets, not the 100 old ones
|
||||
store := loadStoreWithRetention(t, dbPath, 2)
|
||||
defer store.db.conn.Close()
|
||||
|
||||
if len(store.packets) != 50 {
|
||||
t.Errorf("expected 50 recent packets, got %d (old packets should be excluded by retentionHours)", len(store.packets))
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetentionLoad_ZeroRetentionLoadsAll(t *testing.T) {
|
||||
dbPath := createTestDBWithAgedPackets(t, 50, 100)
|
||||
defer os.RemoveAll(filepath.Dir(dbPath))
|
||||
|
||||
// retention = 0 (unlimited) — should load all 150 packets
|
||||
store := loadStoreWithRetention(t, dbPath, 0)
|
||||
defer store.db.conn.Close()
|
||||
|
||||
if len(store.packets) != 150 {
|
||||
t.Errorf("expected all 150 packets with retentionHours=0, got %d", len(store.packets))
|
||||
}
|
||||
}
|
||||
|
||||
func TestEstimateStoreTxBytesTypical(t *testing.T) {
|
||||
est := estimateStoreTxBytesTypical(10)
|
||||
if est < 1000 {
|
||||
@@ -315,7 +229,7 @@ func createTestDBAt(tb testing.TB, dbPath string, numTx int) {
|
||||
id INTEGER PRIMARY KEY,
|
||||
transmission_id INTEGER, observer_id TEXT, observer_name TEXT,
|
||||
direction TEXT, snr REAL, rssi REAL, score INTEGER,
|
||||
path_json TEXT, timestamp TEXT, raw_hex TEXT
|
||||
path_json TEXT, timestamp TEXT
|
||||
)`)
|
||||
execOrFail(`CREATE TABLE IF NOT EXISTS observers (rowid INTEGER PRIMARY KEY, id TEXT, name TEXT)`)
|
||||
execOrFail(`CREATE TABLE IF NOT EXISTS nodes (
|
||||
@@ -366,7 +280,7 @@ func createTestDBWithObs(tb testing.TB, dbPath string, numTx int) {
|
||||
)`)
|
||||
execOrFail(`CREATE TABLE IF NOT EXISTS observations (
|
||||
id INTEGER PRIMARY KEY, transmission_id INTEGER, observer_id TEXT, observer_name TEXT,
|
||||
direction TEXT, snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp TEXT, raw_hex TEXT
|
||||
direction TEXT, snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp TEXT
|
||||
)`)
|
||||
execOrFail(`CREATE TABLE IF NOT EXISTS observers (rowid INTEGER PRIMARY KEY, id TEXT, name TEXT)`)
|
||||
execOrFail(`CREATE TABLE IF NOT EXISTS nodes (
|
||||
|
||||
@@ -62,8 +62,6 @@ type Config struct {
|
||||
|
||||
Retention *RetentionConfig `json:"retention,omitempty"`
|
||||
|
||||
DB *DBConfig `json:"db,omitempty"`
|
||||
|
||||
PacketStore *PacketStoreConfig `json:"packetStore,omitempty"`
|
||||
|
||||
GeoFilter *GeoFilterConfig `json:"geo_filter,omitempty"`
|
||||
@@ -131,20 +129,6 @@ type RetentionConfig struct {
|
||||
MetricsDays int `json:"metricsDays"`
|
||||
}
|
||||
|
||||
// DBConfig controls SQLite vacuum and maintenance behavior (#919).
|
||||
type DBConfig struct {
|
||||
VacuumOnStartup bool `json:"vacuumOnStartup"` // one-time full VACUUM on startup if auto_vacuum is not INCREMENTAL
|
||||
IncrementalVacuumPages int `json:"incrementalVacuumPages"` // pages returned to OS per reaper cycle (default 1024)
|
||||
}
|
||||
|
||||
// IncrementalVacuumPages returns the configured pages per vacuum or 1024 default.
|
||||
func (c *Config) IncrementalVacuumPages() int {
|
||||
if c.DB != nil && c.DB.IncrementalVacuumPages > 0 {
|
||||
return c.DB.IncrementalVacuumPages
|
||||
}
|
||||
return 1024
|
||||
}
|
||||
|
||||
// MetricsRetentionDays returns configured metrics retention or 30 days default.
|
||||
func (c *Config) MetricsRetentionDays() int {
|
||||
if c.Retention != nil && c.Retention.MetricsDays > 0 {
|
||||
|
||||
@@ -47,7 +47,7 @@ func setupTestDBv2(t *testing.T) *DB {
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
transmission_id INTEGER NOT NULL REFERENCES transmissions(id),
|
||||
observer_id TEXT, observer_name TEXT, direction TEXT,
|
||||
snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp INTEGER NOT NULL, raw_hex TEXT
|
||||
snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp INTEGER NOT NULL
|
||||
);
|
||||
`
|
||||
if _, err := conn.Exec(schema); err != nil {
|
||||
@@ -763,9 +763,9 @@ func TestGetChannelsFromStore(t *testing.T) {
|
||||
|
||||
func TestPrefixMapResolve(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aabbccdd11223344", Name: "NodeA", HasGPS: true, Lat: 37.5, Lon: -122.0},
|
||||
{Role: "repeater", PublicKey: "aabbccdd55667788", Name: "NodeB", HasGPS: false},
|
||||
{Role: "repeater", PublicKey: "eeff0011aabbccdd", Name: "NodeC", HasGPS: true, Lat: 38.0, Lon: -121.0},
|
||||
{PublicKey: "aabbccdd11223344", Name: "NodeA", HasGPS: true, Lat: 37.5, Lon: -122.0},
|
||||
{PublicKey: "aabbccdd55667788", Name: "NodeB", HasGPS: false},
|
||||
{PublicKey: "eeff0011aabbccdd", Name: "NodeC", HasGPS: true, Lat: 38.0, Lon: -121.0},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
@@ -805,8 +805,8 @@ func TestPrefixMapResolve(t *testing.T) {
|
||||
|
||||
t.Run("multiple candidates no GPS", func(t *testing.T) {
|
||||
noGPSNodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aa11bb22", Name: "X", HasGPS: false},
|
||||
{Role: "repeater", PublicKey: "aa11cc33", Name: "Y", HasGPS: false},
|
||||
{PublicKey: "aa11bb22", Name: "X", HasGPS: false},
|
||||
{PublicKey: "aa11cc33", Name: "Y", HasGPS: false},
|
||||
}
|
||||
pm2 := buildPrefixMap(noGPSNodes)
|
||||
n := pm2.resolve("aa11")
|
||||
@@ -820,8 +820,8 @@ func TestPrefixMapResolve(t *testing.T) {
|
||||
func TestPrefixMapCap(t *testing.T) {
|
||||
// 16-char pubkey — longer than maxPrefixLen
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aabbccdd11223344", Name: "LongKey"},
|
||||
{Role: "repeater", PublicKey: "eeff0011", Name: "ShortKey"}, // exactly 8 chars
|
||||
{PublicKey: "aabbccdd11223344", Name: "LongKey"},
|
||||
{PublicKey: "eeff0011", Name: "ShortKey"}, // exactly 8 chars
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ type DB struct {
|
||||
path string // filesystem path to the database file
|
||||
isV3 bool // v3 schema: observer_idx in observations (vs observer_id in v2)
|
||||
hasResolvedPath bool // observations table has resolved_path column
|
||||
hasObsRawHex bool // observations table has raw_hex column (#881)
|
||||
|
||||
// Channel list cache (60s TTL) — avoids repeated GROUP BY scans (#762)
|
||||
channelsCacheMu sync.Mutex
|
||||
@@ -77,9 +76,6 @@ func (db *DB) detectSchema() {
|
||||
if colName == "resolved_path" {
|
||||
db.hasResolvedPath = true
|
||||
}
|
||||
if colName == "raw_hex" {
|
||||
db.hasObsRawHex = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+2
-60
@@ -74,8 +74,7 @@ func setupTestDB(t *testing.T) *DB {
|
||||
score INTEGER,
|
||||
path_json TEXT,
|
||||
timestamp INTEGER NOT NULL,
|
||||
resolved_path TEXT,
|
||||
raw_hex TEXT
|
||||
resolved_path TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS observer_metrics (
|
||||
@@ -1135,8 +1134,7 @@ func setupTestDBV2(t *testing.T) *DB {
|
||||
rssi REAL,
|
||||
score INTEGER,
|
||||
path_json TEXT,
|
||||
timestamp INTEGER NOT NULL,
|
||||
raw_hex TEXT
|
||||
timestamp INTEGER NOT NULL
|
||||
);
|
||||
`
|
||||
if _, err := conn.Exec(schema); err != nil {
|
||||
@@ -1977,59 +1975,3 @@ func TestParseWindowDuration(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestPerObservationRawHexEnrich verifies enrichObs returns per-observation raw_hex
|
||||
// when available, falling back to transmission raw_hex when NULL (#881).
|
||||
func TestPerObservationRawHexEnrich(t *testing.T) {
|
||||
db := setupTestDB(t)
|
||||
defer db.Close()
|
||||
|
||||
// Insert observers
|
||||
db.conn.Exec(`INSERT INTO observers (id, name) VALUES ('obs-a', 'Observer A')`)
|
||||
db.conn.Exec(`INSERT INTO observers (id, name) VALUES ('obs-b', 'Observer B')`)
|
||||
|
||||
var rowA, rowB int64
|
||||
db.conn.QueryRow(`SELECT rowid FROM observers WHERE id='obs-a'`).Scan(&rowA)
|
||||
db.conn.QueryRow(`SELECT rowid FROM observers WHERE id='obs-b'`).Scan(&rowB)
|
||||
|
||||
// Insert transmission with raw_hex
|
||||
txHex := "deadbeef"
|
||||
db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen) VALUES (?, 'hash1', '2026-04-21T10:00:00Z')`, txHex)
|
||||
|
||||
// Insert two observations: A has its own raw_hex, B has NULL (historical)
|
||||
obsAHex := "c0ffee01"
|
||||
db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp, raw_hex)
|
||||
VALUES (1, ?, -5.0, -90.0, '[]', 1745236800, ?)`, rowA, obsAHex)
|
||||
db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
|
||||
VALUES (1, ?, -3.0, -85.0, '["aabb"]', 1745236801)`, rowB)
|
||||
|
||||
store := NewPacketStore(db, nil)
|
||||
if err := store.Load(); err != nil {
|
||||
t.Fatalf("store load: %v", err)
|
||||
}
|
||||
|
||||
tx := store.byHash["hash1"]
|
||||
if tx == nil {
|
||||
t.Fatal("transmission not loaded")
|
||||
}
|
||||
if len(tx.Observations) < 2 {
|
||||
t.Fatalf("expected 2 observations, got %d", len(tx.Observations))
|
||||
}
|
||||
|
||||
// Check enriched observations
|
||||
for _, obs := range tx.Observations {
|
||||
m := store.enrichObs(obs)
|
||||
rh, _ := m["raw_hex"].(string)
|
||||
if obs.RawHex != "" {
|
||||
// Observer A: should get per-observation raw_hex
|
||||
if rh != obsAHex {
|
||||
t.Errorf("obs with own raw_hex: got %q, want %q", rh, obsAHex)
|
||||
}
|
||||
} else {
|
||||
// Observer B: should fall back to transmission raw_hex
|
||||
if rh != txHex {
|
||||
t.Errorf("obs without raw_hex: got %q, want %q (tx fallback)", rh, txHex)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,262 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
_ "modernc.org/sqlite"
|
||||
)
|
||||
|
||||
// createFreshIngestorDB creates a SQLite DB using the ingestor's applySchema logic
|
||||
// (simulated here) with auto_vacuum=INCREMENTAL set before tables.
|
||||
func createFreshDBWithAutoVacuum(t *testing.T, path string) *sql.DB {
|
||||
t.Helper()
|
||||
// auto_vacuum must be set via DSN before journal_mode creates the DB file
|
||||
db, err := sql.Open("sqlite", path+"?_pragma=auto_vacuum(INCREMENTAL)&_pragma=journal_mode(WAL)&_pragma=busy_timeout(5000)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db.SetMaxOpenConns(1)
|
||||
|
||||
// Create minimal schema
|
||||
_, err = db.Exec(`
|
||||
CREATE TABLE transmissions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
raw_hex TEXT NOT NULL,
|
||||
hash TEXT NOT NULL UNIQUE,
|
||||
first_seen TEXT NOT NULL,
|
||||
route_type INTEGER,
|
||||
payload_type INTEGER,
|
||||
payload_version INTEGER,
|
||||
decoded_json TEXT,
|
||||
created_at TEXT DEFAULT (datetime('now')),
|
||||
channel_hash TEXT
|
||||
);
|
||||
CREATE TABLE observations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
transmission_id INTEGER NOT NULL REFERENCES transmissions(id),
|
||||
observer_idx INTEGER,
|
||||
direction TEXT,
|
||||
snr REAL,
|
||||
rssi REAL,
|
||||
score INTEGER,
|
||||
path_json TEXT,
|
||||
timestamp INTEGER NOT NULL
|
||||
);
|
||||
`)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
func TestNewDBHasIncrementalAutoVacuum(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "test.db")
|
||||
|
||||
db := createFreshDBWithAutoVacuum(t, path)
|
||||
defer db.Close()
|
||||
|
||||
var autoVacuum int
|
||||
if err := db.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if autoVacuum != 2 {
|
||||
t.Fatalf("expected auto_vacuum=2 (INCREMENTAL), got %d", autoVacuum)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExistingDBHasAutoVacuumNone(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "test.db")
|
||||
|
||||
// Create DB WITHOUT setting auto_vacuum (simulates old DB)
|
||||
db, err := sql.Open("sqlite", path+"?_pragma=journal_mode(WAL)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db.SetMaxOpenConns(1)
|
||||
_, err = db.Exec("CREATE TABLE dummy (id INTEGER PRIMARY KEY)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
var autoVacuum int
|
||||
if err := db.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db.Close()
|
||||
|
||||
if autoVacuum != 0 {
|
||||
t.Fatalf("expected auto_vacuum=0 (NONE) for old DB, got %d", autoVacuum)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVacuumOnStartupMigratesDB(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "test.db")
|
||||
|
||||
// Create DB without auto_vacuum (old DB)
|
||||
db, err := sql.Open("sqlite", path+"?_pragma=journal_mode(WAL)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db.SetMaxOpenConns(1)
|
||||
_, err = db.Exec("CREATE TABLE dummy (id INTEGER PRIMARY KEY)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
var before int
|
||||
db.QueryRow("PRAGMA auto_vacuum").Scan(&before)
|
||||
if before != 0 {
|
||||
t.Fatalf("precondition: expected auto_vacuum=0, got %d", before)
|
||||
}
|
||||
db.Close()
|
||||
|
||||
// Simulate vacuumOnStartup migration using openRW
|
||||
rw, err := openRW(path)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err := rw.Exec("PRAGMA auto_vacuum = INCREMENTAL"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err := rw.Exec("VACUUM"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
rw.Close()
|
||||
|
||||
// Verify migration
|
||||
db2, err := sql.Open("sqlite", path+"?mode=ro")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer db2.Close()
|
||||
|
||||
var after int
|
||||
if err := db2.QueryRow("PRAGMA auto_vacuum").Scan(&after); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if after != 2 {
|
||||
t.Fatalf("expected auto_vacuum=2 after VACUUM migration, got %d", after)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIncrementalVacuumReducesFreelist(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "test.db")
|
||||
|
||||
db := createFreshDBWithAutoVacuum(t, path)
|
||||
|
||||
// Insert a bunch of data
|
||||
now := time.Now().UTC().Format(time.RFC3339)
|
||||
for i := 0; i < 500; i++ {
|
||||
_, err := db.Exec(
|
||||
"INSERT INTO transmissions (raw_hex, hash, first_seen) VALUES (?, ?, ?)",
|
||||
strings.Repeat("AA", 200), // ~400 bytes each
|
||||
"hash_"+string(rune('A'+i%26))+string(rune('0'+i/26)),
|
||||
now,
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Get file size before delete
|
||||
db.Close()
|
||||
infoBefore, _ := os.Stat(path)
|
||||
sizeBefore := infoBefore.Size()
|
||||
|
||||
// Reopen and delete all
|
||||
db, err := sql.Open("sqlite", path+"?_pragma=journal_mode(WAL)&_pragma=busy_timeout(5000)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db.SetMaxOpenConns(1)
|
||||
defer db.Close()
|
||||
|
||||
_, err = db.Exec("DELETE FROM transmissions")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Check freelist before vacuum
|
||||
var freelistBefore int64
|
||||
db.QueryRow("PRAGMA freelist_count").Scan(&freelistBefore)
|
||||
if freelistBefore == 0 {
|
||||
t.Fatal("expected non-zero freelist after DELETE")
|
||||
}
|
||||
|
||||
// Run incremental vacuum
|
||||
_, err = db.Exec("PRAGMA incremental_vacuum(10000)")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Check freelist after vacuum
|
||||
var freelistAfter int64
|
||||
db.QueryRow("PRAGMA freelist_count").Scan(&freelistAfter)
|
||||
if freelistAfter >= freelistBefore {
|
||||
t.Fatalf("expected freelist to shrink: before=%d after=%d", freelistBefore, freelistAfter)
|
||||
}
|
||||
|
||||
// Checkpoint WAL and check file size shrunk
|
||||
db.Exec("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||
db.Close()
|
||||
infoAfter, _ := os.Stat(path)
|
||||
sizeAfter := infoAfter.Size()
|
||||
if sizeAfter >= sizeBefore {
|
||||
t.Logf("warning: file did not shrink (before=%d after=%d) — may depend on page reuse", sizeBefore, sizeAfter)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCheckAutoVacuumLogs(t *testing.T) {
|
||||
// This test verifies checkAutoVacuum doesn't panic on various configs
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "test.db")
|
||||
|
||||
// Create a fresh DB with auto_vacuum=INCREMENTAL
|
||||
dbConn := createFreshDBWithAutoVacuum(t, path)
|
||||
db := &DB{conn: dbConn, path: path}
|
||||
cfg := &Config{}
|
||||
|
||||
// Should not panic
|
||||
checkAutoVacuum(db, cfg, path)
|
||||
dbConn.Close()
|
||||
|
||||
// Create a DB without auto_vacuum
|
||||
path2 := filepath.Join(dir, "test2.db")
|
||||
dbConn2, _ := sql.Open("sqlite", path2+"?_pragma=journal_mode(WAL)")
|
||||
dbConn2.SetMaxOpenConns(1)
|
||||
dbConn2.Exec("CREATE TABLE dummy (id INTEGER PRIMARY KEY)")
|
||||
db2 := &DB{conn: dbConn2, path: path2}
|
||||
|
||||
// Should log warning but not panic
|
||||
checkAutoVacuum(db2, cfg, path2)
|
||||
dbConn2.Close()
|
||||
}
|
||||
|
||||
func TestConfigIncrementalVacuumPages(t *testing.T) {
|
||||
// Default
|
||||
cfg := &Config{}
|
||||
if cfg.IncrementalVacuumPages() != 1024 {
|
||||
t.Fatalf("expected default 1024, got %d", cfg.IncrementalVacuumPages())
|
||||
}
|
||||
|
||||
// Custom
|
||||
cfg.DB = &DBConfig{IncrementalVacuumPages: 512}
|
||||
if cfg.IncrementalVacuumPages() != 512 {
|
||||
t.Fatalf("expected 512, got %d", cfg.IncrementalVacuumPages())
|
||||
}
|
||||
|
||||
// Zero should return default
|
||||
cfg.DB.IncrementalVacuumPages = 0
|
||||
if cfg.IncrementalVacuumPages() != 1024 {
|
||||
t.Fatalf("expected default 1024 for zero, got %d", cfg.IncrementalVacuumPages())
|
||||
}
|
||||
}
|
||||
+101
-3
@@ -10,7 +10,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/meshcore-analyzer/packetpath"
|
||||
"github.com/meshcore-analyzer/sigvalidate"
|
||||
)
|
||||
|
||||
@@ -165,9 +164,8 @@ func decodePath(pathByte byte, buf []byte, offset int) (Path, int) {
|
||||
}, totalBytes
|
||||
}
|
||||
|
||||
// isTransportRoute delegates to packetpath.IsTransportRoute.
|
||||
func isTransportRoute(routeType int) bool {
|
||||
return packetpath.IsTransportRoute(routeType)
|
||||
return routeType == RouteTransportFlood || routeType == RouteTransportDirect
|
||||
}
|
||||
|
||||
func decodeEncryptedPayload(typeName string, buf []byte) Payload {
|
||||
@@ -443,6 +441,106 @@ func DecodePacket(hexString string, validateSignatures bool) (*DecodedPacket, er
|
||||
}, nil
|
||||
}
|
||||
|
||||
// HexRange represents a labeled byte range for the hex breakdown visualization.
|
||||
type HexRange struct {
|
||||
Start int `json:"start"`
|
||||
End int `json:"end"`
|
||||
Label string `json:"label"`
|
||||
}
|
||||
|
||||
// Breakdown holds colored byte ranges returned by the packet detail endpoint.
|
||||
type Breakdown struct {
|
||||
Ranges []HexRange `json:"ranges"`
|
||||
}
|
||||
|
||||
// BuildBreakdown computes labeled byte ranges for each section of a MeshCore packet.
|
||||
// The returned ranges are consumed by createColoredHexDump() and buildHexLegend()
|
||||
// in the frontend (public/app.js).
|
||||
func BuildBreakdown(hexString string) *Breakdown {
|
||||
hexString = strings.ReplaceAll(hexString, " ", "")
|
||||
hexString = strings.ReplaceAll(hexString, "\n", "")
|
||||
hexString = strings.ReplaceAll(hexString, "\r", "")
|
||||
buf, err := hex.DecodeString(hexString)
|
||||
if err != nil || len(buf) < 2 {
|
||||
return &Breakdown{Ranges: []HexRange{}}
|
||||
}
|
||||
|
||||
var ranges []HexRange
|
||||
offset := 0
|
||||
|
||||
// Byte 0: Header
|
||||
ranges = append(ranges, HexRange{Start: 0, End: 0, Label: "Header"})
|
||||
offset = 1
|
||||
|
||||
header := decodeHeader(buf[0])
|
||||
|
||||
// Bytes 1-4: Transport Codes (TRANSPORT_FLOOD / TRANSPORT_DIRECT only)
|
||||
if isTransportRoute(header.RouteType) {
|
||||
if len(buf) < offset+4 {
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
ranges = append(ranges, HexRange{Start: offset, End: offset + 3, Label: "Transport Codes"})
|
||||
offset += 4
|
||||
}
|
||||
|
||||
if offset >= len(buf) {
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
|
||||
// Next byte: Path Length (bits 7-6 = hashSize-1, bits 5-0 = hashCount)
|
||||
ranges = append(ranges, HexRange{Start: offset, End: offset, Label: "Path Length"})
|
||||
pathByte := buf[offset]
|
||||
offset++
|
||||
|
||||
hashSize := int(pathByte>>6) + 1
|
||||
hashCount := int(pathByte & 0x3F)
|
||||
pathBytes := hashSize * hashCount
|
||||
|
||||
// Path hops
|
||||
if hashCount > 0 && offset+pathBytes <= len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: offset, End: offset + pathBytes - 1, Label: "Path"})
|
||||
}
|
||||
offset += pathBytes
|
||||
|
||||
if offset >= len(buf) {
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
|
||||
payloadStart := offset
|
||||
|
||||
// Payload — break ADVERT into named sub-fields; everything else is one Payload range
|
||||
if header.PayloadType == PayloadADVERT && len(buf)-payloadStart >= 100 {
|
||||
ranges = append(ranges, HexRange{Start: payloadStart, End: payloadStart + 31, Label: "PubKey"})
|
||||
ranges = append(ranges, HexRange{Start: payloadStart + 32, End: payloadStart + 35, Label: "Timestamp"})
|
||||
ranges = append(ranges, HexRange{Start: payloadStart + 36, End: payloadStart + 99, Label: "Signature"})
|
||||
|
||||
appStart := payloadStart + 100
|
||||
if appStart < len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: appStart, End: appStart, Label: "Flags"})
|
||||
appFlags := buf[appStart]
|
||||
fOff := appStart + 1
|
||||
if appFlags&0x10 != 0 && fOff+8 <= len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: fOff, End: fOff + 3, Label: "Latitude"})
|
||||
ranges = append(ranges, HexRange{Start: fOff + 4, End: fOff + 7, Label: "Longitude"})
|
||||
fOff += 8
|
||||
}
|
||||
if appFlags&0x20 != 0 && fOff+2 <= len(buf) {
|
||||
fOff += 2
|
||||
}
|
||||
if appFlags&0x40 != 0 && fOff+2 <= len(buf) {
|
||||
fOff += 2
|
||||
}
|
||||
if appFlags&0x80 != 0 && fOff < len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: fOff, End: len(buf) - 1, Label: "Name"})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ranges = append(ranges, HexRange{Start: payloadStart, End: len(buf) - 1, Label: "Payload"})
|
||||
}
|
||||
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
|
||||
// ComputeContentHash computes the SHA-256-based content hash (first 16 hex chars).
|
||||
// It hashes the payload-type nibble + payload (skipping path bytes) to produce a
|
||||
// route-independent identifier for the same logical packet. For TRACE packets,
|
||||
|
||||
@@ -97,6 +97,146 @@ func TestDecodePacket_FloodHasNoCodes(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_InvalidHex(t *testing.T) {
|
||||
b := BuildBreakdown("not-hex!")
|
||||
if len(b.Ranges) != 0 {
|
||||
t.Errorf("expected empty ranges for invalid hex, got %d", len(b.Ranges))
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_TooShort(t *testing.T) {
|
||||
b := BuildBreakdown("11") // 1 byte — no path byte
|
||||
if len(b.Ranges) != 0 {
|
||||
t.Errorf("expected empty ranges for too-short packet, got %d", len(b.Ranges))
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_FloodNonAdvert(t *testing.T) {
|
||||
// Header 0x15: route=1/FLOOD, payload=5/GRP_TXT
|
||||
// PathByte 0x01: 1 hop, 1-byte hash
|
||||
// PathHop: AA
|
||||
// Payload: FF0011
|
||||
b := BuildBreakdown("1501AAFFFF00")
|
||||
labels := rangeLabels(b.Ranges)
|
||||
expect := []string{"Header", "Path Length", "Path", "Payload"}
|
||||
if !equalLabels(labels, expect) {
|
||||
t.Errorf("expected labels %v, got %v", expect, labels)
|
||||
}
|
||||
// Verify byte positions
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Path Length", 1, 1)
|
||||
assertRange(t, b.Ranges, "Path", 2, 2)
|
||||
assertRange(t, b.Ranges, "Payload", 3, 5)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_TransportFlood(t *testing.T) {
|
||||
// Header 0x14: route=0/TRANSPORT_FLOOD, payload=5/GRP_TXT
|
||||
// TransportCodes: AABBCCDD (4 bytes)
|
||||
// PathByte 0x01: 1 hop, 1-byte hash
|
||||
// PathHop: EE
|
||||
// Payload: FF00
|
||||
b := BuildBreakdown("14AABBCCDD01EEFF00")
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Transport Codes", 1, 4)
|
||||
assertRange(t, b.Ranges, "Path Length", 5, 5)
|
||||
assertRange(t, b.Ranges, "Path", 6, 6)
|
||||
assertRange(t, b.Ranges, "Payload", 7, 8)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_FloodNoHops(t *testing.T) {
|
||||
// Header 0x15: FLOOD/GRP_TXT; PathByte 0x00: 0 hops; Payload: AABB
|
||||
b := BuildBreakdown("150000AABB")
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Path Length", 1, 1)
|
||||
// No Path range since hashCount=0
|
||||
for _, r := range b.Ranges {
|
||||
if r.Label == "Path" {
|
||||
t.Error("expected no Path range for zero-hop packet")
|
||||
}
|
||||
}
|
||||
assertRange(t, b.Ranges, "Payload", 2, 4)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_AdvertBasic(t *testing.T) {
|
||||
// Header 0x11: FLOOD/ADVERT
|
||||
// PathByte 0x01: 1 hop, 1-byte hash
|
||||
// PathHop: AA
|
||||
// Payload: 100 bytes (PubKey32 + Timestamp4 + Signature64) + Flags=0x02 (repeater, no extras)
|
||||
pubkey := repeatHex("AB", 32)
|
||||
ts := "00000000" // 4 bytes
|
||||
sig := repeatHex("CD", 64)
|
||||
flags := "02"
|
||||
hex := "1101AA" + pubkey + ts + sig + flags
|
||||
b := BuildBreakdown(hex)
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Path Length", 1, 1)
|
||||
assertRange(t, b.Ranges, "Path", 2, 2)
|
||||
assertRange(t, b.Ranges, "PubKey", 3, 34)
|
||||
assertRange(t, b.Ranges, "Timestamp", 35, 38)
|
||||
assertRange(t, b.Ranges, "Signature", 39, 102)
|
||||
assertRange(t, b.Ranges, "Flags", 103, 103)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_AdvertWithLocation(t *testing.T) {
|
||||
// flags=0x12: hasLocation bit set
|
||||
pubkey := repeatHex("00", 32)
|
||||
ts := "00000000"
|
||||
sig := repeatHex("00", 64)
|
||||
flags := "12" // 0x10 = hasLocation
|
||||
latBytes := "00000000"
|
||||
lonBytes := "00000000"
|
||||
hex := "1101AA" + pubkey + ts + sig + flags + latBytes + lonBytes
|
||||
b := BuildBreakdown(hex)
|
||||
assertRange(t, b.Ranges, "Latitude", 104, 107)
|
||||
assertRange(t, b.Ranges, "Longitude", 108, 111)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_AdvertWithName(t *testing.T) {
|
||||
// flags=0x82: hasName bit set
|
||||
pubkey := repeatHex("00", 32)
|
||||
ts := "00000000"
|
||||
sig := repeatHex("00", 64)
|
||||
flags := "82" // 0x80 = hasName
|
||||
name := "4E6F6465" // "Node" in hex
|
||||
hex := "1101AA" + pubkey + ts + sig + flags + name
|
||||
b := BuildBreakdown(hex)
|
||||
assertRange(t, b.Ranges, "Name", 104, 107)
|
||||
}
|
||||
|
||||
// helpers
|
||||
|
||||
func rangeLabels(ranges []HexRange) []string {
|
||||
out := make([]string, len(ranges))
|
||||
for i, r := range ranges {
|
||||
out[i] = r.Label
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func equalLabels(a, b []string) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func assertRange(t *testing.T, ranges []HexRange, label string, wantStart, wantEnd int) {
|
||||
t.Helper()
|
||||
for _, r := range ranges {
|
||||
if r.Label == label {
|
||||
if r.Start != wantStart || r.End != wantEnd {
|
||||
t.Errorf("range %q: want [%d,%d], got [%d,%d]", label, wantStart, wantEnd, r.Start, r.End)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
t.Errorf("range %q not found in %v", label, rangeLabels(ranges))
|
||||
}
|
||||
|
||||
func TestZeroHopDirectHashSize(t *testing.T) {
|
||||
// DIRECT (RouteType=2) + REQ (PayloadType=0) → header byte = 0x02
|
||||
|
||||
@@ -14,10 +14,6 @@ replace github.com/meshcore-analyzer/geofilter => ../../internal/geofilter
|
||||
|
||||
replace github.com/meshcore-analyzer/sigvalidate => ../../internal/sigvalidate
|
||||
|
||||
require github.com/meshcore-analyzer/packetpath v0.0.0
|
||||
|
||||
replace github.com/meshcore-analyzer/packetpath => ../../internal/packetpath
|
||||
|
||||
require (
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
|
||||
@@ -148,9 +148,6 @@ func main() {
|
||||
stats.TotalTransmissions, stats.TotalObservations, stats.TotalNodes, stats.TotalObservers)
|
||||
}
|
||||
|
||||
// Check auto_vacuum mode and optionally migrate (#919)
|
||||
checkAutoVacuum(database, cfg, resolvedDB)
|
||||
|
||||
// In-memory packet store
|
||||
store := NewPacketStore(database, cfg.PacketStore, cfg.CacheTTL)
|
||||
if err := store.Load(); err != nil {
|
||||
@@ -269,7 +266,6 @@ func main() {
|
||||
defer stopEviction()
|
||||
|
||||
// Auto-prune old packets if retention.packetDays is configured
|
||||
vacuumPages := cfg.IncrementalVacuumPages()
|
||||
var stopPrune func()
|
||||
if cfg.Retention != nil && cfg.Retention.PacketDays > 0 {
|
||||
days := cfg.Retention.PacketDays
|
||||
@@ -290,9 +286,6 @@ func main() {
|
||||
log.Printf("[prune] error: %v", err)
|
||||
} else {
|
||||
log.Printf("[prune] deleted %d transmissions older than %d days", n, days)
|
||||
if n > 0 {
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
}
|
||||
}
|
||||
for {
|
||||
select {
|
||||
@@ -301,9 +294,6 @@ func main() {
|
||||
log.Printf("[prune] error: %v", err)
|
||||
} else {
|
||||
log.Printf("[prune] deleted %d transmissions older than %d days", n, days)
|
||||
if n > 0 {
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
}
|
||||
}
|
||||
case <-pruneDone:
|
||||
return
|
||||
@@ -331,12 +321,10 @@ func main() {
|
||||
}()
|
||||
time.Sleep(2 * time.Minute) // stagger after packet prune
|
||||
database.PruneOldMetrics(metricsDays)
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
for {
|
||||
select {
|
||||
case <-metricsPruneTicker.C:
|
||||
database.PruneOldMetrics(metricsDays)
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
case <-metricsPruneDone:
|
||||
return
|
||||
}
|
||||
@@ -366,12 +354,10 @@ func main() {
|
||||
}()
|
||||
time.Sleep(3 * time.Minute) // stagger after metrics prune
|
||||
database.RemoveStaleObservers(observerDays)
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
for {
|
||||
select {
|
||||
case <-observerPruneTicker.C:
|
||||
database.RemoveStaleObservers(observerDays)
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
case <-observerPruneDone:
|
||||
return
|
||||
}
|
||||
@@ -402,7 +388,6 @@ func main() {
|
||||
g := store.graph
|
||||
store.mu.RUnlock()
|
||||
PruneNeighborEdges(dbPath, g, maxAgeDays)
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
for {
|
||||
select {
|
||||
case <-edgePruneTicker.C:
|
||||
@@ -410,7 +395,6 @@ func main() {
|
||||
g := store.graph
|
||||
store.mu.RUnlock()
|
||||
PruneNeighborEdges(dbPath, g, maxAgeDays)
|
||||
runIncrementalVacuum(resolvedDB, vacuumPages)
|
||||
case <-edgePruneDone:
|
||||
return
|
||||
}
|
||||
|
||||
@@ -12,9 +12,9 @@ import (
|
||||
func TestResolveAmbiguousEdges_GeoProximity(t *testing.T) {
|
||||
// Node A at lat=45, lon=-122. Candidate B1 at lat=45.1, lon=-122.1 (close).
|
||||
// Candidate B2 at lat=10, lon=10 (far away). Prefix "b0" matches both.
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB1 := nodeInfo{Role: "repeater", PublicKey: "b0b1eeee", Name: "CloseNode", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeB2 := nodeInfo{Role: "repeater", PublicKey: "b0c2ffff", Name: "FarNode", HasGPS: true, Lat: 10.0, Lon: 10.0}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB1 := nodeInfo{PublicKey: "b0b1eeee", Name: "CloseNode", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeB2 := nodeInfo{PublicKey: "b0c2ffff", Name: "FarNode", HasGPS: true, Lat: 10.0, Lon: 10.0}
|
||||
|
||||
pm := buildPrefixMap([]nodeInfo{nodeA, nodeB1, nodeB2})
|
||||
|
||||
@@ -62,8 +62,8 @@ func TestResolveAmbiguousEdges_GeoProximity(t *testing.T) {
|
||||
|
||||
// Test 2: Ambiguous edge merged with existing resolved edge (count accumulation).
|
||||
func TestResolveAmbiguousEdges_MergeWithExisting(t *testing.T) {
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{Role: "repeater", PublicKey: "b0b1eeee", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{PublicKey: "b0b1eeee", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
|
||||
pm := buildPrefixMap([]nodeInfo{nodeA, nodeB})
|
||||
|
||||
@@ -133,9 +133,9 @@ func TestResolveAmbiguousEdges_MergeWithExisting(t *testing.T) {
|
||||
// Test 3: Ambiguous edge left as-is when resolution fails.
|
||||
func TestResolveAmbiguousEdges_FailsNoChange(t *testing.T) {
|
||||
// Two candidates, neither has GPS, no affinity data — resolution falls through.
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA"}
|
||||
nodeB1 := nodeInfo{Role: "repeater", PublicKey: "b0b1eeee", Name: "B1"}
|
||||
nodeB2 := nodeInfo{Role: "repeater", PublicKey: "b0c2ffff", Name: "B2"}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111", Name: "NodeA"}
|
||||
nodeB1 := nodeInfo{PublicKey: "b0b1eeee", Name: "B1"}
|
||||
nodeB2 := nodeInfo{PublicKey: "b0c2ffff", Name: "B2"}
|
||||
|
||||
pm := buildPrefixMap([]nodeInfo{nodeA, nodeB1, nodeB2})
|
||||
|
||||
@@ -175,7 +175,7 @@ func TestResolveAmbiguousEdges_FailsNoChange(t *testing.T) {
|
||||
|
||||
// Test 3 (corrected): Resolution fails when prefix has no candidates in prefix map.
|
||||
func TestResolveAmbiguousEdges_NoMatch(t *testing.T) {
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA"}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111", Name: "NodeA"}
|
||||
// pm has no entries matching prefix "zz"
|
||||
pm := buildPrefixMap([]nodeInfo{nodeA})
|
||||
|
||||
@@ -215,8 +215,8 @@ func TestResolveAmbiguousEdges_NoMatch(t *testing.T) {
|
||||
// Test 6: Phase 1 edge collection unchanged (no regression).
|
||||
func TestPhase1EdgeCollection_Unchanged(t *testing.T) {
|
||||
// Build a simple graph and verify non-ambiguous edges are not touched.
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{Role: "repeater", PublicKey: "bbbb2222", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{PublicKey: "bbbb2222", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
|
||||
ts := time.Now().UTC().Format(time.RFC3339)
|
||||
payloadType := 4
|
||||
@@ -232,7 +232,7 @@ func TestPhase1EdgeCollection_Unchanged(t *testing.T) {
|
||||
Observations: obs,
|
||||
}
|
||||
|
||||
store := ngTestStore([]nodeInfo{nodeA, nodeB, {Role: "repeater", PublicKey: "cccc3333", Name: "Observer"}}, []*StoreTx{tx})
|
||||
store := ngTestStore([]nodeInfo{nodeA, nodeB, {PublicKey: "cccc3333", Name: "Observer"}}, []*StoreTx{tx})
|
||||
graph := BuildFromStore(store)
|
||||
|
||||
edges := graph.Neighbors("aaaa1111")
|
||||
@@ -255,8 +255,8 @@ func TestPhase1EdgeCollection_Unchanged(t *testing.T) {
|
||||
|
||||
// Test 7: Merge preserves higher LastSeen timestamp.
|
||||
func TestResolveAmbiguousEdges_PreservesHigherLastSeen(t *testing.T) {
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{Role: "repeater", PublicKey: "b0b1eeee", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{PublicKey: "b0b1eeee", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
pm := buildPrefixMap([]nodeInfo{nodeA, nodeB})
|
||||
|
||||
graph := NewNeighborGraph()
|
||||
@@ -307,10 +307,10 @@ func TestResolveAmbiguousEdges_PreservesHigherLastSeen(t *testing.T) {
|
||||
|
||||
// Test 5: Integration — node with both 1-byte and 2-byte prefix observations shows single entry.
|
||||
func TestIntegration_DualPrefixSingleNeighbor(t *testing.T) {
|
||||
nodeA := nodeInfo{Role: "repeater", PublicKey: "aaaa1111aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{Role: "repeater", PublicKey: "b0b1eeeeb0b1eeee", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeB2 := nodeInfo{Role: "repeater", PublicKey: "b0c2ffffb0c2ffff", Name: "NodeB2", HasGPS: true, Lat: 10.0, Lon: 10.0}
|
||||
observer := nodeInfo{Role: "repeater", PublicKey: "cccc3333cccc3333", Name: "Observer"}
|
||||
nodeA := nodeInfo{PublicKey: "aaaa1111aaaa1111", Name: "NodeA", HasGPS: true, Lat: 45.0, Lon: -122.0}
|
||||
nodeB := nodeInfo{PublicKey: "b0b1eeeeb0b1eeee", Name: "NodeB", HasGPS: true, Lat: 45.1, Lon: -122.1}
|
||||
nodeB2 := nodeInfo{PublicKey: "b0c2ffffb0c2ffff", Name: "NodeB2", HasGPS: true, Lat: 10.0, Lon: 10.0}
|
||||
observer := nodeInfo{PublicKey: "cccc3333cccc3333", Name: "Observer"}
|
||||
|
||||
ts := time.Now().UTC().Format(time.RFC3339)
|
||||
pt := 4
|
||||
|
||||
@@ -86,9 +86,9 @@ func TestBuildNeighborGraph_EmptyStore(t *testing.T) {
|
||||
func TestBuildNeighborGraph_AdvertSingleHopPath(t *testing.T) {
|
||||
// ADVERT from X, path=["R1_prefix"] → edges: X↔R1 and Observer↔R1
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["r1aa"]`, nowStr, ngFloatPtr(-10)),
|
||||
@@ -132,10 +132,10 @@ func TestBuildNeighborGraph_AdvertSingleHopPath(t *testing.T) {
|
||||
func TestBuildNeighborGraph_AdvertMultiHopPath(t *testing.T) {
|
||||
// ADVERT from X, path=["R1","R2"] → X↔R1 and Observer↔R2
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "r2ddeeff", Name: "R2"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "r2ddeeff", Name: "R2"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["r1aa","r2dd"]`, nowStr, nil),
|
||||
@@ -170,8 +170,8 @@ func TestBuildNeighborGraph_AdvertMultiHopPath(t *testing.T) {
|
||||
func TestBuildNeighborGraph_AdvertZeroHop(t *testing.T) {
|
||||
// ADVERT from X, path=[] → X↔Observer direct edge
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `[]`, nowStr, nil),
|
||||
@@ -195,8 +195,8 @@ func TestBuildNeighborGraph_AdvertZeroHop(t *testing.T) {
|
||||
func TestBuildNeighborGraph_NonAdvertEmptyPath(t *testing.T) {
|
||||
// Non-ADVERT, path=[] → no edges
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 2, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `[]`, nowStr, nil),
|
||||
@@ -212,10 +212,10 @@ func TestBuildNeighborGraph_NonAdvertEmptyPath(t *testing.T) {
|
||||
func TestBuildNeighborGraph_NonAdvertOnlyObserverEdge(t *testing.T) {
|
||||
// Non-ADVERT with path=["R1","R2"] → only Observer↔R2, NO originator edge
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "r2ddeeff", Name: "R2"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "r2ddeeff", Name: "R2"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 2, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["r1aa","r2dd"]`, nowStr, nil),
|
||||
@@ -236,9 +236,9 @@ func TestBuildNeighborGraph_NonAdvertOnlyObserverEdge(t *testing.T) {
|
||||
func TestBuildNeighborGraph_NonAdvertSingleHop(t *testing.T) {
|
||||
// Non-ADVERT with path=["R1"] → Observer↔R1 only
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 2, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["r1aa"]`, nowStr, nil),
|
||||
@@ -259,10 +259,10 @@ func TestBuildNeighborGraph_NonAdvertSingleHop(t *testing.T) {
|
||||
func TestBuildNeighborGraph_HashCollision(t *testing.T) {
|
||||
// Two nodes share prefix "a3" → ambiguous edge
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "a3bb1111", Name: "CandidateA"},
|
||||
{Role: "repeater", PublicKey: "a3bb2222", Name: "CandidateB"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "a3bb1111", Name: "CandidateA"},
|
||||
{PublicKey: "a3bb2222", Name: "CandidateB"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["a3bb"]`, nowStr, nil),
|
||||
@@ -308,13 +308,13 @@ func TestBuildNeighborGraph_ConfidenceAutoResolve(t *testing.T) {
|
||||
// CandidateB has no known neighbors (Jaccard = 0).
|
||||
// An ambiguous edge X↔prefix "a3" with candidates [A, B] should auto-resolve to A.
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "n1111111", Name: "N1"},
|
||||
{Role: "repeater", PublicKey: "n2222222", Name: "N2"},
|
||||
{Role: "repeater", PublicKey: "n3333333", Name: "N3"},
|
||||
{Role: "repeater", PublicKey: "a3001111", Name: "CandidateA"},
|
||||
{Role: "repeater", PublicKey: "a3002222", Name: "CandidateB"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "n1111111", Name: "N1"},
|
||||
{PublicKey: "n2222222", Name: "N2"},
|
||||
{PublicKey: "n3333333", Name: "N3"},
|
||||
{PublicKey: "a3001111", Name: "CandidateA"},
|
||||
{PublicKey: "a3002222", Name: "CandidateB"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
|
||||
// Create resolved edges: X↔N1, X↔N2, X↔N3, A↔N1, A↔N2, A↔N3
|
||||
@@ -373,11 +373,11 @@ func TestBuildNeighborGraph_ConfidenceAutoResolve(t *testing.T) {
|
||||
func TestBuildNeighborGraph_EqualScoresAmbiguous(t *testing.T) {
|
||||
// Two candidates with identical neighbor sets → should NOT auto-resolve.
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "n1111111", Name: "N1"},
|
||||
{Role: "repeater", PublicKey: "a3001111", Name: "CandidateA"},
|
||||
{Role: "repeater", PublicKey: "a3002222", Name: "CandidateB"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "n1111111", Name: "N1"},
|
||||
{PublicKey: "a3001111", Name: "CandidateA"},
|
||||
{PublicKey: "a3002222", Name: "CandidateB"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
|
||||
var txs []*StoreTx
|
||||
@@ -425,8 +425,8 @@ func TestBuildNeighborGraph_EqualScoresAmbiguous(t *testing.T) {
|
||||
func TestBuildNeighborGraph_ObserverSelfEdgeGuard(t *testing.T) {
|
||||
// Observer's own prefix in path → should NOT create self-edge.
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["obs0"]`, nowStr, nil),
|
||||
@@ -445,8 +445,8 @@ func TestBuildNeighborGraph_ObserverSelfEdgeGuard(t *testing.T) {
|
||||
func TestBuildNeighborGraph_OrphanPrefix(t *testing.T) {
|
||||
// Path contains prefix matching zero nodes → edge recorded as unresolved.
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["ff99"]`, nowStr, nil),
|
||||
@@ -506,9 +506,9 @@ func TestAffinityScore_StaleAndLow(t *testing.T) {
|
||||
|
||||
func TestBuildNeighborGraph_CountAccumulation(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
|
||||
var txs []*StoreTx
|
||||
@@ -535,10 +535,10 @@ func TestBuildNeighborGraph_CountAccumulation(t *testing.T) {
|
||||
|
||||
func TestBuildNeighborGraph_MultipleObservers(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Obs1"},
|
||||
{Role: "repeater", PublicKey: "obs00002", Name: "Obs2"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "obs00001", Name: "Obs1"},
|
||||
{PublicKey: "obs00002", Name: "Obs2"},
|
||||
}
|
||||
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
@@ -565,9 +565,9 @@ func TestBuildNeighborGraph_MultipleObservers(t *testing.T) {
|
||||
|
||||
func TestBuildNeighborGraph_TimeDecayOldObservations(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
|
||||
tx := ngMakeTx(1, 4, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
@@ -592,10 +592,10 @@ func TestBuildNeighborGraph_TimeDecayOldObservations(t *testing.T) {
|
||||
func TestBuildNeighborGraph_ADVERTOnlyConstraint(t *testing.T) {
|
||||
// Non-ADVERT: should NOT create originator↔path[0] edge, only observer↔path[last].
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{Role: "repeater", PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "r2ddeeff", Name: "R2"},
|
||||
{Role: "repeater", PublicKey: "obs00001", Name: "Observer"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeX"},
|
||||
{PublicKey: "r1aabbcc", Name: "R1"},
|
||||
{PublicKey: "r2ddeeff", Name: "R2"},
|
||||
{PublicKey: "obs00001", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 2, ngFromNodeJSON("aaaa1111"), []*StoreObs{
|
||||
ngMakeObs("obs00001", `["r1aa","r2dd"]`, nowStr, nil),
|
||||
@@ -631,9 +631,9 @@ func ngPubKeyJSON(pubkey string) string {
|
||||
func TestBuildNeighborGraph_AdvertPubKeyField(t *testing.T) {
|
||||
// Real ADVERTs use "pubKey", not "from_node". Verify the builder handles it.
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "99bf37abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234", Name: "Originator"},
|
||||
{Role: "repeater", PublicKey: "r1aabbccdd001122334455667788990011223344556677889900112233445566", Name: "R1"},
|
||||
{Role: "repeater", PublicKey: "obs0000100112233445566778899001122334455667788990011223344556677", Name: "Observer"},
|
||||
{PublicKey: "99bf37abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234", Name: "Originator"},
|
||||
{PublicKey: "r1aabbccdd001122334455667788990011223344556677889900112233445566", Name: "R1"},
|
||||
{PublicKey: "obs0000100112233445566778899001122334455667788990011223344556677", Name: "Observer"},
|
||||
}
|
||||
tx := ngMakeTx(1, 4, ngPubKeyJSON("99bf37abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234"), []*StoreObs{
|
||||
ngMakeObs("obs0000100112233445566778899001122334455667788990011223344556677", `["r1"]`, nowStr, ngFloatPtr(-8.5)),
|
||||
@@ -666,10 +666,10 @@ func TestBuildNeighborGraph_OneByteHashPrefixes(t *testing.T) {
|
||||
// Real-world scenario: 1-byte hash prefixes with multiple candidates.
|
||||
// Should create edges (possibly ambiguous) rather than empty graph.
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "c0dedad400000000000000000000000000000000000000000000000000000001", Name: "NodeC0-1"},
|
||||
{Role: "repeater", PublicKey: "c0dedad900000000000000000000000000000000000000000000000000000002", Name: "NodeC0-2"},
|
||||
{Role: "repeater", PublicKey: "a3bbccdd00000000000000000000000000000000000000000000000000000003", Name: "Originator"},
|
||||
{Role: "repeater", PublicKey: "obs1234500000000000000000000000000000000000000000000000000000004", Name: "Observer"},
|
||||
{PublicKey: "c0dedad400000000000000000000000000000000000000000000000000000001", Name: "NodeC0-1"},
|
||||
{PublicKey: "c0dedad900000000000000000000000000000000000000000000000000000002", Name: "NodeC0-2"},
|
||||
{PublicKey: "a3bbccdd00000000000000000000000000000000000000000000000000000003", Name: "Originator"},
|
||||
{PublicKey: "obs1234500000000000000000000000000000000000000000000000000000004", Name: "Observer"},
|
||||
}
|
||||
// ADVERT from Originator with 1-byte path hop "c0"
|
||||
tx := ngMakeTx(1, 4, ngPubKeyJSON("a3bbccdd00000000000000000000000000000000000000000000000000000003"), []*StoreObs{
|
||||
@@ -809,10 +809,10 @@ func TestExtractFromNode_UsesCachedParse(t *testing.T) {
|
||||
func BenchmarkBuildFromStore(b *testing.B) {
|
||||
// Simulate a dataset with many packets and repeated pubkeys
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aaaa1111", Name: "NodeA"},
|
||||
{Role: "repeater", PublicKey: "bbbb2222", Name: "NodeB"},
|
||||
{Role: "repeater", PublicKey: "cccc3333", Name: "NodeC"},
|
||||
{Role: "repeater", PublicKey: "dddd4444", Name: "NodeD"},
|
||||
{PublicKey: "aaaa1111", Name: "NodeA"},
|
||||
{PublicKey: "bbbb2222", Name: "NodeB"},
|
||||
{PublicKey: "cccc3333", Name: "NodeC"},
|
||||
{PublicKey: "dddd4444", Name: "NodeD"},
|
||||
}
|
||||
const numPackets = 1000
|
||||
packets := make([]*StoreTx, 0, numPackets)
|
||||
|
||||
@@ -38,7 +38,7 @@ func createTestDBWithSchema(t *testing.T) (*DB, string) {
|
||||
observer_id TEXT, observer_name TEXT, direction TEXT,
|
||||
snr REAL, rssi REAL, score INTEGER,
|
||||
path_json TEXT, timestamp TEXT,
|
||||
resolved_path TEXT, raw_hex TEXT
|
||||
resolved_path TEXT
|
||||
)`)
|
||||
conn.Exec(`CREATE TABLE nodes (
|
||||
public_key TEXT PRIMARY KEY, name TEXT, role TEXT,
|
||||
@@ -58,8 +58,8 @@ func createTestDBWithSchema(t *testing.T) (*DB, string) {
|
||||
func TestResolvePathForObs(t *testing.T) {
|
||||
// Build a prefix map with known nodes
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aabbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb", Name: "Node-AA"},
|
||||
{Role: "repeater", PublicKey: "bbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb11", Name: "Node-BB"},
|
||||
{PublicKey: "aabbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb", Name: "Node-AA"},
|
||||
{PublicKey: "bbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb11", Name: "Node-BB"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
graph := NewNeighborGraph()
|
||||
@@ -97,7 +97,7 @@ func TestResolvePathForObs_EmptyPath(t *testing.T) {
|
||||
|
||||
func TestResolvePathForObs_Unresolvable(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aabbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb", Name: "Node-AA"},
|
||||
{PublicKey: "aabbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb", Name: "Node-AA"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
@@ -264,7 +264,7 @@ func TestEnsureResolvedPathColumn(t *testing.T) {
|
||||
conn, _ := sql.Open("sqlite", "file:"+dbPath+"?_journal_mode=WAL")
|
||||
conn.Exec(`CREATE TABLE observations (
|
||||
id INTEGER PRIMARY KEY, transmission_id INTEGER,
|
||||
observer_id TEXT, path_json TEXT, timestamp TEXT, raw_hex TEXT
|
||||
observer_id TEXT, path_json TEXT, timestamp TEXT
|
||||
)`)
|
||||
conn.Close()
|
||||
|
||||
@@ -437,8 +437,8 @@ func TestExtractEdgesFromObs_NonAdvertNoPath(t *testing.T) {
|
||||
|
||||
func TestExtractEdgesFromObs_WithPath(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{Role: "repeater", PublicKey: "aabbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb", Name: "Node-AA"},
|
||||
{Role: "repeater", PublicKey: "ffgghhii1234567890aabbccddee1234567890aabbccddee1234567890aabb11", Name: "Node-FF"},
|
||||
{PublicKey: "aabbccddee1234567890aabbccddee1234567890aabbccddee1234567890aabb", Name: "Node-AA"},
|
||||
{PublicKey: "ffgghhii1234567890aabbccddee1234567890aabbccddee1234567890aabb11", Name: "Node-FF"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
|
||||
@@ -1,427 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"math"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ─── Path Inspector ────────────────────────────────────────────────────────────
|
||||
// POST /api/paths/inspect — beam-search scorer for prefix path candidates.
|
||||
// Spec: issue #944 §2.1–2.5.
|
||||
|
||||
// pathInspectRequest is the JSON body for the inspect endpoint.
|
||||
type pathInspectRequest struct {
|
||||
Prefixes []string `json:"prefixes"`
|
||||
Context *pathInspectContext `json:"context,omitempty"`
|
||||
Limit int `json:"limit,omitempty"`
|
||||
}
|
||||
|
||||
type pathInspectContext struct {
|
||||
ObserverID string `json:"observerId,omitempty"`
|
||||
Since string `json:"since,omitempty"`
|
||||
Until string `json:"until,omitempty"`
|
||||
}
|
||||
|
||||
// pathCandidate is one scored candidate path in the response.
|
||||
type pathCandidate struct {
|
||||
Path []string `json:"path"`
|
||||
Names []string `json:"names"`
|
||||
Score float64 `json:"score"`
|
||||
Speculative bool `json:"speculative"`
|
||||
Evidence pathEvidence `json:"evidence"`
|
||||
}
|
||||
|
||||
type pathEvidence struct {
|
||||
PerHop []hopEvidence `json:"perHop"`
|
||||
}
|
||||
|
||||
type hopEvidence struct {
|
||||
Prefix string `json:"prefix"`
|
||||
CandidatesConsidered int `json:"candidatesConsidered"`
|
||||
Chosen string `json:"chosen"`
|
||||
EdgeWeight float64 `json:"edgeWeight"`
|
||||
Alternatives []hopAlternative `json:"alternatives,omitempty"`
|
||||
}
|
||||
|
||||
// hopAlternative shows a candidate that was considered but not chosen for this hop.
|
||||
type hopAlternative struct {
|
||||
PublicKey string `json:"publicKey"`
|
||||
Name string `json:"name"`
|
||||
Score float64 `json:"score"`
|
||||
}
|
||||
|
||||
type pathInspectResponse struct {
|
||||
Candidates []pathCandidate `json:"candidates"`
|
||||
Input map[string]interface{} `json:"input"`
|
||||
Stats map[string]interface{} `json:"stats"`
|
||||
}
|
||||
|
||||
// beamEntry represents a partial path being extended during beam search.
|
||||
type beamEntry struct {
|
||||
pubkeys []string
|
||||
names []string
|
||||
evidence []hopEvidence
|
||||
score float64 // product of per-hop scores (pre-geometric-mean)
|
||||
}
|
||||
|
||||
const (
|
||||
beamWidth = 20
|
||||
maxInputHops = 64
|
||||
maxPrefixBytes = 3
|
||||
maxRequestItems = 64
|
||||
geoMaxKm = 50.0
|
||||
hopScoreFloor = 0.05
|
||||
speculativeThreshold = 0.7
|
||||
inspectCacheTTL = 30 * time.Second
|
||||
inspectBodyLimit = 4096
|
||||
)
|
||||
|
||||
// Weights per spec §2.3.
|
||||
const (
|
||||
wEdge = 0.35
|
||||
wGeo = 0.20
|
||||
wRecency = 0.15
|
||||
wSelectivity = 0.30
|
||||
)
|
||||
|
||||
func (s *Server) handlePathInspect(w http.ResponseWriter, r *http.Request) {
|
||||
// Body limit per spec §2.1.
|
||||
r.Body = http.MaxBytesReader(w, r.Body, inspectBodyLimit)
|
||||
|
||||
var req pathInspectRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
http.Error(w, `{"error":"invalid JSON"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Validate prefixes.
|
||||
if len(req.Prefixes) == 0 {
|
||||
http.Error(w, `{"error":"prefixes required"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if len(req.Prefixes) > maxRequestItems {
|
||||
http.Error(w, `{"error":"too many prefixes (max 64)"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize + validate each prefix.
|
||||
prefixByteLen := -1
|
||||
for i, p := range req.Prefixes {
|
||||
p = strings.ToLower(strings.TrimSpace(p))
|
||||
req.Prefixes[i] = p
|
||||
if len(p) == 0 || len(p)%2 != 0 {
|
||||
http.Error(w, `{"error":"prefixes must be even-length hex"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if _, err := hex.DecodeString(p); err != nil {
|
||||
http.Error(w, `{"error":"prefixes must be valid hex"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
byteLen := len(p) / 2
|
||||
if byteLen > maxPrefixBytes {
|
||||
http.Error(w, `{"error":"prefix exceeds 3 bytes"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if prefixByteLen == -1 {
|
||||
prefixByteLen = byteLen
|
||||
} else if byteLen != prefixByteLen {
|
||||
http.Error(w, `{"error":"mixed prefix lengths not allowed"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
limit := req.Limit
|
||||
if limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
if limit > 50 {
|
||||
limit = 50
|
||||
}
|
||||
|
||||
// Check cache.
|
||||
cacheKey := s.store.inspectCacheKey(req)
|
||||
s.store.inspectMu.RLock()
|
||||
if cached, ok := s.store.inspectCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) {
|
||||
s.store.inspectMu.RUnlock()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(cached.data)
|
||||
return
|
||||
}
|
||||
s.store.inspectMu.RUnlock()
|
||||
|
||||
// Snapshot data under read lock.
|
||||
nodes, pm := s.store.getCachedNodesAndPM()
|
||||
|
||||
// Build pubkey→nodeInfo map for O(1) geo lookup in scorer.
|
||||
nodeByPK := make(map[string]*nodeInfo, len(nodes))
|
||||
for i := range nodes {
|
||||
nodeByPK[strings.ToLower(nodes[i].PublicKey)] = &nodes[i]
|
||||
}
|
||||
|
||||
// Get neighbor graph; handle cold start.
|
||||
graph := s.store.graph
|
||||
if graph == nil || graph.IsStale() {
|
||||
rebuilt := make(chan struct{})
|
||||
go func() {
|
||||
s.store.ensureNeighborGraph()
|
||||
close(rebuilt)
|
||||
}()
|
||||
select {
|
||||
case <-rebuilt:
|
||||
graph = s.store.graph
|
||||
case <-time.After(2 * time.Second):
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
json.NewEncoder(w).Encode(map[string]interface{}{"retry": true})
|
||||
return
|
||||
}
|
||||
if graph == nil {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
json.NewEncoder(w).Encode(map[string]interface{}{"retry": true})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
start := now
|
||||
|
||||
// Beam search.
|
||||
beam := s.store.beamSearch(req.Prefixes, pm, graph, nodeByPK, now)
|
||||
|
||||
// Sort by score descending, take top limit.
|
||||
sortBeam(beam)
|
||||
if len(beam) > limit {
|
||||
beam = beam[:limit]
|
||||
}
|
||||
|
||||
// Build response with per-hop alternatives (spec §2.7, M2 fix).
|
||||
candidates := make([]pathCandidate, 0, len(beam))
|
||||
for _, entry := range beam {
|
||||
nHops := len(entry.pubkeys)
|
||||
var score float64
|
||||
if nHops > 0 {
|
||||
score = math.Pow(entry.score, 1.0/float64(nHops))
|
||||
}
|
||||
|
||||
// Populate per-hop alternatives: other candidates at each hop that weren't chosen.
|
||||
evidence := make([]hopEvidence, len(entry.evidence))
|
||||
copy(evidence, entry.evidence)
|
||||
for hi, ev := range evidence {
|
||||
if hi >= len(req.Prefixes) {
|
||||
break
|
||||
}
|
||||
prefix := req.Prefixes[hi]
|
||||
allCands := pm.m[prefix]
|
||||
var alts []hopAlternative
|
||||
for _, c := range allCands {
|
||||
if !canAppearInPath(c.Role) || c.PublicKey == ev.Chosen {
|
||||
continue
|
||||
}
|
||||
// Score this alternative in context of the partial path up to this hop.
|
||||
var partialEntry beamEntry
|
||||
if hi > 0 {
|
||||
partialEntry = beamEntry{pubkeys: entry.pubkeys[:hi], names: entry.names[:hi], score: 1.0}
|
||||
}
|
||||
altScore := s.store.scoreHop(partialEntry, c, ev.CandidatesConsidered, graph, nodeByPK, now, hi)
|
||||
alts = append(alts, hopAlternative{PublicKey: c.PublicKey, Name: c.Name, Score: math.Round(altScore*1000) / 1000})
|
||||
}
|
||||
// Sort alts by score desc, cap at 5.
|
||||
sort.Slice(alts, func(i, j int) bool { return alts[i].Score > alts[j].Score })
|
||||
if len(alts) > 5 {
|
||||
alts = alts[:5]
|
||||
}
|
||||
evidence[hi] = hopEvidence{
|
||||
Prefix: ev.Prefix,
|
||||
CandidatesConsidered: ev.CandidatesConsidered,
|
||||
Chosen: ev.Chosen,
|
||||
EdgeWeight: ev.EdgeWeight,
|
||||
Alternatives: alts,
|
||||
}
|
||||
}
|
||||
|
||||
candidates = append(candidates, pathCandidate{
|
||||
Path: entry.pubkeys,
|
||||
Names: entry.names,
|
||||
Score: math.Round(score*1000) / 1000,
|
||||
Speculative: score < speculativeThreshold,
|
||||
Evidence: pathEvidence{PerHop: evidence},
|
||||
})
|
||||
}
|
||||
|
||||
elapsed := time.Since(start).Milliseconds()
|
||||
resp := pathInspectResponse{
|
||||
Candidates: candidates,
|
||||
Input: map[string]interface{}{
|
||||
"prefixes": req.Prefixes,
|
||||
"hops": len(req.Prefixes),
|
||||
},
|
||||
Stats: map[string]interface{}{
|
||||
"beamWidth": beamWidth,
|
||||
"expansionsRun": len(req.Prefixes) * beamWidth,
|
||||
"elapsedMs": elapsed,
|
||||
},
|
||||
}
|
||||
|
||||
// Cache result (and evict stale entries).
|
||||
s.store.inspectMu.Lock()
|
||||
if s.store.inspectCache == nil {
|
||||
s.store.inspectCache = make(map[string]*inspectCachedResult)
|
||||
}
|
||||
now2 := time.Now()
|
||||
for k, v := range s.store.inspectCache {
|
||||
if now2.After(v.expiresAt) {
|
||||
delete(s.store.inspectCache, k)
|
||||
}
|
||||
}
|
||||
s.store.inspectCache[cacheKey] = &inspectCachedResult{
|
||||
data: resp,
|
||||
expiresAt: now2.Add(inspectCacheTTL),
|
||||
}
|
||||
s.store.inspectMu.Unlock()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(resp)
|
||||
}
|
||||
|
||||
type inspectCachedResult struct {
|
||||
data pathInspectResponse
|
||||
expiresAt time.Time
|
||||
}
|
||||
|
||||
func (s *PacketStore) inspectCacheKey(req pathInspectRequest) string {
|
||||
key := strings.Join(req.Prefixes, ",")
|
||||
if req.Context != nil {
|
||||
key += "|" + req.Context.ObserverID + "|" + req.Context.Since + "|" + req.Context.Until
|
||||
}
|
||||
return key
|
||||
}
|
||||
|
||||
func (s *PacketStore) beamSearch(prefixes []string, pm *prefixMap, graph *NeighborGraph, nodeByPK map[string]*nodeInfo, now time.Time) []beamEntry {
|
||||
// Start with empty beam.
|
||||
beam := []beamEntry{{pubkeys: nil, names: nil, evidence: nil, score: 1.0}}
|
||||
|
||||
for hopIdx, prefix := range prefixes {
|
||||
candidates := pm.m[prefix]
|
||||
// Filter by role at lookup time (spec §2.2 step 2).
|
||||
var filtered []nodeInfo
|
||||
for _, c := range candidates {
|
||||
if canAppearInPath(c.Role) {
|
||||
filtered = append(filtered, c)
|
||||
}
|
||||
}
|
||||
|
||||
candidateCount := len(filtered)
|
||||
if candidateCount == 0 {
|
||||
// No candidates for this hop — beam dies.
|
||||
return nil
|
||||
}
|
||||
|
||||
var nextBeam []beamEntry
|
||||
for _, entry := range beam {
|
||||
for _, cand := range filtered {
|
||||
hopScore := s.scoreHop(entry, cand, candidateCount, graph, nodeByPK, now, hopIdx)
|
||||
if hopScore < hopScoreFloor {
|
||||
hopScore = hopScoreFloor
|
||||
}
|
||||
|
||||
newEntry := beamEntry{
|
||||
pubkeys: append(append([]string{}, entry.pubkeys...), cand.PublicKey),
|
||||
names: append(append([]string{}, entry.names...), cand.Name),
|
||||
evidence: append(append([]hopEvidence{}, entry.evidence...), hopEvidence{
|
||||
Prefix: prefix,
|
||||
CandidatesConsidered: candidateCount,
|
||||
Chosen: cand.PublicKey,
|
||||
EdgeWeight: hopScore,
|
||||
}),
|
||||
score: entry.score * hopScore,
|
||||
}
|
||||
nextBeam = append(nextBeam, newEntry)
|
||||
}
|
||||
}
|
||||
|
||||
// Prune to beam width.
|
||||
sortBeam(nextBeam)
|
||||
if len(nextBeam) > beamWidth {
|
||||
nextBeam = nextBeam[:beamWidth]
|
||||
}
|
||||
beam = nextBeam
|
||||
}
|
||||
|
||||
return beam
|
||||
}
|
||||
|
||||
func (s *PacketStore) scoreHop(entry beamEntry, cand nodeInfo, candidateCount int, graph *NeighborGraph, nodeByPK map[string]*nodeInfo, now time.Time, hopIdx int) float64 {
|
||||
var edgeScore float64
|
||||
var geoScore float64 = 1.0
|
||||
var recencyScore float64 = 1.0
|
||||
|
||||
if hopIdx == 0 || len(entry.pubkeys) == 0 {
|
||||
// First hop: no prior node to compare against.
|
||||
edgeScore = 1.0
|
||||
} else {
|
||||
lastPK := entry.pubkeys[len(entry.pubkeys)-1]
|
||||
|
||||
// Single scan over neighbors for both edge weight and recency.
|
||||
edges := graph.Neighbors(lastPK)
|
||||
var foundEdge *NeighborEdge
|
||||
for _, e := range edges {
|
||||
peer := e.NodeA
|
||||
if strings.EqualFold(peer, lastPK) {
|
||||
peer = e.NodeB
|
||||
}
|
||||
if strings.EqualFold(peer, cand.PublicKey) {
|
||||
foundEdge = e
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if foundEdge != nil {
|
||||
edgeScore = foundEdge.Score(now)
|
||||
hoursSince := now.Sub(foundEdge.LastSeen).Hours()
|
||||
if hoursSince <= 24 {
|
||||
recencyScore = 1.0
|
||||
} else {
|
||||
recencyScore = math.Max(0.1, 24.0/hoursSince)
|
||||
}
|
||||
} else {
|
||||
edgeScore = 0
|
||||
recencyScore = 0
|
||||
}
|
||||
|
||||
// Geographic plausibility.
|
||||
prevNode := nodeByPK[strings.ToLower(lastPK)]
|
||||
if prevNode != nil && prevNode.HasGPS && cand.HasGPS {
|
||||
dist := haversineKm(prevNode.Lat, prevNode.Lon, cand.Lat, cand.Lon)
|
||||
if dist > geoMaxKm {
|
||||
geoScore = math.Max(0.1, geoMaxKm/dist)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Prefix selectivity.
|
||||
selectivityScore := 1.0 / float64(candidateCount)
|
||||
|
||||
return wEdge*edgeScore + wGeo*geoScore + wRecency*recencyScore + wSelectivity*selectivityScore
|
||||
}
|
||||
|
||||
|
||||
func sortBeam(beam []beamEntry) {
|
||||
sort.Slice(beam, func(i, j int) bool {
|
||||
return beam[i].score > beam[j].score
|
||||
})
|
||||
}
|
||||
|
||||
// ensureNeighborGraph triggers a graph rebuild if nil or stale.
|
||||
func (s *PacketStore) ensureNeighborGraph() {
|
||||
if s.graph != nil && !s.graph.IsStale() {
|
||||
return
|
||||
}
|
||||
g := BuildFromStore(s)
|
||||
s.graph = g
|
||||
}
|
||||
@@ -1,308 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ─── Unit tests for path inspector (issue #944) ────────────────────────────────
|
||||
|
||||
func TestScoreHop_EdgeWeight(t *testing.T) {
|
||||
store := &PacketStore{}
|
||||
graph := NewNeighborGraph()
|
||||
now := time.Now()
|
||||
|
||||
// Add an edge between A and B.
|
||||
graph.mu.Lock()
|
||||
edge := &NeighborEdge{
|
||||
NodeA: "aaaa", NodeB: "bbbb",
|
||||
Count: 50, LastSeen: now.Add(-1 * time.Hour),
|
||||
Observers: map[string]bool{"obs1": true},
|
||||
}
|
||||
key := edgeKey{"aaaa", "bbbb"}
|
||||
graph.edges[key] = edge
|
||||
graph.byNode["aaaa"] = append(graph.byNode["aaaa"], edge)
|
||||
graph.byNode["bbbb"] = append(graph.byNode["bbbb"], edge)
|
||||
graph.mu.Unlock()
|
||||
|
||||
entry := beamEntry{pubkeys: []string{"aaaa"}, names: []string{"NodeA"}}
|
||||
cand := nodeInfo{PublicKey: "bbbb", Name: "NodeB", Role: "repeater"}
|
||||
|
||||
score := store.scoreHop(entry, cand, 2, graph, nil, now, 1)
|
||||
|
||||
// With edge present, edgeScore > 0. With 2 candidates, selectivity = 0.5.
|
||||
// Anti-tautology: if we zero out edge weight constant, score would change.
|
||||
if score <= 0.05 {
|
||||
t.Errorf("expected score > floor, got %f", score)
|
||||
}
|
||||
|
||||
// No edge: score should be lower.
|
||||
candNoEdge := nodeInfo{PublicKey: "cccc", Name: "NodeC", Role: "repeater"}
|
||||
scoreNoEdge := store.scoreHop(entry, candNoEdge, 2, graph, nil, now, 1)
|
||||
if scoreNoEdge >= score {
|
||||
t.Errorf("expected no-edge score (%f) < edge score (%f)", scoreNoEdge, score)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScoreHop_FirstHop(t *testing.T) {
|
||||
store := &PacketStore{}
|
||||
graph := NewNeighborGraph()
|
||||
now := time.Now()
|
||||
|
||||
entry := beamEntry{pubkeys: nil, names: nil}
|
||||
cand := nodeInfo{PublicKey: "aaaa", Name: "NodeA", Role: "repeater"}
|
||||
|
||||
score := store.scoreHop(entry, cand, 3, graph, nil, now, 0)
|
||||
// First hop: edgeScore=1.0, geoScore=1.0, recencyScore=1.0, selectivity=1/3
|
||||
// = 0.35*1 + 0.20*1 + 0.15*1 + 0.30*(1/3) = 0.35+0.20+0.15+0.10 = 0.80
|
||||
expected := 0.35 + 0.20 + 0.15 + 0.30/3.0
|
||||
if score < expected-0.01 || score > expected+0.01 {
|
||||
t.Errorf("expected ~%f, got %f", expected, score)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScoreHop_GeoPlausibility(t *testing.T) {
|
||||
store := &PacketStore{}
|
||||
store.nodeCache = []nodeInfo{
|
||||
{PublicKey: "aaaa", Name: "A", Role: "repeater", Lat: 37.0, Lon: -122.0, HasGPS: true},
|
||||
{PublicKey: "bbbb", Name: "B", Role: "repeater", Lat: 37.01, Lon: -122.01, HasGPS: true}, // ~1.4km
|
||||
{PublicKey: "cccc", Name: "C", Role: "repeater", Lat: 40.0, Lon: -120.0, HasGPS: true}, // ~400km
|
||||
}
|
||||
store.nodePM = buildPrefixMap(store.nodeCache)
|
||||
store.nodeCacheTime = time.Now()
|
||||
|
||||
graph := NewNeighborGraph()
|
||||
now := time.Now()
|
||||
|
||||
nodeByPK := map[string]*nodeInfo{
|
||||
"aaaa": &store.nodeCache[0],
|
||||
"bbbb": &store.nodeCache[1],
|
||||
"cccc": &store.nodeCache[2],
|
||||
}
|
||||
|
||||
entry := beamEntry{pubkeys: []string{"aaaa"}, names: []string{"A"}}
|
||||
|
||||
// Close node should score higher than far node (geo component).
|
||||
scoreClose := store.scoreHop(entry, store.nodeCache[1], 2, graph, nodeByPK, now, 1)
|
||||
scoreFar := store.scoreHop(entry, store.nodeCache[2], 2, graph, nodeByPK, now, 1)
|
||||
if scoreFar >= scoreClose {
|
||||
t.Errorf("expected far node score (%f) < close node score (%f)", scoreFar, scoreClose)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBeamSearch_WidthCap(t *testing.T) {
|
||||
store := &PacketStore{}
|
||||
graph := NewNeighborGraph()
|
||||
graph.builtAt = time.Now()
|
||||
now := time.Now()
|
||||
|
||||
// Create 25 nodes that all match prefix "aa".
|
||||
var nodes []nodeInfo
|
||||
for i := 0; i < 25; i++ {
|
||||
// Each node has pubkey starting with "aa" followed by unique hex.
|
||||
pk := "aa" + strings.Repeat("0", 4) + fmt.Sprintf("%02x", i)
|
||||
nodes = append(nodes, nodeInfo{PublicKey: pk, Name: pk, Role: "repeater"})
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
// Two hops of "aa" — should produce 25*25=625 combos, pruned to 20.
|
||||
beam := store.beamSearch([]string{"aa", "aa"}, pm, graph, nil, now)
|
||||
if len(beam) > beamWidth {
|
||||
t.Errorf("beam exceeded width: got %d, want <= %d", len(beam), beamWidth)
|
||||
}
|
||||
// Anti-tautology: without beam pruning, we'd have up to 25*min(25,beamWidth)=500 entries.
|
||||
// The test verifies pruning is effective.
|
||||
}
|
||||
|
||||
func TestBeamSearch_Speculative(t *testing.T) {
|
||||
store := &PacketStore{}
|
||||
graph := NewNeighborGraph()
|
||||
graph.builtAt = time.Now()
|
||||
now := time.Now()
|
||||
|
||||
// Create nodes with no edges and multiple candidates — should result in low scores (speculative).
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "aabb", Name: "N1", Role: "repeater"},
|
||||
{PublicKey: "aabb22", Name: "N1b", Role: "repeater"},
|
||||
{PublicKey: "ccdd", Name: "N2", Role: "repeater"},
|
||||
{PublicKey: "ccdd22", Name: "N2b", Role: "repeater"},
|
||||
{PublicKey: "ccdd33", Name: "N2c", Role: "repeater"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
beam := store.beamSearch([]string{"aa", "cc"}, pm, graph, nil, now)
|
||||
if len(beam) == 0 {
|
||||
t.Fatal("expected at least one result")
|
||||
}
|
||||
|
||||
// Score should be < 0.7 since there's no edge and multiple candidates (speculative).
|
||||
nHops := len(beam[0].pubkeys)
|
||||
score := 1.0
|
||||
if nHops > 0 {
|
||||
product := beam[0].score
|
||||
score = pow(product, 1.0/float64(nHops))
|
||||
}
|
||||
if score >= speculativeThreshold {
|
||||
t.Errorf("expected speculative score (< %f), got %f", speculativeThreshold, score)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandlePathInspect_EmptyPrefixes(t *testing.T) {
|
||||
srv := newTestServerForInspect(t)
|
||||
body := `{"prefixes":[]}`
|
||||
rr := doInspectRequest(srv, body)
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", rr.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandlePathInspect_OddLengthPrefix(t *testing.T) {
|
||||
srv := newTestServerForInspect(t)
|
||||
body := `{"prefixes":["abc"]}`
|
||||
rr := doInspectRequest(srv, body)
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 for odd-length prefix, got %d", rr.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandlePathInspect_MixedLengths(t *testing.T) {
|
||||
srv := newTestServerForInspect(t)
|
||||
body := `{"prefixes":["aa","bbcc"]}`
|
||||
rr := doInspectRequest(srv, body)
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 for mixed lengths, got %d", rr.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandlePathInspect_TooLongPrefix(t *testing.T) {
|
||||
srv := newTestServerForInspect(t)
|
||||
body := `{"prefixes":["aabbccdd"]}`
|
||||
rr := doInspectRequest(srv, body)
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 for >3-byte prefix, got %d", rr.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandlePathInspect_TooManyPrefixes(t *testing.T) {
|
||||
srv := newTestServerForInspect(t)
|
||||
prefixes := make([]string, 65)
|
||||
for i := range prefixes {
|
||||
prefixes[i] = "aa"
|
||||
}
|
||||
b, _ := json.Marshal(map[string]interface{}{"prefixes": prefixes})
|
||||
rr := doInspectRequest(srv, string(b))
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 for >64 prefixes, got %d", rr.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandlePathInspect_ValidRequest(t *testing.T) {
|
||||
srv := newTestServerForInspect(t)
|
||||
|
||||
// Seed nodes in the store — multiple candidates per prefix to lower selectivity.
|
||||
srv.store.nodeCache = []nodeInfo{
|
||||
{PublicKey: "aabb1234", Name: "NodeA", Role: "repeater", Lat: 37.0, Lon: -122.0, HasGPS: true},
|
||||
{PublicKey: "aabb5678", Name: "NodeA2", Role: "repeater"},
|
||||
{PublicKey: "ccdd5678", Name: "NodeB", Role: "repeater", Lat: 37.01, Lon: -122.01, HasGPS: true},
|
||||
{PublicKey: "ccdd9999", Name: "NodeB2", Role: "repeater"},
|
||||
{PublicKey: "ccdd1111", Name: "NodeB3", Role: "repeater"},
|
||||
}
|
||||
srv.store.nodePM = buildPrefixMap(srv.store.nodeCache)
|
||||
srv.store.nodeCacheTime = time.Now()
|
||||
srv.store.graph = NewNeighborGraph()
|
||||
srv.store.graph.builtAt = time.Now()
|
||||
|
||||
body := `{"prefixes":["aa","cc"]}`
|
||||
rr := doInspectRequest(srv, body)
|
||||
if rr.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String())
|
||||
}
|
||||
|
||||
var resp pathInspectResponse
|
||||
if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil {
|
||||
t.Fatalf("invalid JSON response: %v", err)
|
||||
}
|
||||
if len(resp.Candidates) == 0 {
|
||||
t.Error("expected at least one candidate")
|
||||
}
|
||||
if resp.Candidates[0].Speculative != true {
|
||||
// No edge between nodes, so score should be < 0.7.
|
||||
t.Error("expected speculative=true for no-edge path")
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
func newTestServerForInspect(t *testing.T) *Server {
|
||||
t.Helper()
|
||||
store := &PacketStore{
|
||||
inspectCache: make(map[string]*inspectCachedResult),
|
||||
}
|
||||
store.graph = NewNeighborGraph()
|
||||
store.graph.builtAt = time.Now()
|
||||
return &Server{store: store}
|
||||
}
|
||||
|
||||
func doInspectRequest(srv *Server, body string) *httptest.ResponseRecorder {
|
||||
req := httptest.NewRequest("POST", "/api/paths/inspect", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
rr := httptest.NewRecorder()
|
||||
srv.handlePathInspect(rr, req)
|
||||
return rr
|
||||
}
|
||||
|
||||
func pow(base, exp float64) float64 {
|
||||
return math.Pow(base, exp)
|
||||
}
|
||||
|
||||
// BenchmarkBeamSearch — performance proof for spec §2.5 (<100ms p99 for ≤64 hops).
|
||||
// Anti-tautology: removing beam pruning makes this ~625x slower; timing assertion catches it.
|
||||
func BenchmarkBeamSearch(b *testing.B) {
|
||||
// Setup: 100 nodes, 10-hop prefix input, realistic neighbor graph.
|
||||
// Anti-tautology: removing beam pruning makes this ~625x slower.
|
||||
store := &PacketStore{}
|
||||
pm := &prefixMap{m: make(map[string][]nodeInfo)}
|
||||
graph := NewNeighborGraph()
|
||||
nodes := make([]nodeInfo, 100)
|
||||
|
||||
now := time.Now()
|
||||
for i := 0; i < 100; i++ {
|
||||
pk := fmt.Sprintf("%064x", i)
|
||||
prefix := fmt.Sprintf("%02x", i%256)
|
||||
node := nodeInfo{PublicKey: pk, Name: fmt.Sprintf("Node%d", i), Role: "repeater", Lat: 37.0 + float64(i)*0.01, Lon: -122.0 + float64(i)*0.01}
|
||||
nodes[i] = node
|
||||
pm.m[prefix] = append(pm.m[prefix], node)
|
||||
// Add neighbor edges to create a connected graph.
|
||||
if i > 0 {
|
||||
prevPK := fmt.Sprintf("%064x", i-1)
|
||||
key := makeEdgeKey(prevPK, pk)
|
||||
edge := &NeighborEdge{NodeA: prevPK, NodeB: pk, LastSeen: now, Count: 10}
|
||||
graph.edges[key] = edge
|
||||
graph.byNode[prevPK] = append(graph.byNode[prevPK], edge)
|
||||
graph.byNode[pk] = append(graph.byNode[pk], edge)
|
||||
}
|
||||
}
|
||||
|
||||
// 10-hop input using prefixes that map to multiple candidates.
|
||||
prefixes := make([]string, 10)
|
||||
for i := 0; i < 10; i++ {
|
||||
prefixes[i] = fmt.Sprintf("%02x", (i*3)%256)
|
||||
}
|
||||
|
||||
nodeByPK := make(map[string]*nodeInfo)
|
||||
for idx := range nodes {
|
||||
nodeByPK[nodes[idx].PublicKey] = &nodes[idx]
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
store.beamSearch(prefixes, pm, graph, nodeByPK, now)
|
||||
}
|
||||
}
|
||||
@@ -1,212 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCanAppearInPath(t *testing.T) {
|
||||
cases := []struct {
|
||||
role string
|
||||
want bool
|
||||
}{
|
||||
{"repeater", true},
|
||||
{"Repeater", true},
|
||||
{"REPEATER", true},
|
||||
{"room_server", true},
|
||||
{"Room_Server", true},
|
||||
{"room", true},
|
||||
{"companion", false},
|
||||
{"sensor", false},
|
||||
{"", false},
|
||||
{"unknown", false},
|
||||
}
|
||||
for _, tc := range cases {
|
||||
if got := canAppearInPath(tc.role); got != tc.want {
|
||||
t.Errorf("canAppearInPath(%q) = %v, want %v", tc.role, got, tc.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildPrefixMap_ExcludesCompanions(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "companion", Name: "MyCompanion"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
if len(pm.m) != 0 {
|
||||
t.Fatalf("expected empty prefix map, got %d entries", len(pm.m))
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildPrefixMap_ExcludesSensors(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "sensor", Name: "MySensor"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
if len(pm.m) != 0 {
|
||||
t.Fatalf("expected empty prefix map, got %d entries", len(pm.m))
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveWithContext_NilWhenOnlyCompanionMatchesPrefix(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "companion", Name: "MyCompanion"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r, _, _ := pm.resolveWithContext("7a", nil, nil)
|
||||
if r != nil {
|
||||
t.Fatalf("expected nil, got %+v", r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveWithContext_NilWhenOnlySensorMatchesPrefix(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "sensor", Name: "MySensor"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r, _, _ := pm.resolveWithContext("7a", nil, nil)
|
||||
if r != nil {
|
||||
t.Fatalf("expected nil for sensor-only prefix, got %+v", r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveWithContext_PrefersRepeaterOverCompanionAtSamePrefix(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "companion", Name: "MyCompanion"},
|
||||
{PublicKey: "7a5678901234", Role: "repeater", Name: "MyRepeater"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r, _, _ := pm.resolveWithContext("7a", nil, nil)
|
||||
if r == nil {
|
||||
t.Fatal("expected non-nil result")
|
||||
}
|
||||
if r.Name != "MyRepeater" {
|
||||
t.Fatalf("expected MyRepeater, got %s", r.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveWithContext_PrefersRoomServerOverCompanionAtSamePrefix(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "ab1234abcdef", Role: "companion", Name: "MyCompanion"},
|
||||
{PublicKey: "ab5678901234", Role: "room_server", Name: "MyRoom"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r, _, _ := pm.resolveWithContext("ab", nil, nil)
|
||||
if r == nil {
|
||||
t.Fatal("expected non-nil result")
|
||||
}
|
||||
if r.Name != "MyRoom" {
|
||||
t.Fatalf("expected MyRoom, got %s", r.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolve_NilWhenOnlyCompanionMatchesPrefix(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "companion", Name: "MyCompanion"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r := pm.resolve("7a")
|
||||
if r != nil {
|
||||
t.Fatalf("expected nil from resolve() for companion-only prefix, got %+v", r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolve_NilWhenOnlySensorMatchesPrefix(t *testing.T) {
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "sensor", Name: "MySensor"},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r := pm.resolve("7a")
|
||||
if r != nil {
|
||||
t.Fatalf("expected nil from resolve() for sensor-only prefix, got %+v", r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveWithContext_PicksRepeaterEvenWhenCompanionHasGPS(t *testing.T) {
|
||||
// Adversarial: companion has GPS, repeater doesn't. Role filter should
|
||||
// exclude companion entirely, so repeater wins despite lacking GPS.
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "companion", Name: "GPSCompanion", Lat: 37.0, Lon: -122.0, HasGPS: true},
|
||||
{PublicKey: "7a5678901234", Role: "repeater", Name: "NoGPSRepeater", Lat: 0, Lon: 0, HasGPS: false},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
r, _, _ := pm.resolveWithContext("7a", nil, nil)
|
||||
if r == nil {
|
||||
t.Fatal("expected non-nil result")
|
||||
}
|
||||
if r.Name != "NoGPSRepeater" {
|
||||
t.Fatalf("expected NoGPSRepeater (role filter excludes companion), got %s", r.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestComputeDistancesForTx_CompanionNeverInResolvedChain(t *testing.T) {
|
||||
// Integration test: a path with a prefix matching both a companion and a
|
||||
// repeater. The resolveHop function (using buildPrefixMap) should only
|
||||
// return the repeater.
|
||||
nodes := []nodeInfo{
|
||||
{PublicKey: "7a1234abcdef", Role: "companion", Name: "BadCompanion", Lat: 37.0, Lon: -122.0, HasGPS: true},
|
||||
{PublicKey: "7a5678901234", Role: "repeater", Name: "GoodRepeater", Lat: 38.0, Lon: -123.0, HasGPS: true},
|
||||
{PublicKey: "bb1111111111", Role: "repeater", Name: "OtherRepeater", Lat: 39.0, Lon: -124.0, HasGPS: true},
|
||||
}
|
||||
pm := buildPrefixMap(nodes)
|
||||
|
||||
nodeByPk := make(map[string]*nodeInfo)
|
||||
for i := range nodes {
|
||||
nodeByPk[nodes[i].PublicKey] = &nodes[i]
|
||||
}
|
||||
repeaterSet := map[string]bool{
|
||||
"7a5678901234": true,
|
||||
"bb1111111111": true,
|
||||
}
|
||||
|
||||
// Build a synthetic StoreTx with a path ["7a", "bb"] and a sender with GPS
|
||||
senderPK := "cc0000000000"
|
||||
sender := nodeInfo{PublicKey: senderPK, Role: "repeater", Name: "Sender", Lat: 36.0, Lon: -121.0, HasGPS: true}
|
||||
nodeByPk[senderPK] = &sender
|
||||
|
||||
pathJSON, _ := json.Marshal([]string{"7a", "bb"})
|
||||
decoded, _ := json.Marshal(map[string]interface{}{"pubKey": senderPK})
|
||||
|
||||
tx := &StoreTx{
|
||||
PathJSON: string(pathJSON),
|
||||
DecodedJSON: string(decoded),
|
||||
FirstSeen: "2026-04-30T12:00",
|
||||
}
|
||||
|
||||
resolveHop := func(hop string) *nodeInfo {
|
||||
return pm.resolve(hop)
|
||||
}
|
||||
|
||||
hops, pathRec := computeDistancesForTx(tx, nodeByPk, repeaterSet, resolveHop)
|
||||
|
||||
// Verify BadCompanion's pubkey never appears in hops
|
||||
badPK := "7a1234abcdef"
|
||||
for i, h := range hops {
|
||||
if h.FromPk == badPK || h.ToPk == badPK {
|
||||
t.Fatalf("hop[%d] contains BadCompanion pubkey: from=%s to=%s", i, h.FromPk, h.ToPk)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify BadCompanion's pubkey never appears in pathRec
|
||||
if pathRec == nil {
|
||||
t.Fatal("expected non-nil path record (3 GPS nodes in chain)")
|
||||
}
|
||||
for i, hop := range pathRec.Hops {
|
||||
if hop.FromPk == badPK || hop.ToPk == badPK {
|
||||
t.Fatalf("pathRec.Hops[%d] contains BadCompanion pubkey: from=%s to=%s", i, hop.FromPk, hop.ToPk)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify GoodRepeater IS in the chain (proves the prefix was resolved to the right node)
|
||||
goodPK := "7a5678901234"
|
||||
foundGood := false
|
||||
for _, hop := range pathRec.Hops {
|
||||
if hop.FromPk == goodPK || hop.ToPk == goodPK {
|
||||
foundGood = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !foundGood {
|
||||
t.Fatal("expected GoodRepeater (7a5678901234) in pathRec.Hops but not found")
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
|
||||
func TestResolveWithContext_UniquePrefix(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1b2c3d4", Name: "Node-A", HasGPS: true, Lat: 1, Lon: 2},
|
||||
{PublicKey: "a1b2c3d4", Name: "Node-A", HasGPS: true, Lat: 1, Lon: 2},
|
||||
})
|
||||
ni, confidence, _ := pm.resolveWithContext("a1b2c3d4", nil, nil)
|
||||
if ni == nil || ni.Name != "Node-A" {
|
||||
@@ -24,7 +24,7 @@ func TestResolveWithContext_UniquePrefix(t *testing.T) {
|
||||
|
||||
func TestResolveWithContext_NoMatch(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1b2c3d4", Name: "Node-A"},
|
||||
{PublicKey: "a1b2c3d4", Name: "Node-A"},
|
||||
})
|
||||
ni, confidence, _ := pm.resolveWithContext("ff", nil, nil)
|
||||
if ni != nil {
|
||||
@@ -37,8 +37,8 @@ func TestResolveWithContext_NoMatch(t *testing.T) {
|
||||
|
||||
func TestResolveWithContext_AffinityWins(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1aaaaaa", Name: "Node-A1"},
|
||||
{Role: "repeater", PublicKey: "a1bbbbbb", Name: "Node-A2"},
|
||||
{PublicKey: "a1aaaaaa", Name: "Node-A1"},
|
||||
{PublicKey: "a1bbbbbb", Name: "Node-A2"},
|
||||
})
|
||||
|
||||
graph := NewNeighborGraph()
|
||||
@@ -60,9 +60,9 @@ func TestResolveWithContext_AffinityWins(t *testing.T) {
|
||||
|
||||
func TestResolveWithContext_AffinityTooClose_FallsToGeo(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1aaaaaa", Name: "Node-A1", HasGPS: true, Lat: 10, Lon: 20},
|
||||
{Role: "repeater", PublicKey: "a1bbbbbb", Name: "Node-A2", HasGPS: true, Lat: 11, Lon: 21},
|
||||
{Role: "repeater", PublicKey: "c0c0c0c0", Name: "Ctx", HasGPS: true, Lat: 10.1, Lon: 20.1},
|
||||
{PublicKey: "a1aaaaaa", Name: "Node-A1", HasGPS: true, Lat: 10, Lon: 20},
|
||||
{PublicKey: "a1bbbbbb", Name: "Node-A2", HasGPS: true, Lat: 11, Lon: 21},
|
||||
{PublicKey: "c0c0c0c0", Name: "Ctx", HasGPS: true, Lat: 10.1, Lon: 20.1},
|
||||
})
|
||||
|
||||
graph := NewNeighborGraph()
|
||||
@@ -85,8 +85,8 @@ func TestResolveWithContext_AffinityTooClose_FallsToGeo(t *testing.T) {
|
||||
|
||||
func TestResolveWithContext_GPSPreference(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1aaaaaa", Name: "NoGPS"},
|
||||
{Role: "repeater", PublicKey: "a1bbbbbb", Name: "HasGPS", HasGPS: true, Lat: 1, Lon: 2},
|
||||
{PublicKey: "a1aaaaaa", Name: "NoGPS"},
|
||||
{PublicKey: "a1bbbbbb", Name: "HasGPS", HasGPS: true, Lat: 1, Lon: 2},
|
||||
})
|
||||
|
||||
ni, confidence, _ := pm.resolveWithContext("a1", nil, nil)
|
||||
@@ -100,8 +100,8 @@ func TestResolveWithContext_GPSPreference(t *testing.T) {
|
||||
|
||||
func TestResolveWithContext_FirstMatchFallback(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1aaaaaa", Name: "First"},
|
||||
{Role: "repeater", PublicKey: "a1bbbbbb", Name: "Second"},
|
||||
{PublicKey: "a1aaaaaa", Name: "First"},
|
||||
{PublicKey: "a1bbbbbb", Name: "Second"},
|
||||
})
|
||||
|
||||
ni, confidence, _ := pm.resolveWithContext("a1", nil, nil)
|
||||
@@ -115,8 +115,8 @@ func TestResolveWithContext_FirstMatchFallback(t *testing.T) {
|
||||
|
||||
func TestResolveWithContext_NilGraphFallsToGPS(t *testing.T) {
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1aaaaaa", Name: "NoGPS"},
|
||||
{Role: "repeater", PublicKey: "a1bbbbbb", Name: "HasGPS", HasGPS: true, Lat: 1, Lon: 2},
|
||||
{PublicKey: "a1aaaaaa", Name: "NoGPS"},
|
||||
{PublicKey: "a1bbbbbb", Name: "HasGPS", HasGPS: true, Lat: 1, Lon: 2},
|
||||
})
|
||||
|
||||
ni, confidence, _ := pm.resolveWithContext("a1", []string{"someone"}, nil)
|
||||
@@ -131,8 +131,8 @@ func TestResolveWithContext_NilGraphFallsToGPS(t *testing.T) {
|
||||
func TestResolveWithContext_BackwardCompatResolve(t *testing.T) {
|
||||
// Verify original resolve() still works unchanged
|
||||
pm := buildPrefixMap([]nodeInfo{
|
||||
{Role: "repeater", PublicKey: "a1aaaaaa", Name: "NoGPS"},
|
||||
{Role: "repeater", PublicKey: "a1bbbbbb", Name: "HasGPS", HasGPS: true, Lat: 1, Lon: 2},
|
||||
{PublicKey: "a1aaaaaa", Name: "NoGPS"},
|
||||
{PublicKey: "a1bbbbbb", Name: "HasGPS", HasGPS: true, Lat: 1, Lon: 2},
|
||||
})
|
||||
ni := pm.resolve("a1")
|
||||
if ni == nil || ni.Name != "HasGPS" {
|
||||
@@ -164,8 +164,8 @@ func TestResolveHopsAPI_UniquePrefix(t *testing.T) {
|
||||
_ = srv
|
||||
|
||||
// Insert a unique node
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"ff11223344", "UniqueNode", 37.0, -122.0, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"ff11223344", "UniqueNode", 37.0, -122.0)
|
||||
srv.store.InvalidateNodeCache()
|
||||
|
||||
req := httptest.NewRequest("GET", "/api/resolve-hops?hops=ff11223344", nil)
|
||||
@@ -189,10 +189,10 @@ func TestResolveHopsAPI_UniquePrefix(t *testing.T) {
|
||||
func TestResolveHopsAPI_AmbiguousNoContext(t *testing.T) {
|
||||
srv, router := setupTestServer(t)
|
||||
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"ee1aaaaaaa", "Node-E1", 37.0, -122.0, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"ee1bbbbbbb", "Node-E2", 38.0, -121.0, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"ee1aaaaaaa", "Node-E1", 37.0, -122.0)
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"ee1bbbbbbb", "Node-E2", 38.0, -121.0)
|
||||
srv.store.InvalidateNodeCache()
|
||||
|
||||
req := httptest.NewRequest("GET", "/api/resolve-hops?hops=ee1", nil)
|
||||
@@ -224,12 +224,12 @@ func TestResolveHopsAPI_AmbiguousNoContext(t *testing.T) {
|
||||
func TestResolveHopsAPI_WithAffinityContext(t *testing.T) {
|
||||
srv, router := setupTestServer(t)
|
||||
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"dd1aaaaaaa", "Node-D1", 37.0, -122.0, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"dd1bbbbbbb", "Node-D2", 38.0, -121.0, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"c0c0c0c0c0", "Context", 37.1, -122.1, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"dd1aaaaaaa", "Node-D1", 37.0, -122.0)
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"dd1bbbbbbb", "Node-D2", 38.0, -121.0)
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"c0c0c0c0c0", "Context", 37.1, -122.1)
|
||||
|
||||
// Invalidate node cache so the PM includes newly inserted nodes.
|
||||
srv.store.cacheMu.Lock()
|
||||
@@ -279,8 +279,8 @@ func TestResolveHopsAPI_WithAffinityContext(t *testing.T) {
|
||||
func TestResolveHopsAPI_ResponseShape(t *testing.T) {
|
||||
srv, router := setupTestServer(t)
|
||||
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon, role) VALUES (?, ?, ?, ?, ?)",
|
||||
"bb1aaaaaaa", "Node-B1", 37.0, -122.0, "repeater")
|
||||
srv.db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, lat, lon) VALUES (?, ?, ?, ?)",
|
||||
"bb1aaaaaaa", "Node-B1", 37.0, -122.0)
|
||||
|
||||
req := httptest.NewRequest("GET", "/api/resolve-hops?hops=bb1a", nil)
|
||||
rr := httptest.NewRecorder()
|
||||
|
||||
+4
-16
@@ -16,7 +16,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/meshcore-analyzer/packetpath"
|
||||
)
|
||||
|
||||
// Server holds shared state for route handlers.
|
||||
@@ -173,7 +172,6 @@ func (s *Server) RegisterRoutes(r *mux.Router) {
|
||||
r.HandleFunc("/api/observers/{id}", s.handleObserverDetail).Methods("GET")
|
||||
r.HandleFunc("/api/observers", s.handleObservers).Methods("GET")
|
||||
r.HandleFunc("/api/traces/{hash}", s.handleTraces).Methods("GET")
|
||||
r.HandleFunc("/api/paths/inspect", s.handlePathInspect).Methods("POST")
|
||||
r.HandleFunc("/api/iata-coords", s.handleIATACoords).Methods("GET")
|
||||
r.HandleFunc("/api/audio-lab/buckets", s.handleAudioLabBuckets).Methods("GET")
|
||||
|
||||
@@ -959,9 +957,11 @@ func (s *Server) handlePacketDetail(w http.ResponseWriter, r *http.Request) {
|
||||
pathHops = []interface{}{}
|
||||
}
|
||||
|
||||
rawHex, _ := packet["raw_hex"].(string)
|
||||
writeJSON(w, PacketDetailResponse{
|
||||
Packet: packet,
|
||||
Path: pathHops,
|
||||
Breakdown: BuildBreakdown(rawHex),
|
||||
ObservationCount: observationCount,
|
||||
Observations: mapSliceToObservations(observations),
|
||||
})
|
||||
@@ -1020,17 +1020,8 @@ func (s *Server) handlePostPacket(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
contentHash := ComputeContentHash(hexStr)
|
||||
pathJSON := "[]"
|
||||
// For TRACE packets, path_json must be the payload-decoded route hops
|
||||
// (decoded.Path.Hops), NOT the raw_hex header bytes which are SNR values.
|
||||
// For all other packet types, derive path from raw_hex (#886).
|
||||
if !packetpath.PathBytesAreHops(byte(decoded.Header.PayloadType)) {
|
||||
if len(decoded.Path.Hops) > 0 {
|
||||
if pj, e := json.Marshal(decoded.Path.Hops); e == nil {
|
||||
pathJSON = string(pj)
|
||||
}
|
||||
}
|
||||
} else if hops, err := packetpath.DecodePathFromRawHex(hexStr); err == nil && len(hops) > 0 {
|
||||
if pj, e := json.Marshal(hops); e == nil {
|
||||
if len(decoded.Path.Hops) > 0 {
|
||||
if pj, e := json.Marshal(decoded.Path.Hops); e == nil {
|
||||
pathJSON = string(pj)
|
||||
}
|
||||
}
|
||||
@@ -2395,9 +2386,6 @@ func mapSliceToObservations(maps []map[string]interface{}) []ObservationResp {
|
||||
obs.SNR = m["snr"]
|
||||
obs.RSSI = m["rssi"]
|
||||
obs.PathJSON = m["path_json"]
|
||||
obs.ResolvedPath = m["resolved_path"]
|
||||
obs.Direction = m["direction"]
|
||||
obs.RawHex = m["raw_hex"]
|
||||
obs.Timestamp = m["timestamp"]
|
||||
result = append(result, obs)
|
||||
}
|
||||
|
||||
+19
-101
@@ -63,7 +63,6 @@ type StoreObs struct {
|
||||
RSSI *float64
|
||||
Score *int
|
||||
PathJSON string
|
||||
RawHex string
|
||||
Timestamp string
|
||||
}
|
||||
|
||||
@@ -209,10 +208,6 @@ type PacketStore struct {
|
||||
// Persisted neighbor graph for hop resolution at ingest time.
|
||||
graph *NeighborGraph
|
||||
|
||||
// Path inspector score cache (issue #944).
|
||||
inspectMu sync.RWMutex
|
||||
inspectCache map[string]*inspectCachedResult
|
||||
|
||||
// Clock skew detection engine.
|
||||
clockSkew *ClockSkewEngine
|
||||
|
||||
@@ -463,42 +458,29 @@ func (s *PacketStore) Load() error {
|
||||
if s.db.hasResolvedPath {
|
||||
rpCol = ",\n\t\t\t\to.resolved_path"
|
||||
}
|
||||
obsRawHexCol := ""
|
||||
if s.db.hasObsRawHex {
|
||||
obsRawHexCol = ", o.raw_hex"
|
||||
}
|
||||
|
||||
// Build WHERE conditions: retention cutoff (mirrors Evict logic) + optional memory-cap limit.
|
||||
var loadConditions []string
|
||||
if s.retentionHours > 0 {
|
||||
cutoff := time.Now().UTC().Add(-time.Duration(s.retentionHours*3600) * time.Second).Format(time.RFC3339)
|
||||
loadConditions = append(loadConditions, fmt.Sprintf("t.first_seen >= '%s'", cutoff))
|
||||
}
|
||||
limitClause := ""
|
||||
if maxPackets > 0 {
|
||||
loadConditions = append(loadConditions, fmt.Sprintf(
|
||||
"t.id IN (SELECT id FROM transmissions ORDER BY first_seen DESC LIMIT %d)", maxPackets))
|
||||
}
|
||||
filterClause := ""
|
||||
if len(loadConditions) > 0 {
|
||||
filterClause = "\n\t\t\tWHERE " + strings.Join(loadConditions, "\n\t\t\t AND ")
|
||||
limitClause = fmt.Sprintf(
|
||||
"\n\t\t\tWHERE t.id IN (SELECT id FROM transmissions ORDER BY first_seen DESC LIMIT %d)", maxPackets)
|
||||
}
|
||||
|
||||
if s.db.isV3 {
|
||||
loadSQL = `SELECT t.id, t.raw_hex, t.hash, t.first_seen, t.route_type,
|
||||
t.payload_type, t.payload_version, t.decoded_json,
|
||||
o.id, obs.id, obs.name, o.direction,
|
||||
o.snr, o.rssi, o.score, o.path_json, strftime('%Y-%m-%dT%H:%M:%fZ', o.timestamp, 'unixepoch')` + obsRawHexCol + rpCol + `
|
||||
o.snr, o.rssi, o.score, o.path_json, strftime('%Y-%m-%dT%H:%M:%fZ', o.timestamp, 'unixepoch')` + rpCol + `
|
||||
FROM transmissions t
|
||||
LEFT JOIN observations o ON o.transmission_id = t.id
|
||||
LEFT JOIN observers obs ON obs.rowid = o.observer_idx` + filterClause + `
|
||||
LEFT JOIN observers obs ON obs.rowid = o.observer_idx` + limitClause + `
|
||||
ORDER BY t.first_seen ASC, o.timestamp DESC`
|
||||
} else {
|
||||
loadSQL = `SELECT t.id, t.raw_hex, t.hash, t.first_seen, t.route_type,
|
||||
t.payload_type, t.payload_version, t.decoded_json,
|
||||
o.id, o.observer_id, o.observer_name, o.direction,
|
||||
o.snr, o.rssi, o.score, o.path_json, o.timestamp` + obsRawHexCol + rpCol + `
|
||||
o.snr, o.rssi, o.score, o.path_json, o.timestamp` + rpCol + `
|
||||
FROM transmissions t
|
||||
LEFT JOIN observations o ON o.transmission_id = t.id` + filterClause + `
|
||||
LEFT JOIN observations o ON o.transmission_id = t.id` + limitClause + `
|
||||
ORDER BY t.first_seen ASC, o.timestamp DESC`
|
||||
}
|
||||
|
||||
@@ -518,16 +500,12 @@ func (s *PacketStore) Load() error {
|
||||
var observerID, observerName, direction, pathJSON, obsTimestamp sql.NullString
|
||||
var snr, rssi sql.NullFloat64
|
||||
var score sql.NullInt64
|
||||
var obsRawHex sql.NullString
|
||||
var resolvedPathStr sql.NullString
|
||||
|
||||
scanArgs := []interface{}{&txID, &rawHex, &hash, &firstSeen, &routeType, &payloadType,
|
||||
&payloadVersion, &decodedJSON,
|
||||
&obsID, &observerID, &observerName, &direction,
|
||||
&snr, &rssi, &score, &pathJSON, &obsTimestamp}
|
||||
if s.db.hasObsRawHex {
|
||||
scanArgs = append(scanArgs, &obsRawHex)
|
||||
}
|
||||
if s.db.hasResolvedPath {
|
||||
scanArgs = append(scanArgs, &resolvedPathStr)
|
||||
}
|
||||
@@ -587,7 +565,6 @@ func (s *PacketStore) Load() error {
|
||||
RSSI: nullFloatPtr(rssi),
|
||||
Score: nullIntPtr(score),
|
||||
PathJSON: obsPJ,
|
||||
RawHex: nullStrVal(obsRawHex),
|
||||
Timestamp: normalizeTimestamp(nullStrVal(obsTimestamp)),
|
||||
}
|
||||
|
||||
@@ -1407,15 +1384,11 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
|
||||
// New ingests always resolve fresh using the current prefix map and neighbor graph.
|
||||
// On restart, Load() handles reading persisted resolved_path values. (review item #7)
|
||||
var querySQL string
|
||||
obsRHCol := ""
|
||||
if s.db.hasObsRawHex {
|
||||
obsRHCol = ", o.raw_hex"
|
||||
}
|
||||
if s.db.isV3 {
|
||||
querySQL = `SELECT t.id, t.raw_hex, t.hash, t.first_seen, t.route_type,
|
||||
t.payload_type, t.payload_version, t.decoded_json,
|
||||
o.id, obs.id, obs.name, o.direction,
|
||||
o.snr, o.rssi, o.score, o.path_json, strftime('%Y-%m-%dT%H:%M:%fZ', o.timestamp, 'unixepoch')` + obsRHCol + `
|
||||
o.snr, o.rssi, o.score, o.path_json, strftime('%Y-%m-%dT%H:%M:%fZ', o.timestamp, 'unixepoch')
|
||||
FROM transmissions t
|
||||
LEFT JOIN observations o ON o.transmission_id = t.id
|
||||
LEFT JOIN observers obs ON obs.rowid = o.observer_idx
|
||||
@@ -1425,7 +1398,7 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
|
||||
querySQL = `SELECT t.id, t.raw_hex, t.hash, t.first_seen, t.route_type,
|
||||
t.payload_type, t.payload_version, t.decoded_json,
|
||||
o.id, o.observer_id, o.observer_name, o.direction,
|
||||
o.snr, o.rssi, o.score, o.path_json, o.timestamp` + obsRHCol + `
|
||||
o.snr, o.rssi, o.score, o.path_json, o.timestamp
|
||||
FROM transmissions t
|
||||
LEFT JOIN observations o ON o.transmission_id = t.id
|
||||
WHERE t.id > ?
|
||||
@@ -1446,7 +1419,6 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
|
||||
routeType, payloadType *int
|
||||
obsID *int
|
||||
observerID, observerName, direction, pathJSON, obsTS string
|
||||
obsRawHex string
|
||||
snr, rssi *float64
|
||||
score *int
|
||||
}
|
||||
@@ -1463,16 +1435,11 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
|
||||
var observerID, observerName, direction, pathJSON, obsTimestamp sql.NullString
|
||||
var snrVal, rssiVal sql.NullFloat64
|
||||
var scoreVal sql.NullInt64
|
||||
var obsRawHex sql.NullString
|
||||
|
||||
scanArgs2 := []interface{}{&txID, &rawHex, &hash, &firstSeen, &routeType, &payloadType,
|
||||
if err := rows.Scan(&txID, &rawHex, &hash, &firstSeen, &routeType, &payloadType,
|
||||
&payloadVersion, &decodedJSON,
|
||||
&obsIDVal, &observerID, &observerName, &direction,
|
||||
&snrVal, &rssiVal, &scoreVal, &pathJSON, &obsTimestamp}
|
||||
if s.db.hasObsRawHex {
|
||||
scanArgs2 = append(scanArgs2, &obsRawHex)
|
||||
}
|
||||
if err := rows.Scan(scanArgs2...); err != nil {
|
||||
&snrVal, &rssiVal, &scoreVal, &pathJSON, &obsTimestamp); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1497,7 +1464,6 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
|
||||
direction: nullStrVal(direction),
|
||||
pathJSON: nullStrVal(pathJSON),
|
||||
obsTS: nullStrVal(obsTimestamp),
|
||||
obsRawHex: nullStrVal(obsRawHex),
|
||||
snr: nullFloatPtr(snrVal),
|
||||
rssi: nullFloatPtr(rssiVal),
|
||||
score: nullIntPtr(scoreVal),
|
||||
@@ -1598,7 +1564,6 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
|
||||
RSSI: r.rssi,
|
||||
Score: r.score,
|
||||
PathJSON: r.pathJSON,
|
||||
RawHex: r.obsRawHex,
|
||||
Timestamp: normalizeTimestamp(r.obsTS),
|
||||
}
|
||||
|
||||
@@ -1841,13 +1806,9 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]
|
||||
}
|
||||
|
||||
var querySQL string
|
||||
obsRHCol2 := ""
|
||||
if s.db.hasObsRawHex {
|
||||
obsRHCol2 = ", o.raw_hex"
|
||||
}
|
||||
if s.db.isV3 {
|
||||
querySQL = `SELECT o.id, o.transmission_id, obs.id, obs.name, o.direction,
|
||||
o.snr, o.rssi, o.score, o.path_json, strftime('%Y-%m-%dT%H:%M:%fZ', o.timestamp, 'unixepoch')` + obsRHCol2 + `
|
||||
o.snr, o.rssi, o.score, o.path_json, strftime('%Y-%m-%dT%H:%M:%fZ', o.timestamp, 'unixepoch')
|
||||
FROM observations o
|
||||
LEFT JOIN observers obs ON obs.rowid = o.observer_idx
|
||||
WHERE o.id > ?
|
||||
@@ -1855,7 +1816,7 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]
|
||||
LIMIT ?`
|
||||
} else {
|
||||
querySQL = `SELECT o.id, o.transmission_id, o.observer_id, o.observer_name, o.direction,
|
||||
o.snr, o.rssi, o.score, o.path_json, o.timestamp` + obsRHCol2 + `
|
||||
o.snr, o.rssi, o.score, o.path_json, o.timestamp
|
||||
FROM observations o
|
||||
WHERE o.id > ?
|
||||
ORDER BY o.id ASC
|
||||
@@ -1878,7 +1839,6 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]
|
||||
snr, rssi *float64
|
||||
score *int
|
||||
pathJSON string
|
||||
rawHex string
|
||||
timestamp string
|
||||
}
|
||||
|
||||
@@ -1888,14 +1848,9 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]
|
||||
var observerID, observerName, direction, pathJSON, ts sql.NullString
|
||||
var snr, rssi sql.NullFloat64
|
||||
var score sql.NullInt64
|
||||
var obsRawHex sql.NullString
|
||||
|
||||
scanArgs3 := []interface{}{&oid, &txID, &observerID, &observerName, &direction,
|
||||
&snr, &rssi, &score, &pathJSON, &ts}
|
||||
if s.db.hasObsRawHex {
|
||||
scanArgs3 = append(scanArgs3, &obsRawHex)
|
||||
}
|
||||
if err := rows.Scan(scanArgs3...); err != nil {
|
||||
if err := rows.Scan(&oid, &txID, &observerID, &observerName, &direction,
|
||||
&snr, &rssi, &score, &pathJSON, &ts); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1909,7 +1864,6 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]
|
||||
rssi: nullFloatPtr(rssi),
|
||||
score: nullIntPtr(score),
|
||||
pathJSON: nullStrVal(pathJSON),
|
||||
rawHex: nullStrVal(obsRawHex),
|
||||
timestamp: nullStrVal(ts),
|
||||
})
|
||||
}
|
||||
@@ -1965,7 +1919,6 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]
|
||||
RSSI: r.rssi,
|
||||
Score: r.score,
|
||||
PathJSON: r.pathJSON,
|
||||
RawHex: r.rawHex,
|
||||
Timestamp: normalizeTimestamp(r.timestamp),
|
||||
}
|
||||
|
||||
@@ -2455,12 +2408,7 @@ func (s *PacketStore) enrichObs(obs *StoreObs) map[string]interface{} {
|
||||
|
||||
if tx != nil {
|
||||
m["hash"] = strOrNil(tx.Hash)
|
||||
// Prefer per-observation raw_hex; fall back to transmission-level (#881)
|
||||
if obs.RawHex != "" {
|
||||
m["raw_hex"] = obs.RawHex
|
||||
} else {
|
||||
m["raw_hex"] = strOrNil(tx.RawHex)
|
||||
}
|
||||
m["raw_hex"] = strOrNil(tx.RawHex)
|
||||
m["payload_type"] = intPtrOrNil(tx.PayloadType)
|
||||
m["route_type"] = intPtrOrNil(tx.RouteType)
|
||||
m["decoded_json"] = strOrNil(tx.DecodedJSON)
|
||||
@@ -4530,19 +4478,12 @@ type nodeInfo struct {
|
||||
Lat float64
|
||||
Lon float64
|
||||
HasGPS bool
|
||||
LastSeen time.Time
|
||||
}
|
||||
|
||||
func (s *PacketStore) getAllNodes() []nodeInfo {
|
||||
// Try with last_seen first; fall back to without if column doesn't exist.
|
||||
rows, err := s.db.conn.Query("SELECT public_key, name, role, lat, lon, last_seen FROM nodes")
|
||||
hasLastSeen := true
|
||||
rows, err := s.db.conn.Query("SELECT public_key, name, role, lat, lon FROM nodes")
|
||||
if err != nil {
|
||||
rows, err = s.db.conn.Query("SELECT public_key, name, role, lat, lon FROM nodes")
|
||||
hasLastSeen = false
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
defer rows.Close()
|
||||
var nodes []nodeInfo
|
||||
@@ -4550,25 +4491,13 @@ func (s *PacketStore) getAllNodes() []nodeInfo {
|
||||
var pk string
|
||||
var name, role sql.NullString
|
||||
var lat, lon sql.NullFloat64
|
||||
var lastSeen sql.NullString
|
||||
if hasLastSeen {
|
||||
rows.Scan(&pk, &name, &role, &lat, &lon, &lastSeen)
|
||||
} else {
|
||||
rows.Scan(&pk, &name, &role, &lat, &lon)
|
||||
}
|
||||
rows.Scan(&pk, &name, &role, &lat, &lon)
|
||||
n := nodeInfo{PublicKey: pk, Name: nullStrVal(name), Role: nullStrVal(role)}
|
||||
if lat.Valid && lon.Valid {
|
||||
n.Lat = lat.Float64
|
||||
n.Lon = lon.Float64
|
||||
n.HasGPS = !(n.Lat == 0 && n.Lon == 0)
|
||||
}
|
||||
if hasLastSeen && lastSeen.Valid && lastSeen.String != "" {
|
||||
if t, err := time.Parse(time.RFC3339, lastSeen.String); err == nil {
|
||||
n.LastSeen = t
|
||||
} else if t, err := time.Parse("2006-01-02 15:04:05", lastSeen.String); err == nil {
|
||||
n.LastSeen = t
|
||||
}
|
||||
}
|
||||
nodes = append(nodes, n)
|
||||
}
|
||||
return nodes
|
||||
@@ -4583,20 +4512,9 @@ type prefixMap struct {
|
||||
// entries to ~7×N (+ 1 full-key entry per node for exact-match lookups).
|
||||
const maxPrefixLen = 8
|
||||
|
||||
// canAppearInPath returns true if the node's role allows it to appear as a
|
||||
// path hop. Only repeaters, room servers, and rooms can forward packets;
|
||||
// companions and sensors originate but never relay.
|
||||
func canAppearInPath(role string) bool {
|
||||
r := strings.ToLower(role)
|
||||
return strings.Contains(r, "repeater") || strings.Contains(r, "room_server") || r == "room"
|
||||
}
|
||||
|
||||
func buildPrefixMap(nodes []nodeInfo) *prefixMap {
|
||||
pm := &prefixMap{m: make(map[string][]nodeInfo, len(nodes)*(maxPrefixLen+1))}
|
||||
for _, n := range nodes {
|
||||
if !canAppearInPath(n.Role) {
|
||||
continue
|
||||
}
|
||||
pk := strings.ToLower(n.PublicKey)
|
||||
maxLen := maxPrefixLen
|
||||
if maxLen > len(pk) {
|
||||
|
||||
+1
-3
@@ -277,9 +277,6 @@ type ObservationResp struct {
|
||||
SNR interface{} `json:"snr"`
|
||||
RSSI interface{} `json:"rssi"`
|
||||
PathJSON interface{} `json:"path_json"`
|
||||
ResolvedPath interface{} `json:"resolved_path,omitempty"`
|
||||
Direction interface{} `json:"direction,omitempty"`
|
||||
RawHex interface{} `json:"raw_hex,omitempty"`
|
||||
Timestamp interface{} `json:"timestamp"`
|
||||
}
|
||||
|
||||
@@ -315,6 +312,7 @@ type PacketTimestampsResponse struct {
|
||||
type PacketDetailResponse struct {
|
||||
Packet interface{} `json:"packet"`
|
||||
Path []interface{} `json:"path"`
|
||||
Breakdown *Breakdown `json:"breakdown"`
|
||||
ObservationCount int `json:"observation_count"`
|
||||
Observations []ObservationResp `json:"observations,omitempty"`
|
||||
}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
// checkAutoVacuum inspects the current auto_vacuum mode and logs a warning
|
||||
// if it's not INCREMENTAL. Optionally performs a one-time full VACUUM if
|
||||
// the operator has set db.vacuumOnStartup: true in config (#919).
|
||||
func checkAutoVacuum(db *DB, cfg *Config, dbPath string) {
|
||||
var autoVacuum int
|
||||
if err := db.conn.QueryRow("PRAGMA auto_vacuum").Scan(&autoVacuum); err != nil {
|
||||
log.Printf("[db] warning: could not read auto_vacuum: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if autoVacuum == 2 {
|
||||
log.Printf("[db] auto_vacuum=INCREMENTAL")
|
||||
return
|
||||
}
|
||||
|
||||
modes := map[int]string{0: "NONE", 1: "FULL", 2: "INCREMENTAL"}
|
||||
mode := modes[autoVacuum]
|
||||
if mode == "" {
|
||||
mode = fmt.Sprintf("UNKNOWN(%d)", autoVacuum)
|
||||
}
|
||||
|
||||
log.Printf("[db] auto_vacuum=%s — DB needs one-time VACUUM to enable incremental auto-vacuum. "+
|
||||
"Set db.vacuumOnStartup: true in config to migrate (will block startup for several minutes on large DBs). "+
|
||||
"See https://github.com/Kpa-clawbot/CoreScope/issues/919", mode)
|
||||
|
||||
if cfg.DB != nil && cfg.DB.VacuumOnStartup {
|
||||
// WARNING: Full VACUUM creates a temporary copy of the entire DB file.
|
||||
// Requires ~2× the DB file size in free disk space or it will fail.
|
||||
log.Printf("[db] vacuumOnStartup=true — starting one-time full VACUUM (ensure 2x DB size free disk space)...")
|
||||
start := time.Now()
|
||||
|
||||
rw, err := openRW(dbPath)
|
||||
if err != nil {
|
||||
log.Printf("[db] VACUUM failed: could not open RW connection: %v", err)
|
||||
return
|
||||
}
|
||||
defer rw.Close()
|
||||
|
||||
if _, err := rw.Exec("PRAGMA auto_vacuum = INCREMENTAL"); err != nil {
|
||||
log.Printf("[db] VACUUM failed: could not set auto_vacuum: %v", err)
|
||||
return
|
||||
}
|
||||
if _, err := rw.Exec("VACUUM"); err != nil {
|
||||
log.Printf("[db] VACUUM failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Printf("[db] VACUUM complete in %v — auto_vacuum is now INCREMENTAL", elapsed.Round(time.Millisecond))
|
||||
|
||||
// Re-check
|
||||
var newMode int
|
||||
if err := db.conn.QueryRow("PRAGMA auto_vacuum").Scan(&newMode); err == nil {
|
||||
if newMode == 2 {
|
||||
log.Printf("[db] auto_vacuum=INCREMENTAL (confirmed after VACUUM)")
|
||||
} else {
|
||||
log.Printf("[db] warning: auto_vacuum=%d after VACUUM — expected 2", newMode)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// runIncrementalVacuum runs PRAGMA incremental_vacuum(N) on a read-write
|
||||
// connection. Safe to call on auto_vacuum=NONE databases (noop).
|
||||
func runIncrementalVacuum(dbPath string, pages int) {
|
||||
rw, err := openRW(dbPath)
|
||||
if err != nil {
|
||||
log.Printf("[vacuum] could not open RW connection: %v", err)
|
||||
return
|
||||
}
|
||||
defer rw.Close()
|
||||
|
||||
if _, err := rw.Exec(fmt.Sprintf("PRAGMA incremental_vacuum(%d)", pages)); err != nil {
|
||||
log.Printf("[vacuum] incremental_vacuum error: %v", err)
|
||||
}
|
||||
}
|
||||
+1
-7
@@ -9,11 +9,6 @@
|
||||
"packetDays": 30,
|
||||
"_comment": "nodeDays: nodes not seen in N days moved to inactive_nodes (default 7). observerDays: observers not sending data in N days are removed (-1 = keep forever, default 14). packetDays: transmissions older than N days are deleted (0 = disabled)."
|
||||
},
|
||||
"db": {
|
||||
"vacuumOnStartup": false,
|
||||
"incrementalVacuumPages": 1024,
|
||||
"_comment": "vacuumOnStartup: run one-time full VACUUM to enable incremental auto-vacuum on existing DBs (blocks startup for minutes on large DBs; requires 2x DB file size in free disk space). incrementalVacuumPages: free pages returned to OS after each retention reaper cycle (default 1024). See #919."
|
||||
},
|
||||
"https": {
|
||||
"cert": "/path/to/cert.pem",
|
||||
"key": "/path/to/key.pem",
|
||||
@@ -213,8 +208,7 @@
|
||||
"packetStore": {
|
||||
"maxMemoryMB": 1024,
|
||||
"estimatedPacketBytes": 450,
|
||||
"retentionHours": 168,
|
||||
"_comment": "In-memory packet store. maxMemoryMB caps RAM usage. retentionHours: only packets younger than this are loaded on startup and kept in memory (0 = unlimited, not recommended for large DBs — causes OOM on cold start). 168 = 7 days. Must be ≤ retention.packetDays * 24."
|
||||
"_comment": "In-memory packet store. maxMemoryMB caps RAM usage. All packets loaded on startup, served from RAM."
|
||||
},
|
||||
"resolvedPath": {
|
||||
"backfillHours": 24,
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
# Scope Stats Page — Design Spec
|
||||
|
||||
**Issue**: Kpa-clawbot/CoreScope#899
|
||||
**Date**: 2026-04-23
|
||||
**Branch target**: `master`
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Add a dedicated **Scopes** page showing scope/region statistics for MeshCore transport-route packets. Scope filtering in MeshCore uses `TRANSPORT_FLOOD` (route_type 0) and `TRANSPORT_DIRECT` (route_type 3) packets that carry two 16-bit transport codes. Code1 ≠ `0000` means the packet is region-scoped.
|
||||
|
||||
Feature 3 from the issue (default scope per client via advert) is **not implemented** — the advert format has no scope field in the current firmware.
|
||||
|
||||
---
|
||||
|
||||
## How Scopes Work (Firmware)
|
||||
|
||||
Transport code derivation (authoritative source: `meshcore-dev/MeshCore`):
|
||||
|
||||
```
|
||||
key = SHA256("#regionname")[:16] // TransportKeyStore::getAutoKeyFor
|
||||
Code1 = HMAC-SHA256(key, type || payload) // TransportKey::calcTransportCode, 2-byte output
|
||||
```
|
||||
|
||||
Code1 is a **per-message** HMAC — the same region produces a different Code1 for every message. Identifying a region from Code1 requires knowing the region name in advance and recomputing the HMAC.
|
||||
|
||||
`Code1 = 0000` is the "no scope" sentinel (also `FFFF` is reserved). Packets with route_type 1 or 2 (plain FLOOD/DIRECT) carry no transport codes.
|
||||
|
||||
---
|
||||
|
||||
## Config
|
||||
|
||||
Add `hashRegions` to the ingestor `Config` struct in `cmd/ingestor/config.go`, mirroring `hashChannels`:
|
||||
|
||||
```json
|
||||
"hashRegions": ["#belgium", "#eu", "#brussels"]
|
||||
```
|
||||
|
||||
Normalization (same rules as `hashChannels`):
|
||||
- Trim whitespace
|
||||
- Prepend `#` if missing
|
||||
- Skip empty entries
|
||||
|
||||
---
|
||||
|
||||
## Ingestor Changes
|
||||
|
||||
### Key derivation (`loadRegionKeys`)
|
||||
|
||||
```go
|
||||
func loadRegionKeys(cfg *Config) map[string][]byte {
|
||||
// key = first 16 bytes of SHA256("#regionname")
|
||||
}
|
||||
```
|
||||
|
||||
Returns `map[string][]byte` (region name → 16-byte HMAC key). Called once at startup, stored on the `Store`.
|
||||
|
||||
### Decoder: expose raw payload bytes
|
||||
|
||||
Add `PayloadRaw []byte` to `DecodedPacket` in `cmd/ingestor/decoder.go`. Populated from the raw `buf` slice at the payload offset — zero-copy slice, no allocation. This is the **encrypted** payload bytes, matching what the firmware feeds into `calcTransportCode`.
|
||||
|
||||
### At-ingest region matching
|
||||
|
||||
In `BuildPacketData`:
|
||||
- Skip if `route_type` not in `{0, 3}` → `scope_name` stays `nil`
|
||||
- If `Code1 == "0000"` → `scope_name = nil` (unscoped transport, no scope involvement)
|
||||
- If `Code1 != "0000"` → try each region key:
|
||||
```
|
||||
HMAC-SHA256(key, payloadType_byte || PayloadRaw) → first 2 bytes as uint16
|
||||
```
|
||||
First match → `scope_name = "#regionname"`. No match → `scope_name = ""` (unknown scope).
|
||||
|
||||
Add `ScopeName *string` to `PacketData`.
|
||||
|
||||
### MQTT-sourced packets (DM / CHAN paths in main.go)
|
||||
|
||||
These are injected directly without going through `BuildPacketData`. They use `route_type = 1` (FLOOD), so they are never transport-route packets. No scope matching needed for these paths.
|
||||
|
||||
---
|
||||
|
||||
## Database
|
||||
|
||||
### Migration
|
||||
|
||||
```sql
|
||||
ALTER TABLE transmissions ADD COLUMN scope_name TEXT DEFAULT NULL;
|
||||
CREATE INDEX idx_tx_scope_name ON transmissions(scope_name) WHERE scope_name IS NOT NULL;
|
||||
```
|
||||
|
||||
### Column semantics
|
||||
|
||||
| Value | Meaning |
|
||||
|-------|---------|
|
||||
| `NULL` | Either: non-transport-route packet (route_type 1/2), or transport-route with Code1=0000 |
|
||||
| `""` (empty string) | Transport-route, Code1 ≠ 0000, but no configured region matched |
|
||||
| `"#belgium"` | Matched named region |
|
||||
|
||||
The API stats queries resolve the NULL ambiguity by always filtering `route_type IN (0, 3)` first:
|
||||
- `unscoped` count = `route_type IN (0,3) AND scope_name IS NULL`
|
||||
- `scoped` count = `route_type IN (0,3) AND scope_name IS NOT NULL`
|
||||
|
||||
### Backfill
|
||||
|
||||
On migration, re-decode `raw_hex` for all rows where `route_type IN (0, 3)` and `scope_name IS NULL`. Run the same HMAC matching logic. Rows with `Code1 = 0000` remain `NULL`.
|
||||
|
||||
The backfill runs in the existing migration framework in `cmd/ingestor/db.go`. If no regions are configured, backfill is skipped.
|
||||
|
||||
---
|
||||
|
||||
## API
|
||||
|
||||
### `GET /api/scope-stats`
|
||||
|
||||
**Query param**: `window` — one of `1h`, `24h` (default), `7d`
|
||||
|
||||
**Time-series bucket sizes**:
|
||||
| Window | Bucket |
|
||||
|--------|--------|
|
||||
| `1h` | 5 min |
|
||||
| `24h` | 1 hour |
|
||||
| `7d` | 6 hours|
|
||||
|
||||
**Response**:
|
||||
```json
|
||||
{
|
||||
"window": "24h",
|
||||
"summary": {
|
||||
"transportTotal": 1240,
|
||||
"scoped": 890,
|
||||
"unscoped": 350,
|
||||
"unknownScope": 42
|
||||
},
|
||||
"byRegion": [
|
||||
{ "name": "#belgium", "count": 612 },
|
||||
{ "name": "#eu", "count": 236 }
|
||||
],
|
||||
"timeSeries": [
|
||||
{ "t": "2026-04-23T10:00:00Z", "scoped": 45, "unscoped": 18 },
|
||||
{ "t": "2026-04-23T11:00:00Z", "scoped": 51, "unscoped": 22 }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- `transportTotal` = `scoped + unscoped` (transport-route packets only)
|
||||
- `scoped` = Code1 ≠ 0000 (named + unknown)
|
||||
- `unscoped` = transport-route with Code1 = 0000
|
||||
- `unknownScope` = scoped but no region name matched (subset of `scoped`)
|
||||
- `byRegion` sorted by count descending, excludes unknown
|
||||
- `timeSeries` covers the full window at the bucket granularity
|
||||
|
||||
Route: `GET /api/scope-stats` registered in `cmd/server/routes.go`.
|
||||
No auth required (same as other read endpoints).
|
||||
TTL cache: 30 seconds (heavier query than `/api/stats`).
|
||||
|
||||
---
|
||||
|
||||
## Frontend
|
||||
|
||||
### Navigation
|
||||
|
||||
Add nav link between Channels and Nodes in `public/index.html`:
|
||||
```html
|
||||
<a href="#/scopes" class="nav-link" data-route="scopes">Scopes</a>
|
||||
```
|
||||
|
||||
### `public/scopes.js`
|
||||
|
||||
Three sections on the page:
|
||||
|
||||
**1. Summary cards** (reuse existing card CSS pattern from home/analytics pages)
|
||||
- Transport total, Scoped, Unscoped, Unknown scope
|
||||
- Each card shows count + percentage of transport total
|
||||
|
||||
**2. Per-region table**
|
||||
Columns: Region, Messages, % of Scoped
|
||||
Sorted by count descending. Last row: "Unknown scope" (italic) if unknownScope > 0.
|
||||
Shows "No regions configured" message if `byRegion` is empty and `unknownScope = 0`.
|
||||
|
||||
**3. Time-series chart**
|
||||
- Window selector: `1h / 24h / 7d` (default 24h)
|
||||
- Two lines: **Scoped** (blue) and **Unscoped** (grey)
|
||||
- Uses the same lightweight canvas chart pattern as other pages (no external chart lib)
|
||||
|
||||
### Cache buster
|
||||
|
||||
`scopes.js` added to the `__BUST__` entries in `index.html` in the same commit.
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
- Unit tests for `loadRegionKeys`: normalization, key bytes match firmware SHA256 derivation
|
||||
- Unit tests for HMAC matching: known Code1 value computed from firmware logic, verified against Go implementation
|
||||
- Integration test: ingest a synthetic transport-route packet with a known region, assert `scope_name` column is set correctly
|
||||
- API test: `GET /api/scope-stats` returns correct summary counts against fixture DB
|
||||
|
||||
---
|
||||
|
||||
## Out of Scope
|
||||
|
||||
- Feature 3 (default scope per client via advert) — firmware has no advert scope field
|
||||
- Drill-down from region row to filtered packet list (deferred)
|
||||
- Private regions (`$`-prefixed) — use secret keys not publicly derivable
|
||||
@@ -98,22 +98,6 @@ How long (in hours) before a node is marked degraded or silent:
|
||||
| `retention.nodeDays` | `7` | Nodes not seen in N days move to inactive |
|
||||
| `retention.packetDays` | `30` | Packets older than N days are deleted daily |
|
||||
|
||||
> **Note:** Lowering retention does **not** immediately shrink the database file.
|
||||
> SQLite marks deleted pages as free but does not return them to the filesystem
|
||||
> unless [incremental auto-vacuum](database.md) is enabled. New databases created
|
||||
> after v0.x.x have auto-vacuum enabled automatically. Existing databases require
|
||||
> a one-time migration — see the [Database](database.md) guide.
|
||||
|
||||
## Database
|
||||
|
||||
| Field | Default | Description |
|
||||
|-------|---------|-------------|
|
||||
| `db.vacuumOnStartup` | `false` | Run a one-time full `VACUUM` on startup to enable incremental auto-vacuum (blocks for minutes on large DBs) |
|
||||
| `db.incrementalVacuumPages` | `1024` | Free pages returned to the OS after each retention reaper cycle |
|
||||
|
||||
See [Database](database.md) for details on SQLite auto-vacuum, WAL, and manual maintenance.
|
||||
See [#919](https://github.com/Kpa-clawbot/CoreScope/issues/919) for background.
|
||||
|
||||
## Channel decryption
|
||||
|
||||
| Field | Description |
|
||||
@@ -166,9 +150,6 @@ Lower values = fresher data but more server load.
|
||||
|-------|---------|-------------|
|
||||
| `packetStore.maxMemoryMB` | `1024` | Maximum RAM for in-memory packet store |
|
||||
| `packetStore.estimatedPacketBytes` | `450` | Estimated bytes per packet (for memory budgeting) |
|
||||
| `packetStore.retentionHours` | `0` | Only load packets younger than N hours on startup and keep them in memory. **Set this on any instance with a large DB.** `0` = unlimited (loads full DB history — causes OOM on cold start when the DB has hundreds of thousands of paths). Recommended: same as `retention.packetDays × 24` (e.g. `168` for 7 days). |
|
||||
|
||||
> **Warning:** Leaving `retentionHours` at `0` on a large database will cause the server to OOM-kill itself on every cold start. The full packet history is loaded into the subpath index at startup; a DB with ~280K paths produces ~13M index entries before the process is killed.
|
||||
|
||||
## Timestamps
|
||||
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
# Database
|
||||
|
||||
CoreScope uses SQLite in WAL (Write-Ahead Log) mode for both the server
|
||||
(read-only) and ingestor (read-write).
|
||||
|
||||
## WAL mode
|
||||
|
||||
WAL mode allows concurrent reads while writes happen. It is set automatically
|
||||
at connection time via `PRAGMA journal_mode=WAL`. No operator action needed.
|
||||
|
||||
The WAL file (`meshcore.db-wal`) grows during writes and is checkpointed
|
||||
(merged back into the main DB) periodically and at clean shutdown.
|
||||
|
||||
## Auto-vacuum
|
||||
|
||||
By default, SQLite does not shrink the database file after `DELETE` operations.
|
||||
Deleted pages are marked free and reused by future writes, but the file size
|
||||
on disk stays the same. This is surprising when lowering retention settings.
|
||||
|
||||
### New databases
|
||||
|
||||
Databases created after this feature was added automatically have
|
||||
`PRAGMA auto_vacuum = INCREMENTAL`. After each retention reaper cycle,
|
||||
CoreScope runs `PRAGMA incremental_vacuum(N)` to return free pages to the OS.
|
||||
|
||||
### Existing databases
|
||||
|
||||
The `auto_vacuum` mode is stored in the database header and can only be changed
|
||||
by rewriting the entire file with `VACUUM`. CoreScope will **not** do this
|
||||
automatically — on large databases (5+ GB seen in the wild) it takes minutes
|
||||
and holds an exclusive lock.
|
||||
|
||||
**To migrate an existing database:**
|
||||
|
||||
1. At startup, CoreScope logs a warning:
|
||||
```
|
||||
[db] auto_vacuum=NONE — DB needs one-time VACUUM to enable incremental auto-vacuum.
|
||||
```
|
||||
2. **Ensure at least 2× the database file size in free disk space.** Full VACUUM
|
||||
creates a temporary copy of the entire file — on a near-full disk it will fail.
|
||||
3. Set `db.vacuumOnStartup: true` in your `config.json`:
|
||||
```json
|
||||
{
|
||||
"db": {
|
||||
"vacuumOnStartup": true
|
||||
}
|
||||
}
|
||||
```
|
||||
4. Restart CoreScope. The one-time `VACUUM` will run and block startup.
|
||||
5. After migration, remove or set `vacuumOnStartup: false` — it's not needed again.
|
||||
|
||||
### Configuration
|
||||
|
||||
| Field | Default | Description |
|
||||
|-------|---------|-------------|
|
||||
| `db.vacuumOnStartup` | `false` | One-time full VACUUM to enable incremental auto-vacuum |
|
||||
| `db.incrementalVacuumPages` | `1024` | Pages returned to OS per reaper cycle |
|
||||
|
||||
## Manual VACUUM
|
||||
|
||||
You can also run a manual vacuum from the SQLite CLI:
|
||||
|
||||
```bash
|
||||
sqlite3 data/meshcore.db "PRAGMA auto_vacuum = INCREMENTAL; VACUUM;"
|
||||
```
|
||||
|
||||
This is equivalent to `vacuumOnStartup: true` but can be done offline.
|
||||
|
||||
> ⚠️ Full VACUUM requires **2× the database file size** in free disk space (it
|
||||
> creates a temporary copy). Check with `ls -lh data/meshcore.db` before running.
|
||||
|
||||
## Checking current mode
|
||||
|
||||
```bash
|
||||
sqlite3 data/meshcore.db "PRAGMA auto_vacuum;"
|
||||
```
|
||||
|
||||
- `0` = NONE (default for old databases)
|
||||
- `1` = FULL (automatic, but slower writes)
|
||||
- `2` = INCREMENTAL (recommended — CoreScope triggers vacuum after deletes)
|
||||
|
||||
See [#919](https://github.com/Kpa-clawbot/CoreScope/issues/919) for background on this feature.
|
||||
@@ -1,3 +0,0 @@
|
||||
module github.com/meshcore-analyzer/packetpath
|
||||
|
||||
go 1.22
|
||||
@@ -1,76 +0,0 @@
|
||||
// Package packetpath provides shared helpers for extracting path hops from
|
||||
// raw MeshCore packet hex bytes.
|
||||
package packetpath
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// DecodePathFromRawHex extracts the header path hops directly from raw hex bytes.
|
||||
// This is the authoritative path that matches what's in raw_hex, as opposed to
|
||||
// decoded.Path.Hops which may be overwritten for TRACE packets (issue #886).
|
||||
//
|
||||
// WARNING: This function returns the literal header path bytes regardless of
|
||||
// payload type. For TRACE packets these bytes are SNR values, NOT hop hashes.
|
||||
// Callers that may receive TRACE packets MUST check PathBytesAreHops(payloadType)
|
||||
// first, or use the safer DecodeHopsForPayload wrapper.
|
||||
func DecodePathFromRawHex(rawHex string) ([]string, error) {
|
||||
buf, err := hex.DecodeString(rawHex)
|
||||
if err != nil || len(buf) < 2 {
|
||||
return nil, fmt.Errorf("invalid or too-short hex")
|
||||
}
|
||||
|
||||
headerByte := buf[0]
|
||||
offset := 1
|
||||
if IsTransportRoute(int(headerByte & 0x03)) {
|
||||
if len(buf) < offset+4 {
|
||||
return nil, fmt.Errorf("too short for transport codes")
|
||||
}
|
||||
offset += 4
|
||||
}
|
||||
if offset >= len(buf) {
|
||||
return nil, fmt.Errorf("too short for path byte")
|
||||
}
|
||||
|
||||
pathByte := buf[offset]
|
||||
offset++
|
||||
|
||||
hashSize := int(pathByte>>6) + 1
|
||||
hashCount := int(pathByte & 0x3F)
|
||||
|
||||
hops := make([]string, 0, hashCount)
|
||||
for i := 0; i < hashCount; i++ {
|
||||
start := offset + i*hashSize
|
||||
end := start + hashSize
|
||||
if end > len(buf) {
|
||||
break
|
||||
}
|
||||
hops = append(hops, strings.ToUpper(hex.EncodeToString(buf[start:end])))
|
||||
}
|
||||
return hops, nil
|
||||
}
|
||||
|
||||
// DecodeHopsForPayload returns the header path hops only when the payload type's
|
||||
// header bytes are actually route hops (i.e. PathBytesAreHops(payloadType) is true).
|
||||
// For TRACE packets it returns (nil, ErrPayloadHasNoHeaderHops) so the caller is
|
||||
// forced to source hops from the decoded payload instead.
|
||||
//
|
||||
// Prefer this over DecodePathFromRawHex when the payload type is known.
|
||||
func DecodeHopsForPayload(rawHex string, payloadType byte) ([]string, error) {
|
||||
if !PathBytesAreHops(payloadType) {
|
||||
return nil, ErrPayloadHasNoHeaderHops
|
||||
}
|
||||
return DecodePathFromRawHex(rawHex)
|
||||
}
|
||||
|
||||
// ErrPayloadHasNoHeaderHops is returned by DecodeHopsForPayload when the
|
||||
// payload type repurposes the raw_hex header path bytes (e.g. TRACE → SNR values).
|
||||
var ErrPayloadHasNoHeaderHops = errPayloadHasNoHeaderHops{}
|
||||
|
||||
type errPayloadHasNoHeaderHops struct{}
|
||||
|
||||
func (errPayloadHasNoHeaderHops) Error() string {
|
||||
return "payload type repurposes header path bytes; source hops from decoded payload"
|
||||
}
|
||||
@@ -1,150 +0,0 @@
|
||||
package packetpath
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDecodePathFromRawHex_Basic(t *testing.T) {
|
||||
// Build a simple FLOOD packet (route_type=1) with 2 hops of hashSize=1
|
||||
// header: route_type=1, payload_type=2 (TXT_MSG), version=0 → 0b00_0010_01 = 0x09
|
||||
// path byte: hashSize=1 (bits 7-6 = 0), hashCount=2 (bits 5-0 = 2) → 0x02
|
||||
// hops: AB, CD
|
||||
// payload: some bytes
|
||||
raw := "0902ABCD" + "DEADBEEF"
|
||||
hops, err := DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(hops) != 2 || hops[0] != "AB" || hops[1] != "CD" {
|
||||
t.Fatalf("expected [AB, CD], got %v", hops)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_ZeroHops(t *testing.T) {
|
||||
// DIRECT route (type=2), no hops → 0b00_0010_10 = 0x0A
|
||||
// path byte: 0x00 (0 hops)
|
||||
raw := "0A00" + "DEADBEEF"
|
||||
hops, err := DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(hops) != 0 {
|
||||
t.Fatalf("expected 0 hops, got %v", hops)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_TransportRoute(t *testing.T) {
|
||||
// TRANSPORT_FLOOD (route_type=0), payload_type=5 (GRP_TXT), version=0
|
||||
// header: 0b00_0101_00 = 0x14
|
||||
// transport codes: 4 bytes
|
||||
// path byte: hashSize=1, hashCount=1 → 0x01
|
||||
// hop: FF
|
||||
raw := "14" + "00112233" + "01" + "FF" + "DEAD"
|
||||
hops, err := DecodePathFromRawHex(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(hops) != 1 || hops[0] != "FF" {
|
||||
t.Fatalf("expected [FF], got %v", hops)
|
||||
}
|
||||
}
|
||||
|
||||
// buildTracePacket creates a TRACE packet hex string where header path bytes are
|
||||
// SNR values, and payload contains the actual route hops.
|
||||
func buildTracePacket() (rawHex string, headerPathHops []string, payloadHops []string) {
|
||||
// DIRECT route (type=2), TRACE payload (type=9), version=0
|
||||
// header byte: 0b00_1001_10 = 0x26
|
||||
headerByte := byte(0x26)
|
||||
|
||||
// Header path: 2 SNR bytes (hashSize=1, hashCount=2) → path byte = 0x02
|
||||
// SNR values: 0x1A (26 dB), 0x0F (15 dB)
|
||||
pathByte := byte(0x02)
|
||||
snrBytes := []byte{0x1A, 0x0F}
|
||||
|
||||
// TRACE payload: tag(4) + authCode(4) + flags(1) + path hops
|
||||
tag := []byte{0x01, 0x00, 0x00, 0x00}
|
||||
authCode := []byte{0x02, 0x00, 0x00, 0x00}
|
||||
// flags: path_sz=0 (1 byte hops), other bits=0 → 0x00
|
||||
flags := byte(0x00)
|
||||
// Payload hops: AA, BB, CC (the actual route)
|
||||
payloadPathBytes := []byte{0xAA, 0xBB, 0xCC}
|
||||
|
||||
var buf []byte
|
||||
buf = append(buf, headerByte, pathByte)
|
||||
buf = append(buf, snrBytes...)
|
||||
buf = append(buf, tag...)
|
||||
buf = append(buf, authCode...)
|
||||
buf = append(buf, flags)
|
||||
buf = append(buf, payloadPathBytes...)
|
||||
|
||||
rawHex = strings.ToUpper(hex.EncodeToString(buf))
|
||||
headerPathHops = []string{"1A", "0F"} // SNR values — NOT route hops
|
||||
payloadHops = []string{"AA", "BB", "CC"} // actual route hops from payload
|
||||
return
|
||||
}
|
||||
|
||||
func TestDecodePathFromRawHex_TraceReturnsSNR(t *testing.T) {
|
||||
rawHex, expectedSNR, _ := buildTracePacket()
|
||||
hops, err := DecodePathFromRawHex(rawHex)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
// DecodePathFromRawHex always returns header path bytes — for TRACE these are SNR values
|
||||
if len(hops) != len(expectedSNR) {
|
||||
t.Fatalf("expected %d hops (SNR), got %d: %v", len(expectedSNR), len(hops), hops)
|
||||
}
|
||||
for i, h := range hops {
|
||||
if h != expectedSNR[i] {
|
||||
t.Errorf("hop[%d]: expected %s, got %s", i, expectedSNR[i], h)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTracePathJSON_UsesPayloadHops(t *testing.T) {
|
||||
// This test validates the TRACE vs non-TRACE logic that callers should implement:
|
||||
// For TRACE: path_json = decoded.Path.Hops (payload-decoded route hops)
|
||||
// For non-TRACE: path_json = DecodePathFromRawHex(raw_hex)
|
||||
rawHex, snrHops, payloadHops := buildTracePacket()
|
||||
|
||||
// DecodePathFromRawHex returns SNR bytes for TRACE
|
||||
headerHops, _ := DecodePathFromRawHex(rawHex)
|
||||
headerJSON, _ := json.Marshal(headerHops)
|
||||
|
||||
// payload hops (what decoded.Path.Hops would return after TRACE decoding)
|
||||
payloadJSON, _ := json.Marshal(payloadHops)
|
||||
|
||||
// They must differ — SNR != route hops
|
||||
if string(headerJSON) == string(payloadJSON) {
|
||||
t.Fatalf("SNR hops and payload hops should differ for TRACE; both are %s", headerJSON)
|
||||
}
|
||||
|
||||
// For TRACE, path_json should be payloadHops, not headerHops
|
||||
_ = snrHops // snrHops == headerHops — used for documentation
|
||||
t.Logf("TRACE: header path (SNR) = %s, payload path (route) = %s", headerJSON, payloadJSON)
|
||||
}
|
||||
|
||||
func TestDecodeHopsForPayload_NonTrace(t *testing.T) {
|
||||
// header 0x01, path_len 0x02, hops 0xAA 0xBB, then payload bytes
|
||||
raw := "0102AABB00"
|
||||
hops, err := DecodeHopsForPayload(raw, 0x05) // GRP_TXT — header path bytes ARE hops
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(hops) != 2 || hops[0] != "AA" || hops[1] != "BB" {
|
||||
t.Errorf("expected [AA BB], got %v", hops)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeHopsForPayload_TraceReturnsError(t *testing.T) {
|
||||
raw := "010205F00100"
|
||||
hops, err := DecodeHopsForPayload(raw, PayloadTRACE)
|
||||
if err != ErrPayloadHasNoHeaderHops {
|
||||
t.Errorf("expected ErrPayloadHasNoHeaderHops, got %v", err)
|
||||
}
|
||||
if hops != nil {
|
||||
t.Errorf("expected nil hops for TRACE, got %v", hops)
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
package packetpath
|
||||
|
||||
// Route type constants (header bits 1-0).
|
||||
const (
|
||||
RouteTransportFlood = 0
|
||||
RouteFlood = 1
|
||||
RouteDirect = 2
|
||||
RouteTransportDirect = 3
|
||||
)
|
||||
|
||||
// PayloadTRACE is the payload type constant for TRACE packets.
|
||||
const PayloadTRACE = 0x09
|
||||
|
||||
// IsTransportRoute returns true for TRANSPORT_FLOOD (0) and TRANSPORT_DIRECT (3).
|
||||
func IsTransportRoute(routeType int) bool {
|
||||
return routeType == RouteTransportFlood || routeType == RouteTransportDirect
|
||||
}
|
||||
|
||||
// PathBytesAreHops returns true when the raw_hex header path bytes represent
|
||||
// route hop hashes (the normal case). Returns false for packet types where
|
||||
// header path bytes are repurposed (e.g. TRACE uses them for SNR values).
|
||||
func PathBytesAreHops(payloadType byte) bool {
|
||||
return payloadType != PayloadTRACE
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package packetpath
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestIsTransportRoute(t *testing.T) {
|
||||
if !IsTransportRoute(RouteTransportFlood) {
|
||||
t.Error("RouteTransportFlood should be transport")
|
||||
}
|
||||
if !IsTransportRoute(RouteTransportDirect) {
|
||||
t.Error("RouteTransportDirect should be transport")
|
||||
}
|
||||
if IsTransportRoute(RouteFlood) {
|
||||
t.Error("RouteFlood should not be transport")
|
||||
}
|
||||
if IsTransportRoute(RouteDirect) {
|
||||
t.Error("RouteDirect should not be transport")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathBytesAreHops(t *testing.T) {
|
||||
if PathBytesAreHops(PayloadTRACE) {
|
||||
t.Error("PathBytesAreHops(PayloadTRACE) should be false")
|
||||
}
|
||||
// All other known payload types should return true.
|
||||
otherTypes := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F}
|
||||
for _, pt := range otherTypes {
|
||||
if !PathBytesAreHops(pt) {
|
||||
t.Errorf("PathBytesAreHops(0x%02X) should be true", pt)
|
||||
}
|
||||
}
|
||||
}
|
||||
+1
-105
@@ -14,71 +14,6 @@ function isTransportRoute(rt) { return rt === 0 || rt === 3; }
|
||||
function getPathLenOffset(routeType) { return isTransportRoute(routeType) ? 5 : 1; }
|
||||
function transportBadge(rt) { return isTransportRoute(rt) ? ' <span class="badge badge-transport" title="' + routeTypeName(rt) + '">T</span>' : ''; }
|
||||
|
||||
/**
|
||||
* Compute breakdown byte ranges from raw_hex on the client.
|
||||
* Mirrors cmd/server/decoder.go BuildBreakdown(). Used so per-observation raw_hex
|
||||
* (which can differ in path length from the top-level packet) gets accurate
|
||||
* highlighted byte ranges, instead of using the server-supplied breakdown
|
||||
* computed once from the top-level raw_hex.
|
||||
*/
|
||||
function computeBreakdownRanges(hexString, routeType, payloadType) {
|
||||
if (!hexString) return [];
|
||||
const clean = hexString.replace(/\s+/g, '');
|
||||
const bytes = clean.length / 2;
|
||||
if (bytes < 2) return [];
|
||||
const ranges = [];
|
||||
// Header
|
||||
ranges.push({ start: 0, end: 0, label: 'Header' });
|
||||
let offset = 1;
|
||||
if (isTransportRoute(routeType)) {
|
||||
if (bytes < offset + 4) return ranges;
|
||||
ranges.push({ start: offset, end: offset + 3, label: 'Transport Codes' });
|
||||
offset += 4;
|
||||
}
|
||||
if (offset >= bytes) return ranges;
|
||||
// Path Length byte
|
||||
ranges.push({ start: offset, end: offset, label: 'Path Length' });
|
||||
const pathByte = parseInt(clean.slice(offset * 2, offset * 2 + 2), 16);
|
||||
offset += 1;
|
||||
if (isNaN(pathByte)) return ranges;
|
||||
const hashSize = (pathByte >> 6) + 1;
|
||||
const hashCount = pathByte & 0x3F;
|
||||
const pathBytes = hashSize * hashCount;
|
||||
if (hashCount > 0 && offset + pathBytes <= bytes) {
|
||||
ranges.push({ start: offset, end: offset + pathBytes - 1, label: 'Path' });
|
||||
}
|
||||
offset += pathBytes;
|
||||
if (offset >= bytes) return ranges;
|
||||
const payloadStart = offset;
|
||||
// ADVERT (payload_type 4) gets sub-fields when full record present
|
||||
if (payloadType === 4 && bytes - payloadStart >= 100) {
|
||||
ranges.push({ start: payloadStart, end: payloadStart + 31, label: 'PubKey' });
|
||||
ranges.push({ start: payloadStart + 32, end: payloadStart + 35, label: 'Timestamp' });
|
||||
ranges.push({ start: payloadStart + 36, end: payloadStart + 99, label: 'Signature' });
|
||||
const appStart = payloadStart + 100;
|
||||
if (appStart < bytes) {
|
||||
ranges.push({ start: appStart, end: appStart, label: 'Flags' });
|
||||
const appFlags = parseInt(clean.slice(appStart * 2, appStart * 2 + 2), 16);
|
||||
let fOff = appStart + 1;
|
||||
if (!isNaN(appFlags)) {
|
||||
if ((appFlags & 0x10) && fOff + 8 <= bytes) {
|
||||
ranges.push({ start: fOff, end: fOff + 3, label: 'Latitude' });
|
||||
ranges.push({ start: fOff + 4, end: fOff + 7, label: 'Longitude' });
|
||||
fOff += 8;
|
||||
}
|
||||
if ((appFlags & 0x20) && fOff + 2 <= bytes) fOff += 2;
|
||||
if ((appFlags & 0x40) && fOff + 2 <= bytes) fOff += 2;
|
||||
if ((appFlags & 0x80) && fOff < bytes) {
|
||||
ranges.push({ start: fOff, end: bytes - 1, label: 'Name' });
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ranges.push({ start: payloadStart, end: bytes - 1, label: 'Payload' });
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
// --- Utilities ---
|
||||
const _apiPerf = { calls: 0, totalMs: 0, log: [], cacheHits: 0 };
|
||||
const _apiCache = new Map();
|
||||
@@ -505,21 +440,6 @@ const pages = {};
|
||||
|
||||
function registerPage(name, mod) { pages[name] = mod; }
|
||||
|
||||
// Tools landing page — shows sub-menu with Trace and Path Inspector (spec §2.8, M1 fix).
|
||||
registerPage('tools-landing', {
|
||||
init: function (container) {
|
||||
container.innerHTML =
|
||||
'<div class="tools-landing">' +
|
||||
'<h2>Tools</h2>' +
|
||||
'<div class="tools-menu">' +
|
||||
'<a href="#/tools/path-inspector" class="tools-card"><h3>🔍 Path Inspector</h3><p>Resolve prefix paths to candidate full-pubkey routes with confidence scoring.</p></a>' +
|
||||
'<a href="#/tools/trace/" class="tools-card"><h3>📡 Trace Viewer</h3><p>View detailed packet traces by hash.</p></a>' +
|
||||
'</div>' +
|
||||
'</div>';
|
||||
},
|
||||
destroy: function () {}
|
||||
});
|
||||
|
||||
let currentPage = null;
|
||||
|
||||
function closeNav() {
|
||||
@@ -540,12 +460,6 @@ function closeMoreMenu() {
|
||||
function navigate() {
|
||||
closeNav();
|
||||
|
||||
// Backward-compat redirect: #/traces/<hash> → #/tools/trace/<hash> (issue #944).
|
||||
if (location.hash.startsWith('#/traces/')) {
|
||||
location.hash = location.hash.replace('#/traces/', '#/tools/trace/');
|
||||
return;
|
||||
}
|
||||
|
||||
const hash = location.hash.replace('#/', '') || 'packets';
|
||||
const route = hash.split('?')[0];
|
||||
|
||||
@@ -573,27 +487,9 @@ function navigate() {
|
||||
basePage = 'observer-detail';
|
||||
}
|
||||
|
||||
// Tools sub-routing (issue #944): tools/trace/<hash>, tools/path-inspector
|
||||
if (basePage === 'tools') {
|
||||
if (routeParam && routeParam.startsWith('trace/')) {
|
||||
basePage = 'traces';
|
||||
routeParam = routeParam.substring(6); // strip "trace/"
|
||||
} else if (routeParam === 'path-inspector' || (routeParam && routeParam.startsWith('path-inspector'))) {
|
||||
basePage = 'path-inspector';
|
||||
routeParam = null;
|
||||
} else if (!routeParam) {
|
||||
// Default tools landing shows menu with both entries.
|
||||
basePage = 'tools-landing';
|
||||
}
|
||||
}
|
||||
// Also support old #/traces (no sub-path) → traces page.
|
||||
if (basePage === 'traces' && !routeParam) {
|
||||
basePage = 'traces';
|
||||
}
|
||||
|
||||
// Update nav active state
|
||||
document.querySelectorAll('.nav-link[data-route]').forEach(el => {
|
||||
el.classList.toggle('active', el.dataset.route === basePage || (el.dataset.route === 'tools' && (basePage === 'traces' || basePage === 'path-inspector' || basePage === 'tools-landing')));
|
||||
el.classList.toggle('active', el.dataset.route === basePage);
|
||||
});
|
||||
// Update "More" button to show active state if a low-priority page is selected
|
||||
var moreBtn = document.getElementById('navMoreBtn');
|
||||
|
||||
+17
-44
@@ -393,25 +393,17 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Merge user-stored keys into the channel list.
|
||||
// If a stored key matches a server-known channel, mark that channel as
|
||||
// userAdded so the ✕ button appears — otherwise the user has no way to
|
||||
// remove a key they added but that the server already knows about.
|
||||
// Merge user-stored keys into the channel list
|
||||
function mergeUserChannels() {
|
||||
var keys = ChannelDecrypt.getStoredKeys();
|
||||
var names = Object.keys(keys);
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
var name = names[i];
|
||||
var matched = false;
|
||||
for (var j = 0; j < channels.length; j++) {
|
||||
var ch = channels[j];
|
||||
if (ch.name === name || ch.hash === name || ch.hash === ('user:' + name)) {
|
||||
ch.userAdded = true;
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
// Check if channel already exists by name
|
||||
var exists = channels.some(function (ch) {
|
||||
return ch.name === name || ch.hash === name || ch.hash === ('user:' + name);
|
||||
});
|
||||
if (!exists) {
|
||||
channels.push({
|
||||
hash: 'user:' + name,
|
||||
name: name,
|
||||
@@ -757,38 +749,19 @@
|
||||
e.stopPropagation();
|
||||
var channelHash = removeBtn.getAttribute('data-remove-channel');
|
||||
if (!channelHash) return;
|
||||
// The localStorage key is the channel name. For user:-prefixed entries
|
||||
// strip the prefix; for server-known channels look up the channel
|
||||
// object so we use its display name (the hash itself isn't the key).
|
||||
var ch = channels.find(function (c) { return c.hash === channelHash; });
|
||||
var chName = channelHash.startsWith('user:')
|
||||
? channelHash.substring(5)
|
||||
: (ch && ch.name) || channelHash;
|
||||
var chName = channelHash.startsWith('user:') ? channelHash.substring(5) : channelHash;
|
||||
if (!confirm('Remove channel "' + chName + '"? This will clear saved keys and cached messages.')) return;
|
||||
ChannelDecrypt.removeKey(chName);
|
||||
if (channelHash.startsWith('user:')) {
|
||||
// Pure user-added channel — drop from the list entirely.
|
||||
channels = channels.filter(function (c) { return c.hash !== channelHash; });
|
||||
if (selectedHash === channelHash) {
|
||||
selectedHash = null;
|
||||
messages = [];
|
||||
history.replaceState(null, '', '#/channels');
|
||||
var msgEl2 = document.getElementById('chMessages');
|
||||
if (msgEl2) msgEl2.innerHTML = '<div class="ch-empty">Choose a channel from the sidebar to view messages</div>';
|
||||
var header2 = document.getElementById('chHeader');
|
||||
if (header2) header2.querySelector('.ch-header-text').textContent = 'Select a channel';
|
||||
}
|
||||
} else if (ch) {
|
||||
// Server-known channel: keep the row, just unmark as user-added so
|
||||
// the ✕ disappears until they re-add a key.
|
||||
ch.userAdded = false;
|
||||
// If this was the selected channel, clear decrypted messages since
|
||||
// the key is gone — they can't be re-decrypted without re-adding it.
|
||||
if (selectedHash === channelHash) {
|
||||
messages = [];
|
||||
var msgEl2 = document.getElementById('chMessages');
|
||||
if (msgEl2) msgEl2.innerHTML = '<div class="ch-empty">Key removed — add a key to decrypt messages</div>';
|
||||
}
|
||||
// Remove from channels array
|
||||
channels = channels.filter(function (c) { return c.hash !== channelHash; });
|
||||
if (selectedHash === channelHash) {
|
||||
selectedHash = null;
|
||||
messages = [];
|
||||
history.replaceState(null, '', '#/channels');
|
||||
var msgEl2 = document.getElementById('chMessages');
|
||||
if (msgEl2) msgEl2.innerHTML = '<div class="ch-empty">Choose a channel from the sidebar to view messages</div>';
|
||||
var header2 = document.getElementById('chHeader');
|
||||
if (header2) header2.querySelector('.ch-header-text').textContent = 'Select a channel';
|
||||
}
|
||||
renderChannelList();
|
||||
return;
|
||||
|
||||
@@ -629,11 +629,7 @@
|
||||
}
|
||||
writeOverrides(delta);
|
||||
_runPipeline();
|
||||
// Skip re-render while the user is typing inside the panel — setting
|
||||
// innerHTML would destroy the focused input and collapse the mobile keyboard.
|
||||
if (!(_panelEl && _panelEl.contains(document.activeElement))) {
|
||||
_refreshPanel();
|
||||
}
|
||||
_refreshPanel();
|
||||
}, 300);
|
||||
}
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@
|
||||
<div id="help-bar">
|
||||
Copy the JSON above → paste as a top-level key in <code>config.json</code> → restart the server.
|
||||
Nodes with no GPS fix always pass through. Remove the <code>geo_filter</code> block to disable filtering.
|
||||
· <a href="/geofilter-docs.html">Documentation</a>
|
||||
· <a href="https://github.com/Kpa-clawbot/CoreScope/blob/master/docs/user-guide/geofilter.md" target="_blank">Documentation ↗</a>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
@@ -87,8 +87,7 @@ let polygon = null;
|
||||
let closingLine = null;
|
||||
|
||||
function latLonPair(latlng) {
|
||||
const w = latlng.wrap();
|
||||
return [parseFloat(w.lat.toFixed(6)), parseFloat(w.lng.toFixed(6))];
|
||||
return [parseFloat(latlng.lat.toFixed(6)), parseFloat(latlng.lng.toFixed(6))];
|
||||
}
|
||||
|
||||
function render() {
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>GeoFilter Docs — CoreScope</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body { font-family: system-ui, sans-serif; background: #1a1a2e; color: #e0e0e0; min-height: 100vh; display: flex; flex-direction: column; }
|
||||
header { padding: 12px 16px; background: #0f0f23; border-bottom: 1px solid #333; display: flex; align-items: center; gap: 16px; }
|
||||
header h1 { font-size: 1rem; font-weight: 600; color: #4a9eff; }
|
||||
#back-link { font-size: 0.8rem; color: #4a9eff; text-decoration: none; white-space: nowrap; }
|
||||
#back-link:hover { text-decoration: underline; }
|
||||
main { flex: 1; max-width: 800px; margin: 0 auto; padding: 32px 24px; width: 100%; }
|
||||
h2 { font-size: 1.1rem; font-weight: 600; color: #4a9eff; margin: 32px 0 12px; border-bottom: 1px solid #222; padding-bottom: 6px; }
|
||||
h2:first-of-type { margin-top: 0; }
|
||||
h3 { font-size: 0.95rem; font-weight: 600; color: #c0c0c0; margin: 20px 0 8px; }
|
||||
p { font-size: 0.9rem; line-height: 1.6; color: #ccc; margin-bottom: 10px; }
|
||||
ul { padding-left: 20px; margin-bottom: 10px; }
|
||||
li { font-size: 0.9rem; line-height: 1.7; color: #ccc; }
|
||||
code { font-family: monospace; font-size: 0.85rem; color: #7ec8e3; background: #111; border: 1px solid #333; border-radius: 3px; padding: 1px 5px; }
|
||||
pre { background: #111; border: 1px solid #333; border-radius: 6px; padding: 14px 16px; overflow-x: auto; margin: 10px 0 16px; }
|
||||
pre code { background: none; border: none; padding: 0; font-size: 0.82rem; color: #7ec8e3; }
|
||||
.note { background: #1a2a1a; border: 1px solid #2a4a2a; border-radius: 6px; padding: 10px 14px; margin: 12px 0; }
|
||||
.note p { color: #aaddaa; margin: 0; }
|
||||
.warn { background: #2a1a0a; border: 1px solid #5a3a0a; border-radius: 6px; padding: 10px 14px; margin: 12px 0; }
|
||||
.warn p { color: #ddbb88; margin: 0; }
|
||||
table { width: 100%; border-collapse: collapse; margin: 10px 0 16px; font-size: 0.88rem; }
|
||||
th { background: #0f0f23; color: #888; font-weight: 500; text-align: left; padding: 8px 12px; border: 1px solid #333; }
|
||||
td { padding: 8px 12px; border: 1px solid #222; color: #ccc; vertical-align: top; }
|
||||
td code { font-size: 0.82rem; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<header>
|
||||
<a href="/geofilter-builder.html" id="back-link">← GeoFilter Builder</a>
|
||||
<h1>GeoFilter Docs</h1>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
|
||||
<h2>How it works</h2>
|
||||
<p>Geographic filtering restricts which nodes are ingested and returned in API responses. It operates at two levels:</p>
|
||||
<ul>
|
||||
<li><strong>Ingest time</strong> — ADVERT packets carrying GPS coordinates are rejected by the ingestor if the node falls outside the configured area. The node never reaches the database.</li>
|
||||
<li><strong>API responses</strong> — Nodes already in the database are filtered from the <code>/api/nodes</code> response if they fall outside the area. This covers nodes ingested before the filter was configured.</li>
|
||||
</ul>
|
||||
<div class="note"><p>Nodes with no GPS fix (<code>lat=0, lon=0</code> or missing coordinates) always pass the filter regardless of configuration.</p></div>
|
||||
|
||||
<h2>Configuration</h2>
|
||||
<p>Add a <code>geo_filter</code> block to <code>config.json</code>:</p>
|
||||
<pre><code>"geo_filter": {
|
||||
"polygon": [
|
||||
[51.55, 3.80],
|
||||
[51.55, 5.90],
|
||||
[50.65, 5.90],
|
||||
[50.65, 3.80]
|
||||
],
|
||||
"bufferKm": 20
|
||||
}</code></pre>
|
||||
<table>
|
||||
<thead><tr><th>Field</th><th>Type</th><th>Description</th></tr></thead>
|
||||
<tbody>
|
||||
<tr><td><code>polygon</code></td><td><code>[[lat, lon], ...]</code></td><td>Array of at least 3 coordinate pairs defining the boundary</td></tr>
|
||||
<tr><td><code>bufferKm</code></td><td>number</td><td>Extra distance (km) around the polygon edge that is also accepted. <code>0</code> = exact boundary</td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<p>Both the server and the ingestor read <code>geo_filter</code> from <code>config.json</code>. Restart both after changing this section.</p>
|
||||
<p>To disable filtering entirely, remove the <code>geo_filter</code> block.</p>
|
||||
|
||||
<h2>Coordinate ordering</h2>
|
||||
<div class="warn"><p><strong>Important:</strong> Coordinates are <code>[lat, lon]</code> — latitude first, longitude second. This is the opposite of GeoJSON, which uses <code>[lon, lat]</code>. Swapping them will place your polygon in the wrong location.</p></div>
|
||||
|
||||
<h2>Multi-polygon</h2>
|
||||
<p>Only a single polygon is supported. If your deployment area consists of multiple disconnected regions, draw a single convex hull that covers all of them, or use the largest region with a generous <code>bufferKm</code> value.</p>
|
||||
|
||||
<h2>Examples</h2>
|
||||
<h3>Belgium (bounding rectangle)</h3>
|
||||
<pre><code>"geo_filter": {
|
||||
"polygon": [
|
||||
[51.55, 3.80],
|
||||
[51.55, 5.90],
|
||||
[50.65, 5.90],
|
||||
[50.65, 3.80]
|
||||
],
|
||||
"bufferKm": 20
|
||||
}</code></pre>
|
||||
<h3>Irregular shape</h3>
|
||||
<pre><code>"geo_filter": {
|
||||
"polygon": [
|
||||
[51.10, 3.70],
|
||||
[51.55, 4.20],
|
||||
[51.30, 5.10],
|
||||
[50.80, 5.50],
|
||||
[50.50, 4.80],
|
||||
[50.70, 3.90]
|
||||
],
|
||||
"bufferKm": 10
|
||||
}</code></pre>
|
||||
|
||||
<h2>Legacy bounding box</h2>
|
||||
<p>An older bounding box format is also supported as a fallback when no <code>polygon</code> is present:</p>
|
||||
<pre><code>"geo_filter": {
|
||||
"latMin": 50.65,
|
||||
"latMax": 51.55,
|
||||
"lonMin": 3.80,
|
||||
"lonMax": 5.90
|
||||
}</code></pre>
|
||||
<p>Prefer the polygon format — it supports irregular shapes and the <code>bufferKm</code> margin.</p>
|
||||
|
||||
<h2>Cleaning up historical nodes</h2>
|
||||
<p>The ingestor prevents new out-of-bounds nodes from being ingested, but does not retroactively remove nodes stored before the filter was configured. Use the prune script for that:</p>
|
||||
<pre><code># Dry run — shows what would be deleted without making any changes
|
||||
python3 scripts/prune-nodes-outside-geo-filter.py --dry-run
|
||||
|
||||
# Default paths: /app/data/meshcore.db and /app/config.json
|
||||
python3 scripts/prune-nodes-outside-geo-filter.py
|
||||
|
||||
# Custom paths
|
||||
python3 scripts/prune-nodes-outside-geo-filter.py /path/to/meshcore.db \
|
||||
--config /path/to/config.json
|
||||
|
||||
# In Docker — run inside the container
|
||||
docker exec -it meshcore-analyzer \
|
||||
python3 /app/scripts/prune-nodes-outside-geo-filter.py --dry-run</code></pre>
|
||||
<p>The script reads <code>geo_filter.polygon</code> and <code>geo_filter.bufferKm</code> from config, lists nodes that fall outside, then asks for <code>yes</code> confirmation before deleting. Nodes without coordinates are always kept.</p>
|
||||
<p>This is a one-time migration tool — run it once after first configuring <code>geo_filter</code> to clean up pre-filter data.</p>
|
||||
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,70 +0,0 @@
|
||||
/* hash-color.js — Deterministic HSL color from packet hash
|
||||
* IIFE attaching window.HashColor = { hashToHsl, hashToOutline }
|
||||
* Pure function: no DOM access, no state, works in Node vm.createContext sandbox.
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Derive a deterministic HSL color string from a hex hash.
|
||||
* Uses bytes 0-1 for hue, byte 2 for saturation, byte 3 for lightness.
|
||||
* Produces bright vivid fills; contrast is provided by a dark outline (hashToOutline).
|
||||
* @param {string|null|undefined} hashHex - Hex string (e.g. "a1b2c3d4...")
|
||||
* @param {string} theme - "light" or "dark"
|
||||
* @returns {string} CSS hsl() string
|
||||
*/
|
||||
function hashToHsl(hashHex, theme) {
|
||||
if (!hashHex || hashHex.length < 8) {
|
||||
return 'hsl(0, 0%, 50%)';
|
||||
}
|
||||
|
||||
var b0 = parseInt(hashHex.slice(0, 2), 16) || 0;
|
||||
var b1 = parseInt(hashHex.slice(2, 4), 16) || 0;
|
||||
var b2 = parseInt(hashHex.slice(4, 6), 16) || 0;
|
||||
var b3 = parseInt(hashHex.slice(6, 8), 16) || 0;
|
||||
|
||||
// Hue: 0-360 from bytes 0-1 (16-bit)
|
||||
var hue = Math.round(((b0 << 8) | b1) / 65535 * 360);
|
||||
// Saturation: 55-95% from byte 2
|
||||
var S = 55 + Math.round(b2 / 255 * 40);
|
||||
// Lightness: vivid range per theme from byte 3
|
||||
// Light: 50-65%, Dark: 55-72%
|
||||
var L;
|
||||
if (theme === 'dark') {
|
||||
L = 55 + Math.round(b3 / 255 * 17);
|
||||
} else {
|
||||
L = 50 + Math.round(b3 / 255 * 15);
|
||||
}
|
||||
|
||||
return 'hsl(' + hue + ', ' + S + '%, ' + L + '%)';
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive a dark outline color (same hue) for contrast against backgrounds.
|
||||
* @param {string|null|undefined} hashHex - Hex string
|
||||
* @param {string} theme - "light" or "dark"
|
||||
* @returns {string} CSS hsl() string
|
||||
*/
|
||||
function hashToOutline(hashHex, theme) {
|
||||
if (!hashHex || hashHex.length < 8) {
|
||||
return 'hsl(0, 0%, 30%)';
|
||||
}
|
||||
|
||||
var b0 = parseInt(hashHex.slice(0, 2), 16) || 0;
|
||||
var b1 = parseInt(hashHex.slice(2, 4), 16) || 0;
|
||||
var hue = Math.round(((b0 << 8) | b1) / 65535 * 360);
|
||||
|
||||
// Dark outline: same hue, low lightness for contrast
|
||||
if (theme === 'dark') {
|
||||
return 'hsl(' + hue + ', 30%, 15%)';
|
||||
}
|
||||
return 'hsl(' + hue + ', 70%, 25%)';
|
||||
}
|
||||
|
||||
// Export
|
||||
if (typeof window !== 'undefined') {
|
||||
window.HashColor = { hashToHsl: hashToHsl, hashToOutline: hashToOutline };
|
||||
} else if (typeof module !== 'undefined') {
|
||||
module.exports = { hashToHsl: hashToHsl, hashToOutline: hashToOutline };
|
||||
}
|
||||
})();
|
||||
+68
-138
@@ -7,14 +7,6 @@ window.HopResolver = (function() {
|
||||
|
||||
const MAX_HOP_DIST = 1.8; // ~200km in degrees
|
||||
const REGION_RADIUS_KM = 300;
|
||||
|
||||
// Only repeaters and room servers can appear as path hops per protocol.
|
||||
// Companions/sensors originate but never relay packets.
|
||||
function canAppearInPath(role) {
|
||||
if (!role) return false;
|
||||
var r = String(role).toLowerCase();
|
||||
return r.indexOf('repeater') >= 0 || r.indexOf('room_server') >= 0 || r === 'room';
|
||||
}
|
||||
let prefixIdx = {}; // lowercase hex prefix → [node, ...]
|
||||
let pubkeyIdx = {}; // full lowercase pubkey → node (O(1) lookup)
|
||||
let nodesList = [];
|
||||
@@ -48,11 +40,7 @@ window.HopResolver = (function() {
|
||||
for (const n of nodesList) {
|
||||
if (!n.public_key) continue;
|
||||
const pk = n.public_key.toLowerCase();
|
||||
// pubkeyIdx includes ALL nodes — used by resolveFromServer for
|
||||
// server-confirmed full-pubkey lookups (any node type).
|
||||
pubkeyIdx[pk] = n;
|
||||
// prefixIdx only includes nodes that can appear as path hops.
|
||||
if (!canAppearInPath(n.role)) continue;
|
||||
for (let len = 1; len <= 3; len++) {
|
||||
const p = pk.slice(0, len * 2);
|
||||
if (!prefixIdx[p]) prefixIdx[p] = [];
|
||||
@@ -84,89 +72,33 @@ window.HopResolver = (function() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Pick the best candidate by scoring against BOTH prev and next resolved hops.
|
||||
*
|
||||
* Strategy (in priority order):
|
||||
* 1. Neighbor-graph edge weight: sum of edge scores to prevPubkey + nextPubkey. Pick max.
|
||||
* 2. Geographic centroid: if no candidate has graph edges, compute centroid of
|
||||
* prev+next positions and pick closest candidate by haversine distance.
|
||||
* 3. Single-anchor geo fallback: if only one neighbor is resolved, use it as anchor.
|
||||
* 4. Original heuristic: first candidate (when no context at all).
|
||||
*
|
||||
* Pick the best candidate using affinity first, then geo-distance fallback.
|
||||
* @param {Array} candidates - candidates with lat/lon/pubkey/name
|
||||
* @param {string|null} prevPubkey - pubkey of previous resolved hop
|
||||
* @param {string|null} nextPubkey - pubkey of next resolved hop
|
||||
* @param {Object|null} prevPos - {lat, lon} of previous resolved hop or origin
|
||||
* @param {Object|null} nextPos - {lat, lon} of next resolved hop or observer
|
||||
* @param {string|null} adjacentPubkey - pubkey of the previously/next resolved hop
|
||||
* @param {Object|null} anchor - {lat, lon} for geo fallback
|
||||
* @param {number|null} fallbackLat - fallback anchor lat (e.g. observer)
|
||||
* @param {number|null} fallbackLon - fallback anchor lon
|
||||
* @returns {Object} best candidate
|
||||
*/
|
||||
function pickByAffinity(candidates, prevPubkey, nextPubkey, prevPos, nextPos) {
|
||||
const hasGraph = Object.keys(affinityMap).length > 0;
|
||||
const hasAdj = prevPubkey || nextPubkey;
|
||||
|
||||
// Strategy 1: neighbor-graph edge weights (sum of prev + next)
|
||||
if (hasGraph && hasAdj) {
|
||||
const scored = candidates.map(function(c) {
|
||||
let s = 0;
|
||||
if (prevPubkey) s += getAffinity(prevPubkey, c.pubkey);
|
||||
if (nextPubkey) s += getAffinity(nextPubkey, c.pubkey);
|
||||
return { candidate: c, edgeScore: s };
|
||||
});
|
||||
const withEdges = scored.filter(function(s) { return s.edgeScore > 0; });
|
||||
if (withEdges.length > 0) {
|
||||
withEdges.sort(function(a, b) { return b.edgeScore - a.edgeScore; });
|
||||
_traceMultiCandidate(candidates, scored, withEdges[0].candidate, 'graph');
|
||||
return withEdges[0].candidate;
|
||||
function pickByAffinity(candidates, adjacentPubkey, anchor, fallbackLat, fallbackLon) {
|
||||
// If we have affinity data and an adjacent hop, prefer neighbors
|
||||
if (adjacentPubkey && Object.keys(affinityMap).length > 0) {
|
||||
const withAffinity = candidates
|
||||
.map(c => ({ ...c, affinity: getAffinity(adjacentPubkey, c.pubkey) }))
|
||||
.filter(c => c.affinity > 0);
|
||||
if (withAffinity.length > 0) {
|
||||
withAffinity.sort((a, b) => b.affinity - a.affinity);
|
||||
return withAffinity[0];
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy 2/3: geographic — centroid of prev+next, or single anchor
|
||||
let anchorLat = null, anchorLon = null, anchorCount = 0;
|
||||
if (prevPos && prevPos.lat != null && prevPos.lon != null) {
|
||||
anchorLat = (anchorLat || 0) + prevPos.lat;
|
||||
anchorLon = (anchorLon || 0) + prevPos.lon;
|
||||
anchorCount++;
|
||||
// Fallback: geo-distance sort (existing behavior)
|
||||
const effectiveAnchor = anchor || (fallbackLat != null ? { lat: fallbackLat, lon: fallbackLon } : null);
|
||||
if (effectiveAnchor) {
|
||||
candidates.sort((a, b) => dist(a.lat, a.lon, effectiveAnchor.lat, effectiveAnchor.lon) - dist(b.lat, b.lon, effectiveAnchor.lat, effectiveAnchor.lon));
|
||||
}
|
||||
if (nextPos && nextPos.lat != null && nextPos.lon != null) {
|
||||
anchorLat = (anchorLat || 0) + nextPos.lat;
|
||||
anchorLon = (anchorLon || 0) + nextPos.lon;
|
||||
anchorCount++;
|
||||
}
|
||||
if (anchorCount > 0) {
|
||||
anchorLat /= anchorCount;
|
||||
anchorLon /= anchorCount;
|
||||
const geoScored = candidates.map(function(c) {
|
||||
const d = (c.lat != null && c.lon != null && !(c.lat === 0 && c.lon === 0))
|
||||
? haversineKm(c.lat, c.lon, anchorLat, anchorLon) : 999999;
|
||||
return { candidate: c, distKm: d };
|
||||
});
|
||||
geoScored.sort(function(a, b) { return a.distKm - b.distKm; });
|
||||
_traceMultiCandidate(candidates, geoScored, geoScored[0].candidate, 'centroid');
|
||||
return geoScored[0].candidate;
|
||||
}
|
||||
|
||||
// Strategy 4: no context — return first candidate
|
||||
_traceMultiCandidate(candidates, null, candidates[0], 'fallback');
|
||||
return candidates[0];
|
||||
}
|
||||
|
||||
/** Dev-mode console trace for multi-candidate picks */
|
||||
function _traceMultiCandidate(candidates, scored, chosen, method) {
|
||||
if (typeof console === 'undefined' || !console.debug) return;
|
||||
if (candidates.length < 2) return;
|
||||
try {
|
||||
const prefix = candidates[0].pubkey ? candidates[0].pubkey.slice(0, 2) : '??';
|
||||
const scoreSummary = scored ? scored.map(function(s) {
|
||||
const pk = (s.candidate || s).pubkey || '?';
|
||||
const val = s.edgeScore != null ? s.edgeScore : (s.distKm != null ? s.distKm + 'km' : '?');
|
||||
return pk.slice(0, 8) + ':' + val;
|
||||
}) : [];
|
||||
console.debug('[hop-resolver] hash=' + prefix + ' candidates=' + candidates.length +
|
||||
' scored=[' + scoreSummary.join(',') + '] chose=' + (chosen.pubkey || '?').slice(0, 8) +
|
||||
' method=' + method);
|
||||
} catch(e) { /* trace is best-effort */ }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve an array of hex hop prefixes to node info.
|
||||
* Returns a map: { hop: {name, pubkey, lat, lon, ambiguous, unreliable} }
|
||||
@@ -237,54 +169,52 @@ window.HopResolver = (function() {
|
||||
}
|
||||
}
|
||||
|
||||
// Combined disambiguation: resolve ambiguous hops using both neighbors.
|
||||
// We iterate until no more hops can be resolved (handles cascading dependencies).
|
||||
const originPos = (originLat != null && originLon != null) ? { lat: originLat, lon: originLon } : null;
|
||||
const observerPos = (observerLat != null && observerLon != null) ? { lat: observerLat, lon: observerLon } : null;
|
||||
|
||||
let changed = true;
|
||||
let maxIter = hops.length + 1; // prevent infinite loops
|
||||
while (changed && maxIter-- > 0) {
|
||||
changed = false;
|
||||
for (let i = 0; i < hops.length; i++) {
|
||||
const hop = hops[i];
|
||||
if (hopPositions[hop]) continue; // already resolved
|
||||
const r = resolved[hop];
|
||||
if (!r || !r.ambiguous) continue;
|
||||
const withLoc = r.candidates.filter(c => c.lat != null && c.lon != null && !(c.lat === 0 && c.lon === 0));
|
||||
if (!withLoc.length) continue;
|
||||
|
||||
// Find prev resolved neighbor
|
||||
let prevPubkey = null, prevPos = null;
|
||||
for (let j = i - 1; j >= 0; j--) {
|
||||
if (hopPositions[hops[j]]) {
|
||||
prevPos = hopPositions[hops[j]];
|
||||
prevPubkey = resolved[hops[j]] ? resolved[hops[j]].pubkey : null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!prevPos && originPos) prevPos = originPos;
|
||||
|
||||
// Find next resolved neighbor
|
||||
let nextPubkey = null, nextPos = null;
|
||||
for (let j = i + 1; j < hops.length; j++) {
|
||||
if (hopPositions[hops[j]]) {
|
||||
nextPos = hopPositions[hops[j]];
|
||||
nextPubkey = resolved[hops[j]] ? resolved[hops[j]].pubkey : null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!nextPos && observerPos) nextPos = observerPos;
|
||||
|
||||
// Skip if we have zero context (wait for a later iteration or neighbor resolution)
|
||||
if (!prevPubkey && !nextPubkey && !prevPos && !nextPos) continue;
|
||||
|
||||
const picked = pickByAffinity(withLoc, prevPubkey, nextPubkey, prevPos, nextPos);
|
||||
r.name = picked.name;
|
||||
r.pubkey = picked.pubkey;
|
||||
hopPositions[hop] = { lat: picked.lat, lon: picked.lon };
|
||||
changed = true;
|
||||
// Forward pass
|
||||
let lastPos = (originLat != null && originLon != null) ? { lat: originLat, lon: originLon } : null;
|
||||
let lastResolvedPubkey = null;
|
||||
for (let i = 0; i < hops.length; i++) {
|
||||
const hop = hops[i];
|
||||
if (hopPositions[hop]) {
|
||||
lastPos = hopPositions[hop];
|
||||
lastResolvedPubkey = resolved[hop] ? resolved[hop].pubkey : null;
|
||||
continue;
|
||||
}
|
||||
const r = resolved[hop];
|
||||
if (!r || !r.ambiguous) continue;
|
||||
const withLoc = r.candidates.filter(c => c.lat && c.lon && !(c.lat === 0 && c.lon === 0));
|
||||
if (!withLoc.length) continue;
|
||||
|
||||
// Affinity-aware: prefer candidates that are neighbors of the previous hop
|
||||
const picked = pickByAffinity(withLoc, lastResolvedPubkey, lastPos, i === hops.length - 1 ? observerLat : null, i === hops.length - 1 ? observerLon : null);
|
||||
r.name = picked.name;
|
||||
r.pubkey = picked.pubkey;
|
||||
hopPositions[hop] = { lat: picked.lat, lon: picked.lon };
|
||||
lastPos = hopPositions[hop];
|
||||
lastResolvedPubkey = picked.pubkey;
|
||||
}
|
||||
|
||||
// Backward pass
|
||||
let nextPos = (observerLat != null && observerLon != null) ? { lat: observerLat, lon: observerLon } : null;
|
||||
let nextResolvedPubkey = null;
|
||||
for (let i = hops.length - 1; i >= 0; i--) {
|
||||
const hop = hops[i];
|
||||
if (hopPositions[hop]) {
|
||||
nextPos = hopPositions[hop];
|
||||
nextResolvedPubkey = resolved[hop] ? resolved[hop].pubkey : null;
|
||||
continue;
|
||||
}
|
||||
const r = resolved[hop];
|
||||
if (!r || !r.ambiguous) continue;
|
||||
const withLoc = r.candidates.filter(c => c.lat && c.lon && !(c.lat === 0 && c.lon === 0));
|
||||
if (!withLoc.length || !nextPos) continue;
|
||||
|
||||
// Affinity-aware: prefer candidates that are neighbors of the next hop
|
||||
const picked = pickByAffinity(withLoc, nextResolvedPubkey, nextPos, null, null);
|
||||
r.name = picked.name;
|
||||
r.pubkey = picked.pubkey;
|
||||
hopPositions[hop] = { lat: picked.lat, lon: picked.lon };
|
||||
nextPos = hopPositions[hop];
|
||||
nextResolvedPubkey = picked.pubkey;
|
||||
}
|
||||
|
||||
// Sanity check: drop hops impossibly far from neighbors
|
||||
@@ -346,13 +276,13 @@ window.HopResolver = (function() {
|
||||
*/
|
||||
function resolveFromServer(hops, resolvedPath) {
|
||||
if (!hops || !resolvedPath || hops.length !== resolvedPath.length) return {};
|
||||
const result = {};
|
||||
for (let i = 0; i < hops.length; i++) {
|
||||
const hop = hops[i];
|
||||
const pubkey = resolvedPath[i];
|
||||
var result = {};
|
||||
for (var i = 0; i < hops.length; i++) {
|
||||
var hop = hops[i];
|
||||
var pubkey = resolvedPath[i];
|
||||
if (!pubkey) continue; // null = unresolved, leave for client-side fallback
|
||||
// O(1) lookup via pubkeyIdx built during init()
|
||||
const node = pubkeyIdx[pubkey.toLowerCase()] || null;
|
||||
var node = pubkeyIdx[pubkey.toLowerCase()] || null;
|
||||
result[hop] = {
|
||||
name: node ? node.name : pubkey.slice(0, 8),
|
||||
pubkey: pubkey,
|
||||
|
||||
+1
-3
@@ -50,7 +50,7 @@
|
||||
<a href="#/live" class="nav-link" data-route="live" data-priority="high">🔴 Live</a>
|
||||
<a href="#/channels" class="nav-link" data-route="channels">Channels</a>
|
||||
<a href="#/nodes" class="nav-link" data-route="nodes" data-priority="high">Nodes</a>
|
||||
<a href="#/tools" class="nav-link" data-route="tools">Tools</a>
|
||||
<a href="#/traces" class="nav-link" data-route="traces">Traces</a>
|
||||
<a href="#/observers" class="nav-link" data-route="observers">Observers</a>
|
||||
<a href="#/analytics" class="nav-link" data-route="analytics">Analytics</a>
|
||||
<a href="#/perf" class="nav-link" data-route="perf">⚡ Perf</a>
|
||||
@@ -94,7 +94,6 @@
|
||||
<script src="home.js?v=__BUST__"></script>
|
||||
<script src="table-sort.js?v=__BUST__"></script>
|
||||
<script src="packet-filter.js?v=__BUST__"></script>
|
||||
<script src="hash-color.js?v=__BUST__"></script>
|
||||
<script src="packet-helpers.js?v=__BUST__"></script>
|
||||
<script src="channel-decrypt.js?v=__BUST__"></script>
|
||||
<script src="channel-colors.js?v=__BUST__"></script>
|
||||
@@ -106,7 +105,6 @@
|
||||
<script src="table-sort.js?v=__BUST__"></script>
|
||||
<script src="nodes.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="traces.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="path-inspector.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="analytics.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-v1-constellation.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
|
||||
+9
-133
@@ -22,12 +22,6 @@
|
||||
let showOnlyFavorites = localStorage.getItem('live-favorites-only') === 'true';
|
||||
let matrixMode = localStorage.getItem('live-matrix-mode') === 'true';
|
||||
let matrixRain = localStorage.getItem('live-matrix-rain') === 'true';
|
||||
let colorByHash = localStorage.getItem('meshcore-color-packets-by-hash') !== 'false';
|
||||
/** Current theme string for hash-color functions. */
|
||||
function _liveTheme() { return document.documentElement.dataset.theme || (window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'); }
|
||||
let nodeFilterKeys = (localStorage.getItem('live-node-filter') || '').split(',').map(s => s.trim()).filter(Boolean);
|
||||
let nodeFilterTotal = 0;
|
||||
let nodeFilterShown = 0;
|
||||
let rainCanvas = null, rainCtx = null, rainDrops = [], rainRAF = null;
|
||||
const propagationBuffer = new Map(); // hash -> {timer, packets[]}
|
||||
let _onResize = null;
|
||||
@@ -831,8 +825,6 @@
|
||||
<span id="ghostDesc" class="sr-only">Show interpolated ghost markers for unknown hops</span>
|
||||
<label><input type="checkbox" id="liveRealisticToggle" aria-describedby="realisticDesc"> Realistic</label>
|
||||
<span id="realisticDesc" class="sr-only">Buffer packets by hash and animate all paths simultaneously</span>
|
||||
<label><input type="checkbox" id="liveColorHashToggle" aria-describedby="colorHashDesc"> Color by hash</label>
|
||||
<span id="colorHashDesc" class="sr-only">Color flying-packet dots and contrails by packet hash for propagation tracing</span>
|
||||
<label><input type="checkbox" id="liveMatrixToggle" aria-describedby="matrixDesc"> Matrix</label>
|
||||
<span id="matrixDesc" class="sr-only">Animate packet hex bytes flowing along paths like the Matrix</span>
|
||||
<label><input type="checkbox" id="liveMatrixRainToggle" aria-describedby="rainDesc"> Rain</label>
|
||||
@@ -841,12 +833,6 @@
|
||||
<span id="audioDesc" class="sr-only">Sonify packets — turn raw bytes into generative music</span>
|
||||
<label><input type="checkbox" id="liveFavoritesToggle" aria-describedby="favDesc"> ⭐ Favorites</label>
|
||||
<span id="favDesc" class="sr-only">Show only favorited and claimed nodes</span>
|
||||
<div class="live-node-filter-wrap">
|
||||
<input type="text" id="liveNodeFilterInput" list="liveNodeFilterList" placeholder="Filter by node…" autocomplete="off" class="live-node-filter-input">
|
||||
<datalist id="liveNodeFilterList"></datalist>
|
||||
<button id="liveNodeFilterClear" class="vcr-btn" title="Clear node filter" style="display:none">×</button>
|
||||
</div>
|
||||
<div id="liveNodeFilterCount" class="live-filter-count hidden"></div>
|
||||
<label id="liveGeoFilterLabel" style="display:none"><input type="checkbox" id="liveGeoFilterToggle"> Mesh live area</label>
|
||||
</div>
|
||||
<div class="audio-controls hidden" id="audioControls">
|
||||
@@ -997,14 +983,6 @@
|
||||
localStorage.setItem('live-realistic-propagation', realisticPropagation);
|
||||
});
|
||||
|
||||
const colorHashToggle = document.getElementById('liveColorHashToggle');
|
||||
colorHashToggle.checked = colorByHash;
|
||||
colorHashToggle.addEventListener('change', (e) => {
|
||||
colorByHash = e.target.checked;
|
||||
localStorage.setItem('meshcore-color-packets-by-hash', colorByHash);
|
||||
window.dispatchEvent(new Event('storage'));
|
||||
});
|
||||
|
||||
const favoritesToggle = document.getElementById('liveFavoritesToggle');
|
||||
favoritesToggle.checked = showOnlyFavorites;
|
||||
favoritesToggle.addEventListener('change', (e) => {
|
||||
@@ -1013,35 +991,6 @@
|
||||
applyFavoritesFilter();
|
||||
});
|
||||
|
||||
// Node filter input
|
||||
const nodeFilterInput = document.getElementById('liveNodeFilterInput');
|
||||
const nodeFilterClear = document.getElementById('liveNodeFilterClear');
|
||||
if (nodeFilterInput) {
|
||||
// Restore from URL param or localStorage
|
||||
const urlNode = getHashParams && getHashParams().get('node');
|
||||
if (urlNode) setNodeFilter(urlNode.split(',').map(s => s.trim()).filter(Boolean));
|
||||
else if (nodeFilterKeys.length) updateNodeFilterUI();
|
||||
|
||||
nodeFilterInput.addEventListener('change', (e) => {
|
||||
const val = e.target.value.trim();
|
||||
setNodeFilter(val ? val.split(',').map(s => s.trim()).filter(Boolean) : []);
|
||||
const params = getHashParams ? getHashParams() : new URLSearchParams();
|
||||
if (nodeFilterKeys.length) params.set('node', nodeFilterKeys.join(','));
|
||||
else params.delete('node');
|
||||
const base = location.hash.split('?')[0];
|
||||
const qs = params.toString();
|
||||
location.hash = base + (qs ? '?' + qs : '');
|
||||
});
|
||||
}
|
||||
if (nodeFilterClear) {
|
||||
nodeFilterClear.addEventListener('click', () => {
|
||||
if (nodeFilterInput) nodeFilterInput.value = '';
|
||||
setNodeFilter([]);
|
||||
const base = location.hash.split('?')[0];
|
||||
location.hash = base;
|
||||
});
|
||||
}
|
||||
|
||||
// Geo filter overlay
|
||||
(async function () {
|
||||
try {
|
||||
@@ -1707,47 +1656,6 @@
|
||||
return getFavoritePubkeys().some(f => f === pubkey);
|
||||
}
|
||||
|
||||
function packetInvolvesFilterNode(pkt, filterKeys) {
|
||||
if (!filterKeys.length) return true;
|
||||
const hops = (pkt.decoded?.path?.hops) || [];
|
||||
for (const hop of hops) {
|
||||
const h = (hop.id || hop.public_key || hop).toString().toLowerCase();
|
||||
if (filterKeys.some(f => f.toLowerCase().startsWith(h) || h.startsWith(f.toLowerCase()))) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function setNodeFilter(keys) {
|
||||
nodeFilterKeys = keys;
|
||||
nodeFilterTotal = 0;
|
||||
nodeFilterShown = 0;
|
||||
localStorage.setItem('live-node-filter', keys.join(','));
|
||||
updateNodeFilterUI();
|
||||
}
|
||||
|
||||
function updateNodeFilterUI() {
|
||||
const countEl = document.getElementById('liveNodeFilterCount');
|
||||
const clearBtn = document.getElementById('liveNodeFilterClear');
|
||||
const input = document.getElementById('liveNodeFilterInput');
|
||||
if (nodeFilterKeys.length > 0) {
|
||||
if (clearBtn) clearBtn.style.display = '';
|
||||
if (countEl) { countEl.textContent = `Showing ${nodeFilterShown} of ${nodeFilterTotal}`; countEl.classList.remove('hidden'); }
|
||||
if (input && input.value !== nodeFilterKeys.join(', ')) input.value = nodeFilterKeys.join(', ');
|
||||
} else {
|
||||
if (clearBtn) clearBtn.style.display = 'none';
|
||||
if (countEl) countEl.classList.add('hidden');
|
||||
}
|
||||
updateNodeFilterDatalist();
|
||||
}
|
||||
|
||||
function updateNodeFilterDatalist() {
|
||||
const dl = document.getElementById('liveNodeFilterList');
|
||||
if (!dl) return;
|
||||
dl.innerHTML = Object.values(nodeData).map(n =>
|
||||
`<option value="${n.public_key}">${n.name || n.public_key.slice(0, 8)}</option>`
|
||||
).join('');
|
||||
}
|
||||
|
||||
function rebuildFeedList() {
|
||||
const feed = document.getElementById('liveFeed');
|
||||
if (!feed) return;
|
||||
@@ -1954,9 +1862,6 @@
|
||||
window._liveGetFavoritePubkeys = getFavoritePubkeys;
|
||||
window._livePacketInvolvesFavorite = packetInvolvesFavorite;
|
||||
window._liveIsNodeFavorited = isNodeFavorited;
|
||||
window._livePacketInvolvesFilterNode = packetInvolvesFilterNode;
|
||||
window._liveGetNodeFilterKeys = function() { return nodeFilterKeys; };
|
||||
window._liveSetNodeFilter = setNodeFilter;
|
||||
window._liveFormatLiveTimestampHtml = formatLiveTimestampHtml;
|
||||
window._liveResolveHopPositions = resolveHopPositions;
|
||||
window._liveVcrSpeedCycle = vcrSpeedCycle;
|
||||
@@ -2047,14 +1952,6 @@
|
||||
// --- Favorites filter ---
|
||||
if (showOnlyFavorites && !packets.some(function(p) { return packetInvolvesFavorite(p); })) return;
|
||||
|
||||
// --- Node filter ---
|
||||
if (nodeFilterKeys.length) {
|
||||
nodeFilterTotal++;
|
||||
if (!packets.some(function(p) { return packetInvolvesFilterNode(p, nodeFilterKeys); })) return;
|
||||
nodeFilterShown++;
|
||||
updateNodeFilterUI();
|
||||
}
|
||||
|
||||
// --- Ensure ADVERT nodes appear on map ---
|
||||
for (var pi = 0; pi < packets.length; pi++) {
|
||||
var pkt = packets[pi];
|
||||
@@ -2171,7 +2068,7 @@
|
||||
var completedPositions = allPaths[ai].hopPositions.slice(0, hopsCompleted + 1);
|
||||
var remainingPositions = allPaths[ai].hopPositions.slice(hopsCompleted);
|
||||
if (completedPositions.length >= 2) {
|
||||
animatePath(completedPositions, typeName, color, allPaths[ai].raw, onHop, first.hash);
|
||||
animatePath(completedPositions, typeName, color, allPaths[ai].raw, onHop);
|
||||
} else if (completedPositions.length === 1) {
|
||||
pulseNode(completedPositions[0].key, completedPositions[0].pos, typeName);
|
||||
}
|
||||
@@ -2179,7 +2076,7 @@
|
||||
drawDashedPath(remainingPositions, color);
|
||||
}
|
||||
} else {
|
||||
animatePath(allPaths[ai].hopPositions, typeName, color, allPaths[ai].raw, onHop, first.hash);
|
||||
animatePath(allPaths[ai].hopPositions, typeName, color, allPaths[ai].raw, onHop);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2288,7 +2185,7 @@
|
||||
return raw.filter(h => h.pos != null);
|
||||
}
|
||||
|
||||
function animatePath(hopPositions, typeName, color, rawHex, onHop, hash) {
|
||||
function animatePath(hopPositions, typeName, color, rawHex, onHop) {
|
||||
if (!animLayer || !pathsLayer) return;
|
||||
if (activeAnims >= MAX_CONCURRENT_ANIMS) return;
|
||||
activeAnims++;
|
||||
@@ -2340,7 +2237,7 @@
|
||||
const nextGhost = hopPositions[hopIndex + 1].ghost;
|
||||
const lineColor = (isGhost || nextGhost) ? '#94a3b8' : color;
|
||||
const lineOpacity = (isGhost || nextGhost) ? 0.3 : undefined;
|
||||
drawAnimatedLine(hp.pos, nextPos, lineColor, () => { hopIndex++; nextHop(); }, lineOpacity, rawHex, hash);
|
||||
drawAnimatedLine(hp.pos, nextPos, lineColor, () => { hopIndex++; nextHop(); }, lineOpacity, rawHex);
|
||||
} else {
|
||||
if (!isGhost) pulseNode(hp.key, hp.pos, typeName);
|
||||
hopIndex++; nextHop();
|
||||
@@ -2695,7 +2592,7 @@
|
||||
requestAnimationFrame(tick);
|
||||
}
|
||||
|
||||
function drawAnimatedLine(from, to, color, onComplete, overrideOpacity, rawHex, hash) {
|
||||
function drawAnimatedLine(from, to, color, onComplete, overrideOpacity, rawHex) {
|
||||
if (!animLayer || !pathsLayer) { if (onComplete) onComplete(); return; }
|
||||
if (matrixMode) return drawMatrixLine(from, to, color, onComplete, rawHex);
|
||||
const steps = 20;
|
||||
@@ -2706,30 +2603,17 @@
|
||||
const mainOpacity = overrideOpacity ?? 0.8;
|
||||
const isDashed = overrideOpacity != null;
|
||||
|
||||
// Hash-derived color for fill + contrail + outline (when toggle ON and not ghost/dashed line)
|
||||
var hashFill = '#fff';
|
||||
var hashOutline = color;
|
||||
var contrailColor = color;
|
||||
if (colorByHash && hash && !isDashed && window.HashColor) {
|
||||
var hsl = HashColor.hashToHsl(hash, _liveTheme());
|
||||
hashFill = hsl;
|
||||
hashOutline = HashColor.hashToOutline(hash, _liveTheme());
|
||||
contrailColor = hsl;
|
||||
}
|
||||
|
||||
const contrail = L.polyline([from], {
|
||||
color: contrailColor, weight: 6, opacity: mainOpacity * 0.2, lineCap: 'round'
|
||||
color: color, weight: 6, opacity: mainOpacity * 0.2, lineCap: 'round'
|
||||
}).addTo(pathsLayer);
|
||||
|
||||
const line = L.polyline([from], {
|
||||
color: (colorByHash && hash && !isDashed && window.HashColor) ? hashFill : color,
|
||||
weight: isDashed ? 1.5 : 2, opacity: mainOpacity, lineCap: 'round',
|
||||
dashArray: isDashed ? '4 6' : null,
|
||||
className: 'live-packet-trace'
|
||||
color: color, weight: isDashed ? 1.5 : 2, opacity: mainOpacity, lineCap: 'round',
|
||||
dashArray: isDashed ? '4 6' : null
|
||||
}).addTo(pathsLayer);
|
||||
|
||||
const dot = L.circleMarker(from, {
|
||||
radius: 3.5, fillColor: hashFill, fillOpacity: 1, color: hashOutline, weight: 1.5
|
||||
radius: 3.5, fillColor: '#fff', fillOpacity: 1, color: color, weight: 1.5
|
||||
}).addTo(animLayer);
|
||||
|
||||
let lastStep = performance.now();
|
||||
@@ -2861,10 +2745,6 @@
|
||||
item.setAttribute('tabindex', '0');
|
||||
item.setAttribute('role', 'button');
|
||||
item.style.cursor = 'pointer';
|
||||
// Hash-color stripe for feed items (mirrors packets table border-left)
|
||||
if (colorByHash && pkt.hash && window.HashColor) {
|
||||
item.style.borderLeft = '4px solid ' + HashColor.hashToHsl(pkt.hash, _liveTheme());
|
||||
}
|
||||
// Channel color highlighting for GRP_TXT packets (#271)
|
||||
var _cs = _getChannelStyle(pkt);
|
||||
if (_cs) item.style.cssText += _cs;
|
||||
@@ -2948,10 +2828,6 @@
|
||||
item.setAttribute('role', 'button');
|
||||
if (hash) item.setAttribute('data-hash', hash);
|
||||
item.style.cursor = 'pointer';
|
||||
// Hash-color stripe for feed items (mirrors packets table border-left)
|
||||
if (colorByHash && hash && window.HashColor) {
|
||||
item.style.borderLeft = '4px solid ' + HashColor.hashToHsl(hash, _liveTheme());
|
||||
}
|
||||
// Channel color highlighting for GRP_TXT packets (#271)
|
||||
var _chanStyle = _getChannelStyle(pkt);
|
||||
if (_chanStyle) item.style.cssText += _chanStyle;
|
||||
|
||||
+2
-156
@@ -102,21 +102,8 @@
|
||||
|
||||
async function init(container) {
|
||||
container.innerHTML = `
|
||||
<div id="map-wrap" style="position:relative;width:100%;height:100%;display:flex;">
|
||||
<div id="leaflet-map" style="flex:1 1 0%;height:100%;"></div>
|
||||
<div class="map-side-pane" id="mapSidePane">
|
||||
<div class="pane-toggle" id="mapPaneToggle" title="Path Inspector">◀</div>
|
||||
<div class="pane-content">
|
||||
<h3 style="margin:0 0 8px 0;font-size:14px;">Path Inspector</h3>
|
||||
<p style="font-size:11px;color:var(--text-muted);margin:0 0 8px 0;">Hex prefixes (1-3 bytes), comma or space separated.</p>
|
||||
<div style="display:flex;gap:4px;margin-bottom:8px;">
|
||||
<input type="text" id="mapPiInput" class="input" placeholder="2C,A1,F4" style="flex:1;">
|
||||
<button id="mapPiSubmit" class="btn btn-primary btn-sm">Go</button>
|
||||
</div>
|
||||
<div id="mapPiError" class="path-inspector-error"></div>
|
||||
<div id="mapPiResults"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="map-wrap" style="position:relative;width:100%;height:100%;">
|
||||
<div id="leaflet-map" style="width:100%;height:100%;"></div>
|
||||
<button class="map-controls-toggle" id="mapControlsToggle" aria-label="Toggle map controls" aria-expanded="true">⚙️</button>
|
||||
<div class="map-controls" id="mapControls" role="region" aria-label="Map controls">
|
||||
<h3>🗺️ Map Controls</h3>
|
||||
@@ -388,14 +375,6 @@
|
||||
}
|
||||
|
||||
function drawPacketRoute(hopKeys, origin) {
|
||||
// Defensive: origin must be an object with pubkey/lat/lon/name. A bare
|
||||
// string slips through both branches at lines below and silently no-ops
|
||||
// the originator marker (caused PR #950's bug). Coerce string → object
|
||||
// and warn so callers get a clear signal.
|
||||
if (typeof origin === 'string') {
|
||||
console.warn('drawPacketRoute: origin should be an object {pubkey,lat,lon,name}, got string. Coercing.');
|
||||
origin = { pubkey: origin };
|
||||
}
|
||||
// Hide default markers so only the route is visible
|
||||
if (markerLayer) map.removeLayer(markerLayer);
|
||||
if (clusterGroup) map.removeLayer(clusterGroup);
|
||||
@@ -574,23 +553,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Check for pending path inspector route (cross-page navigation from Path Inspector).
|
||||
if (window._pendingPathInspectorRoute) {
|
||||
var pending = window._pendingPathInspectorRoute;
|
||||
delete window._pendingPathInspectorRoute;
|
||||
if (pending.path && pending.path.length > 0) {
|
||||
if (window.routeLayer) window.routeLayer.clearLayers();
|
||||
// Pass full path as hopKeys; null origin (origin is already the first
|
||||
// hop). slice(1) + path[0] string was wrong — drawPacketRoute expects
|
||||
// origin to be an OBJECT with pubkey/lat/lon, and stripping the head
|
||||
// hid the originating node from the route polyline.
|
||||
drawPacketRoute(pending.path, null);
|
||||
}
|
||||
}
|
||||
|
||||
// Wire up map side pane (Path Inspector embedded - spec §2.7).
|
||||
initMapSidePane();
|
||||
|
||||
// Don't fitBounds on initial load — respect the Bay Area default or saved view
|
||||
// Only fitBounds on subsequent data refreshes if user hasn't manually panned
|
||||
} catch (e) {
|
||||
@@ -1019,122 +981,6 @@
|
||||
map.fitBounds(bounds, { padding: [50, 50], maxZoom: 14 });
|
||||
}
|
||||
|
||||
// === Map Side Pane — Path Inspector (spec §2.7) ===
|
||||
function initMapSidePane() {
|
||||
var pane = document.getElementById('mapSidePane');
|
||||
var toggle = document.getElementById('mapPaneToggle');
|
||||
var input = document.getElementById('mapPiInput');
|
||||
var btn = document.getElementById('mapPiSubmit');
|
||||
if (!pane || !toggle) return;
|
||||
|
||||
toggle.addEventListener('click', function () {
|
||||
pane.classList.toggle('expanded');
|
||||
toggle.textContent = pane.classList.contains('expanded') ? '▶' : '◀';
|
||||
// Invalidate map size after transition.
|
||||
setTimeout(function () { if (map) map.invalidateSize(); }, 220);
|
||||
});
|
||||
|
||||
if (btn && input) {
|
||||
btn.addEventListener('click', function () { mapPiSubmit(input.value); });
|
||||
input.addEventListener('keydown', function (e) {
|
||||
if (e.key === 'Enter') mapPiSubmit(input.value);
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-open if URL has prefixes param while on map.
|
||||
var params = new URLSearchParams(location.hash.split('?')[1] || '');
|
||||
var prefixParam = params.get('prefixes');
|
||||
if (prefixParam && input) {
|
||||
pane.classList.add('expanded');
|
||||
toggle.textContent = '▶';
|
||||
input.value = prefixParam;
|
||||
setTimeout(function () { if (map) map.invalidateSize(); }, 220);
|
||||
mapPiSubmit(prefixParam);
|
||||
}
|
||||
}
|
||||
|
||||
function mapPiSubmit(raw) {
|
||||
var errDiv = document.getElementById('mapPiError');
|
||||
var resultsDiv = document.getElementById('mapPiResults');
|
||||
if (!errDiv || !resultsDiv) return;
|
||||
errDiv.textContent = '';
|
||||
resultsDiv.innerHTML = '';
|
||||
|
||||
// Reuse PathInspector validation if available.
|
||||
var prefixes = raw.trim().split(/[\s,]+/).filter(function (s) { return s.length > 0; }).map(function (s) { return s.toLowerCase(); });
|
||||
var err = (window.PathInspector && window.PathInspector.validatePrefixes) ? window.PathInspector.validatePrefixes(prefixes) : null;
|
||||
if (!err && prefixes.length === 0) err = 'Enter at least one prefix.';
|
||||
if (err) { errDiv.textContent = err; return; }
|
||||
|
||||
resultsDiv.innerHTML = '<p style="font-size:12px;">Loading...</p>';
|
||||
fetch('/api/paths/inspect', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ prefixes: prefixes })
|
||||
})
|
||||
.then(function (r) {
|
||||
if (r.status === 503) return r.json().then(function () { throw new Error('Service warming up, retry shortly.'); });
|
||||
if (!r.ok) return r.json().then(function (d) { throw new Error(d.error || 'Request failed'); });
|
||||
return r.json();
|
||||
})
|
||||
.then(function (data) { renderMapPiResults(data, resultsDiv); })
|
||||
.catch(function (e) { resultsDiv.innerHTML = ''; errDiv.textContent = e.message; });
|
||||
}
|
||||
|
||||
function renderMapPiResults(data, div) {
|
||||
if (!data.candidates || data.candidates.length === 0) {
|
||||
div.innerHTML = '<p style="font-size:12px;color:var(--text-muted);">No candidates found.</p>';
|
||||
return;
|
||||
}
|
||||
var html = '<table class="path-inspector-table" style="font-size:11px;width:100%;"><thead><tr><th>#</th><th>Score</th><th>Path</th><th></th></tr></thead><tbody>';
|
||||
for (var i = 0; i < data.candidates.length; i++) {
|
||||
var c = data.candidates[i];
|
||||
var rowClass = c.speculative ? 'speculative-row' : '';
|
||||
html += '<tr class="' + rowClass + '">';
|
||||
html += '<td>' + (i + 1) + '</td>';
|
||||
html += '<td class="' + (c.speculative ? 'speculative-warning' : '') + '">' + c.score.toFixed(2) + (c.speculative ? ' ⚠' : '') + '</td>';
|
||||
html += '<td title="' + safeEsc(c.names.join(' → ')) + '">' + safeEsc(c.names.slice(0, 3).join('→')) + (c.names.length > 3 ? '…' : '') + '</td>';
|
||||
html += '<td><button class="btn btn-sm" data-idx="' + i + '" title="Show on Map">📍</button></td>';
|
||||
html += '</tr>';
|
||||
// Per-hop evidence (collapsed).
|
||||
html += '<tr class="evidence-row collapsed" data-evidence="' + i + '"><td colspan="4"><div class="evidence-detail" style="font-size:10px;">';
|
||||
if (c.evidence && c.evidence.perHop) {
|
||||
for (var j = 0; j < c.evidence.perHop.length; j++) {
|
||||
var h = c.evidence.perHop[j];
|
||||
html += '<div>Hop ' + (j+1) + ': ' + h.prefix + ' (×' + h.candidatesConsidered + ') w=' + h.edgeWeight.toFixed(2);
|
||||
if (h.alternatives && h.alternatives.length > 0) {
|
||||
html += ' <span style="color:var(--text-muted);">[+' + h.alternatives.length + ' alt]</span>';
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
}
|
||||
html += '</div></td></tr>';
|
||||
}
|
||||
html += '</tbody></table>';
|
||||
div.innerHTML = html;
|
||||
|
||||
// Wire buttons.
|
||||
div.querySelectorAll('button[data-idx]').forEach(function (btn) {
|
||||
btn.addEventListener('click', function () {
|
||||
var idx = parseInt(btn.dataset.idx);
|
||||
var cand = data.candidates[idx];
|
||||
if (routeLayer) routeLayer.clearLayers();
|
||||
drawPacketRoute(cand.path, null);
|
||||
});
|
||||
});
|
||||
// Expand evidence on row click.
|
||||
div.querySelectorAll('.path-inspector-table tbody tr:not(.evidence-row)').forEach(function (row) {
|
||||
row.style.cursor = 'pointer';
|
||||
row.addEventListener('click', function (e) {
|
||||
if (e.target.tagName === 'BUTTON') return;
|
||||
var b = row.querySelector('button[data-idx]');
|
||||
if (!b) return;
|
||||
var ev = div.querySelector('tr[data-evidence="' + b.dataset.idx + '"]');
|
||||
if (ev) ev.classList.toggle('collapsed');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function destroy() {
|
||||
if (wsHandler) offWS(wsHandler);
|
||||
wsHandler = null;
|
||||
|
||||
+3
-15
@@ -1144,19 +1144,6 @@
|
||||
makeColumnsResizable('#nodesTable', 'meshcore-nodes-col-widths');
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to the full-screen node view for `pubkey` from anywhere within
|
||||
* the nodes module. Single source of navigation truth — works regardless
|
||||
* of current hash state (hash assignment alone is a no-op when the hash
|
||||
* is already the target).
|
||||
*/
|
||||
function navigateToNode(pubkey) {
|
||||
destroy();
|
||||
var appEl = document.getElementById('app');
|
||||
history.replaceState(null, '', '#/nodes/' + encodeURIComponent(pubkey));
|
||||
init(appEl, pubkey);
|
||||
}
|
||||
|
||||
async function selectNode(pubkey) {
|
||||
// On mobile, navigate to full-screen node view
|
||||
if (window.innerWidth <= 640) {
|
||||
@@ -1320,11 +1307,12 @@
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Wire "Details" button via the unified navigateToNode helper
|
||||
// #856: Wire "Details" button to navigate to full-screen node view
|
||||
var detailBtn = panel.querySelector('.node-detail-btn');
|
||||
if (detailBtn) {
|
||||
detailBtn.addEventListener('click', function() {
|
||||
navigateToNode(decodeURIComponent(detailBtn.getAttribute('data-pubkey')));
|
||||
var pk = detailBtn.getAttribute('data-pubkey');
|
||||
location.hash = '#/nodes/' + pk;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
+44
-120
@@ -13,9 +13,6 @@
|
||||
return o.iata ? `${o.name} (${o.iata})` : o.name;
|
||||
}
|
||||
let selectedId = null;
|
||||
function _isColorByHash() { return localStorage.getItem('meshcore-color-packets-by-hash') !== 'false'; }
|
||||
function _currentTheme() { return document.documentElement.dataset.theme || (window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'); }
|
||||
function _hashStripeStyle(hash) { return _isColorByHash() && hash && window.HashColor ? 'border-left:4px solid ' + HashColor.hashToHsl(hash, _currentTheme()) + ';' : ''; }
|
||||
let groupByHash = true;
|
||||
let filters = {};
|
||||
{ const o = localStorage.getItem('meshcore-observer-filter'); if (o) filters.observer = o;
|
||||
@@ -390,9 +387,9 @@
|
||||
const obs = data.observations.find(o => String(o.id) === String(obsTarget));
|
||||
if (obs) {
|
||||
expandedHashes.add(h);
|
||||
const obsPacket = {...data.packet, observer_id: obs.observer_id, observer_name: obs.observer_name, snr: obs.snr, rssi: obs.rssi, path_json: obs.path_json, resolved_path: obs.resolved_path, direction: obs.direction, timestamp: obs.timestamp, first_seen: obs.timestamp};
|
||||
const obsPacket = {...data.packet, observer_id: obs.observer_id, observer_name: obs.observer_name, snr: obs.snr, rssi: obs.rssi, path_json: obs.path_json, resolved_path: obs.resolved_path, timestamp: obs.timestamp, first_seen: obs.timestamp};
|
||||
clearParsedCache(obsPacket);
|
||||
selectPacket(obs.id, h, {packet: obsPacket, observations: data.observations}, obs.id);
|
||||
selectPacket(obs.id, h, {packet: obsPacket, breakdown: data.breakdown, observations: data.observations}, obs.id);
|
||||
} else {
|
||||
selectPacket(data.packet.id, h, data);
|
||||
}
|
||||
@@ -471,9 +468,6 @@
|
||||
|
||||
// Check if new packets pass current filters
|
||||
const filtered = newPkts.filter(p => {
|
||||
// When user pinned a hash, accept ONLY that exact packet — bypass all
|
||||
// other filters (window/region/type/observer/node).
|
||||
if (filters.hash) return p.hash === filters.hash;
|
||||
// Respect time window filter — drop packets outside the selected window
|
||||
const windowMin = savedTimeWindowMin;
|
||||
if (windowMin > 0) {
|
||||
@@ -483,6 +477,7 @@
|
||||
}
|
||||
if (filters.type) { const types = filters.type.split(',').map(Number); if (!types.includes(p.payload_type)) return false; }
|
||||
if (filters.observer) { const obsSet = new Set(filters.observer.split(',')); if (!obsSet.has(p.observer_id) && !(p._children && p._children.some(c => obsSet.has(String(c.observer_id))))) return false; }
|
||||
if (filters.hash && p.hash !== filters.hash) return false;
|
||||
if (RegionFilter.getRegionParam()) {
|
||||
const selectedRegions = RegionFilter.getRegionParam().split(',');
|
||||
const obs = observerMap.get(p.observer_id);
|
||||
@@ -524,7 +519,7 @@
|
||||
if (p.decoded_json) existing.decoded_json = p.decoded_json;
|
||||
// Update expanded children if this group is expanded
|
||||
if (expandedHashes.has(h) && existing._children) {
|
||||
existing._children.unshift(clearParsedCache({...p, _isObservation: true}));
|
||||
existing._children.unshift(p);
|
||||
if (existing._children.length > 200) existing._children.length = 200;
|
||||
sortGroupChildren(existing);
|
||||
// Invalidate row counts — child count changed, so virtual scroll
|
||||
@@ -615,52 +610,27 @@
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Build URLSearchParams for /api/packets given UI state. Pure function for
|
||||
// testability — returns the params object the next call to /api/packets
|
||||
// would use. The hash filter is an exact identifier: when present it
|
||||
// suppresses ALL other filters (region, time window, observer, node,
|
||||
// channel). The user is asking for THAT packet regardless of saved
|
||||
// selections.
|
||||
function buildPacketsParams({ filters, regionParam, windowMin, groupByHash, limit }) {
|
||||
const params = new URLSearchParams();
|
||||
if (filters.hash) {
|
||||
params.set('hash', filters.hash);
|
||||
params.set('limit', String(limit));
|
||||
async function loadPackets() {
|
||||
try {
|
||||
const params = new URLSearchParams();
|
||||
const selectedWindow = Number(document.getElementById('fTimeWindow')?.value);
|
||||
const windowMin = Number.isFinite(selectedWindow) ? selectedWindow : savedTimeWindowMin;
|
||||
if (windowMin > 0 && !filters.hash) {
|
||||
const since = new Date(Date.now() - windowMin * 60000).toISOString();
|
||||
params.set('since', since);
|
||||
}
|
||||
params.set('limit', String(PACKET_LIMIT));
|
||||
const regionParam = RegionFilter.getRegionParam();
|
||||
if (regionParam) params.set('region', regionParam);
|
||||
if (filters.hash) params.set('hash', filters.hash);
|
||||
if (filters.node) params.set('node', filters.node);
|
||||
if (filters.observer) params.set('observer', filters.observer);
|
||||
if (filters.channel) params.set('channel', filters.channel);
|
||||
if (groupByHash) {
|
||||
params.set('groupByHash', 'true');
|
||||
} else {
|
||||
params.set('expand', 'observations');
|
||||
}
|
||||
return params;
|
||||
}
|
||||
if (windowMin > 0) {
|
||||
const since = new Date(Date.now() - windowMin * 60000).toISOString();
|
||||
params.set('since', since);
|
||||
}
|
||||
params.set('limit', String(limit));
|
||||
if (regionParam) params.set('region', regionParam);
|
||||
if (filters.node) params.set('node', filters.node);
|
||||
if (filters.observer) params.set('observer', filters.observer);
|
||||
if (filters.channel) params.set('channel', filters.channel);
|
||||
if (groupByHash) {
|
||||
params.set('groupByHash', 'true');
|
||||
} else {
|
||||
params.set('expand', 'observations');
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
async function loadPackets() {
|
||||
try {
|
||||
const selectedWindow = Number(document.getElementById('fTimeWindow')?.value);
|
||||
const windowMin = Number.isFinite(selectedWindow) ? selectedWindow : savedTimeWindowMin;
|
||||
const params = buildPacketsParams({
|
||||
filters,
|
||||
regionParam: RegionFilter.getRegionParam(),
|
||||
windowMin,
|
||||
groupByHash,
|
||||
limit: PACKET_LIMIT,
|
||||
});
|
||||
|
||||
const data = await api('/packets?' + params.toString());
|
||||
packets = data.packets || [];
|
||||
@@ -713,14 +683,10 @@
|
||||
// Restore expanded group children (parallel fetch, Map lookup)
|
||||
if (groupByHash && expandedHashes.size > 0) {
|
||||
const expandedArr = [...expandedHashes];
|
||||
// Fetch the full packet detail (which includes per-observation rows) for each expanded hash.
|
||||
// Previously this used `/packets?hash=X&limit=20` which returned ONE aggregate row, causing
|
||||
// every "child" row in the table to carry the parent packet.id instead of unique observation
|
||||
// ids — so clicking any child pointed the side pane at the same aggregate. See #866.
|
||||
const results = await Promise.all(expandedArr.map(hash => {
|
||||
const group = hashIndex.get(hash);
|
||||
if (!group) return { hash, group: null, data: null };
|
||||
return api(`/packets/${hash}`)
|
||||
return api(`/packets?hash=${hash}&limit=20`)
|
||||
.then(data => ({ hash, group, data }))
|
||||
.catch(() => ({ hash, group, data: null }));
|
||||
}));
|
||||
@@ -728,15 +694,7 @@
|
||||
if (!group) {
|
||||
expandedHashes.delete(hash);
|
||||
} else if (data) {
|
||||
const pkt = data.packet || group;
|
||||
// Build per-observation children. Spread (pkt, obs) so obs-level fields
|
||||
// (id, observer_id/name, path_json, snr/rssi, timestamp, raw_hex) override
|
||||
// the aggregate. Each child's `id` is the observation id (unique per observer).
|
||||
const obs = data.observations || [];
|
||||
group._children = obs.length
|
||||
? obs.map(o => clearParsedCache({...pkt, ...o, _isObservation: true}))
|
||||
: [pkt];
|
||||
group._fetchedData = { packet: pkt, observations: obs };
|
||||
group._children = data.packets || [];
|
||||
sortGroupChildren(group);
|
||||
}
|
||||
}
|
||||
@@ -1288,9 +1246,9 @@
|
||||
const child = group?._children?.find(c => String(c.id) === String(value));
|
||||
if (child) {
|
||||
const parentData = group._fetchedData;
|
||||
const obsPacket = parentData ? {...parentData.packet, observer_id: child.observer_id, observer_name: child.observer_name, snr: child.snr, rssi: child.rssi, path_json: child.path_json, resolved_path: child.resolved_path, direction: child.direction, timestamp: child.timestamp, first_seen: child.timestamp} : child;
|
||||
const obsPacket = parentData ? {...parentData.packet, observer_id: child.observer_id, observer_name: child.observer_name, snr: child.snr, rssi: child.rssi, path_json: child.path_json, resolved_path: child.resolved_path, timestamp: child.timestamp, first_seen: child.timestamp} : child;
|
||||
if (parentData) { clearParsedCache(obsPacket); }
|
||||
selectPacket(child.id, parentHash, {packet: obsPacket, observations: parentData?.observations}, child.id);
|
||||
selectPacket(child.id, parentHash, {packet: obsPacket, breakdown: parentData?.breakdown, observations: parentData?.observations}, child.id);
|
||||
}
|
||||
}
|
||||
else if (action === 'select-hash') pktSelectHash(value);
|
||||
@@ -1362,9 +1320,7 @@
|
||||
// Channel color highlighting (#271)
|
||||
const _grpDecoded = getParsedDecoded(p) || {};
|
||||
const _grpChanStyle = window.ChannelColors ? window.ChannelColors.getRowStyle(_grpDecoded.type || groupTypeName, _grpDecoded.channel) : '';
|
||||
const _grpHashStripe = _hashStripeStyle(p.hash);
|
||||
const _grpStyle = _grpHashStripe + _grpChanStyle;
|
||||
let html = `<tr class="${isSingle ? '' : 'group-header'} ${isExpanded ? 'expanded' : ''}" data-hash="${p.hash}" data-action="${isSingle ? 'select-hash' : 'toggle-select'}" data-value="${p.hash}" data-entry-idx="${entryIdx}" tabindex="0" role="row"${_grpStyle ? ' style="' + _grpStyle + '"' : ''}>
|
||||
let html = `<tr class="${isSingle ? '' : 'group-header'} ${isExpanded ? 'expanded' : ''}" data-hash="${p.hash}" data-action="${isSingle ? 'select-hash' : 'toggle-select'}" data-value="${p.hash}" data-entry-idx="${entryIdx}" tabindex="0" role="row"${_grpChanStyle ? ' style="' + _grpChanStyle + '"' : ''}>
|
||||
<td style="width:28px;text-align:center;cursor:pointer">${isSingle ? '' : (isExpanded ? '▼' : '▶')}</td>
|
||||
<td class="col-region">${groupRegion ? `<span class="badge-region">${groupRegion}</span>` : '—'}</td>
|
||||
<td class="col-time">${renderTimestampCell(p.latest)}</td>
|
||||
@@ -1390,8 +1346,7 @@
|
||||
const childRegion = c.observer_id ? (observerMap.get(c.observer_id)?.iata || '') : '';
|
||||
const childPath = getParsedPath(c);
|
||||
const childPathStr = renderPath(childPath, c.observer_id);
|
||||
const _childHashStripe = _hashStripeStyle(c.hash || p.hash);
|
||||
html += `<tr class="group-child" data-id="${c.id}" data-hash="${c.hash || ''}" data-action="select-observation" data-value="${c.id}" data-parent-hash="${p.hash}" data-entry-idx="${entryIdx}" tabindex="0" role="row"${_childHashStripe ? ' style="' + _childHashStripe + '"' : ''}>
|
||||
html += `<tr class="group-child" data-id="${c.id}" data-hash="${c.hash || ''}" data-action="select-observation" data-value="${c.id}" data-parent-hash="${p.hash}" data-entry-idx="${entryIdx}" tabindex="0" role="row">
|
||||
<td></td><td class="col-region">${childRegion ? `<span class="badge-region">${childRegion}</span>` : '—'}</td>
|
||||
<td class="col-time">${renderTimestampCell(c.timestamp)}</td>
|
||||
<td class="mono col-hash">${truncate(c.hash || '', 8)}</td>
|
||||
@@ -1421,9 +1376,7 @@
|
||||
const hashBytes = ((parseInt(p.raw_hex?.slice(2, 4), 16) || 0) >> 6) + 1;
|
||||
const pathStr = renderPath(pathHops, p.observer_id);
|
||||
const detail = getDetailPreview(decoded);
|
||||
const _flatHashStripe = _hashStripeStyle(p.hash);
|
||||
const _flatStyle = _flatHashStripe + _chanStyle;
|
||||
return `<tr data-id="${p.id}" data-hash="${p.hash || ''}" data-action="select-hash" data-value="${p.hash || p.id}" data-entry-idx="${entryIdx}" tabindex="0" role="row" class="${selectedId === p.id ? 'selected' : ''}"${_flatStyle ? ' style="' + _flatStyle + '"' : ''}>
|
||||
return `<tr data-id="${p.id}" data-hash="${p.hash || ''}" data-action="select-hash" data-value="${p.hash || p.id}" data-entry-idx="${entryIdx}" tabindex="0" role="row" class="${selectedId === p.id ? 'selected' : ''}"${_chanStyle ? ' style="' + _chanStyle + '"' : ''}>
|
||||
<td></td><td class="col-region">${region ? `<span class="badge-region">${region}</span>` : '—'}</td>
|
||||
<td class="col-time">${renderTimestampCell(p.timestamp)}</td>
|
||||
<td class="mono col-hash">${truncate(p.hash || String(p.id), 8)}</td>
|
||||
@@ -1682,14 +1635,7 @@
|
||||
|
||||
// Filter to claimed/favorited nodes — pure client-side filter (no server round-trip)
|
||||
let displayPackets = packets;
|
||||
|
||||
// When loading a specific packet by hash, bypass ALL client-side filters
|
||||
// (myNodes, type, observer, packet-filter-expression). The user is asking
|
||||
// for THAT exact packet — saved type/observer/expression filters must not
|
||||
// hide it. Hash filter is the exact identifier; nothing else applies.
|
||||
const hashOnly = !!filters.hash;
|
||||
|
||||
if (!hashOnly && filters.myNodes) {
|
||||
if (filters.myNodes) {
|
||||
const myNodes = JSON.parse(localStorage.getItem('meshcore-my-nodes') || '[]');
|
||||
const myKeys = myNodes.map(n => n.pubkey).filter(Boolean);
|
||||
const favs = getFavorites();
|
||||
@@ -1705,11 +1651,11 @@
|
||||
}
|
||||
|
||||
// Client-side type/observer filtering
|
||||
if (!hashOnly && filters.type) {
|
||||
if (filters.type) {
|
||||
const types = filters.type.split(',').map(Number);
|
||||
displayPackets = displayPackets.filter(p => types.includes(p.payload_type));
|
||||
}
|
||||
if (!hashOnly && filters.observer) {
|
||||
if (filters.observer) {
|
||||
const obsIds = new Set(filters.observer.split(','));
|
||||
displayPackets = displayPackets.filter(p => {
|
||||
if (obsIds.has(p.observer_id)) return true;
|
||||
@@ -1720,7 +1666,7 @@
|
||||
|
||||
// Packet Filter Language
|
||||
const pfCount = document.getElementById('packetFilterCount');
|
||||
if (!hashOnly && filters._packetFilter) {
|
||||
if (filters._packetFilter) {
|
||||
const beforeCount = displayPackets.length;
|
||||
displayPackets = displayPackets.filter(filters._packetFilter);
|
||||
if (pfCount) {
|
||||
@@ -1851,7 +1797,7 @@
|
||||
panel.innerHTML = isMobileNow ? '' : '<div class="panel-resize-handle" id="pktResizeHandle"></div>' + PANEL_CLOSE_HTML;
|
||||
const content = document.createElement('div');
|
||||
panel.appendChild(content);
|
||||
await renderDetail(content, data, selectedObservationId);
|
||||
await renderDetail(content, data);
|
||||
if (!isMobileNow) initPanelResize();
|
||||
} catch (e) {
|
||||
panel.innerHTML = `<div class="text-muted">Error: ${e.message}</div>`;
|
||||
@@ -1860,6 +1806,8 @@
|
||||
|
||||
async function renderDetail(panel, data, chosenObsId) {
|
||||
const pkt = data.packet;
|
||||
const breakdown = data.breakdown || {};
|
||||
const ranges = breakdown.ranges || [];
|
||||
const observations = data.observations || [];
|
||||
|
||||
// Per-observation rendering (issue #849):
|
||||
@@ -1880,15 +1828,6 @@
|
||||
const decoded = getParsedDecoded(effectivePkt) || {};
|
||||
const pathHops = getParsedPath(effectivePkt) || [];
|
||||
|
||||
// Compute breakdown ranges from the actually-rendered raw_hex (per-observation).
|
||||
// Single source of truth — derived from the same bytes we display, so a
|
||||
// post-#882 per-obs raw_hex with a different path length than the top-level
|
||||
// packet's raw_hex still gets accurate byte highlights.
|
||||
const obsRawHexForRanges = effectivePkt.raw_hex || pkt.raw_hex || '';
|
||||
const ranges = obsRawHexForRanges
|
||||
? computeBreakdownRanges(obsRawHexForRanges, pkt.route_type, pkt.payload_type)
|
||||
: [];
|
||||
|
||||
// Cross-check: hop count from raw_hex path_len byte vs path_json length
|
||||
const obsRawHex = effectivePkt.raw_hex || pkt.raw_hex || '';
|
||||
let rawHopCount = null;
|
||||
@@ -1899,7 +1838,7 @@
|
||||
if (!isNaN(plByte)) rawHopCount = plByte & 0x3F;
|
||||
}
|
||||
if (rawHopCount != null && pathHops.length !== rawHopCount) {
|
||||
console.warn(`[CoreScope] Hop count inconsistency for packet ${pkt.hash}: path_json has ${pathHops.length} hops but raw_hex path_len has ${rawHopCount}. UI shows path_json.`);
|
||||
console.warn(`[CoreScope] Hop count inconsistency for packet ${pkt.hash}: path_json has ${pathHops.length} hops but raw_hex path_len has ${rawHopCount}. Trusting raw_hex.`);
|
||||
}
|
||||
|
||||
// Resolve sender GPS — from packet directly, or from known node in DB
|
||||
@@ -2036,10 +1975,8 @@
|
||||
? `<div class="anomaly-banner" style="background:var(--warning, #f0ad4e); color:#000; padding:8px 12px; border-radius:4px; margin-bottom:8px; font-weight:600;">⚠️ Anomaly: ${escapeHtml(decoded.anomaly)}</div>`
|
||||
: '';
|
||||
|
||||
// Hop count display: use pathHops length (= effective observation's path_json).
|
||||
// The raw_hex/path_json mismatch warning is logged above for diagnostics; the UI
|
||||
// must stay self-consistent — top pill names and byte breakdown rows must agree.
|
||||
const displayHopCount = pathHops.length;
|
||||
// Hop count display: trust raw_hex (firmware truth) over path_json
|
||||
const displayHopCount = rawHopCount != null ? rawHopCount : pathHops.length;
|
||||
const obsIndicator = currentObs && observations.length > 1
|
||||
? `<span style="font-size:0.8em;color:var(--text-muted);margin-left:6px">(observation ${observations.indexOf(currentObs) + 1} of ${observations.length})</span>`
|
||||
: '';
|
||||
@@ -2244,19 +2181,18 @@
|
||||
rows += fieldRow(off, 'Path Length', '0x' + (buf.slice(off * 2, off * 2 + 2) || '??'), hashCountVal === 0 ? `hash_count=0 (direct advert)` : `hash_size=${hashSizeVal} byte${hashSizeVal !== 1 ? 's' : ''}, hash_count=${hashCountVal}`);
|
||||
off += 1;
|
||||
|
||||
// Path — render hops from path_json (what this observation reported).
|
||||
// Byte offsets advance by hashSize * pathHops.length to match.
|
||||
// Path — derive hop count from path_len byte (firmware truth), not aggregated _parsedPath
|
||||
const hashSize = isNaN(pathByte0) ? 1 : ((pathByte0 >> 6) + 1);
|
||||
if (pathHops.length > 0) {
|
||||
rows += sectionRow('Path (' + pathHops.length + ' hops)', 'section-path');
|
||||
for (let i = 0; i < pathHops.length; i++) {
|
||||
if (typeof hashCountVal === 'number' && hashCountVal > 0) {
|
||||
rows += sectionRow('Path (' + hashCountVal + ' hops)', 'section-path');
|
||||
for (let i = 0; i < hashCountVal; i++) {
|
||||
const hopOff = off + i * hashSize;
|
||||
const hex = String(pathHops[i] || '').toUpperCase();
|
||||
const hex = buf.slice(hopOff * 2, (hopOff + hashSize) * 2).toUpperCase();
|
||||
const hopHtml = HopDisplay.renderHop(hex, hopNameCache[hex]);
|
||||
const label = `Hop ${i} — ${hopHtml}`;
|
||||
rows += fieldRow(hopOff, label, hex, '');
|
||||
}
|
||||
off += hashSize * pathHops.length;
|
||||
off += hashSize * hashCountVal;
|
||||
}
|
||||
|
||||
// Payload
|
||||
@@ -2530,7 +2466,7 @@
|
||||
renderTableRows();
|
||||
return;
|
||||
}
|
||||
// Single fetch — gets packet + observations + path
|
||||
// Single fetch — gets packet + observations + path + breakdown
|
||||
try {
|
||||
const data = await api(`/packets/${hash}`);
|
||||
const pkt = data.packet;
|
||||
@@ -2564,22 +2500,12 @@
|
||||
} catch {}
|
||||
}
|
||||
|
||||
let _lastColorByHash = _isColorByHash();
|
||||
function _onStorageChange() {
|
||||
var current = _isColorByHash();
|
||||
if (_lastColorByHash !== current) {
|
||||
_lastColorByHash = current;
|
||||
renderVisibleRows();
|
||||
}
|
||||
}
|
||||
|
||||
let _themeRefreshHandler = null;
|
||||
|
||||
registerPage('packets', {
|
||||
init: function(app, routeParam) {
|
||||
_themeRefreshHandler = () => { if (typeof renderTableRows === 'function') renderTableRows(); };
|
||||
window.addEventListener('theme-refresh', _themeRefreshHandler);
|
||||
window.addEventListener('storage', _onStorageChange);
|
||||
var result = init(app, routeParam);
|
||||
// Install channel color picker on packets table (M2, #271)
|
||||
if (window.ChannelColorPicker) window.ChannelColorPicker.installPacketsTable();
|
||||
@@ -2587,7 +2513,6 @@
|
||||
},
|
||||
destroy: function() {
|
||||
if (_themeRefreshHandler) { window.removeEventListener('theme-refresh', _themeRefreshHandler); _themeRefreshHandler = null; }
|
||||
window.removeEventListener('storage', _onStorageChange);
|
||||
return destroy();
|
||||
}
|
||||
});
|
||||
@@ -2616,7 +2541,6 @@
|
||||
buildGroupRowHtml,
|
||||
buildFlatRowHtml,
|
||||
_calcVisibleRange,
|
||||
buildPacketsParams,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
// Path Inspector — prefix candidate scoring with map overlay (issue #944).
|
||||
// IIFE; exports window.PathInspector for testability.
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var container = null;
|
||||
var currentResults = null;
|
||||
|
||||
function init(app) {
|
||||
container = app;
|
||||
var params = new URLSearchParams(location.hash.split('?')[1] || '');
|
||||
var prefixParam = params.get('prefixes') || '';
|
||||
|
||||
container.innerHTML =
|
||||
'<div class="path-inspector-page">' +
|
||||
'<h2>Path Inspector</h2>' +
|
||||
'<p class="help-text">Enter comma or space-separated hex prefixes (1-3 bytes each, e.g. <code>2C,A1,F4</code> or <code>2C A1 F4</code>).</p>' +
|
||||
'<div class="path-inspector-input-row">' +
|
||||
'<input type="text" id="path-inspector-input" class="input" placeholder="2C,A1,F4 or 2C A1 F4" value="' + escapeAttr(prefixParam) + '">' +
|
||||
'<button id="path-inspector-submit" class="btn btn-primary">Inspect</button>' +
|
||||
'</div>' +
|
||||
'<div id="path-inspector-error" class="path-inspector-error"></div>' +
|
||||
'<div id="path-inspector-results"></div>' +
|
||||
'</div>';
|
||||
|
||||
var input = document.getElementById('path-inspector-input');
|
||||
var btn = document.getElementById('path-inspector-submit');
|
||||
btn.addEventListener('click', function () { submit(input.value); });
|
||||
input.addEventListener('keydown', function (e) {
|
||||
if (e.key === 'Enter') submit(input.value);
|
||||
});
|
||||
|
||||
// Auto-run if prefixes in URL.
|
||||
if (prefixParam) submit(prefixParam);
|
||||
}
|
||||
|
||||
function destroy() {
|
||||
container = null;
|
||||
currentResults = null;
|
||||
}
|
||||
|
||||
function parsePrefixes(raw) {
|
||||
// Accept comma or space separated.
|
||||
var parts = raw.trim().split(/[\s,]+/).filter(function (s) { return s.length > 0; });
|
||||
return parts.map(function (p) { return p.toLowerCase(); });
|
||||
}
|
||||
|
||||
function validatePrefixes(prefixes) {
|
||||
if (prefixes.length === 0) return 'Enter at least one prefix.';
|
||||
if (prefixes.length > 64) return 'Too many prefixes (max 64).';
|
||||
var hexRe = /^[0-9a-f]+$/;
|
||||
var byteLen = -1;
|
||||
for (var i = 0; i < prefixes.length; i++) {
|
||||
var p = prefixes[i];
|
||||
if (!hexRe.test(p)) return 'Invalid hex: ' + p;
|
||||
if (p.length % 2 !== 0) return 'Odd-length prefix: ' + p;
|
||||
var bl = p.length / 2;
|
||||
if (bl > 3) return 'Prefix too long (max 3 bytes): ' + p;
|
||||
if (byteLen === -1) byteLen = bl;
|
||||
else if (bl !== byteLen) return 'Mixed prefix lengths not allowed.';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function submit(raw) {
|
||||
var errDiv = document.getElementById('path-inspector-error');
|
||||
var resultsDiv = document.getElementById('path-inspector-results');
|
||||
errDiv.textContent = '';
|
||||
resultsDiv.innerHTML = '';
|
||||
|
||||
var prefixes = parsePrefixes(raw);
|
||||
var err = validatePrefixes(prefixes);
|
||||
if (err) {
|
||||
errDiv.textContent = err;
|
||||
return;
|
||||
}
|
||||
|
||||
// Update URL.
|
||||
var base = '#/tools/path-inspector';
|
||||
if (location.hash.indexOf(base) === 0) {
|
||||
history.replaceState(null, '', base + '?prefixes=' + prefixes.join(','));
|
||||
}
|
||||
|
||||
resultsDiv.innerHTML = '<p>Loading...</p>';
|
||||
fetch('/api/paths/inspect', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ prefixes: prefixes })
|
||||
})
|
||||
.then(function (r) {
|
||||
if (r.status === 503) return r.json().then(function (d) { throw new Error('Service warming up, retry in a few seconds.'); });
|
||||
if (!r.ok) return r.json().then(function (d) { throw new Error(d.error || 'Request failed'); });
|
||||
return r.json();
|
||||
})
|
||||
.then(function (data) {
|
||||
currentResults = data;
|
||||
renderResults(data, resultsDiv);
|
||||
})
|
||||
.catch(function (e) {
|
||||
resultsDiv.innerHTML = '';
|
||||
errDiv.textContent = e.message;
|
||||
});
|
||||
}
|
||||
|
||||
function renderResults(data, div) {
|
||||
if (!data.candidates || data.candidates.length === 0) {
|
||||
div.innerHTML = '<p class="no-results">No candidates found. The prefixes may not match any known path-eligible nodes.</p>';
|
||||
return;
|
||||
}
|
||||
|
||||
var html = '<table class="path-inspector-table"><thead><tr>' +
|
||||
'<th>#</th><th>Score</th><th>Path</th><th>Action</th>' +
|
||||
'</tr></thead><tbody>';
|
||||
|
||||
for (var i = 0; i < data.candidates.length; i++) {
|
||||
var c = data.candidates[i];
|
||||
var rowClass = c.speculative ? 'speculative-row' : '';
|
||||
html += '<tr class="' + rowClass + '">';
|
||||
html += '<td>' + (i + 1) + '</td>';
|
||||
html += '<td class="' + (c.speculative ? 'speculative-warning' : '') + '">' +
|
||||
c.score.toFixed(3) +
|
||||
(c.speculative ? ' <span class="speculative-badge" title="Low evidence; may be wrong">⚠</span>' : '') +
|
||||
'</td>';
|
||||
html += '<td>' + escapeHtml(c.names.join(' → ')) + '</td>';
|
||||
html += '<td><button class="btn btn-sm" data-idx="' + i + '">Show on Map</button></td>';
|
||||
html += '</tr>';
|
||||
|
||||
// Per-hop evidence (collapsed).
|
||||
html += '<tr class="evidence-row collapsed" data-evidence="' + i + '"><td colspan="4"><div class="evidence-detail">';
|
||||
for (var j = 0; j < c.evidence.perHop.length; j++) {
|
||||
var h = c.evidence.perHop[j];
|
||||
html += '<div class="hop-evidence">Hop ' + (j + 1) + ': prefix=' + h.prefix +
|
||||
', candidates=' + h.candidatesConsidered +
|
||||
', edge=' + h.edgeWeight.toFixed(3);
|
||||
if (h.alternatives && h.alternatives.length > 0) {
|
||||
html += '<div class="hop-alternatives" style="margin-left:12px;font-size:12px;color:var(--text-muted);">';
|
||||
for (var k = 0; k < h.alternatives.length; k++) {
|
||||
var alt = h.alternatives[k];
|
||||
html += '<div>↳ ' + escapeHtml(alt.name || alt.publicKey.substring(0, 8)) + ' (score=' + alt.score.toFixed(3) + ')</div>';
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
html += '</div></td></tr>';
|
||||
}
|
||||
|
||||
html += '</tbody></table>';
|
||||
html += '<div class="path-inspector-stats">Beam width: ' + data.stats.beamWidth +
|
||||
' | Expansions: ' + data.stats.expansionsRun +
|
||||
' | Elapsed: ' + data.stats.elapsedMs + 'ms</div>';
|
||||
|
||||
div.innerHTML = html;
|
||||
|
||||
// Wire up Show on Map buttons.
|
||||
div.querySelectorAll('button[data-idx]').forEach(function (btn) {
|
||||
btn.addEventListener('click', function () {
|
||||
var idx = parseInt(btn.dataset.idx);
|
||||
showOnMap(data.candidates[idx]);
|
||||
});
|
||||
});
|
||||
|
||||
// Wire up row expand for evidence.
|
||||
div.querySelectorAll('.path-inspector-table tbody tr:not(.evidence-row)').forEach(function (row) {
|
||||
row.style.cursor = 'pointer';
|
||||
row.addEventListener('click', function (e) {
|
||||
if (e.target.tagName === 'BUTTON') return;
|
||||
var idx = row.querySelector('button[data-idx]');
|
||||
if (!idx) return;
|
||||
var evidenceRow = div.querySelector('tr[data-evidence="' + idx.dataset.idx + '"]');
|
||||
if (evidenceRow) evidenceRow.classList.toggle('collapsed');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function showOnMap(candidate) {
|
||||
// Store pending route for map init to pick up.
|
||||
window._pendingPathInspectorRoute = candidate;
|
||||
// Switch to map page if not there; map init will draw the route.
|
||||
if (location.hash.indexOf('#/map') !== 0) {
|
||||
location.hash = '#/map';
|
||||
} else {
|
||||
// Already on map — draw directly.
|
||||
delete window._pendingPathInspectorRoute;
|
||||
if (window.routeLayer) window.routeLayer.clearLayers();
|
||||
// Pass FULL path as hopKeys (not slice(1)) — drawPacketRoute resolves
|
||||
// each entry against nodes[] for plotting. The 2nd arg is the origin
|
||||
// OBJECT (with pubkey/lat/lon/name); pass null since the origin is
|
||||
// already the first hop in the path itself, and drawPacketRoute draws
|
||||
// a marker for every resolved hop.
|
||||
if (window.drawPacketRoute) window.drawPacketRoute(candidate.path, null);
|
||||
}
|
||||
}
|
||||
|
||||
function escapeAttr(s) {
|
||||
return s.replace(/&/g, '&').replace(/"/g, '"').replace(/</g, '<');
|
||||
}
|
||||
|
||||
function escapeHtml(s) {
|
||||
return s.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
window.PathInspector = { init: init, destroy: destroy, parsePrefixes: parsePrefixes, validatePrefixes: validatePrefixes };
|
||||
if (typeof registerPage === 'function') registerPage('path-inspector', { init: init, destroy: destroy });
|
||||
})();
|
||||
@@ -16,7 +16,6 @@
|
||||
--status-amber: #f59e0b;
|
||||
--status-amber-light: #fef3c7;
|
||||
--status-amber-text: #92400e;
|
||||
--path-inspector-speculative: #d97706;
|
||||
--role-observer: #8b5cf6;
|
||||
--accent-hover: #6db3ff;
|
||||
--text: #1a1a2e;
|
||||
@@ -53,7 +52,6 @@
|
||||
--status-amber: #f59e0b;
|
||||
--status-amber-light: #422006;
|
||||
--status-amber-text: #fcd34d;
|
||||
--path-inspector-speculative: #f59e0b;
|
||||
--surface-0: #0f0f23;
|
||||
--surface-1: #1a1a2e;
|
||||
--surface-2: #232340;
|
||||
@@ -2312,37 +2310,3 @@ th.sort-active { color: var(--accent, #60a5fa); }
|
||||
|
||||
.clock-filter-btn { font-size: 12px; padding: 3px 8px; border: 1px solid var(--border); border-radius: 4px; background: var(--card-bg, #fff); color: var(--text); cursor: pointer; margin-right: 4px; }
|
||||
.clock-filter-btn.active { background: var(--accent); color: #fff; border-color: var(--accent); }
|
||||
|
||||
/* === Path Inspector (issue #944) === */
|
||||
.path-inspector-page { padding: 16px; max-width: 900px; margin: 0 auto; }
|
||||
.path-inspector-input-row { display: flex; gap: 8px; margin-bottom: 12px; }
|
||||
.path-inspector-input-row .input { flex: 1; }
|
||||
.path-inspector-error { color: var(--status-red, #ef4444); font-size: 13px; margin-bottom: 8px; }
|
||||
.path-inspector-table { width: 100%; border-collapse: collapse; font-size: 13px; }
|
||||
.path-inspector-table th,
|
||||
.path-inspector-table td { padding: 6px 10px; border-bottom: 1px solid var(--border); text-align: left; }
|
||||
.path-inspector-table th { background: var(--card-bg); font-weight: 600; }
|
||||
.speculative-warning { color: var(--path-inspector-speculative, #d97706); font-weight: 600; }
|
||||
.speculative-badge { cursor: help; }
|
||||
.speculative-row { background: color-mix(in srgb, var(--path-inspector-speculative, #d97706) 8%, transparent); }
|
||||
.evidence-row { font-size: 12px; color: var(--text-muted); }
|
||||
.evidence-row.collapsed { display: none; }
|
||||
.evidence-detail { padding: 4px 10px; }
|
||||
.hop-evidence { margin: 2px 0; }
|
||||
.path-inspector-stats { margin-top: 12px; font-size: 12px; color: var(--text-muted); }
|
||||
.no-results { color: var(--text-muted); font-style: italic; }
|
||||
|
||||
/* Map side pane for path inspector */
|
||||
.map-side-pane { flex: 0 0 32px; overflow: hidden; transition: flex-basis 0.2s; border-left: 1px solid var(--border); background: var(--card-bg); }
|
||||
.map-side-pane.expanded { flex: 0 0 320px; overflow-y: auto; padding: 12px; }
|
||||
.map-side-pane .pane-toggle { cursor: pointer; padding: 8px; font-size: 14px; text-align: center; }
|
||||
.map-side-pane .pane-content { display: none; }
|
||||
.map-side-pane.expanded .pane-content { display: block; }
|
||||
|
||||
/* Tools landing page */
|
||||
.tools-landing { padding: 24px; max-width: 600px; }
|
||||
.tools-menu { display: flex; flex-direction: column; gap: 12px; margin-top: 16px; }
|
||||
.tools-card { display: block; padding: 16px; border-radius: 8px; border: 1px solid var(--border); background: var(--card-bg); color: var(--text); text-decoration: none; transition: border-color 0.2s; }
|
||||
.tools-card:hover { border-color: var(--primary); }
|
||||
.tools-card h3 { margin: 0 0 4px 0; font-size: 16px; }
|
||||
.tools-card p { margin: 0; font-size: 13px; color: var(--text-muted); }
|
||||
|
||||
+111
@@ -59,7 +59,118 @@ test('null lastSeenMs → stale', () => assert.strictEqual(getNodeStatus('repeat
|
||||
test('undefined lastSeenMs → stale', () => assert.strictEqual(getNodeStatus('repeater', undefined), 'stale'));
|
||||
test('0 lastSeenMs → stale', () => assert.strictEqual(getNodeStatus('repeater', 0), 'stale'));
|
||||
|
||||
// === getStatusInfo tests (inline since nodes.js has too many DOM deps) ===
|
||||
console.log('\n=== getStatusInfo (logic validation) ===');
|
||||
|
||||
// Simulate getStatusInfo logic
|
||||
function mockGetStatusInfo(n) {
|
||||
const ROLE_COLORS = ctx.window.ROLE_COLORS;
|
||||
const role = (n.role || '').toLowerCase();
|
||||
const roleColor = ROLE_COLORS[n.role] || '#6b7280';
|
||||
const lastHeardTime = n._lastHeard || n.last_heard || n.last_seen;
|
||||
const lastHeardMs = lastHeardTime ? new Date(lastHeardTime).getTime() : 0;
|
||||
const status = getNodeStatus(role, lastHeardMs);
|
||||
const statusLabel = status === 'active' ? '🟢 Active' : '⚪ Stale';
|
||||
const isInfra = role === 'repeater' || role === 'room';
|
||||
|
||||
let explanation = '';
|
||||
if (status === 'active') {
|
||||
explanation = 'Last heard recently';
|
||||
} else {
|
||||
const reason = isInfra
|
||||
? 'repeaters typically advertise every 12-24h'
|
||||
: 'companions only advertise when user initiates, this may be normal';
|
||||
explanation = 'Not heard — ' + reason;
|
||||
}
|
||||
return { status, statusLabel, roleColor, explanation, role };
|
||||
}
|
||||
|
||||
test('active repeater → 🟢 Active, red color', () => {
|
||||
const info = mockGetStatusInfo({ role: 'repeater', last_seen: new Date(now - 1*h).toISOString() });
|
||||
assert.strictEqual(info.status, 'active');
|
||||
assert.strictEqual(info.statusLabel, '🟢 Active');
|
||||
assert.strictEqual(info.roleColor, '#dc2626');
|
||||
});
|
||||
|
||||
test('stale companion → ⚪ Stale, explanation mentions "this may be normal"', () => {
|
||||
const info = mockGetStatusInfo({ role: 'companion', last_seen: new Date(now - 25*h).toISOString() });
|
||||
assert.strictEqual(info.status, 'stale');
|
||||
assert.strictEqual(info.statusLabel, '⚪ Stale');
|
||||
assert(info.explanation.includes('this may be normal'), 'should mention "this may be normal"');
|
||||
});
|
||||
|
||||
test('missing last_seen → stale', () => {
|
||||
const info = mockGetStatusInfo({ role: 'repeater' });
|
||||
assert.strictEqual(info.status, 'stale');
|
||||
});
|
||||
|
||||
test('missing role → defaults to empty string, uses node threshold', () => {
|
||||
const info = mockGetStatusInfo({ last_seen: new Date(now - 25*h).toISOString() });
|
||||
assert.strictEqual(info.status, 'stale');
|
||||
assert.strictEqual(info.roleColor, '#6b7280');
|
||||
});
|
||||
|
||||
test('prefers last_heard over last_seen', () => {
|
||||
// last_seen is stale, but last_heard is recent
|
||||
const info = mockGetStatusInfo({
|
||||
role: 'companion',
|
||||
last_seen: new Date(now - 48*h).toISOString(),
|
||||
last_heard: new Date(now - 1*h).toISOString()
|
||||
});
|
||||
assert.strictEqual(info.status, 'active');
|
||||
});
|
||||
|
||||
// === getStatusTooltip tests ===
|
||||
console.log('\n=== getStatusTooltip ===');
|
||||
|
||||
// Load from nodes.js by extracting the function
|
||||
// Since nodes.js is complex, I'll re-implement the tooltip function for testing
|
||||
function getStatusTooltip(role, status) {
|
||||
const isInfra = role === 'repeater' || role === 'room';
|
||||
const threshold = isInfra ? '72h' : '24h';
|
||||
if (status === 'active') {
|
||||
return 'Active — heard within the last ' + threshold + '.' + (isInfra ? ' Repeaters typically advertise every 12-24h.' : '');
|
||||
}
|
||||
if (role === 'companion') {
|
||||
return 'Stale — not heard for over ' + threshold + '. Companions only advertise when the user initiates — this may be normal.';
|
||||
}
|
||||
if (role === 'sensor') {
|
||||
return 'Stale — not heard for over ' + threshold + '. This sensor may be offline.';
|
||||
}
|
||||
return 'Stale — not heard for over ' + threshold + '. This ' + role + ' may be offline or out of range.';
|
||||
}
|
||||
|
||||
test('active repeater mentions "72h" and "advertise every 12-24h"', () => {
|
||||
const tip = getStatusTooltip('repeater', 'active');
|
||||
assert(tip.includes('72h'), 'should mention 72h');
|
||||
assert(tip.includes('advertise every 12-24h'), 'should mention advertise frequency');
|
||||
});
|
||||
|
||||
test('active companion mentions "24h"', () => {
|
||||
const tip = getStatusTooltip('companion', 'active');
|
||||
assert(tip.includes('24h'), 'should mention 24h');
|
||||
});
|
||||
|
||||
test('stale companion mentions "24h" and "user initiates"', () => {
|
||||
const tip = getStatusTooltip('companion', 'stale');
|
||||
assert(tip.includes('24h'), 'should mention 24h');
|
||||
assert(tip.includes('user initiates'), 'should mention user initiates');
|
||||
});
|
||||
|
||||
test('stale repeater mentions "offline or out of range"', () => {
|
||||
const tip = getStatusTooltip('repeater', 'stale');
|
||||
assert(tip.includes('offline or out of range'), 'should mention offline or out of range');
|
||||
});
|
||||
|
||||
test('stale sensor mentions "sensor may be offline"', () => {
|
||||
const tip = getStatusTooltip('sensor', 'stale');
|
||||
assert(tip.includes('sensor may be offline'));
|
||||
});
|
||||
|
||||
test('stale room uses 72h threshold', () => {
|
||||
const tip = getStatusTooltip('room', 'stale');
|
||||
assert(tip.includes('72h'));
|
||||
});
|
||||
|
||||
// === Bug check: renderRows uses last_seen instead of last_heard || last_seen ===
|
||||
console.log('\n=== BUG CHECK ===');
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* test-anim-perf.js — Performance benchmark for animation timer management
|
||||
*
|
||||
* Demonstrates that the rAF + concurrency-cap approach keeps active animation
|
||||
* count bounded, whereas the old setInterval approach accumulated without limit.
|
||||
*
|
||||
* Run: node test-anim-perf.js
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
let passed = 0, failed = 0;
|
||||
function assert(cond, msg) {
|
||||
if (cond) { console.log(` ✅ ${msg}`); passed++; }
|
||||
else { console.log(` ❌ ${msg}`); failed++; }
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Simulate OLD behaviour: setInterval-based, no concurrency cap
|
||||
// ---------------------------------------------------------------------------
|
||||
function simulateOldModel(packetsPerSec, hopsPerPacket, durationSec) {
|
||||
// Each hop spawns 3 intervals (pulse 26ms, line 33ms, fade 52ms).
|
||||
// Pulse lasts ~2s, line ~0.66s, fade ~0.8s+0.4s ≈ 1.2s
|
||||
// At any moment, timers from the last ~2s of packets are still alive.
|
||||
const intervalLifetimes = [2.0, 0.66, 1.2]; // seconds each interval lives
|
||||
let maxConcurrent = 0;
|
||||
// Walk through time in 0.1s steps
|
||||
const dt = 0.1;
|
||||
const spawns = []; // {time, lifetime}
|
||||
for (let t = 0; t < durationSec; t += dt) {
|
||||
// Spawn timers for packets arriving in this window
|
||||
const pktsInWindow = packetsPerSec * dt;
|
||||
for (let p = 0; p < pktsInWindow; p++) {
|
||||
for (let h = 0; h < hopsPerPacket; h++) {
|
||||
for (const lt of intervalLifetimes) {
|
||||
spawns.push({ time: t, lifetime: lt });
|
||||
}
|
||||
}
|
||||
}
|
||||
// Count alive timers
|
||||
const alive = spawns.filter(s => t < s.time + s.lifetime).length;
|
||||
if (alive > maxConcurrent) maxConcurrent = alive;
|
||||
}
|
||||
return maxConcurrent;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Simulate NEW behaviour: rAF + MAX_CONCURRENT_ANIMS cap
|
||||
// ---------------------------------------------------------------------------
|
||||
function simulateNewModel(packetsPerSec, hopsPerPacket, durationSec) {
|
||||
const MAX_CONCURRENT_ANIMS = 20;
|
||||
let activeAnims = 0;
|
||||
let maxConcurrent = 0;
|
||||
const anims = []; // {endTime}
|
||||
const dt = 0.1;
|
||||
for (let t = 0; t < durationSec; t += dt) {
|
||||
// Expire finished animations
|
||||
while (anims.length && anims[0].endTime <= t) {
|
||||
anims.shift();
|
||||
activeAnims--;
|
||||
}
|
||||
// Try to start new animations
|
||||
const pktsInWindow = packetsPerSec * dt;
|
||||
for (let p = 0; p < pktsInWindow; p++) {
|
||||
if (activeAnims >= MAX_CONCURRENT_ANIMS) break; // cap reached — drop
|
||||
activeAnims++;
|
||||
// rAF animation lifetime: longest is pulse ~2s
|
||||
anims.push({ endTime: t + 2.0 });
|
||||
}
|
||||
// Sort by endTime so expiry works
|
||||
anims.sort((a, b) => a.endTime - b.endTime);
|
||||
if (activeAnims > maxConcurrent) maxConcurrent = activeAnims;
|
||||
}
|
||||
return maxConcurrent;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
console.log('\n=== Animation timer accumulation: old vs new ===');
|
||||
|
||||
// Scenario: 5 pkts/sec, 3 hops each, 30 seconds
|
||||
const oldPeak30s = simulateOldModel(5, 3, 30);
|
||||
const newPeak30s = simulateNewModel(5, 3, 30);
|
||||
console.log(` Old model (30s @ 5pkt/s×3hops): peak ${oldPeak30s} concurrent timers`);
|
||||
console.log(` New model (30s @ 5pkt/s×3hops): peak ${newPeak30s} concurrent animations`);
|
||||
assert(oldPeak30s > 100, `old model accumulates >100 timers (got ${oldPeak30s})`);
|
||||
assert(newPeak30s <= 20, `new model stays ≤20 (got ${newPeak30s})`);
|
||||
|
||||
// Scenario: 5 minutes sustained
|
||||
const oldPeak5m = simulateOldModel(5, 3, 300);
|
||||
const newPeak5m = simulateNewModel(5, 3, 300);
|
||||
console.log(` Old model (5min @ 5pkt/s×3hops): peak ${oldPeak5m} concurrent timers`);
|
||||
console.log(` New model (5min @ 5pkt/s×3hops): peak ${newPeak5m} concurrent animations`);
|
||||
assert(oldPeak5m > 100, `old model at 5min still unbounded (got ${oldPeak5m})`);
|
||||
assert(newPeak5m <= 20, `new model at 5min still ≤20 (got ${newPeak5m})`);
|
||||
|
||||
// Scenario: burst — 20 pkts/sec for 10s
|
||||
const oldBurst = simulateOldModel(20, 3, 10);
|
||||
const newBurst = simulateNewModel(20, 3, 10);
|
||||
console.log(` Old model (burst 20pkt/s×3hops, 10s): peak ${oldBurst} concurrent timers`);
|
||||
console.log(` New model (burst 20pkt/s×3hops, 10s): peak ${newBurst} concurrent animations`);
|
||||
assert(oldBurst > 200, `old model under burst >200 timers (got ${oldBurst})`);
|
||||
assert(newBurst <= 20, `new model under burst stays ≤20 (got ${newBurst})`);
|
||||
|
||||
console.log('\n=== drawAnimatedLine frame-drop catch-up ===');
|
||||
|
||||
// Read the source and verify catch-up logic exists
|
||||
const fs = require('fs');
|
||||
const src = fs.readFileSync(__dirname + '/public/live.js', 'utf8');
|
||||
|
||||
// Extract the animateLine function body
|
||||
const lineMatch = src.match(/function animateLine\(now\)\s*\{[\s\S]*?requestAnimationFrame\(animateLine\)/);
|
||||
assert(lineMatch && /Math\.min\(Math\.floor\(elapsed\s*\/\s*33\)/.test(lineMatch[0]),
|
||||
'drawAnimatedLine catches up on frame drops (multi-tick per frame)');
|
||||
|
||||
const fadeMatch = src.match(/function animateFade\(now\)\s*\{[\s\S]*?requestAnimationFrame\(animateFade\)/);
|
||||
assert(fadeMatch && /Math\.min\(Math\.floor\(fadeElapsed\s*\/\s*52\)/.test(fadeMatch[0]),
|
||||
'animateFade catches up on frame drops (multi-tick per frame)');
|
||||
|
||||
console.log(`\n${passed} passed, ${failed} failed\n`);
|
||||
process.exit(failed ? 1 : 0);
|
||||
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* Tests for #759 — Add channel UX: button, hint, status feedback.
|
||||
* Validates the HTML structure rendered by channels.js init.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(cond, msg) {
|
||||
if (cond) { passed++; console.log(' ✓ ' + msg); }
|
||||
else { failed++; console.error(' ✗ ' + msg); }
|
||||
}
|
||||
|
||||
function assertIncludes(html, substr, msg) {
|
||||
assert(html.includes(substr), msg);
|
||||
}
|
||||
|
||||
// Read the channels.js source to extract the HTML template
|
||||
const src = fs.readFileSync(__dirname + '/public/channels.js', 'utf8');
|
||||
|
||||
// Extract the sidebar HTML from the template literal
|
||||
const htmlMatch = src.match(/app\.innerHTML\s*=\s*`([\s\S]*?)`;/);
|
||||
const html = htmlMatch ? htmlMatch[1] : '';
|
||||
|
||||
console.log('Test: Add channel UX (#759)');
|
||||
|
||||
// 1. Button renders in the form
|
||||
assertIncludes(html, 'class="ch-add-btn"', 'Add button has ch-add-btn class');
|
||||
assertIncludes(html, 'type="submit"', 'Button is type=submit');
|
||||
assertIncludes(html, '>+</button>', 'Button shows + text');
|
||||
|
||||
// 2. Form has proper structure
|
||||
assertIncludes(html, 'class="ch-add-form"', 'Form has ch-add-form class');
|
||||
assertIncludes(html, 'class="ch-add-row"', 'Row wrapper present');
|
||||
assert(!html.includes('class="ch-add-label"'), 'Label removed (redundant with hint)');
|
||||
|
||||
// 3. Hint text present
|
||||
assertIncludes(html, 'class="ch-add-hint"', 'Hint div present');
|
||||
assertIncludes(html, 'e.g. #LongFast or 32-char hex key', 'Hint text correct');
|
||||
|
||||
// 4. Status div present
|
||||
assertIncludes(html, 'id="chAddStatus"', 'Status div has correct id');
|
||||
assertIncludes(html, 'class="ch-add-status"', 'Status div has correct class');
|
||||
assertIncludes(html, 'style="display:none"', 'Status div hidden by default');
|
||||
|
||||
// 5. showAddStatus function exists in source
|
||||
assert(src.includes('function showAddStatus('), 'showAddStatus function defined');
|
||||
assert(src.includes("'success'"), 'Success status type referenced');
|
||||
assert(src.includes("'error'"), 'Error status type referenced');
|
||||
|
||||
// 6. CSS classes exist
|
||||
const css = fs.readFileSync(__dirname + '/public/style.css', 'utf8');
|
||||
assert(css.includes('.ch-add-form'), 'CSS: .ch-add-form defined');
|
||||
assert(css.includes('.ch-add-btn'), 'CSS: .ch-add-btn defined');
|
||||
assert(css.includes('.ch-add-hint'), 'CSS: .ch-add-hint defined');
|
||||
assert(css.includes('.ch-add-status'), 'CSS: .ch-add-status defined');
|
||||
assert(css.includes('.ch-add-row'), 'CSS: .ch-add-row defined');
|
||||
// .ch-add-label CSS kept for backward compat but label removed from HTML
|
||||
|
||||
console.log('\n' + passed + ' passed, ' + failed + ' failed');
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
+14
-492
@@ -15,11 +15,6 @@ async function test(name, fn) {
|
||||
results.push({ name, pass: true });
|
||||
console.log(` \u2705 ${name}`);
|
||||
} catch (err) {
|
||||
if (err.skip) {
|
||||
results.push({ name, pass: true, skipped: true });
|
||||
console.log(` ⏭ ${name}: ${err.message}`);
|
||||
return;
|
||||
}
|
||||
results.push({ name, pass: false, error: err.message });
|
||||
console.log(` \u274c ${name}: ${err.message}`);
|
||||
console.log(`\nFail-fast: stopping after first failure.`);
|
||||
@@ -224,7 +219,10 @@ async function run() {
|
||||
// Test 5: Node detail loads (reuses nodes page from test 2)
|
||||
await test('Node detail loads', async () => {
|
||||
await page.waitForSelector('table tbody tr');
|
||||
await page.click('table tbody tr');
|
||||
// Click first row
|
||||
const firstRow = await page.$('table tbody tr');
|
||||
assert(firstRow, 'No node rows found');
|
||||
await firstRow.click();
|
||||
// Wait for detail pane to appear
|
||||
await page.waitForSelector('.node-detail');
|
||||
const html = await page.content();
|
||||
@@ -237,14 +235,17 @@ async function run() {
|
||||
await test('Node side panel Details link navigates', async () => {
|
||||
await page.goto(`${BASE}/#/nodes`, { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr');
|
||||
await page.click('table tbody tr');
|
||||
// Click first row to open side panel
|
||||
const firstRow = await page.$('table tbody tr');
|
||||
assert(firstRow, 'No node rows found');
|
||||
await firstRow.click();
|
||||
await page.waitForSelector('.node-detail');
|
||||
// Find the Details link in the side panel
|
||||
await page.waitForSelector('#nodesRight a.btn-primary[href^="#/nodes/"]');
|
||||
const href = await page.$eval('#nodesRight a.btn-primary[href^="#/nodes/"]', el => el.getAttribute('href'));
|
||||
assert(href, 'Details link not found in side panel');
|
||||
const detailsLink = await page.$('#nodesRight a.btn-primary[href^="#/nodes/"]');
|
||||
assert(detailsLink, 'Details link not found in side panel');
|
||||
const href = await detailsLink.getAttribute('href');
|
||||
// Click the Details link — this should navigate to the full detail page
|
||||
await page.click('#nodesRight a.btn-primary[href^="#/nodes/"]');
|
||||
await detailsLink.click();
|
||||
// Wait for navigation — the full detail page has sections like neighbors/packets
|
||||
await page.waitForFunction((expectedHash) => {
|
||||
return location.hash === expectedHash;
|
||||
@@ -657,8 +658,6 @@ async function run() {
|
||||
await page.waitForSelector('#ngCanvas', { timeout: 8000 });
|
||||
const hasCanvas = await page.$('#ngCanvas');
|
||||
assert(hasCanvas, 'Neighbor Graph tab should have a canvas element');
|
||||
// Stats are populated after the async API call — wait for at least one card before counting
|
||||
await page.waitForSelector('#ngStats .stat-card', { timeout: 8000 });
|
||||
const hasStats = await page.$$eval('#ngStats .stat-card', els => els.length);
|
||||
assert(hasStats >= 3, `Neighbor Graph stats should have >=3 cards, got ${hasStats}`);
|
||||
// Verify filters exist
|
||||
@@ -1354,38 +1353,6 @@ async function run() {
|
||||
await page.evaluate(() => localStorage.removeItem('cs-theme-overrides'));
|
||||
});
|
||||
|
||||
await test('Customizer v2: typing in text field does not collapse focus (re-render guard)', async () => {
|
||||
await page.goto(BASE, { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('nav, .navbar, .nav, [class*="nav"]');
|
||||
await page.waitForFunction(() => window._customizerV2 && window._customizerV2.initDone, { timeout: 5000 });
|
||||
const toggleSel = '#customizeToggle, button[title*="ustom" i], [class*="customize"]';
|
||||
const btn = await page.$(toggleSel);
|
||||
if (!btn) { console.log(' ⏭️ Customizer toggle not found'); return; }
|
||||
await btn.click();
|
||||
await page.waitForSelector('.cust-overlay', { timeout: 5000 });
|
||||
const result = await page.evaluate(() => {
|
||||
const input = document.querySelector('.cust-overlay input[type="text"][data-cv2-field]');
|
||||
if (!input) return { skipped: true };
|
||||
input.focus();
|
||||
input.value = 'test';
|
||||
input.dispatchEvent(new Event('input', { bubbles: true }));
|
||||
const inputRef = input;
|
||||
return new Promise(resolve => {
|
||||
setTimeout(() => {
|
||||
const panel = document.querySelector('.cust-overlay');
|
||||
resolve({
|
||||
inputConnected: inputRef.isConnected,
|
||||
focusInPanel: panel ? panel.contains(document.activeElement) : false,
|
||||
});
|
||||
}, 500);
|
||||
});
|
||||
});
|
||||
if (result.skipped) { console.log(' ⏭️ No text input with data-cv2-field found in panel'); return; }
|
||||
assert(result.inputConnected, 'Input element should remain connected to DOM after debounce fires');
|
||||
assert(result.focusInPanel, 'Focus should remain inside panel after debounce — re-render must not run while typing');
|
||||
await page.evaluate(() => localStorage.removeItem('cs-theme-overrides'));
|
||||
});
|
||||
|
||||
|
||||
await test('Show Neighbors populates neighborPubkeys from affinity API', async () => {
|
||||
const testPubkey = 'aabbccdd11223344556677889900aabbccddeeff00112233445566778899001122';
|
||||
@@ -1811,457 +1778,12 @@ async function run() {
|
||||
}
|
||||
});
|
||||
|
||||
// Test: Expanded group children have unique observation ids (#866)
|
||||
await test('Expanded group children update detail pane per-observation', async () => {
|
||||
await page.goto(`${BASE}/#/packets`, { waitUntil: 'domcontentloaded' });
|
||||
// Ensure grouped mode and wide time window
|
||||
await page.evaluate(() => {
|
||||
localStorage.setItem('meshcore-time-window', '525600');
|
||||
localStorage.setItem('meshcore-groupbyhash', 'true');
|
||||
});
|
||||
await page.reload({ waitUntil: 'load' });
|
||||
await page.waitForSelector('table tbody tr', { timeout: 15000 });
|
||||
|
||||
// Find a group row with observation_count > 1 (has expand button)
|
||||
const expandBtn = await page.$('table tbody tr .expand-btn, table tbody tr [data-expand]');
|
||||
if (!expandBtn) {
|
||||
console.log(' ℹ️ No expandable groups found — skipping child assertion');
|
||||
return;
|
||||
}
|
||||
|
||||
// Click expand and wait for the /packets/<hash> detail API call
|
||||
const [detailResp] = await Promise.all([
|
||||
page.waitForResponse(resp => {
|
||||
const u = new URL(resp.url(), BASE);
|
||||
// Match /api/packets/<hash> but not /api/packets?... or /api/packets/observations
|
||||
return /\/api\/packets\/[A-Fa-f0-9]+$/.test(u.pathname) && resp.status() === 200;
|
||||
}, { timeout: 15000 }),
|
||||
expandBtn.click(),
|
||||
]);
|
||||
assert(detailResp, 'Expected /api/packets/<hash> response on expand');
|
||||
|
||||
// Wait for child rows to appear
|
||||
await page.waitForSelector('table tbody tr.child-row, table tbody tr[class*="child"]', { timeout: 5000 });
|
||||
const childRows = await page.$$('table tbody tr.child-row, table tbody tr[class*="child"]');
|
||||
if (childRows.length < 2) {
|
||||
console.log(' ℹ️ Group has < 2 children — skipping per-observation assertion');
|
||||
return;
|
||||
}
|
||||
|
||||
// Click first child row
|
||||
await childRows[0].click();
|
||||
await page.waitForFunction(() => {
|
||||
const panel = document.getElementById('pktRight');
|
||||
return panel && !panel.classList.contains('empty') && panel.textContent.trim().length > 0;
|
||||
}, { timeout: 10000 });
|
||||
const content1 = await page.$eval('#pktRight', el => el.textContent.trim());
|
||||
const url1 = page.url();
|
||||
|
||||
// Click second child row
|
||||
await childRows[1].click();
|
||||
await page.waitForTimeout(500);
|
||||
const content2 = await page.$eval('#pktRight', el => el.textContent.trim());
|
||||
const url2 = page.url();
|
||||
|
||||
// URL should contain ?obs= with a real observation id
|
||||
assert(url1.includes('obs=') || url2.includes('obs='), `URL should contain obs= parameter, got: ${url1}`);
|
||||
|
||||
// The two children should show different detail pane content (different observers)
|
||||
// At minimum, the URL obs= values should differ
|
||||
if (url1.includes('obs=') && url2.includes('obs=')) {
|
||||
const obs1 = new URL(url1).hash.match(/obs=(\d+)/)?.[1];
|
||||
const obs2 = new URL(url2).hash.match(/obs=(\d+)/)?.[1];
|
||||
if (obs1 && obs2) {
|
||||
assert(obs1 !== obs2, `Two children should have different obs ids, both got obs=${obs1}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify obs id is NOT the aggregate packet id (the bug from #866)
|
||||
const obsMatch = url2.match(/obs=(\d+)/);
|
||||
if (obsMatch) {
|
||||
const detailJson = await detailResp.json().catch(() => null);
|
||||
if (detailJson?.packet?.id) {
|
||||
const aggId = String(detailJson.packet.id);
|
||||
// At least one child obs id should differ from the aggregate packet id
|
||||
const obs1 = url1.match(/obs=(\d+)/)?.[1];
|
||||
const obs2 = url2.match(/obs=(\d+)/)?.[1];
|
||||
const allSameAsAgg = obs1 === aggId && obs2 === aggId;
|
||||
assert(!allSameAsAgg, `Child obs ids should not all equal aggregate packet.id (${aggId})`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Test: per-observation raw_hex — hex pane updates when switching observations (#881)
|
||||
await test('Packet detail hex pane updates per observation', async () => {
|
||||
await page.goto(BASE + '#/packets', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr', { timeout: 15000 });
|
||||
await page.waitForTimeout(500);
|
||||
|
||||
// Try clicking packet rows to find one with multiple observations
|
||||
const rows = await page.$$('table tbody tr[data-action]');
|
||||
let obsRows = [];
|
||||
for (let i = 0; i < Math.min(rows.length, 10); i++) {
|
||||
await rows[i].click({ timeout: 3000 }).catch(() => null);
|
||||
await page.waitForTimeout(600);
|
||||
obsRows = await page.$$('.detail-obs-row');
|
||||
if (obsRows.length >= 2) break;
|
||||
}
|
||||
|
||||
if (obsRows.length < 2) {
|
||||
console.log(' ⏭ Skipped: no packet with ≥2 observations found in first 10 rows');
|
||||
return;
|
||||
}
|
||||
|
||||
// Click first observation, capture hex dump
|
||||
await obsRows[0].click({ timeout: 5000 });
|
||||
await page.waitForTimeout(500);
|
||||
const hex1 = await page.$eval('.hex-dump', el => el.textContent).catch(() => '');
|
||||
|
||||
// Click second observation, capture hex dump
|
||||
await obsRows[1].click({ timeout: 5000 });
|
||||
await page.waitForTimeout(500);
|
||||
const hex2 = await page.$eval('.hex-dump', el => el.textContent).catch(() => '');
|
||||
|
||||
// If both have content and differ, the feature works
|
||||
if (hex1 && hex2 && hex1 !== hex2) {
|
||||
console.log(' ✓ Hex pane content differs between observations');
|
||||
} else if (hex1 && hex2 && hex1 === hex2) {
|
||||
console.log(' ⏭ Hex same for both observations (likely historical NULL raw_hex — OK)');
|
||||
} else {
|
||||
console.log(' ⏭ Could not capture hex content from both observations');
|
||||
}
|
||||
});
|
||||
|
||||
// Test: path pill (top) and byte breakdown (bottom) agree on hop count
|
||||
// Regression for visual mismatch where badge said "1 hop" but path text listed N names
|
||||
await test('Packet detail path pill and byte breakdown agree on hop count', async () => {
|
||||
await page.goto(BASE + '#/packets', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr', { timeout: 15000 });
|
||||
await page.waitForTimeout(500);
|
||||
|
||||
// Click rows until we find one whose detail pane renders a multi-hop path
|
||||
const rows = await page.$$('table tbody tr[data-action]');
|
||||
let found = false;
|
||||
for (let i = 0; i < Math.min(rows.length, 15); i++) {
|
||||
await rows[i].click({ timeout: 3000 }).catch(() => null);
|
||||
await page.waitForTimeout(500);
|
||||
|
||||
const result = await page.evaluate(() => {
|
||||
// Path pill: <dt>Path</dt><dd><span class="badge ...">N hops</span> ...names...</dd>
|
||||
const dts = document.querySelectorAll('dl.detail-meta dt');
|
||||
let pillBadgeCount = null;
|
||||
let pillNameCount = null;
|
||||
for (const dt of dts) {
|
||||
if (dt.textContent.trim() === 'Path') {
|
||||
const dd = dt.nextElementSibling;
|
||||
if (!dd) break;
|
||||
const badge = dd.querySelector('.badge');
|
||||
if (badge) {
|
||||
const m = badge.textContent.match(/(\d+)\s*hop/);
|
||||
if (m) pillBadgeCount = parseInt(m[1], 10);
|
||||
}
|
||||
// Count rendered hop links/spans (HopDisplay.renderHop output)
|
||||
const hops = dd.querySelectorAll('.hop-link, [data-hop-link], .hop-named, .hop-anonymous');
|
||||
pillNameCount = hops.length;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Byte breakdown: section row "Path (N hops)" + N "Hop X — ..." rows
|
||||
let breakdownSectionCount = null;
|
||||
let breakdownRowCount = 0;
|
||||
const fieldTable = document.querySelector('table.field-table');
|
||||
if (fieldTable) {
|
||||
for (const tr of fieldTable.querySelectorAll('tr')) {
|
||||
const txt = tr.textContent.trim();
|
||||
const sec = txt.match(/^Path\s*\((\d+)\s*hops?\)/);
|
||||
if (sec) breakdownSectionCount = parseInt(sec[1], 10);
|
||||
if (/^\s*\d+\s*Hop\s+\d+\s*—/.test(txt) || /^Hop\s+\d+\s*—/.test(txt.replace(/^\d+/, '').trim())) {
|
||||
breakdownRowCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { pillBadgeCount, pillNameCount, breakdownSectionCount, breakdownRowCount };
|
||||
});
|
||||
|
||||
if (result.pillBadgeCount && result.pillBadgeCount > 0 && result.breakdownSectionCount != null) {
|
||||
found = true;
|
||||
// Top badge count must equal bottom section count
|
||||
assert(result.pillBadgeCount === result.breakdownSectionCount,
|
||||
`Path pill badge says ${result.pillBadgeCount} hops but byte breakdown says ${result.breakdownSectionCount} hops`);
|
||||
// Number of rendered hop names in pill should also match (within 1, since renderPath may add separators)
|
||||
if (result.pillNameCount != null && result.pillNameCount > 0) {
|
||||
assert(Math.abs(result.pillNameCount - result.pillBadgeCount) <= 1,
|
||||
`Path pill badge ${result.pillBadgeCount} but rendered ${result.pillNameCount} hop names`);
|
||||
}
|
||||
// And breakdown rendered rows should match its own section count
|
||||
assert(result.breakdownRowCount > 0,
|
||||
'breakdown rows selector matched nothing — selector or DOM changed');
|
||||
assert(result.breakdownRowCount === result.breakdownSectionCount,
|
||||
`Byte breakdown section says ${result.breakdownSectionCount} hops but rendered ${result.breakdownRowCount} hop rows`);
|
||||
console.log(` ✓ Path pill (${result.pillBadgeCount}) and byte breakdown (${result.breakdownSectionCount}) agree`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
if (process.env.E2E_REQUIRE_PATH_TEST === '1') {
|
||||
throw new Error('BLOCKED — no multi-hop packet found in first 15 rows (E2E_REQUIRE_PATH_TEST=1 requires it)');
|
||||
}
|
||||
const skipErr = new Error('SKIP: No multi-hop packet with byte breakdown found in first 15 rows — needs fixture');
|
||||
skipErr.skip = true;
|
||||
throw skipErr;
|
||||
}
|
||||
});
|
||||
|
||||
// Test: hex-strip color spans match the labeled byte rows (per-obs raw_hex).
|
||||
// Regression #891: server-supplied breakdown was computed once from top-level
|
||||
// raw_hex, so per-observation rendering had off-by-N highlights vs the labels.
|
||||
await test('Packet detail hex strip Path range matches hop row count', async () => {
|
||||
await page.goto(BASE + '#/packets', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr', { timeout: 15000 });
|
||||
await page.waitForTimeout(500);
|
||||
|
||||
const rows = await page.$$('table tbody tr[data-action]');
|
||||
let checked = 0;
|
||||
for (let i = 0; i < Math.min(rows.length, 25) && checked < 3; i++) {
|
||||
await rows[i].click({ timeout: 3000 }).catch(() => null);
|
||||
await page.waitForTimeout(400);
|
||||
|
||||
const result = await page.evaluate(() => {
|
||||
const dump = document.querySelector('.hex-dump');
|
||||
const fieldTable = document.querySelector('table.field-table');
|
||||
if (!dump || !fieldTable) return null;
|
||||
const pathSpan = dump.querySelector('span.hex-byte.hex-path');
|
||||
const pathBytes = pathSpan ? pathSpan.textContent.trim().split(/\s+/).filter(Boolean).length : 0;
|
||||
const hopRows = [];
|
||||
for (const tr of fieldTable.querySelectorAll('tr')) {
|
||||
const cells = [...tr.cells].map(c => c.textContent.trim());
|
||||
if (cells.length >= 2 && /^Hop\s+\d+/.test(cells[1])) hopRows.push(cells[2]);
|
||||
}
|
||||
return { pathBytes, hopRows };
|
||||
});
|
||||
|
||||
if (!result || (result.pathBytes === 0 && result.hopRows.length === 0)) continue;
|
||||
checked++;
|
||||
// Either both zero, or the count of bytes inside hex-path == hop rows.
|
||||
// (For multi-byte hash sizes this is bytes-per-hop * hops; for hash_size=1 it's just hops.)
|
||||
// The simpler invariant: if there are hop rows, hex-path span must exist and have at least
|
||||
// as many bytes as there are hops (== exactly hops * hash_size).
|
||||
assert(result.hopRows.length > 0,
|
||||
`row ${i}: hex-path span has ${result.pathBytes} bytes but no hop rows in the labeled table`);
|
||||
assert(result.pathBytes >= result.hopRows.length,
|
||||
`row ${i}: hex-path has ${result.pathBytes} bytes but ${result.hopRows.length} hop rows — strip and labels disagree`);
|
||||
assert(result.pathBytes % result.hopRows.length === 0,
|
||||
`row ${i}: hex-path has ${result.pathBytes} bytes but ${result.hopRows.length} hop rows — bytes/hops not divisible (hash_size violated)`);
|
||||
console.log(` ✓ row ${i}: hex-path ${result.pathBytes} bytes / ${result.hopRows.length} hop rows (hash_size=${result.pathBytes / result.hopRows.length})`);
|
||||
}
|
||||
if (checked === 0) {
|
||||
const skipErr = new Error('SKIP: no packet with rendered hex strip + hop rows found in first 25 rows');
|
||||
skipErr.skip = true;
|
||||
throw skipErr;
|
||||
}
|
||||
});
|
||||
|
||||
// Test: clicking a different observation row re-renders strip + breakdown consistently.
|
||||
// Regression: observations of the same packet hash have different raw_hex (#882),
|
||||
// so picking a different obs must recompute the byte ranges, not reuse the old ones.
|
||||
await test('Packet detail switches consistently across observations', async () => {
|
||||
await page.goto(BASE + '#/packets?groupByHash=1', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr', { timeout: 15000 });
|
||||
await page.waitForTimeout(500);
|
||||
|
||||
let opened = false;
|
||||
const groupRows = await page.$$('table tbody tr[data-action]');
|
||||
for (let i = 0; i < Math.min(groupRows.length, 10); i++) {
|
||||
await groupRows[i].click({ timeout: 3000 }).catch(() => null);
|
||||
await page.waitForTimeout(400);
|
||||
const obsCount = await page.evaluate(() => {
|
||||
return document.querySelectorAll('table.observations-table tbody tr, .obs-row').length;
|
||||
});
|
||||
if (obsCount >= 2) { opened = true; break; }
|
||||
}
|
||||
if (!opened) {
|
||||
const skipErr = new Error('SKIP: no multi-observation packet found in first 10 group rows');
|
||||
skipErr.skip = true;
|
||||
throw skipErr;
|
||||
}
|
||||
|
||||
async function snapshot() {
|
||||
return page.evaluate(() => {
|
||||
const dump = document.querySelector('.hex-dump');
|
||||
const fieldTable = document.querySelector('table.field-table');
|
||||
if (!dump || !fieldTable) return null;
|
||||
const pathSpan = dump.querySelector('span.hex-byte.hex-path');
|
||||
const pathBytes = pathSpan ? pathSpan.textContent.trim().split(/\s+/).filter(Boolean).length : 0;
|
||||
const hopRows = [];
|
||||
for (const tr of fieldTable.querySelectorAll('tr')) {
|
||||
const cells = [...tr.cells].map(c => c.textContent.trim());
|
||||
if (cells.length >= 2 && /^Hop\s+\d+/.test(cells[1])) hopRows.push(cells[2]);
|
||||
}
|
||||
const rawHexParts = [...dump.querySelectorAll('span.hex-byte')].map(s => s.textContent.trim());
|
||||
return { pathBytes, hopCount: hopRows.length, rawHexJoined: rawHexParts.join('|') };
|
||||
});
|
||||
}
|
||||
|
||||
const snapA = await snapshot();
|
||||
assert(snapA, 'first snapshot must have hex dump + field table');
|
||||
assert(snapA.hopCount === 0 || snapA.pathBytes >= snapA.hopCount,
|
||||
`obs A inconsistent: hex-path ${snapA.pathBytes} bytes vs ${snapA.hopCount} hop rows`);
|
||||
|
||||
const switched = await page.evaluate(() => {
|
||||
const obsRows = [...document.querySelectorAll('table.observations-table tbody tr, .obs-row')];
|
||||
if (obsRows.length < 2) return false;
|
||||
obsRows[1].click();
|
||||
return true;
|
||||
});
|
||||
assert(switched, 'should click second observation row');
|
||||
await page.waitForTimeout(500);
|
||||
|
||||
const snapB = await snapshot();
|
||||
assert(snapB, 'second snapshot must have hex dump + field table');
|
||||
assert(snapB.hopCount === 0 || snapB.pathBytes >= snapB.hopCount,
|
||||
`obs B inconsistent: hex-path ${snapB.pathBytes} bytes vs ${snapB.hopCount} hop rows`);
|
||||
console.log(` ✓ obs A: ${snapA.pathBytes} path bytes / ${snapA.hopCount} hops; obs B: ${snapB.pathBytes} / ${snapB.hopCount}`);
|
||||
});
|
||||
|
||||
// Test: clicking the 🔍 Details button in the nodes side panel navigates to
|
||||
// the full-screen node detail view. Regression: hash already === target,
|
||||
// so location.hash assignment was a no-op and the panel stayed open.
|
||||
await test('Nodes side panel Details button opens full-screen view', async () => {
|
||||
await page.goto(BASE + '#/nodes', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr[data-action]', { timeout: 15000 });
|
||||
await page.waitForTimeout(500);
|
||||
// Open side panel
|
||||
await page.click('table tbody tr[data-action]');
|
||||
await page.waitForSelector('#nodesRight .node-detail-btn', { timeout: 5000 });
|
||||
// Click Details
|
||||
await page.click('#nodesRight .node-detail-btn');
|
||||
// Wait for full-screen view to appear
|
||||
await page.waitForSelector('.node-fullscreen', { timeout: 5000 });
|
||||
const isFullScreen = await page.evaluate(() => !!document.querySelector('.node-fullscreen'));
|
||||
assert(isFullScreen, 'Details button should open full-screen node view');
|
||||
});
|
||||
|
||||
// === Hash color toggle E2E tests (#946) ===
|
||||
|
||||
await test('Color-by-hash toggle present on Live page, defaults ON', async () => {
|
||||
await page.goto(BASE + '#/live', { waitUntil: 'domcontentloaded' });
|
||||
// Wait until live.js has initialized the toggle (checked = true by default)
|
||||
await page.waitForFunction(() => {
|
||||
const el = document.getElementById('liveColorHashToggle');
|
||||
return el && el.checked === true;
|
||||
}, { timeout: 10000 });
|
||||
const checked = await page.$eval('#liveColorHashToggle', el => el.checked);
|
||||
assert(checked, 'Color by hash toggle should default to ON');
|
||||
});
|
||||
|
||||
await test('Color-by-hash toggle persists across reload', async () => {
|
||||
await page.goto(BASE + '#/live', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('#liveColorHashToggle', { timeout: 10000 });
|
||||
// Uncheck toggle
|
||||
await page.click('#liveColorHashToggle');
|
||||
const unchecked = await page.$eval('#liveColorHashToggle', el => !el.checked);
|
||||
assert(unchecked, 'Toggle should be OFF after click');
|
||||
// Reload
|
||||
await page.goto(BASE + '#/live', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('#liveColorHashToggle', { timeout: 10000 });
|
||||
const afterReload = await page.$eval('#liveColorHashToggle', el => !el.checked);
|
||||
assert(afterReload, 'Toggle OFF state should persist after reload');
|
||||
// Reset to ON for other tests
|
||||
await page.click('#liveColorHashToggle');
|
||||
});
|
||||
|
||||
await test('Packets table rows have border-left stripe when toggle ON', async () => {
|
||||
await page.evaluate(() => localStorage.setItem('meshcore-color-packets-by-hash', 'true'));
|
||||
// Hard reload to re-init page handler with the new toggle state.
|
||||
// page.goto with same hash URL is a no-op for re-rendering.
|
||||
await page.goto(BASE + '#/packets', { waitUntil: 'domcontentloaded' });
|
||||
await page.reload({ waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr[data-hash]', { timeout: 15000 });
|
||||
// Wait for hash stripe to be applied (inline style set during render).
|
||||
// Assert specifically 4px (per spec §2.10) so we don't false-pass on the
|
||||
// 3px channel-color highlight which is independent of this toggle.
|
||||
const hasStripe = await page.waitForFunction(() => {
|
||||
const row = document.querySelector('table tbody tr[data-hash]');
|
||||
return row && (row.getAttribute('style') || '').includes('border-left:4px');
|
||||
}, { timeout: 5000 }).then(() => true).catch(() => false);
|
||||
assert(hasStripe, 'At least one <tr> should have hash-color border-left:4px stripe when toggle ON');
|
||||
});
|
||||
|
||||
await test('Packets table rows have NO border-left stripe when toggle OFF', async () => {
|
||||
await page.evaluate(() => {
|
||||
localStorage.setItem('meshcore-color-packets-by-hash', 'false');
|
||||
});
|
||||
// Hard reload (page.goto with same hash URL no-ops — must reload to re-init
|
||||
// the page handler and re-render rows with the new toggle state).
|
||||
await page.reload({ waitUntil: 'domcontentloaded' });
|
||||
await page.waitForSelector('table tbody tr[data-hash]', { timeout: 15000 });
|
||||
await page.waitForTimeout(500);
|
||||
const noStripe = await page.evaluate(() => {
|
||||
const rows = document.querySelectorAll('table tbody tr[data-hash]');
|
||||
for (const r of rows) {
|
||||
// Hash stripe is 4px (per spec §2.10). Channel-color highlight uses
|
||||
// 3px and is independent of the hash-color toggle. Only assert no
|
||||
// 4px hash stripe is present.
|
||||
if ((r.getAttribute('style') || '').includes('border-left:4px')) return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
assert(noStripe, 'No <tr> should have hash-color border-left:4px stripe when toggle OFF');
|
||||
// Reset
|
||||
await page.evaluate(() => localStorage.setItem('meshcore-color-packets-by-hash', 'true'));
|
||||
});
|
||||
|
||||
// --- Live feed hash-color stripe ---
|
||||
await test('Live feed items have border-left stripe when toggle ON', async () => {
|
||||
await page.evaluate(() => localStorage.setItem('meshcore-color-packets-by-hash', 'true'));
|
||||
await page.goto(BASE + '/#/live');
|
||||
await page.waitForTimeout(3000); // allow feed to populate
|
||||
const hasStripe = await page.evaluate(() => {
|
||||
const items = document.querySelectorAll('.live-feed-item');
|
||||
for (const item of items) {
|
||||
if ((item.getAttribute('style') || item.style.cssText || '').includes('border-left')) return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
// May not have live packets in fixture — skip if no feed items
|
||||
const itemCount = await page.evaluate(() => document.querySelectorAll('.live-feed-item').length);
|
||||
if (itemCount === 0) {
|
||||
console.log(' (skipped — no live feed items in fixture)');
|
||||
return;
|
||||
}
|
||||
assert(hasStripe, 'At least one .live-feed-item should have hash-color border-left stripe when toggle ON');
|
||||
});
|
||||
|
||||
// --- Map polyline uses hash color ---
|
||||
await test('Map trace polyline uses hash-derived color when toggle ON', async () => {
|
||||
await page.evaluate(() => localStorage.setItem('meshcore-color-packets-by-hash', 'true'));
|
||||
await page.goto(BASE + '/#/live');
|
||||
await page.waitForTimeout(3000);
|
||||
// Use the dedicated .live-packet-trace class so we don't pick up
|
||||
// unrelated leaflet paths (geofilter polygons, region overlays, etc).
|
||||
const pathCount = await page.evaluate(() => document.querySelectorAll('path.live-packet-trace').length);
|
||||
if (pathCount === 0) {
|
||||
console.log(' (skipped — no live-packet-trace polylines drawn in 3s window)');
|
||||
return;
|
||||
}
|
||||
const hasHslPolyline = await page.evaluate(() => {
|
||||
const paths = document.querySelectorAll('path.live-packet-trace');
|
||||
for (const p of paths) {
|
||||
const stroke = p.getAttribute('stroke') || '';
|
||||
if (stroke.startsWith('hsl(')) return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
assert(hasHslPolyline, 'At least one live-packet-trace polyline should have hsl() stroke color from hash');
|
||||
});
|
||||
|
||||
await browser.close();
|
||||
|
||||
// Summary
|
||||
const skipped = results.filter(r => r.skipped).length;
|
||||
const passed = results.filter(r => r.pass && !r.skipped).length;
|
||||
const passed = results.filter(r => r.pass).length;
|
||||
const failed = results.filter(r => !r.pass).length;
|
||||
console.log(`\n${passed}/${results.length} tests passed${skipped ? `, ${skipped} skipped` : ''}${failed ? `, ${failed} failed` : ''}`);
|
||||
console.log(`\n${passed}/${results.length} tests passed${failed ? `, ${failed} failed` : ''}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
|
||||
+17
-352
@@ -690,88 +690,6 @@ console.log('\n=== haversineKm (hop-resolver.js) ===');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== pickByAffinity — neighbor-graph + centroid scoring (#874) =====
|
||||
console.log('\n=== pickByAffinity neighbor-graph scoring (#874) ===');
|
||||
{
|
||||
const ctx = makeSandbox();
|
||||
ctx.IATA_COORDS_GEO = {};
|
||||
loadInCtx(ctx, 'public/hop-resolver.js');
|
||||
const HR = ctx.window.HopResolver;
|
||||
|
||||
// Two nodes sharing prefix "ab", hundreds of km apart.
|
||||
// NodeSF is near San Francisco, NodeDEN is near Denver.
|
||||
const nodeSF = { public_key: 'ab11111111111111', name: 'NodeSF', lat: 37.7, lon: -122.4 };
|
||||
const nodeDEN = { public_key: 'ab22222222222222', name: 'NodeDEN', lat: 39.7, lon: -104.9 };
|
||||
// A known neighbor of NodeSF (in the graph)
|
||||
const nodeNeighbor = { public_key: 'cc33333333333333', name: 'SFNeighbor', lat: 37.8, lon: -122.3 };
|
||||
// Another known node near Denver
|
||||
const nodeDenNeighbor = { public_key: 'dd44444444444444', name: 'DENNeighbor', lat: 39.8, lon: -105.0 };
|
||||
|
||||
test('#874: graph edge scoring picks correct regional candidate (SF)', () => {
|
||||
HR.init([nodeSF, nodeDEN, nodeNeighbor, nodeDenNeighbor]);
|
||||
HR.setAffinity({ edges: [
|
||||
{ source: 'cc33333333333333', target: 'ab11111111111111', weight: 5 },
|
||||
{ source: 'dd44444444444444', target: 'ab22222222222222', weight: 5 },
|
||||
]});
|
||||
// Path: SFNeighbor → [ab??] → DENNeighbor
|
||||
// With graph edges, ab11 (NodeSF) has edge to SFNeighbor, ab22 (NodeDEN) has edge to DENNeighbor
|
||||
// Prev=SFNeighbor, Next=DENNeighbor → both have score 5, but SFNeighbor edge only to ab11
|
||||
const result = HR.resolve(['cc', 'ab', 'dd'],
|
||||
null, null, null, null);
|
||||
assert.strictEqual(result['ab'].name, 'NodeSF',
|
||||
'Should pick NodeSF because it has a graph edge to prev hop SFNeighbor');
|
||||
});
|
||||
|
||||
test('#874: graph edge scoring — next hop breaks tie', () => {
|
||||
HR.init([nodeSF, nodeDEN, nodeNeighbor, nodeDenNeighbor]);
|
||||
HR.setAffinity({ edges: [
|
||||
{ source: 'dd44444444444444', target: 'ab22222222222222', weight: 8 },
|
||||
// No edge from SFNeighbor to either ab node
|
||||
]});
|
||||
// Path: SFNeighbor → [ab??] → DENNeighbor
|
||||
// Only ab22 (NodeDEN) has edge to DENNeighbor (next hop)
|
||||
const result = HR.resolve(['cc', 'ab', 'dd'],
|
||||
null, null, null, null);
|
||||
assert.strictEqual(result['ab'].name, 'NodeDEN',
|
||||
'Should pick NodeDEN because it has graph edge to next hop DENNeighbor');
|
||||
});
|
||||
|
||||
test('#874: centroid fallback when no graph edges exist', () => {
|
||||
HR.init([nodeSF, nodeDEN, nodeNeighbor]);
|
||||
HR.setAffinity({ edges: [] }); // no edges at all
|
||||
// Path: SFNeighbor → [ab??]
|
||||
// SFNeighbor is at (37.8, -122.3), centroid is just that point
|
||||
// NodeSF (37.7, -122.4) is ~14km away, NodeDEN (39.7, -104.9) is ~1500km away
|
||||
const result = HR.resolve(['cc', 'ab'],
|
||||
null, null, null, null);
|
||||
assert.strictEqual(result['ab'].name, 'NodeSF',
|
||||
'Should pick NodeSF via centroid proximity to SFNeighbor');
|
||||
});
|
||||
|
||||
test('#874: centroid uses average of prev+next positions', () => {
|
||||
// Prev near SF, next near Denver → centroid is midpoint (~Nevada)
|
||||
// NodeDEN is closer to Nevada midpoint than NodeSF
|
||||
const nodeMid = { public_key: 'ee55555555555555', name: 'MidNode', lat: 38.5, lon: -114.0 };
|
||||
HR.init([nodeSF, nodeDEN, nodeNeighbor, nodeDenNeighbor, nodeMid]);
|
||||
HR.setAffinity({ edges: [] });
|
||||
// Path: SFNeighbor → [ab??] → DENNeighbor
|
||||
// centroid = avg(37.8,-122.3, 39.8,-105.0) = (38.8, -113.65) — closer to Denver
|
||||
const result = HR.resolve(['cc', 'ab', 'dd'],
|
||||
null, null, null, null);
|
||||
assert.strictEqual(result['ab'].name, 'NodeDEN',
|
||||
'Should pick NodeDEN because centroid of SF+Denver neighbors is closer to Denver');
|
||||
});
|
||||
|
||||
test('#874: fallback when no context at all', () => {
|
||||
HR.init([nodeSF, nodeDEN]);
|
||||
HR.setAffinity({ edges: [] });
|
||||
// Single ambiguous hop, no origin/observer, no neighbors
|
||||
const result = HR.resolve(['ab'], null, null, null, null);
|
||||
assert.ok(result['ab'].ambiguous || result['ab'].name != null,
|
||||
'Should resolve to some candidate without crashing');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SNR/RSSI Number casting =====
|
||||
{
|
||||
// These test the pattern used in observer-detail.js, home.js, traces.js, live.js
|
||||
@@ -1804,128 +1722,6 @@ console.log('\n=== app.js: formatEngineBadge ===');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== APP.JS: computeBreakdownRanges =====
|
||||
console.log('\n=== app.js: computeBreakdownRanges ===');
|
||||
{
|
||||
const ctx = makeSandbox();
|
||||
loadInCtx(ctx, 'public/roles.js');
|
||||
loadInCtx(ctx, 'public/app.js');
|
||||
const computeBreakdownRanges = ctx.computeBreakdownRanges;
|
||||
|
||||
function findRange(ranges, label) {
|
||||
return ranges.find(r => r.label === label);
|
||||
}
|
||||
|
||||
test('returns [] for empty hex', () => {
|
||||
assert.deepEqual(computeBreakdownRanges('', 1, 5), []);
|
||||
});
|
||||
|
||||
test('returns [] for too-short hex (< 2 bytes)', () => {
|
||||
assert.deepEqual(computeBreakdownRanges('15', 1, 5), []);
|
||||
});
|
||||
|
||||
test('FLOOD non-transport: 4-hop hash_size=1', () => {
|
||||
// header=15, plb=04 → hash_size=1, hash_count=4
|
||||
// bytes: 15 04 90 FA F9 10 6E 01 D9
|
||||
const r = computeBreakdownRanges('150490FAF910 6E01D9'.replace(/\s/g,''), 1, 5);
|
||||
assert.deepEqual(findRange(r, 'Header'), { start: 0, end: 0, label: 'Header' });
|
||||
assert.deepEqual(findRange(r, 'Path Length'), { start: 1, end: 1, label: 'Path Length' });
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 2, end: 5, label: 'Path' });
|
||||
assert.deepEqual(findRange(r, 'Payload'), { start: 6, end: 8, label: 'Payload' });
|
||||
assert.strictEqual(findRange(r, 'Transport Codes'), undefined);
|
||||
});
|
||||
|
||||
test('FLOOD non-transport: 7-hop hash_size=1', () => {
|
||||
// header=15, plb=07
|
||||
const hex = '15077f6d7d1cadeca33988fd95e0851ebf01ea12e1879e';
|
||||
const r = computeBreakdownRanges(hex, 1, 5);
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 2, end: 8, label: 'Path' });
|
||||
const payload = findRange(r, 'Payload');
|
||||
assert.strictEqual(payload.start, 9, 'payload starts after the 7 path bytes');
|
||||
});
|
||||
|
||||
test('FLOOD non-transport: 8-hop hash_size=1', () => {
|
||||
const hex = '1508' + '11223344556677AA' + 'BBCCDD';
|
||||
const r = computeBreakdownRanges(hex, 1, 5);
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 2, end: 9, label: 'Path' });
|
||||
assert.deepEqual(findRange(r, 'Payload'), { start: 10, end: 12, label: 'Payload' });
|
||||
});
|
||||
|
||||
test('Direct advert: 0-hop, no Path range', () => {
|
||||
// plb=00 → 0 hops; expect Path Length but NO Path range
|
||||
const r = computeBreakdownRanges('1100AABBCCDD', 1, 4);
|
||||
assert.deepEqual(findRange(r, 'Path Length'), { start: 1, end: 1, label: 'Path Length' });
|
||||
assert.strictEqual(findRange(r, 'Path'), undefined);
|
||||
});
|
||||
|
||||
test('Transport route shifts path-length offset by 4', () => {
|
||||
// route_type=0 (TRANSPORT_FLOOD): bytes 1..4 are Transport Codes
|
||||
// header=14, transport=AABBCCDD, plb=02, hops=11 22, payload=99
|
||||
const hex = '14AABBCCDD021122' + '99';
|
||||
const r = computeBreakdownRanges(hex, 0, 5);
|
||||
assert.deepEqual(findRange(r, 'Transport Codes'), { start: 1, end: 4, label: 'Transport Codes' });
|
||||
assert.deepEqual(findRange(r, 'Path Length'), { start: 5, end: 5, label: 'Path Length' });
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 6, end: 7, label: 'Path' });
|
||||
assert.deepEqual(findRange(r, 'Payload'), { start: 8, end: 8, label: 'Payload' });
|
||||
});
|
||||
|
||||
test('hash_size=2 (plb top bits=01): 4 hops × 2 bytes', () => {
|
||||
// plb = 01 0001 00 = 0x44 → hash_size=2, hash_count=4 → 8 path bytes
|
||||
const hex = '15' + '44' + 'AABB' + 'CCDD' + 'EEFF' + '1122' + '9988';
|
||||
const r = computeBreakdownRanges(hex, 1, 5);
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 2, end: 9, label: 'Path' });
|
||||
assert.deepEqual(findRange(r, 'Payload'), { start: 10, end: 11, label: 'Payload' });
|
||||
});
|
||||
|
||||
test('hash_size=3 (plb top bits=10): 2 hops × 3 bytes', () => {
|
||||
// plb = 10 0000 10 = 0x82 → hash_size=3, hash_count=2 → 6 path bytes
|
||||
const hex = '15' + '82' + 'AABBCC' + 'DDEEFF' + '99';
|
||||
const r = computeBreakdownRanges(hex, 1, 5);
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 2, end: 7, label: 'Path' });
|
||||
assert.deepEqual(findRange(r, 'Payload'), { start: 8, end: 8, label: 'Payload' });
|
||||
});
|
||||
|
||||
test('hash_size=4 (plb top bits=11): 2 hops × 4 bytes', () => {
|
||||
// plb = 11 0000 10 = 0xC2 → hash_size=4, hash_count=2 → 8 path bytes
|
||||
const hex = '15' + 'C2' + 'AABBCCDD' + 'EEFF1122' + '99887766';
|
||||
const r = computeBreakdownRanges(hex, 1, 5);
|
||||
assert.deepEqual(findRange(r, 'Path'), { start: 2, end: 9, label: 'Path' });
|
||||
assert.deepEqual(findRange(r, 'Payload'), { start: 10, end: 13, label: 'Payload' });
|
||||
});
|
||||
|
||||
test('truncated path: not enough bytes → no Path range', () => {
|
||||
// plb=04 says 4 hops but only 2 bytes remain
|
||||
const hex = '1504AABB';
|
||||
const r = computeBreakdownRanges(hex, 1, 5);
|
||||
assert.strictEqual(findRange(r, 'Path'), undefined);
|
||||
});
|
||||
|
||||
test('ADVERT (payload_type=4) with full record: PubKey/Timestamp/Signature/Flags', () => {
|
||||
// header=11, plb=00 (direct advert)
|
||||
// payload: 32 bytes pubkey + 4 bytes ts + 64 bytes sig + 1 byte flags
|
||||
const pubkey = 'AB'.repeat(32);
|
||||
const ts = '11223344';
|
||||
const sig = 'CD'.repeat(64);
|
||||
const flags = '00';
|
||||
const hex = '1100' + pubkey + ts + sig + flags;
|
||||
const r = computeBreakdownRanges(hex, 1, 4);
|
||||
assert.deepEqual(findRange(r, 'PubKey'), { start: 2, end: 33, label: 'PubKey' });
|
||||
assert.deepEqual(findRange(r, 'Timestamp'), { start: 34, end: 37, label: 'Timestamp' });
|
||||
assert.deepEqual(findRange(r, 'Signature'), { start: 38, end: 101, label: 'Signature' });
|
||||
assert.deepEqual(findRange(r, 'Flags'), { start: 102, end: 102, label: 'Flags' });
|
||||
});
|
||||
|
||||
test('NaN-safe: malformed path-length byte produces no Path range', () => {
|
||||
// hex with non-hex char in plb position would parseInt-fail → bail
|
||||
// Use a 1-byte payload that makes pathByte parseInt produce NaN-ish via X
|
||||
// (parseInt of 'XY' is NaN). Since fs reads only hex chars, simulate via short hex.
|
||||
// Easier: empty string already returns []; 1-byte returns []. Both covered above.
|
||||
// Use plb=FF (hash_size=4, hash_count=63) too long for input → no Path
|
||||
const r = computeBreakdownRanges('15FF' + 'AA', 1, 5);
|
||||
assert.strictEqual(findRange(r, 'Path'), undefined);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== APP.JS: isTransportRoute + transportBadge =====
|
||||
console.log('\n=== app.js: isTransportRoute + transportBadge ===');
|
||||
{
|
||||
@@ -5666,33 +5462,40 @@ console.log('\n=== packets.js: buildFieldTable hop count from path_len (#844) ==
|
||||
loadInCtx(ftCtx, 'public/packets.js');
|
||||
const { buildFieldTable } = ftCtx.window._packetsTestAPI;
|
||||
|
||||
test('#885: byte breakdown uses pathHops length (single source of truth)', () => {
|
||||
// After #885 the byte breakdown agrees with the path pill: both render
|
||||
// from the per-observation path_json. raw_hex is the underlying bytes
|
||||
// for that same observation, so consistency is by construction.
|
||||
test('#844: byte breakdown uses path_len hop count, not aggregated _parsedPath', () => {
|
||||
// path_len = 0x42 → hash_size=2, hash_count=2
|
||||
// raw_hex: header(11) + path_len(42) + hop0(41B1) + hop1(27D7) + pubkey(32 bytes)...
|
||||
const pubkey = 'C0DEDAD4'.padEnd(64, '0'); // 32 bytes = 64 hex chars
|
||||
const raw = '1142' + '41B1' + '27D7' + pubkey + '00000000' + '0'.repeat(128);
|
||||
const pkt = { raw_hex: raw, route_type: 1, payload_type: 0 };
|
||||
// Per-obs path_json IS the source of truth — pass the 2 hops that match raw_hex.
|
||||
const pathHops = ['41B1', '27D7'];
|
||||
// Pass aggregated pathHops with 7 hops (mismatched)
|
||||
const pathHops = ['41B1', '5EB0', '1000', '2DD2', '52F8', '9535', '762B'];
|
||||
const html = buildFieldTable(pkt, {}, pathHops, {});
|
||||
|
||||
assert.ok(html.includes('Path (2 hops)'), 'Should show "Path (2 hops)"');
|
||||
// Section header should say "2 hops", not "7 hops"
|
||||
assert.ok(html.includes('Path (2 hops)'), 'Should show "Path (2 hops)" from path_len, got: ' +
|
||||
(html.match(/Path \(\d+ hops\)/)?.[0] || 'no match'));
|
||||
assert.ok(!html.includes('Path (7 hops)'), 'Should NOT show 7 hops from aggregated path');
|
||||
|
||||
// Should contain hop values from raw_hex
|
||||
assert.ok(html.includes('41B1'), 'Should show hop 0 = 41B1');
|
||||
assert.ok(html.includes('27D7'), 'Should show hop 1 = 27D7');
|
||||
|
||||
// Should NOT contain hops from aggregated path that aren't in raw_hex
|
||||
assert.ok(!html.includes('5EB0'), 'Should NOT show aggregated hop 5EB0');
|
||||
assert.ok(!html.includes('9535'), 'Should NOT show aggregated hop 9535');
|
||||
});
|
||||
|
||||
test('#885: pubkey offset advances by hashSize * pathHops.length', () => {
|
||||
test('#844: pubkey offset correct after 2-hop path (not after 7-hop)', () => {
|
||||
const pubkey = 'C0DEDAD4'.padEnd(64, '0');
|
||||
const raw = '1142' + '41B1' + '27D7' + pubkey + '00000000' + '0'.repeat(128);
|
||||
const pkt = { raw_hex: raw, route_type: 1, payload_type: 0 };
|
||||
const html = buildFieldTable(pkt, { type: 'ADVERT', pubKey: pubkey }, ['41B1', '27D7'], {});
|
||||
const html = buildFieldTable(pkt, { type: 'ADVERT', pubKey: pubkey }, ['41B1','5EB0','1000','2DD2','52F8','9535','762B'], {});
|
||||
|
||||
// Public Key should be at offset 6 (1 header + 1 path_len + 2*2 hops = 6)
|
||||
// Not at offset 16 (1 + 1 + 2*7 = 16)
|
||||
assert.ok(html.includes('>6<') || html.includes('"6"'),
|
||||
'Public Key should be at offset 6');
|
||||
'Public Key should be at offset 6, not 16');
|
||||
});
|
||||
|
||||
test('#844: hashCountVal=0 (direct advert) skips Path section', () => {
|
||||
@@ -6313,144 +6116,6 @@ console.log('\n=== analytics.js: renderCollisionsFromServer collision table ==='
|
||||
});
|
||||
}
|
||||
|
||||
// ===== Issue #866: Full-page obs-switch — hex + path must update per observation =====
|
||||
{
|
||||
console.log('\n=== Issue #866: Full-page observation switch ===');
|
||||
|
||||
const ctx866 = makeSandbox();
|
||||
loadInCtx(ctx866, 'public/roles.js');
|
||||
loadInCtx(ctx866, 'public/app.js');
|
||||
loadInCtx(ctx866, 'public/packet-helpers.js');
|
||||
|
||||
test('#866: switching observation updates effectivePkt path_json', () => {
|
||||
const pkt = { id: 1, hash: 'abc123', observer_id: 'obs-agg', path_json: '["A","B","C","D"]', raw_hex: '0484A1B1C1D1', route_type: 1, timestamp: '2026-01-01T00:00:00Z' };
|
||||
const obs1 = { id: 10, observer_id: 'obs-1', path_json: '["A","B"]', snr: 5, rssi: -80, timestamp: '2026-01-01T00:01:00Z' };
|
||||
const obs2 = { id: 20, observer_id: 'obs-2', path_json: '["A","B","C","D"]', snr: 8, rssi: -75, timestamp: '2026-01-01T00:02:00Z' };
|
||||
|
||||
// Simulate renderDetail logic: pick obs1
|
||||
const eff1 = ctx866.clearParsedCache({...pkt, ...obs1, _isObservation: true});
|
||||
const path1 = ctx866.getParsedPath(eff1);
|
||||
assert.deepStrictEqual(path1, ['A', 'B']);
|
||||
assert.strictEqual(eff1.observer_id, 'obs-1');
|
||||
assert.strictEqual(eff1.snr, 5);
|
||||
|
||||
// Switch to obs2
|
||||
const eff2 = ctx866.clearParsedCache({...pkt, ...obs2, _isObservation: true});
|
||||
const path2 = ctx866.getParsedPath(eff2);
|
||||
assert.deepStrictEqual(path2, ['A', 'B', 'C', 'D']);
|
||||
assert.strictEqual(eff2.observer_id, 'obs-2');
|
||||
assert.strictEqual(eff2.snr, 8);
|
||||
});
|
||||
|
||||
test('#866: effectivePkt preserves raw_hex from packet when obs has none', () => {
|
||||
const pkt = { id: 1, hash: 'h1', raw_hex: '0482AABB', route_type: 1 };
|
||||
const obs = { id: 10, observer_id: 'obs-1', path_json: '["AA"]', snr: 3, rssi: -90, timestamp: '2026-01-01T00:00:00Z' };
|
||||
const eff = ctx866.clearParsedCache({...pkt, ...obs, _isObservation: true});
|
||||
// obs doesn't have raw_hex, so packet's raw_hex survives spread
|
||||
assert.strictEqual(eff.raw_hex, '0482AABB');
|
||||
});
|
||||
|
||||
test('#866: effectivePkt uses obs raw_hex when available (API now returns it)', () => {
|
||||
const pkt = { id: 1, hash: 'h1', raw_hex: '0482AABB', route_type: 1 };
|
||||
const obs = { id: 10, observer_id: 'obs-1', raw_hex: '0441CC', path_json: '["CC"]', snr: 3, rssi: -90, timestamp: '2026-01-01T00:00:00Z' };
|
||||
const eff = ctx866.clearParsedCache({...pkt, ...obs, _isObservation: true});
|
||||
// obs has raw_hex from API, should override
|
||||
assert.strictEqual(eff.raw_hex, '0441CC');
|
||||
});
|
||||
|
||||
test('#866: direction field carried through observation spread', () => {
|
||||
const pkt = { id: 1, hash: 'h1', direction: 'rx', route_type: 1 };
|
||||
const obs = { id: 10, observer_id: 'obs-1', direction: 'tx', path_json: '[]', timestamp: '2026-01-01T00:00:00Z' };
|
||||
const eff = {...pkt, ...obs, _isObservation: true};
|
||||
assert.strictEqual(eff.direction, 'tx');
|
||||
});
|
||||
|
||||
test('#866: resolved_path carried through observation spread', () => {
|
||||
const pkt = { id: 1, hash: 'h1', resolved_path: '["aaa","bbb","ccc"]', route_type: 1 };
|
||||
const obs = { id: 10, observer_id: 'obs-1', resolved_path: '["aaa"]', path_json: '["AA"]', timestamp: '2026-01-01T00:00:00Z' };
|
||||
const eff = ctx866.clearParsedCache({...pkt, ...obs, _isObservation: true});
|
||||
const rp = ctx866.getResolvedPath(eff);
|
||||
assert.deepStrictEqual(rp, ['aaa']);
|
||||
});
|
||||
|
||||
test('#866: getPathLenOffset used for hop count cross-check', () => {
|
||||
// Flood route: offset 1
|
||||
assert.strictEqual(ctx866.getPathLenOffset(1), 1);
|
||||
assert.strictEqual(ctx866.getPathLenOffset(2), 1);
|
||||
// Transport route: offset 5
|
||||
assert.strictEqual(ctx866.getPathLenOffset(0), 5);
|
||||
assert.strictEqual(ctx866.getPathLenOffset(3), 5);
|
||||
});
|
||||
|
||||
test('#866: URL hash should encode obs parameter for deep linking', () => {
|
||||
// Simulate the URL construction pattern from renderDetail obs click
|
||||
const pktHash = 'abc123def456';
|
||||
const obsId = '42';
|
||||
const url = `#/packets/${pktHash}?obs=${obsId}`;
|
||||
assert.strictEqual(url, '#/packets/abc123def456?obs=42');
|
||||
// Parse back
|
||||
const qIdx = url.indexOf('?');
|
||||
const qs = new URLSearchParams(url.substring(qIdx));
|
||||
assert.strictEqual(qs.get('obs'), '42');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== #872 — hop-display unreliable badge =====
|
||||
{
|
||||
console.log('\n--- #872: hop-display unreliable warning badge ---');
|
||||
|
||||
function makeHopDisplaySandbox() {
|
||||
const sb = {
|
||||
window: { addEventListener: () => {}, dispatchEvent: () => {} },
|
||||
document: {
|
||||
readyState: 'complete',
|
||||
createElement: () => ({ id: '', textContent: '', innerHTML: '' }),
|
||||
head: { appendChild: () => {} },
|
||||
getElementById: () => null,
|
||||
addEventListener: () => {},
|
||||
querySelectorAll: () => [],
|
||||
querySelector: () => null,
|
||||
},
|
||||
console,
|
||||
Date, Math, Array, Object, String, Number, JSON, RegExp, Map, Set,
|
||||
encodeURIComponent, parseInt, parseFloat, isNaN, Infinity, NaN, undefined,
|
||||
setTimeout: () => {}, setInterval: () => {}, clearTimeout: () => {}, clearInterval: () => {},
|
||||
};
|
||||
sb.window.document = sb.document;
|
||||
sb.self = sb.window;
|
||||
sb.globalThis = sb.window;
|
||||
const ctx = vm.createContext(sb);
|
||||
const hopSrc = fs.readFileSync(__dirname + '/public/hop-display.js', 'utf8');
|
||||
vm.runInContext(hopSrc, ctx);
|
||||
return ctx;
|
||||
}
|
||||
|
||||
const hopCtx = makeHopDisplaySandbox();
|
||||
|
||||
test('#872: unreliable hop renders warning badge, not strikethrough', () => {
|
||||
const html = hopCtx.window.HopDisplay.renderHop('AABB', {
|
||||
name: 'TestNode', pubkey: 'pk123', unreliable: true,
|
||||
ambiguous: false, conflicts: [], globalFallback: false,
|
||||
}, {});
|
||||
// Must contain unreliable warning badge button
|
||||
assert.ok(html.includes('hop-unreliable-btn'), 'should have unreliable badge button');
|
||||
assert.ok(html.includes('⚠️'), 'should have ⚠️ icon');
|
||||
assert.ok(html.includes('Unreliable name resolution'), 'should have tooltip text');
|
||||
// Must NOT contain line-through in inline style (CSS class no longer has it)
|
||||
assert.ok(!html.includes('line-through'), 'should not contain line-through');
|
||||
// Should still have hop-unreliable class for subtle styling
|
||||
assert.ok(html.includes('hop-unreliable'), 'should have hop-unreliable class');
|
||||
});
|
||||
|
||||
test('#872: reliable hop does NOT render unreliable badge', () => {
|
||||
const html = hopCtx.window.HopDisplay.renderHop('CCDD', {
|
||||
name: 'GoodNode', pubkey: 'pk456', unreliable: false,
|
||||
ambiguous: false, conflicts: [], globalFallback: false,
|
||||
}, {});
|
||||
assert.ok(!html.includes('hop-unreliable-btn'), 'should not have unreliable badge');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SUMMARY =====
|
||||
Promise.allSettled(pendingTests).then(() => {
|
||||
console.log(`\n${'═'.repeat(40)}`);
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
/* test-hash-color.js — Unit tests for hash-color.js (vm.createContext sandbox)
|
||||
* Tests: purity, theme split, saturation variability, lightness variability,
|
||||
* outline darker than fill, sentinel, perceptual distance
|
||||
*/
|
||||
'use strict';
|
||||
const vm = require('vm');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const src = fs.readFileSync(path.join(__dirname, 'public', 'hash-color.js'), 'utf8');
|
||||
|
||||
function createSandbox() {
|
||||
const sandbox = { window: {}, module: {} };
|
||||
vm.createContext(sandbox);
|
||||
vm.runInContext(src, sandbox);
|
||||
return sandbox.window.HashColor || sandbox.module.exports;
|
||||
}
|
||||
|
||||
const HashColor = createSandbox();
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(cond, msg) {
|
||||
if (cond) { passed++; console.log(' ✓ ' + msg); }
|
||||
else { failed++; console.error(' ✗ ' + msg); }
|
||||
}
|
||||
|
||||
function parseHsl(str) {
|
||||
const m = str.match(/hsl\((\d+),\s*(\d+)%,\s*(\d+)%\)/);
|
||||
if (!m) return null;
|
||||
return { h: parseInt(m[1]), s: parseInt(m[2]), l: parseInt(m[3]) };
|
||||
}
|
||||
|
||||
// --- Purity: same input → same output ---
|
||||
console.log('Purity:');
|
||||
const r1 = HashColor.hashToHsl('a1b2c3d4', 'light');
|
||||
const r2 = HashColor.hashToHsl('a1b2c3d4', 'light');
|
||||
assert(r1 === r2, 'Same hash+theme → identical output');
|
||||
const r3 = HashColor.hashToHsl('a1b2c3d4', 'light');
|
||||
assert(r1 === r3, 'Third call still identical (no internal state)');
|
||||
|
||||
// --- Theme split: light vs dark produce different L ---
|
||||
console.log('Theme split:');
|
||||
const light = HashColor.hashToHsl('ff00aa80', 'light');
|
||||
const dark = HashColor.hashToHsl('ff00aa80', 'dark');
|
||||
assert(light !== dark, 'Light and dark produce different colors for same hash');
|
||||
const lightP = parseHsl(light);
|
||||
const darkP = parseHsl(dark);
|
||||
assert(lightP.l >= 50 && lightP.l <= 65, 'Light theme L in [50,65] (got ' + lightP.l + ')');
|
||||
assert(darkP.l >= 55 && darkP.l <= 72, 'Dark theme L in [55,72] (got ' + darkP.l + ')');
|
||||
|
||||
// --- Saturation varies with byte 2 ---
|
||||
console.log('Saturation variability (byte 2):');
|
||||
const lowSat = HashColor.hashToHsl('000000ff', 'light'); // byte2=0x00
|
||||
const highSat = HashColor.hashToHsl('0000ffff', 'light'); // byte2=0xff
|
||||
const lowSatP = parseHsl(lowSat);
|
||||
const highSatP = parseHsl(highSat);
|
||||
assert(lowSatP.s === 55, 'byte2=0x00 → S=55% (got ' + lowSatP.s + ')');
|
||||
assert(highSatP.s === 95, 'byte2=0xff → S=95% (got ' + highSatP.s + ')');
|
||||
// Mid value
|
||||
const midSat = HashColor.hashToHsl('00008000', 'light'); // byte2=0x80
|
||||
const midSatP = parseHsl(midSat);
|
||||
assert(midSatP.s > 55 && midSatP.s < 95, 'byte2=0x80 → S between 55 and 95 (got ' + midSatP.s + ')');
|
||||
|
||||
// --- Lightness varies with byte 3 ---
|
||||
console.log('Lightness variability (byte 3):');
|
||||
const lowL = HashColor.hashToHsl('00000000', 'light'); // byte3=0x00
|
||||
const highL = HashColor.hashToHsl('000000ff', 'light'); // byte3=0xff
|
||||
const lowLP = parseHsl(lowL);
|
||||
const highLP = parseHsl(highL);
|
||||
assert(lowLP.l === 50, 'byte3=0x00 light → L=50 (got ' + lowLP.l + ')');
|
||||
assert(highLP.l === 65, 'byte3=0xff light → L=65 (got ' + highLP.l + ')');
|
||||
const lowLD = HashColor.hashToHsl('00000000', 'dark');
|
||||
const highLD = HashColor.hashToHsl('000000ff', 'dark');
|
||||
assert(parseHsl(lowLD).l === 55, 'byte3=0x00 dark → L=55 (got ' + parseHsl(lowLD).l + ')');
|
||||
assert(parseHsl(highLD).l === 72, 'byte3=0xff dark → L=72 (got ' + parseHsl(highLD).l + ')');
|
||||
|
||||
// --- Outline is darker than fill ---
|
||||
console.log('Outline darker than fill:');
|
||||
['a1b2c3d4', 'ff00aa80', '12345678', 'deadbeef'].forEach(h => {
|
||||
['light', 'dark'].forEach(theme => {
|
||||
const fill = parseHsl(HashColor.hashToHsl(h, theme));
|
||||
const outline = parseHsl(HashColor.hashToOutline(h, theme));
|
||||
assert(outline.l < fill.l, 'Outline L(' + outline.l + ') < Fill L(' + fill.l + ') for ' + h + '/' + theme);
|
||||
});
|
||||
});
|
||||
|
||||
// --- Outline same hue as fill ---
|
||||
console.log('Outline same hue as fill:');
|
||||
['a1b2c3d4', 'deadbeef'].forEach(h => {
|
||||
const fill = parseHsl(HashColor.hashToHsl(h, 'light'));
|
||||
const outline = parseHsl(HashColor.hashToOutline(h, 'light'));
|
||||
assert(fill.h === outline.h, 'Hue matches: fill=' + fill.h + ' outline=' + outline.h + ' for ' + h);
|
||||
});
|
||||
|
||||
// --- Sentinel: null/empty/short hash ---
|
||||
console.log('Sentinel:');
|
||||
assert(HashColor.hashToHsl(null, 'light') === 'hsl(0, 0%, 50%)', 'null → sentinel');
|
||||
assert(HashColor.hashToHsl('', 'light') === 'hsl(0, 0%, 50%)', 'empty string → sentinel');
|
||||
assert(HashColor.hashToHsl('ab', 'dark') === 'hsl(0, 0%, 50%)', 'too short (2 chars) → sentinel');
|
||||
assert(HashColor.hashToHsl('abcdef', 'dark') === 'hsl(0, 0%, 50%)', '6 chars (need 8) → sentinel');
|
||||
assert(HashColor.hashToHsl(undefined, 'dark') === 'hsl(0, 0%, 50%)', 'undefined → sentinel');
|
||||
assert(HashColor.hashToOutline(null, 'light') === 'hsl(0, 0%, 30%)', 'null outline → sentinel');
|
||||
|
||||
// --- Variability: different hashes → different colors (anti-tautology) ---
|
||||
console.log('Variability (anti-tautology):');
|
||||
const colors = new Set();
|
||||
['00008080', '80008080', 'ff008080', '00ff8080', 'ffff8080'].forEach(h => {
|
||||
colors.add(HashColor.hashToHsl(h, 'light'));
|
||||
});
|
||||
assert(colors.size >= 4, 'At least 4 distinct colors from 5 different hashes (got ' + colors.size + ')');
|
||||
|
||||
// Adjacent hashes differ
|
||||
const c1 = HashColor.hashToHsl('01008080', 'light');
|
||||
const c2 = HashColor.hashToHsl('02008080', 'light');
|
||||
assert(c1 !== c2, 'Adjacent hashes produce different colors');
|
||||
|
||||
// --- Perceptual distance: sample 50 hashes, compute pairwise HSL distance ---
|
||||
console.log('Perceptual distance (50 sample hashes):');
|
||||
function hslDistance(a, b) {
|
||||
// Simple cylindrical distance: weight hue wrap, sat, lightness
|
||||
var dh = Math.min(Math.abs(a.h - b.h), 360 - Math.abs(a.h - b.h)) / 180; // 0-1
|
||||
var ds = Math.abs(a.s - b.s) / 100; // 0-1
|
||||
var dl = Math.abs(a.l - b.l) / 100; // 0-1
|
||||
return Math.sqrt(dh*dh + ds*ds + dl*dl);
|
||||
}
|
||||
|
||||
const deterministicHashes = [];
|
||||
for (var i = 0; i < 50; i++) {
|
||||
var hex = ('0000000' + (i * 5347 + 12345).toString(16)).slice(-8);
|
||||
deterministicHashes.push(hex);
|
||||
}
|
||||
|
||||
const parsedColors = deterministicHashes.map(h => parseHsl(HashColor.hashToHsl(h, 'light')));
|
||||
var distances = [];
|
||||
for (var i = 0; i < parsedColors.length; i++) {
|
||||
for (var j = i + 1; j < parsedColors.length; j++) {
|
||||
distances.push(hslDistance(parsedColors[i], parsedColors[j]));
|
||||
}
|
||||
}
|
||||
var avgDist = distances.reduce((a, b) => a + b, 0) / distances.length;
|
||||
var minDist = Math.min(...distances);
|
||||
console.log(' Avg pairwise HSL distance: ' + avgDist.toFixed(4));
|
||||
console.log(' Min pairwise HSL distance: ' + minDist.toFixed(4));
|
||||
assert(avgDist > 0.15, 'Average pairwise distance > 0.15 (got ' + avgDist.toFixed(4) + ')');
|
||||
assert(minDist > 0.01, 'Min pairwise distance > 0.01 (got ' + minDist.toFixed(4) + ')');
|
||||
|
||||
// --- Summary ---
|
||||
console.log('\n' + passed + ' passed, ' + failed + ' failed');
|
||||
if (failed > 0) process.exit(1);
|
||||
@@ -22,9 +22,9 @@ function assert(condition, msg) {
|
||||
|
||||
// ── Test nodes ──
|
||||
// Two nodes share the same 1-byte prefix "ab"
|
||||
const nodeA = { public_key: 'ab1111', name: 'NodeA', role: 'repeater', lat: 37.0, lon: -122.0 };
|
||||
const nodeB = { public_key: 'ab2222', name: 'NodeB', role: 'repeater', lat: 38.0, lon: -123.0 };
|
||||
const nodeC = { public_key: 'cd3333', name: 'NodeC', role: 'repeater', lat: 37.5, lon: -122.5 };
|
||||
const nodeA = { public_key: 'ab1111', name: 'NodeA', lat: 37.0, lon: -122.0 };
|
||||
const nodeB = { public_key: 'ab2222', name: 'NodeB', lat: 38.0, lon: -123.0 };
|
||||
const nodeC = { public_key: 'cd3333', name: 'NodeC', lat: 37.5, lon: -122.5 };
|
||||
|
||||
console.log('\n=== HopResolver Affinity Tests ===\n');
|
||||
|
||||
@@ -88,65 +88,12 @@ assert(result5['ab'].name === 'NodeB', 'Should pick NodeB (highest affinity 0.9)
|
||||
|
||||
// Test 6: Unambiguous hops are not affected by affinity
|
||||
console.log('\nTest 6: Unambiguous hops unaffected by affinity');
|
||||
const nodeD = { public_key: 'ee4444', name: 'NodeD', role: 'repeater', lat: 36.0, lon: -121.0 };
|
||||
const nodeD = { public_key: 'ee4444', name: 'NodeD', lat: 36.0, lon: -121.0 };
|
||||
HopResolver.init([nodeA, nodeB, nodeC, nodeD]);
|
||||
HopResolver.setAffinity({ edges: [] });
|
||||
const result6 = HopResolver.resolve(['ee44'], null, null, null, null, null);
|
||||
assert(result6['ee44'].name === 'NodeD', 'Unique prefix resolves directly — got: ' + result6['ee44'].name);
|
||||
assert(!result6['ee44'].ambiguous, 'Should not be marked ambiguous');
|
||||
|
||||
// Test 7: lat=0 / lon=0 candidates are NOT excluded (equator/prime-meridian bug fix)
|
||||
console.log('\nTest 7: lat=0 / lon=0 candidates are included in geo scoring');
|
||||
const nodeEquator = { public_key: 'ab5555', name: 'EquatorNode', role: 'repeater', lat: 0, lon: 10 };
|
||||
const nodeFar = { public_key: 'ab6666', name: 'FarNode', role: 'repeater', lat: 60, lon: 60 };
|
||||
const anchorNearEq = { public_key: 'cd7777', name: 'AnchorEq', role: 'repeater', lat: 1, lon: 11 };
|
||||
HopResolver.init([nodeEquator, nodeFar, anchorNearEq]);
|
||||
HopResolver.setAffinity({});
|
||||
// Anchor near equator — EquatorNode (0,10) should be geo-closest
|
||||
const result7 = HopResolver.resolve(['cd77', 'ab'], 1.0, 11.0, null, null, null);
|
||||
assert(result7['ab'].name === 'EquatorNode',
|
||||
'lat=0 candidate should be included and win by geo — got: ' + result7['ab'].name);
|
||||
|
||||
// Test 8: lon=0 candidate is also included
|
||||
console.log('\nTest 8: lon=0 candidate is included in geo scoring');
|
||||
const nodePrime = { public_key: 'ab8888', name: 'PrimeMeridian', role: 'repeater', lat: 10, lon: 0 };
|
||||
const anchorNearPM = { public_key: 'cd9999', name: 'AnchorPM', role: 'repeater', lat: 11, lon: 1 };
|
||||
HopResolver.init([nodePrime, nodeFar, anchorNearPM]);
|
||||
HopResolver.setAffinity({});
|
||||
const result8 = HopResolver.resolve(['cd99', 'ab'], 11.0, 1.0, null, null, null);
|
||||
assert(result8['ab'].name === 'PrimeMeridian',
|
||||
'lon=0 candidate should be included and win by geo — got: ' + result8['ab'].name);
|
||||
|
||||
// ── Role filter tests (#935) ──
|
||||
console.log('\nTest: Role filter — companions excluded from prefixIdx');
|
||||
const companion = { public_key: 'ab9999', name: 'Companion1', role: 'companion', lat: 37.0, lon: -122.0 };
|
||||
const sensor = { public_key: 'ab7777', name: 'Sensor1', role: 'sensor', lat: 37.0, lon: -122.0 };
|
||||
const repeater = { public_key: 'ab1234', name: 'Repeater1', role: 'repeater', lat: 37.0, lon: -122.0 };
|
||||
const roomSrv = { public_key: 'ff1234', name: 'RoomSrv1', role: 'room_server', lat: 37.0, lon: -122.0 };
|
||||
|
||||
HopResolver.init([companion, sensor, repeater, roomSrv]);
|
||||
HopResolver.setAffinity({});
|
||||
|
||||
// Prefix 'ab' should only resolve to repeater (companion/sensor excluded)
|
||||
const r1 = HopResolver.resolve(['ab12'], 0, 0, null, null, null);
|
||||
assert(r1['ab12'] && r1['ab12'].name === 'Repeater1',
|
||||
'prefix ab12 resolves to Repeater1 not companion — got: ' + (r1['ab12'] && r1['ab12'].name));
|
||||
|
||||
// Prefix 'ff' should resolve to room_server
|
||||
const r2 = HopResolver.resolve(['ff12'], 0, 0, null, null, null);
|
||||
assert(r2['ff12'] && r2['ff12'].name === 'RoomSrv1',
|
||||
'prefix ff12 resolves to RoomSrv1 — got: ' + (r2['ff12'] && r2['ff12'].name));
|
||||
|
||||
// Prefix that only matches companion should return nothing
|
||||
const r3 = HopResolver.resolve(['ab99'], 0, 0, null, null, null);
|
||||
assert(!r3['ab99'] || !r3['ab99'].name,
|
||||
'prefix ab99 (companion only) resolves to nothing — got: ' + (r3['ab99'] && r3['ab99'].name));
|
||||
|
||||
// pubkeyIdx should still have companion (full pubkey lookup)
|
||||
console.log('\nTest: pubkeyIdx still includes all roles');
|
||||
const fromServer = HopResolver.resolveFromServer(['ab99'], [companion.public_key]);
|
||||
assert(fromServer['ab99'] && fromServer['ab99'].name === 'Companion1',
|
||||
'resolveFromServer finds companion by full pubkey — got: ' + (fromServer['ab99'] && fromServer['ab99'].name));
|
||||
|
||||
console.log('\n' + (passed + failed) + ' tests, ' + passed + ' passed, ' + failed + ' failed\n');
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -928,56 +928,6 @@ console.log('\n=== live.js: source-level safety checks ===');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== Node filter (M3 — #771) =====
|
||||
console.log('\n=== live.js: node filter ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const pktInvolvesFilter = ctx.window._livePacketInvolvesFilterNode;
|
||||
assert.ok(pktInvolvesFilter, '_livePacketInvolvesFilterNode must be exposed');
|
||||
|
||||
const makePkt = (hops) => ({ decoded: { path: { hops }, payload: {} } });
|
||||
|
||||
test('packetInvolvesFilterNode returns true when filter is empty', () => {
|
||||
assert.strictEqual(pktInvolvesFilter(makePkt(['abcd1234']), []), true);
|
||||
});
|
||||
|
||||
test('packetInvolvesFilterNode matches hop by prefix', () => {
|
||||
assert.strictEqual(pktInvolvesFilter(makePkt(['abcd1234', 'ef012345']), ['abcd1234567890ab']), true);
|
||||
});
|
||||
|
||||
test('packetInvolvesFilterNode matches full key against short hop', () => {
|
||||
assert.strictEqual(pktInvolvesFilter(makePkt(['abcd']), ['abcd1234567890ab']), true);
|
||||
});
|
||||
|
||||
test('packetInvolvesFilterNode returns false when no hop matches', () => {
|
||||
assert.strictEqual(pktInvolvesFilter(makePkt(['ffff1234', '00001111']), ['abcd1234567890ab']), false);
|
||||
});
|
||||
|
||||
test('packetInvolvesFilterNode matches any of multiple filter keys (OR logic)', () => {
|
||||
assert.strictEqual(pktInvolvesFilter(makePkt(['ffff0000']), ['abcd1234', 'ffff0000']), true);
|
||||
});
|
||||
|
||||
test('packetInvolvesFilterNode returns false for packet with no hops', () => {
|
||||
assert.strictEqual(pktInvolvesFilter(makePkt([]), ['abcd1234']), false);
|
||||
});
|
||||
|
||||
const getNodeFilterKeys = ctx.window._liveGetNodeFilterKeys;
|
||||
assert.ok(getNodeFilterKeys, '_liveGetNodeFilterKeys must be exposed');
|
||||
|
||||
test('node filter defaults to empty array when localStorage is unset', () => {
|
||||
assert.strictEqual(getNodeFilterKeys().length, 0);
|
||||
});
|
||||
|
||||
test('node filter saves to localStorage when set', () => {
|
||||
const setFilter = ctx.window._liveSetNodeFilter;
|
||||
assert.ok(setFilter, '_liveSetNodeFilter must be exposed');
|
||||
setFilter(['abcd1234', 'ef012345']);
|
||||
assert.strictEqual(ctx.localStorage.getItem('live-node-filter'), 'abcd1234,ef012345');
|
||||
setFilter([]);
|
||||
assert.strictEqual(ctx.localStorage.getItem('live-node-filter'), '');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SUMMARY =====
|
||||
Promise.allSettled(pendingTests).then(() => {
|
||||
console.log(`\n${'═'.repeat(40)}`);
|
||||
|
||||
-114
@@ -844,120 +844,6 @@ console.log('\n=== packets.js: _invalidateRowCounts / _refreshRowCountsIfDirty (
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: buildPacketsParams ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
assert(typeof api.buildPacketsParams === 'function', 'buildPacketsParams must be exported');
|
||||
|
||||
test('hash filter suppresses region — direct hash links work regardless of saved region', () => {
|
||||
// This is the bug from URL https://analyzer.../#/packets?hash=178525e9f693aa7e
|
||||
// when the user's saved RegionFilter excludes the packet's observer region.
|
||||
// The hash is an exact identifier; ALL other filters must be ignored.
|
||||
const p = api.buildPacketsParams({
|
||||
filters: { hash: 'abc123' },
|
||||
regionParam: 'SJC,SFO,OAK,MRY',
|
||||
windowMin: 60,
|
||||
groupByHash: false,
|
||||
limit: 200,
|
||||
});
|
||||
assert.strictEqual(p.get('hash'), 'abc123');
|
||||
assert.strictEqual(p.get('region'), null, 'region must NOT be set when hash is present');
|
||||
assert.strictEqual(p.get('since'), null, 'since must NOT be set when hash is present');
|
||||
});
|
||||
|
||||
test('hash filter suppresses ALL other filters — observer, node, channel too', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: { hash: 'h', node: 'n', observer: 'o', channel: 'c' },
|
||||
regionParam: 'SJC',
|
||||
windowMin: 60,
|
||||
groupByHash: false,
|
||||
limit: 200,
|
||||
});
|
||||
assert.strictEqual(p.get('hash'), 'h');
|
||||
assert.strictEqual(p.get('node'), null);
|
||||
assert.strictEqual(p.get('observer'), null);
|
||||
assert.strictEqual(p.get('channel'), null);
|
||||
assert.strictEqual(p.get('region'), null);
|
||||
assert.strictEqual(p.get('since'), null);
|
||||
});
|
||||
|
||||
test('hash filter suppresses region with default windowMin=0', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: { hash: 'deadbeef' },
|
||||
regionParam: 'COA',
|
||||
windowMin: 0,
|
||||
groupByHash: false,
|
||||
limit: 50,
|
||||
});
|
||||
assert.strictEqual(p.get('hash'), 'deadbeef');
|
||||
assert.strictEqual(p.get('region'), null);
|
||||
});
|
||||
|
||||
test('region applied normally when hash filter is absent', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: {},
|
||||
regionParam: 'SJC,SFO',
|
||||
windowMin: 60,
|
||||
groupByHash: false,
|
||||
limit: 200,
|
||||
});
|
||||
assert.strictEqual(p.get('region'), 'SJC,SFO', 'region must apply when no hash');
|
||||
assert.strictEqual(p.get('hash'), null);
|
||||
assert(p.get('since'), 'since must apply when no hash and windowMin>0');
|
||||
});
|
||||
|
||||
test('observer/node/channel pass through normally when no hash', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: { observer: 'obs1', node: 'node1', channel: '#test' },
|
||||
regionParam: '',
|
||||
windowMin: 0,
|
||||
groupByHash: false,
|
||||
limit: 50,
|
||||
});
|
||||
assert.strictEqual(p.get('observer'), 'obs1');
|
||||
assert.strictEqual(p.get('node'), 'node1');
|
||||
assert.strictEqual(p.get('channel'), '#test');
|
||||
});
|
||||
|
||||
test('region absent when regionParam empty — no spurious empty region= param', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: {},
|
||||
regionParam: '',
|
||||
windowMin: 0,
|
||||
groupByHash: false,
|
||||
limit: 50,
|
||||
});
|
||||
assert.strictEqual(p.get('region'), null);
|
||||
});
|
||||
|
||||
test('groupByHash=true with hash sets groupByHash and omits expand', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: { hash: 'h' }, regionParam: '', windowMin: 0, groupByHash: true, limit: 50,
|
||||
});
|
||||
assert.strictEqual(p.get('groupByHash'), 'true');
|
||||
assert.strictEqual(p.get('expand'), null);
|
||||
assert.strictEqual(p.get('hash'), 'h');
|
||||
});
|
||||
|
||||
test('groupByHash=false with hash sets expand=observations', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: { hash: 'h' }, regionParam: '', windowMin: 0, groupByHash: false, limit: 50,
|
||||
});
|
||||
assert.strictEqual(p.get('expand'), 'observations');
|
||||
assert.strictEqual(p.get('groupByHash'), null);
|
||||
assert.strictEqual(p.get('hash'), 'h');
|
||||
});
|
||||
|
||||
test('groupByHash=false without hash sets expand=observations', () => {
|
||||
const p = api.buildPacketsParams({
|
||||
filters: {}, regionParam: '', windowMin: 0, groupByHash: false, limit: 50,
|
||||
});
|
||||
assert.strictEqual(p.get('expand'), 'observations');
|
||||
assert.strictEqual(p.get('groupByHash'), null);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SUMMARY =====
|
||||
console.log(`\n${'='.repeat(40)}`);
|
||||
console.log(`packets.js tests: ${passed} passed, ${failed} failed`);
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
// E2E tests for Path Inspector (spec §5 — Playwright).
|
||||
// Run: npx playwright test test-path-inspector-e2e.js
|
||||
// Requires: running server on BASE_URL (default http://localhost:3000).
|
||||
'use strict';
|
||||
|
||||
const { test, expect } = require('@playwright/test');
|
||||
const BASE_URL = process.env.BASE_URL || 'http://localhost:3000';
|
||||
|
||||
test.describe('Path Inspector — Map Side Pane (spec §2.7)', () => {
|
||||
test('side pane present and collapsed by default', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/map`);
|
||||
const pane = page.locator('#mapSidePane');
|
||||
await expect(pane).toBeVisible();
|
||||
await expect(pane).not.toHaveClass(/expanded/);
|
||||
});
|
||||
|
||||
test('click toggle expands the pane', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/map`);
|
||||
await page.click('#mapPaneToggle');
|
||||
const pane = page.locator('#mapSidePane');
|
||||
await expect(pane).toHaveClass(/expanded/);
|
||||
});
|
||||
|
||||
test('submit valid prefixes renders candidates within 1s', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/map`);
|
||||
await page.click('#mapPaneToggle');
|
||||
await page.fill('#mapPiInput', '2c,a1,f4');
|
||||
await page.click('#mapPiSubmit');
|
||||
// Wait for results or error (both indicate API round-trip complete).
|
||||
await expect(page.locator('#mapPiResults table, #mapPiResults .no-results, #mapPiError')).toBeVisible({ timeout: 1000 });
|
||||
});
|
||||
|
||||
test('Show on Map button draws polyline on map', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/map`);
|
||||
await page.click('#mapPaneToggle');
|
||||
await page.fill('#mapPiInput', '2c,a1');
|
||||
await page.click('#mapPiSubmit');
|
||||
// Wait for results.
|
||||
const btn = page.locator('#mapPiResults button[data-idx="0"]');
|
||||
await btn.waitFor({ timeout: 2000 });
|
||||
await btn.click();
|
||||
// Check that route layer has SVG polyline paths drawn.
|
||||
const svg = page.locator('#leaflet-map .leaflet-overlay-pane svg path');
|
||||
await expect(svg.first()).toBeVisible({ timeout: 2000 });
|
||||
});
|
||||
|
||||
test('switching candidate clears prior polyline', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/map`);
|
||||
await page.click('#mapPaneToggle');
|
||||
await page.fill('#mapPiInput', '2c,a1');
|
||||
await page.click('#mapPiSubmit');
|
||||
const btn0 = page.locator('#mapPiResults button[data-idx="0"]');
|
||||
await btn0.waitFor({ timeout: 2000 });
|
||||
await btn0.click();
|
||||
// Click second candidate if available.
|
||||
const btn1 = page.locator('#mapPiResults button[data-idx="1"]');
|
||||
if (await btn1.isVisible()) {
|
||||
await btn1.click();
|
||||
// Prior route should be cleared — only one polyline group visible.
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Path Inspector — Standalone Page', () => {
|
||||
test('deep link auto-fills and runs', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/tools/path-inspector?prefixes=2c,a1,f4`);
|
||||
const input = page.locator('#path-inspector-input');
|
||||
await expect(input).toHaveValue('2c,a1,f4');
|
||||
// Should auto-submit and show results or error.
|
||||
await expect(page.locator('#path-inspector-results table, #path-inspector-results .no-results, #path-inspector-error')).toBeVisible({ timeout: 2000 });
|
||||
});
|
||||
|
||||
test('old #/traces/<hash> redirects to #/tools/trace/<hash>', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/traces/abc123`);
|
||||
await page.waitForTimeout(500);
|
||||
expect(page.url()).toContain('#/tools/trace/abc123');
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Path Inspector — Tools Landing (spec §2.8)', () => {
|
||||
test('Tools nav shows landing with both entries', async ({ page }) => {
|
||||
await page.goto(`${BASE_URL}/#/tools`);
|
||||
await expect(page.locator('.tools-landing')).toBeVisible();
|
||||
await expect(page.locator('a[href="#/tools/path-inspector"]')).toBeVisible();
|
||||
await expect(page.locator('a[href*="#/tools/trace"]')).toBeVisible();
|
||||
});
|
||||
});
|
||||
@@ -1,106 +0,0 @@
|
||||
// test-path-inspector.js — vm.createContext sandbox tests for path-inspector.js
|
||||
'use strict';
|
||||
const vm = require('vm');
|
||||
const fs = require('fs');
|
||||
const assert = require('assert');
|
||||
|
||||
const src = fs.readFileSync(__dirname + '/public/path-inspector.js', 'utf8');
|
||||
|
||||
function createSandbox() {
|
||||
const sandbox = {
|
||||
window: {},
|
||||
document: {
|
||||
getElementById: () => ({ textContent: '', innerHTML: '', addEventListener: () => {}, querySelectorAll: () => [] }),
|
||||
querySelectorAll: () => []
|
||||
},
|
||||
location: { hash: '#/tools/path-inspector' },
|
||||
history: { replaceState: () => {} },
|
||||
fetch: () => Promise.resolve({ ok: true, json: () => Promise.resolve({ candidates: [] }) }),
|
||||
URLSearchParams: URLSearchParams,
|
||||
registerPage: function () {},
|
||||
escapeHtml: s => s,
|
||||
console: console
|
||||
};
|
||||
sandbox.self = sandbox;
|
||||
sandbox.globalThis = sandbox;
|
||||
const ctx = vm.createContext(sandbox);
|
||||
vm.runInContext(src, ctx);
|
||||
return sandbox;
|
||||
}
|
||||
|
||||
// Test: parsePrefixes accepts comma-separated.
|
||||
(function testParseComma() {
|
||||
const sb = createSandbox();
|
||||
const result = sb.window.PathInspector.parsePrefixes('2C,A1,F4');
|
||||
assert.strictEqual(JSON.stringify(result), JSON.stringify(['2c', 'a1', 'f4']));
|
||||
console.log('✓ parsePrefixes comma-separated');
|
||||
})();
|
||||
|
||||
// Test: parsePrefixes accepts space-separated.
|
||||
(function testParseSpace() {
|
||||
const sb = createSandbox();
|
||||
const result = sb.window.PathInspector.parsePrefixes('2C A1 F4');
|
||||
assert.strictEqual(JSON.stringify(result), JSON.stringify(['2c', 'a1', 'f4']));
|
||||
console.log('✓ parsePrefixes space-separated');
|
||||
})();
|
||||
|
||||
// Test: parsePrefixes accepts mixed.
|
||||
(function testParseMixed() {
|
||||
const sb = createSandbox();
|
||||
const result = sb.window.PathInspector.parsePrefixes(' 2C, A1 F4 ');
|
||||
assert.strictEqual(JSON.stringify(result), JSON.stringify(['2c', 'a1', 'f4']));
|
||||
console.log('✓ parsePrefixes mixed separators');
|
||||
})();
|
||||
|
||||
// Test: validatePrefixes rejects empty.
|
||||
(function testValidateEmpty() {
|
||||
const sb = createSandbox();
|
||||
const err = sb.window.PathInspector.validatePrefixes([]);
|
||||
assert.ok(err !== null, 'should reject empty');
|
||||
console.log('✓ validatePrefixes rejects empty');
|
||||
})();
|
||||
|
||||
// Test: validatePrefixes rejects odd-length.
|
||||
(function testValidateOdd() {
|
||||
const sb = createSandbox();
|
||||
const err = sb.window.PathInspector.validatePrefixes(['abc']);
|
||||
assert.ok(err !== null && err.includes('Odd'), 'should reject odd-length');
|
||||
console.log('✓ validatePrefixes rejects odd-length');
|
||||
})();
|
||||
|
||||
// Test: validatePrefixes rejects >3 bytes.
|
||||
(function testValidateTooLong() {
|
||||
const sb = createSandbox();
|
||||
const err = sb.window.PathInspector.validatePrefixes(['aabbccdd']);
|
||||
assert.ok(err !== null && err.includes('too long'), 'should reject >3 bytes');
|
||||
console.log('✓ validatePrefixes rejects >3 bytes');
|
||||
})();
|
||||
|
||||
// Test: validatePrefixes rejects mixed lengths.
|
||||
(function testValidateMixed() {
|
||||
const sb = createSandbox();
|
||||
const err = sb.window.PathInspector.validatePrefixes(['aa', 'bbcc']);
|
||||
assert.ok(err !== null && err.includes('Mixed'), 'should reject mixed');
|
||||
console.log('✓ validatePrefixes rejects mixed lengths');
|
||||
})();
|
||||
|
||||
// Test: validatePrefixes accepts valid input.
|
||||
(function testValidateValid() {
|
||||
const sb = createSandbox();
|
||||
const err = sb.window.PathInspector.validatePrefixes(['2c', 'a1', 'f4']);
|
||||
assert.strictEqual(err, null);
|
||||
console.log('✓ validatePrefixes accepts valid');
|
||||
})();
|
||||
|
||||
// Test: validatePrefixes rejects invalid hex.
|
||||
(function testValidateInvalidHex() {
|
||||
const sb = createSandbox();
|
||||
const err = sb.window.PathInspector.validatePrefixes(['zz']);
|
||||
assert.ok(err !== null && err.includes('Invalid hex'), 'should reject invalid hex');
|
||||
console.log('✓ validatePrefixes rejects invalid hex');
|
||||
})();
|
||||
|
||||
// Anti-tautology: if validation were removed (always return null), the odd-length test would fail.
|
||||
// Mental revert: validatePrefixes = () => null; → testValidateOdd would fail because err would be null.
|
||||
|
||||
console.log('\nAll path-inspector tests passed!');
|
||||
@@ -72,8 +72,7 @@ let polygon = null;
|
||||
let closingLine = null;
|
||||
|
||||
function latLonPair(latlng) {
|
||||
const w = latlng.wrap();
|
||||
return [parseFloat(w.lat.toFixed(6)), parseFloat(w.lng.toFixed(6))];
|
||||
return [parseFloat(latlng.lat.toFixed(6)), parseFloat(latlng.lng.toFixed(6))];
|
||||
}
|
||||
|
||||
function render() {
|
||||
|
||||
Reference in New Issue
Block a user