mirror of
https://github.com/Kpa-clawbot/meshcore-analyzer.git
synced 2026-04-27 02:25:11 +00:00
Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b3c0da8a94 | |||
| 3778ba9c95 | |||
| 2fc68c4452 | |||
| 2fc5da33d3 | |||
| 5d8c52d2e5 | |||
| 016c820207 | |||
| 93f437f937 | |||
| ad97c0fdd1 | |||
| c7f655e419 | |||
| b1d89d7d9f | |||
| c173ab7e80 | |||
| 4664c90db4 | |||
| 2755dc3875 | |||
| 5228e67604 | |||
| 698514e5e6 | |||
| cf3a383bb2 | |||
| a45ac71508 | |||
| 016b87b33c | |||
| 889107a5e1 | |||
| 50f94603c1 | |||
| b799f54700 | |||
| d5b300a8ba | |||
| 2af4259eca | |||
| bf2e721dd7 | |||
| f20431d816 | |||
| f9cfad9cd4 | |||
| 96d0bbe487 | |||
| 6712da7d7c | |||
| 6aef83c82a | |||
| 9f14c74b3e | |||
| 0b8b1e91a6 | |||
| c678555e75 | |||
| 623ebc879b | |||
| 0b1924d401 | |||
| 0f502370c5 | |||
| e47c39ffda | |||
| 1499a55ba7 | |||
| f71e117cdd |
@@ -246,6 +246,12 @@ jobs:
|
||||
with:
|
||||
node-version: '22'
|
||||
|
||||
- name: Free disk space
|
||||
run: |
|
||||
docker system prune -af 2>/dev/null || true
|
||||
docker builder prune -af 2>/dev/null || true
|
||||
df -h /
|
||||
|
||||
- name: Build Go Docker image
|
||||
run: |
|
||||
echo "${GITHUB_SHA::7}" > .git-commit
|
||||
|
||||
@@ -33,7 +33,7 @@ public/ — Frontend (vanilla JS, one file per page) — ACTIVE, NOT
|
||||
style.css — Main styles, CSS variables for theming
|
||||
live.css — Live page styles
|
||||
home.css — Home page styles
|
||||
index.html — SPA shell, script/style tags with cache busters
|
||||
index.html — SPA shell, script/style tags with __BUST__ placeholder (auto-replaced at server startup)
|
||||
test-fixtures/ — Real data SQLite fixture from staging (used for E2E tests)
|
||||
scripts/ — Tooling (coverage collector, fixture capture, frontend instrumentation)
|
||||
```
|
||||
@@ -84,12 +84,8 @@ Every change that touches logic MUST have tests. For Go backend: `cd cmd/server
|
||||
### 2. No commit without browser validation
|
||||
After pushing, verify the change works in an actual browser. Use `browser profile=openclaw` against the running instance. Take a screenshot if the change is visual. If you can't validate it, say so — don't claim it works.
|
||||
|
||||
### 3. Cache busters — ALWAYS bump them
|
||||
Every time you change a `.js` or `.css` file in `public/`, bump the cache buster in `index.html`. This has caused 7 separate production regressions. Use:
|
||||
```bash
|
||||
NEWV=$(date +%s) && sed -i "s/v=[0-9]*/v=$NEWV/g" public/index.html
|
||||
```
|
||||
Do this in the SAME commit as the code change, not as a follow-up.
|
||||
### 3. Cache busters are automatic — do NOT manually edit them
|
||||
Cache busters are injected automatically by the Go server at startup. The `__BUST__` placeholder in `index.html` is replaced with a Unix timestamp when the server reads the file. No manual bumping needed — every server restart picks up new asset versions. Do NOT replace `__BUST__` with hardcoded timestamps.
|
||||
|
||||
### 4. Verify API response shape before building UI
|
||||
Before writing client code that consumes an API endpoint, check what the endpoint ACTUALLY returns. Use `curl` or check the server code. Don't assume fields exist — grouped packets (`groupByHash=true`) have different fields than raw packets. This has caused multiple breakages.
|
||||
@@ -351,7 +347,7 @@ One logical change per commit. Each commit is deployable. Each commit has its te
|
||||
|
||||
| Pitfall | Times it happened | Prevention |
|
||||
|---------|-------------------|------------|
|
||||
| Forgot cache busters | 7 | Always bump in same commit |
|
||||
| Forgot cache busters | 7 | Now automatic — `__BUST__` replaced at server startup |
|
||||
| Grouped packets missing fields | 3 | curl the actual API first |
|
||||
| last_seen vs last_heard mismatch | 4 | Always use `last_heard \|\| last_seen` |
|
||||
| CSS selectors don't match SVG | 2 | Manipulate SVG in JS after generation |
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
+12
-3
@@ -36,8 +36,9 @@ type Store struct {
|
||||
stmtUpsertNode *sql.Stmt
|
||||
stmtIncrementAdvertCount *sql.Stmt
|
||||
stmtUpsertObserver *sql.Stmt
|
||||
stmtGetObserverRowid *sql.Stmt
|
||||
stmtUpdateNodeTelemetry *sql.Stmt
|
||||
stmtGetObserverRowid *sql.Stmt
|
||||
stmtUpdateObserverLastSeen *sql.Stmt
|
||||
stmtUpdateNodeTelemetry *sql.Stmt
|
||||
}
|
||||
|
||||
// OpenStore opens or creates a SQLite DB at the given path, applying the
|
||||
@@ -369,6 +370,11 @@ func (s *Store) prepareStatements() error {
|
||||
return err
|
||||
}
|
||||
|
||||
s.stmtUpdateObserverLastSeen, err = s.db.Prepare("UPDATE observers SET last_seen = ? WHERE rowid = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.stmtUpdateNodeTelemetry, err = s.db.Prepare(`
|
||||
UPDATE nodes SET
|
||||
battery_mv = COALESCE(?, battery_mv),
|
||||
@@ -428,13 +434,16 @@ func (s *Store) InsertTransmission(data *PacketData) (bool, error) {
|
||||
s.Stats.DuplicateTransmissions.Add(1)
|
||||
}
|
||||
|
||||
// Resolve observer_idx
|
||||
// Resolve observer_idx and update last_seen
|
||||
var observerIdx *int64
|
||||
if data.ObserverID != "" {
|
||||
var rowid int64
|
||||
err := s.stmtGetObserverRowid.QueryRow(data.ObserverID).Scan(&rowid)
|
||||
if err == nil {
|
||||
observerIdx = &rowid
|
||||
// Update observer last_seen on every packet to prevent
|
||||
// low-traffic observers from appearing offline (#463)
|
||||
_, _ = s.stmtUpdateObserverLastSeen.Exec(now, rowid)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -516,6 +516,56 @@ func TestInsertTransmissionWithObserver(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// #463: Verify that inserting a packet updates the observer's last_seen,
|
||||
// so low-traffic observers don't incorrectly appear offline.
|
||||
func TestInsertTransmissionUpdatesObserverLastSeen(t *testing.T) {
|
||||
s, err := OpenStore(tempDBPath(t))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer s.Close()
|
||||
|
||||
// Insert observer with an old last_seen
|
||||
if err := s.UpsertObserver("obs1", "Observer1", "SJC", nil); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Backdate last_seen to 2 hours ago
|
||||
oldTime := "2026-03-24T22:00:00Z"
|
||||
s.db.Exec("UPDATE observers SET last_seen = ? WHERE id = ?", oldTime, "obs1")
|
||||
|
||||
// Verify it was backdated
|
||||
var lastSeenBefore string
|
||||
s.db.QueryRow("SELECT last_seen FROM observers WHERE id = ?", "obs1").Scan(&lastSeenBefore)
|
||||
if lastSeenBefore != oldTime {
|
||||
t.Fatalf("expected last_seen=%s, got %s", oldTime, lastSeenBefore)
|
||||
}
|
||||
|
||||
// Insert a packet from this observer
|
||||
data := &PacketData{
|
||||
RawHex: "0A00D69F",
|
||||
Timestamp: "2026-03-25T01:00:00Z",
|
||||
ObserverID: "obs1",
|
||||
Hash: "lastseentest123456",
|
||||
RouteType: 2,
|
||||
PayloadType: 2,
|
||||
PathJSON: "[]",
|
||||
DecodedJSON: `{"type":"TXT_MSG"}`,
|
||||
}
|
||||
if _, err := s.InsertTransmission(data); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Verify last_seen was updated
|
||||
var lastSeenAfter string
|
||||
s.db.QueryRow("SELECT last_seen FROM observers WHERE id = ?", "obs1").Scan(&lastSeenAfter)
|
||||
if lastSeenAfter == oldTime {
|
||||
t.Error("observer last_seen was NOT updated after packet insertion — low-traffic observers will appear offline")
|
||||
}
|
||||
if lastSeenAfter != "2026-03-25T01:00:00Z" {
|
||||
t.Errorf("expected last_seen=2026-03-25T01:00:00Z, got %s", lastSeenAfter)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEndToEndIngest(t *testing.T) {
|
||||
s, err := OpenStore(tempDBPath(t))
|
||||
if err != nil {
|
||||
|
||||
@@ -3715,3 +3715,99 @@ func TestGetChannelMessagesAfterIngest(t *testing.T) {
|
||||
t.Errorf("newest message should be 'brand new message', got %q", lastMsg["text"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndexByNodePreCheck(t *testing.T) {
|
||||
store := &PacketStore{
|
||||
byNode: make(map[string][]*StoreTx),
|
||||
nodeHashes: make(map[string]map[string]bool),
|
||||
}
|
||||
|
||||
t.Run("indexes ADVERT with pubKey", func(t *testing.T) {
|
||||
tx := &StoreTx{Hash: "h1", DecodedJSON: `{"pubKey":"AABBCC","type":"ADVERT"}`}
|
||||
store.indexByNode(tx)
|
||||
if len(store.byNode["AABBCC"]) != 1 {
|
||||
t.Errorf("expected 1 entry for pubKey AABBCC, got %d", len(store.byNode["AABBCC"]))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("indexes destPubKey", func(t *testing.T) {
|
||||
tx := &StoreTx{Hash: "h2", DecodedJSON: `{"destPubKey":"DDEEFF","type":"MSG"}`}
|
||||
store.indexByNode(tx)
|
||||
if len(store.byNode["DDEEFF"]) != 1 {
|
||||
t.Errorf("expected 1 entry for destPubKey DDEEFF, got %d", len(store.byNode["DDEEFF"]))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("indexes srcPubKey", func(t *testing.T) {
|
||||
tx := &StoreTx{Hash: "h2b", DecodedJSON: `{"srcPubKey":"112233","type":"TXT_MSG"}`}
|
||||
store.indexByNode(tx)
|
||||
if len(store.byNode["112233"]) != 1 {
|
||||
t.Errorf("expected 1 entry for srcPubKey 112233, got %d", len(store.byNode["112233"]))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("skips channel message without pubKey", func(t *testing.T) {
|
||||
beforeLen := len(store.byNode)
|
||||
tx := &StoreTx{Hash: "h3", DecodedJSON: `{"type":"CHAN","channel":"#test","text":"hello"}`}
|
||||
store.indexByNode(tx)
|
||||
if len(store.byNode) != beforeLen {
|
||||
t.Errorf("expected byNode unchanged for channel packet, got %d new entries", len(store.byNode)-beforeLen)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("skips empty DecodedJSON", func(t *testing.T) {
|
||||
beforeLen := len(store.byNode)
|
||||
tx := &StoreTx{Hash: "h4", DecodedJSON: ""}
|
||||
store.indexByNode(tx)
|
||||
if len(store.byNode) != beforeLen {
|
||||
t.Error("expected byNode unchanged for empty DecodedJSON")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("deduplicates same hash", func(t *testing.T) {
|
||||
tx := &StoreTx{Hash: "h1", DecodedJSON: `{"pubKey":"AABBCC","type":"ADVERT"}`}
|
||||
store.indexByNode(tx) // second call for same hash
|
||||
if len(store.byNode["AABBCC"]) != 1 {
|
||||
t.Errorf("expected dedup to keep 1 entry, got %d", len(store.byNode["AABBCC"]))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// BenchmarkIndexByNode measures indexByNode performance with and without pubkey
|
||||
// fields to demonstrate the strings.Contains pre-check optimization.
|
||||
func BenchmarkIndexByNode(b *testing.B) {
|
||||
// Payload WITHOUT any pubkey fields — should be skipped via pre-check
|
||||
noPubkey := `{"type":1,"msgId":42,"sender":"node1","data":"hello world"}`
|
||||
// Payload WITH a pubkey field — requires JSON parse
|
||||
withPubkey := `{"type":1,"msgId":42,"pubKey":"AABB","sender":"node1","data":"hello world"}`
|
||||
|
||||
b.Run("no_pubkey_skip", func(b *testing.B) {
|
||||
store := &PacketStore{
|
||||
byNode: make(map[string][]*StoreTx),
|
||||
nodeHashes: make(map[string]map[string]bool),
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
tx := &StoreTx{
|
||||
Hash: fmt.Sprintf("hash-%d", i),
|
||||
DecodedJSON: noPubkey,
|
||||
}
|
||||
store.indexByNode(tx)
|
||||
}
|
||||
})
|
||||
|
||||
b.Run("with_pubkey_parse", func(b *testing.B) {
|
||||
store := &PacketStore{
|
||||
byNode: make(map[string][]*StoreTx),
|
||||
nodeHashes: make(map[string]map[string]bool),
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
tx := &StoreTx{
|
||||
Hash: fmt.Sprintf("hash-%d", i),
|
||||
DecodedJSON: withPubkey,
|
||||
}
|
||||
store.indexByNode(tx)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -698,6 +698,32 @@ func (db *DB) GetNodes(limit, offset int, role, search, before, lastHeard, sortB
|
||||
}
|
||||
}
|
||||
|
||||
if region != "" {
|
||||
codes := normalizeRegionCodes(region)
|
||||
if len(codes) > 0 {
|
||||
placeholders := make([]string, len(codes))
|
||||
regionArgs := make([]interface{}, len(codes))
|
||||
for i, c := range codes {
|
||||
placeholders[i] = "?"
|
||||
regionArgs[i] = c
|
||||
}
|
||||
joinCond := "obs.rowid = o.observer_idx"
|
||||
if !db.isV3 {
|
||||
joinCond = "obs.id = o.observer_id"
|
||||
}
|
||||
subq := fmt.Sprintf(`public_key IN (
|
||||
SELECT DISTINCT JSON_EXTRACT(t.decoded_json, '$.pubKey')
|
||||
FROM transmissions t
|
||||
JOIN observations o ON o.transmission_id = t.id
|
||||
JOIN observers obs ON %s
|
||||
WHERE t.payload_type = 4
|
||||
AND UPPER(TRIM(obs.iata)) IN (%s)
|
||||
)`, joinCond, strings.Join(placeholders, ","))
|
||||
where = append(where, subq)
|
||||
args = append(args, regionArgs...)
|
||||
}
|
||||
}
|
||||
|
||||
w := ""
|
||||
if len(where) > 0 {
|
||||
w = "WHERE " + strings.Join(where, " AND ")
|
||||
|
||||
@@ -1012,6 +1012,168 @@ func TestGetNodesFiltering(t *testing.T) {
|
||||
t.Errorf("expected 1 node with offset, got %d", len(nodes))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("region filter SJC", func(t *testing.T) {
|
||||
nodes, total, _, err := db.GetNodes(50, 0, "", "", "", "", "", "SJC")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if total != 1 {
|
||||
t.Errorf("expected 1 node for SJC region, got %d", total)
|
||||
}
|
||||
if len(nodes) != 1 {
|
||||
t.Fatalf("expected 1 node, got %d", len(nodes))
|
||||
}
|
||||
if nodes[0]["public_key"] != "aabbccdd11223344" {
|
||||
t.Errorf("expected TestRepeater, got %v", nodes[0]["public_key"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("region filter SFO", func(t *testing.T) {
|
||||
_, total, _, err := db.GetNodes(50, 0, "", "", "", "", "", "SFO")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if total != 1 {
|
||||
t.Errorf("expected 1 node for SFO region, got %d", total)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("region filter multi", func(t *testing.T) {
|
||||
_, total, _, err := db.GetNodes(50, 0, "", "", "", "", "", "SJC,SFO")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if total != 1 {
|
||||
t.Errorf("expected 1 node for SJC,SFO region, got %d", total)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("region filter unknown", func(t *testing.T) {
|
||||
_, total, _, err := db.GetNodes(50, 0, "", "", "", "", "", "AMS")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if total != 0 {
|
||||
t.Errorf("expected 0 nodes for unknown region, got %d", total)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// setupTestDBV2 creates an in-memory SQLite database with the v2 schema
|
||||
// where observations use observer_id TEXT instead of observer_idx INTEGER.
|
||||
func setupTestDBV2(t *testing.T) *DB {
|
||||
t.Helper()
|
||||
conn, err := sql.Open("sqlite", ":memory:")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
conn.SetMaxOpenConns(1)
|
||||
|
||||
schema := `
|
||||
CREATE TABLE nodes (
|
||||
public_key TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
role TEXT,
|
||||
lat REAL,
|
||||
lon REAL,
|
||||
last_seen TEXT,
|
||||
first_seen TEXT,
|
||||
advert_count INTEGER DEFAULT 0,
|
||||
battery_mv INTEGER,
|
||||
temperature_c REAL
|
||||
);
|
||||
|
||||
CREATE TABLE observers (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
iata TEXT,
|
||||
last_seen TEXT,
|
||||
first_seen TEXT,
|
||||
packet_count INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE transmissions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
raw_hex TEXT NOT NULL,
|
||||
hash TEXT NOT NULL UNIQUE,
|
||||
first_seen TEXT NOT NULL,
|
||||
route_type INTEGER,
|
||||
payload_type INTEGER,
|
||||
payload_version INTEGER,
|
||||
decoded_json TEXT,
|
||||
created_at TEXT DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE observations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
transmission_id INTEGER NOT NULL REFERENCES transmissions(id),
|
||||
observer_id TEXT,
|
||||
observer_name TEXT,
|
||||
direction TEXT,
|
||||
snr REAL,
|
||||
rssi REAL,
|
||||
score INTEGER,
|
||||
path_json TEXT,
|
||||
timestamp INTEGER NOT NULL
|
||||
);
|
||||
`
|
||||
if _, err := conn.Exec(schema); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
return &DB{conn: conn, isV3: false}
|
||||
}
|
||||
|
||||
func TestGetNodesRegionFilterV2(t *testing.T) {
|
||||
db := setupTestDBV2(t)
|
||||
defer db.Close()
|
||||
|
||||
now := time.Now().UTC()
|
||||
recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
|
||||
recentEpoch := now.Add(-1 * time.Hour).Unix()
|
||||
|
||||
// Seed observer with IATA code
|
||||
db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
|
||||
VALUES ('obs-v2-1', 'V2 Observer', 'LAX', ?, '2026-01-01T00:00:00Z', 10)`, recent)
|
||||
|
||||
// Seed a node
|
||||
db.conn.Exec(`INSERT INTO nodes (public_key, name, role, lat, lon, last_seen, first_seen, advert_count)
|
||||
VALUES ('v2pubkey11223344', 'V2Node', 'repeater', 34.0, -118.0, ?, '2026-01-01T00:00:00Z', 5)`, recent)
|
||||
|
||||
// Seed an ADVERT transmission for the node
|
||||
db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
|
||||
VALUES ('AABB', 'v2hash0001', ?, 1, 4, '{"pubKey":"v2pubkey11223344","name":"V2Node","type":"ADVERT"}')`, recent)
|
||||
|
||||
// Seed v2-style observation: observer_id references observers.id directly
|
||||
db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
|
||||
VALUES (1, 'obs-v2-1', 'V2 Observer', 10.0, -90, '[]', ?)`, recentEpoch)
|
||||
|
||||
t.Run("v2 region filter match", func(t *testing.T) {
|
||||
nodes, total, _, err := db.GetNodes(50, 0, "", "", "", "", "", "LAX")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if total != 1 {
|
||||
t.Errorf("expected 1 node for LAX region (v2 schema), got %d", total)
|
||||
}
|
||||
if len(nodes) != 1 {
|
||||
t.Fatalf("expected 1 node, got %d", len(nodes))
|
||||
}
|
||||
if nodes[0]["public_key"] != "v2pubkey11223344" {
|
||||
t.Errorf("expected V2Node, got %v", nodes[0]["public_key"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("v2 region filter no match", func(t *testing.T) {
|
||||
_, total, _, err := db.GetNodes(50, 0, "", "", "", "", "", "JFK")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if total != 0 {
|
||||
t.Errorf("expected 0 nodes for JFK region (v2 schema), got %d", total)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetChannelMessagesDedup(t *testing.T) {
|
||||
|
||||
@@ -397,6 +397,106 @@ func DecodePacket(hexString string) (*DecodedPacket, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// HexRange represents a labeled byte range for the hex breakdown visualization.
|
||||
type HexRange struct {
|
||||
Start int `json:"start"`
|
||||
End int `json:"end"`
|
||||
Label string `json:"label"`
|
||||
}
|
||||
|
||||
// Breakdown holds colored byte ranges returned by the packet detail endpoint.
|
||||
type Breakdown struct {
|
||||
Ranges []HexRange `json:"ranges"`
|
||||
}
|
||||
|
||||
// BuildBreakdown computes labeled byte ranges for each section of a MeshCore packet.
|
||||
// The returned ranges are consumed by createColoredHexDump() and buildHexLegend()
|
||||
// in the frontend (public/app.js).
|
||||
func BuildBreakdown(hexString string) *Breakdown {
|
||||
hexString = strings.ReplaceAll(hexString, " ", "")
|
||||
hexString = strings.ReplaceAll(hexString, "\n", "")
|
||||
hexString = strings.ReplaceAll(hexString, "\r", "")
|
||||
buf, err := hex.DecodeString(hexString)
|
||||
if err != nil || len(buf) < 2 {
|
||||
return &Breakdown{Ranges: []HexRange{}}
|
||||
}
|
||||
|
||||
var ranges []HexRange
|
||||
offset := 0
|
||||
|
||||
// Byte 0: Header
|
||||
ranges = append(ranges, HexRange{Start: 0, End: 0, Label: "Header"})
|
||||
offset = 1
|
||||
|
||||
header := decodeHeader(buf[0])
|
||||
|
||||
// Bytes 1-4: Transport Codes (TRANSPORT_FLOOD / TRANSPORT_DIRECT only)
|
||||
if isTransportRoute(header.RouteType) {
|
||||
if len(buf) < offset+4 {
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
ranges = append(ranges, HexRange{Start: offset, End: offset + 3, Label: "Transport Codes"})
|
||||
offset += 4
|
||||
}
|
||||
|
||||
if offset >= len(buf) {
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
|
||||
// Next byte: Path Length (bits 7-6 = hashSize-1, bits 5-0 = hashCount)
|
||||
ranges = append(ranges, HexRange{Start: offset, End: offset, Label: "Path Length"})
|
||||
pathByte := buf[offset]
|
||||
offset++
|
||||
|
||||
hashSize := int(pathByte>>6) + 1
|
||||
hashCount := int(pathByte & 0x3F)
|
||||
pathBytes := hashSize * hashCount
|
||||
|
||||
// Path hops
|
||||
if hashCount > 0 && offset+pathBytes <= len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: offset, End: offset + pathBytes - 1, Label: "Path"})
|
||||
}
|
||||
offset += pathBytes
|
||||
|
||||
if offset >= len(buf) {
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
|
||||
payloadStart := offset
|
||||
|
||||
// Payload — break ADVERT into named sub-fields; everything else is one Payload range
|
||||
if header.PayloadType == PayloadADVERT && len(buf)-payloadStart >= 100 {
|
||||
ranges = append(ranges, HexRange{Start: payloadStart, End: payloadStart + 31, Label: "PubKey"})
|
||||
ranges = append(ranges, HexRange{Start: payloadStart + 32, End: payloadStart + 35, Label: "Timestamp"})
|
||||
ranges = append(ranges, HexRange{Start: payloadStart + 36, End: payloadStart + 99, Label: "Signature"})
|
||||
|
||||
appStart := payloadStart + 100
|
||||
if appStart < len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: appStart, End: appStart, Label: "Flags"})
|
||||
appFlags := buf[appStart]
|
||||
fOff := appStart + 1
|
||||
if appFlags&0x10 != 0 && fOff+8 <= len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: fOff, End: fOff + 3, Label: "Latitude"})
|
||||
ranges = append(ranges, HexRange{Start: fOff + 4, End: fOff + 7, Label: "Longitude"})
|
||||
fOff += 8
|
||||
}
|
||||
if appFlags&0x20 != 0 && fOff+2 <= len(buf) {
|
||||
fOff += 2
|
||||
}
|
||||
if appFlags&0x40 != 0 && fOff+2 <= len(buf) {
|
||||
fOff += 2
|
||||
}
|
||||
if appFlags&0x80 != 0 && fOff < len(buf) {
|
||||
ranges = append(ranges, HexRange{Start: fOff, End: len(buf) - 1, Label: "Name"})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ranges = append(ranges, HexRange{Start: payloadStart, End: len(buf) - 1, Label: "Payload"})
|
||||
}
|
||||
|
||||
return &Breakdown{Ranges: ranges}
|
||||
}
|
||||
|
||||
// ComputeContentHash computes the SHA-256-based content hash (first 16 hex chars).
|
||||
func ComputeContentHash(rawHex string) string {
|
||||
buf, err := hex.DecodeString(rawHex)
|
||||
|
||||
@@ -93,3 +93,152 @@ func TestDecodePacket_FloodHasNoCodes(t *testing.T) {
|
||||
t.Error("expected no transport codes for FLOOD route")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_InvalidHex(t *testing.T) {
|
||||
b := BuildBreakdown("not-hex!")
|
||||
if len(b.Ranges) != 0 {
|
||||
t.Errorf("expected empty ranges for invalid hex, got %d", len(b.Ranges))
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_TooShort(t *testing.T) {
|
||||
b := BuildBreakdown("11") // 1 byte — no path byte
|
||||
if len(b.Ranges) != 0 {
|
||||
t.Errorf("expected empty ranges for too-short packet, got %d", len(b.Ranges))
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_FloodNonAdvert(t *testing.T) {
|
||||
// Header 0x15: route=1/FLOOD, payload=5/GRP_TXT
|
||||
// PathByte 0x01: 1 hop, 1-byte hash
|
||||
// PathHop: AA
|
||||
// Payload: FF0011
|
||||
b := BuildBreakdown("1501AAFFFF00")
|
||||
labels := rangeLabels(b.Ranges)
|
||||
expect := []string{"Header", "Path Length", "Path", "Payload"}
|
||||
if !equalLabels(labels, expect) {
|
||||
t.Errorf("expected labels %v, got %v", expect, labels)
|
||||
}
|
||||
// Verify byte positions
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Path Length", 1, 1)
|
||||
assertRange(t, b.Ranges, "Path", 2, 2)
|
||||
assertRange(t, b.Ranges, "Payload", 3, 5)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_TransportFlood(t *testing.T) {
|
||||
// Header 0x14: route=0/TRANSPORT_FLOOD, payload=5/GRP_TXT
|
||||
// TransportCodes: AABBCCDD (4 bytes)
|
||||
// PathByte 0x01: 1 hop, 1-byte hash
|
||||
// PathHop: EE
|
||||
// Payload: FF00
|
||||
b := BuildBreakdown("14AABBCCDD01EEFF00")
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Transport Codes", 1, 4)
|
||||
assertRange(t, b.Ranges, "Path Length", 5, 5)
|
||||
assertRange(t, b.Ranges, "Path", 6, 6)
|
||||
assertRange(t, b.Ranges, "Payload", 7, 8)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_FloodNoHops(t *testing.T) {
|
||||
// Header 0x15: FLOOD/GRP_TXT; PathByte 0x00: 0 hops; Payload: AABB
|
||||
b := BuildBreakdown("150000AABB")
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Path Length", 1, 1)
|
||||
// No Path range since hashCount=0
|
||||
for _, r := range b.Ranges {
|
||||
if r.Label == "Path" {
|
||||
t.Error("expected no Path range for zero-hop packet")
|
||||
}
|
||||
}
|
||||
assertRange(t, b.Ranges, "Payload", 2, 4)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_AdvertBasic(t *testing.T) {
|
||||
// Header 0x11: FLOOD/ADVERT
|
||||
// PathByte 0x01: 1 hop, 1-byte hash
|
||||
// PathHop: AA
|
||||
// Payload: 100 bytes (PubKey32 + Timestamp4 + Signature64) + Flags=0x02 (repeater, no extras)
|
||||
pubkey := repeatHex("AB", 32)
|
||||
ts := "00000000" // 4 bytes
|
||||
sig := repeatHex("CD", 64)
|
||||
flags := "02"
|
||||
hex := "1101AA" + pubkey + ts + sig + flags
|
||||
b := BuildBreakdown(hex)
|
||||
assertRange(t, b.Ranges, "Header", 0, 0)
|
||||
assertRange(t, b.Ranges, "Path Length", 1, 1)
|
||||
assertRange(t, b.Ranges, "Path", 2, 2)
|
||||
assertRange(t, b.Ranges, "PubKey", 3, 34)
|
||||
assertRange(t, b.Ranges, "Timestamp", 35, 38)
|
||||
assertRange(t, b.Ranges, "Signature", 39, 102)
|
||||
assertRange(t, b.Ranges, "Flags", 103, 103)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_AdvertWithLocation(t *testing.T) {
|
||||
// flags=0x12: hasLocation bit set
|
||||
pubkey := repeatHex("00", 32)
|
||||
ts := "00000000"
|
||||
sig := repeatHex("00", 64)
|
||||
flags := "12" // 0x10 = hasLocation
|
||||
latBytes := "00000000"
|
||||
lonBytes := "00000000"
|
||||
hex := "1101AA" + pubkey + ts + sig + flags + latBytes + lonBytes
|
||||
b := BuildBreakdown(hex)
|
||||
assertRange(t, b.Ranges, "Latitude", 104, 107)
|
||||
assertRange(t, b.Ranges, "Longitude", 108, 111)
|
||||
}
|
||||
|
||||
func TestBuildBreakdown_AdvertWithName(t *testing.T) {
|
||||
// flags=0x82: hasName bit set
|
||||
pubkey := repeatHex("00", 32)
|
||||
ts := "00000000"
|
||||
sig := repeatHex("00", 64)
|
||||
flags := "82" // 0x80 = hasName
|
||||
name := "4E6F6465" // "Node" in hex
|
||||
hex := "1101AA" + pubkey + ts + sig + flags + name
|
||||
b := BuildBreakdown(hex)
|
||||
assertRange(t, b.Ranges, "Name", 104, 107)
|
||||
}
|
||||
|
||||
// helpers
|
||||
|
||||
func rangeLabels(ranges []HexRange) []string {
|
||||
out := make([]string, len(ranges))
|
||||
for i, r := range ranges {
|
||||
out[i] = r.Label
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func equalLabels(a, b []string) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func assertRange(t *testing.T, ranges []HexRange, label string, wantStart, wantEnd int) {
|
||||
t.Helper()
|
||||
for _, r := range ranges {
|
||||
if r.Label == label {
|
||||
if r.Start != wantStart || r.End != wantEnd {
|
||||
t.Errorf("range %q: want [%d,%d], got [%d,%d]", label, wantStart, wantEnd, r.Start, r.End)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
t.Errorf("range %q not found in %v", label, rangeLabels(ranges))
|
||||
}
|
||||
|
||||
func repeatHex(byteHex string, n int) string {
|
||||
s := ""
|
||||
for i := 0; i < n; i++ {
|
||||
s += byteHex
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@@ -326,6 +327,84 @@ func TestSpaHandler(t *testing.T) {
|
||||
t.Errorf("expected no-cache header for .html, got %s", cc)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("root path serves index.html", func(t *testing.T) {
|
||||
req := httptest.NewRequest("GET", "/", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 200 {
|
||||
t.Errorf("expected 200, got %d", w.Code)
|
||||
}
|
||||
body := w.Body.String()
|
||||
if body != "<html>SPA</html>" {
|
||||
t.Errorf("expected SPA index.html content, got %s", body)
|
||||
}
|
||||
ct := w.Header().Get("Content-Type")
|
||||
if ct != "text/html; charset=utf-8" {
|
||||
t.Errorf("expected text/html content type, got %s", ct)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("/index.html serves pre-processed content", func(t *testing.T) {
|
||||
req := httptest.NewRequest("GET", "/index.html", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 200 {
|
||||
t.Errorf("expected 200, got %d", w.Code)
|
||||
}
|
||||
body := w.Body.String()
|
||||
if body != "<html>SPA</html>" {
|
||||
t.Errorf("expected SPA index.html content, got %s", body)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSpaHandlerCacheBust(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
htmlWithBust := `<html><script src="app.js?v=__BUST__"></script><link href="style.css?v=__BUST__"></html>`
|
||||
os.WriteFile(filepath.Join(dir, "index.html"), []byte(htmlWithBust), 0644)
|
||||
|
||||
fs := http.FileServer(http.Dir(dir))
|
||||
handler := spaHandler(dir, fs)
|
||||
|
||||
t.Run("__BUST__ is replaced with a Unix timestamp", func(t *testing.T) {
|
||||
req := httptest.NewRequest("GET", "/", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
|
||||
body := w.Body.String()
|
||||
if strings.Contains(body, "__BUST__") {
|
||||
t.Errorf("__BUST__ placeholder was not replaced in response: %s", body)
|
||||
}
|
||||
// Verify it was replaced with digits (Unix timestamp)
|
||||
if !strings.Contains(body, "v=") {
|
||||
t.Errorf("expected v= query params in response, got: %s", body)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("SPA fallback also has busted values", func(t *testing.T) {
|
||||
req := httptest.NewRequest("GET", "/nonexistent/route", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
|
||||
body := w.Body.String()
|
||||
if strings.Contains(body, "__BUST__") {
|
||||
t.Errorf("__BUST__ placeholder was not replaced in SPA fallback: %s", body)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("/index.html also has busted values", func(t *testing.T) {
|
||||
req := httptest.NewRequest("GET", "/index.html", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
|
||||
body := w.Body.String()
|
||||
if strings.Contains(body, "__BUST__") {
|
||||
t.Errorf("__BUST__ placeholder was not replaced for /index.html: %s", body)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestWriteJSON(t *testing.T) {
|
||||
@@ -345,3 +424,29 @@ func TestWriteJSON(t *testing.T) {
|
||||
t.Errorf("expected 'value', got %v", body["key"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestHaversineKm(t *testing.T) {
|
||||
// Same point should be 0
|
||||
if d := haversineKm(37.0, -122.0, 37.0, -122.0); d != 0 {
|
||||
t.Errorf("same point: expected 0, got %f", d)
|
||||
}
|
||||
|
||||
// SF to LA ~559km
|
||||
d := haversineKm(37.7749, -122.4194, 34.0522, -118.2437)
|
||||
if d < 550 || d > 570 {
|
||||
t.Errorf("SF to LA: expected ~559km, got %f", d)
|
||||
}
|
||||
|
||||
// Symmetry
|
||||
d1 := haversineKm(37.7749, -122.4194, 34.0522, -118.2437)
|
||||
d2 := haversineKm(34.0522, -118.2437, 37.7749, -122.4194)
|
||||
if d1 != d2 {
|
||||
t.Errorf("not symmetric: %f vs %f", d1, d2)
|
||||
}
|
||||
|
||||
// Oslo to Stockholm ~415km (old Euclidean dLat*111, dLon*85 would give ~627km)
|
||||
d = haversineKm(59.9, 10.7, 59.3, 18.0)
|
||||
if d < 400 || d > 430 {
|
||||
t.Errorf("Oslo to Stockholm: expected ~415km, got %f", d)
|
||||
}
|
||||
}
|
||||
|
||||
+26
-2
@@ -11,9 +11,9 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"sync"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
@@ -242,11 +242,35 @@ func main() {
|
||||
}
|
||||
|
||||
// spaHandler serves static files, falling back to index.html for SPA routes.
|
||||
// It reads index.html once at creation time and replaces the __BUST__ placeholder
|
||||
// with a Unix timestamp so browsers fetch fresh JS/CSS after each server restart.
|
||||
func spaHandler(root string, fs http.Handler) http.Handler {
|
||||
// Pre-process index.html: replace __BUST__ with a cache-bust timestamp
|
||||
indexPath := filepath.Join(root, "index.html")
|
||||
rawHTML, err := os.ReadFile(indexPath)
|
||||
if err != nil {
|
||||
log.Printf("[static] warning: could not read index.html for cache-bust: %v", err)
|
||||
rawHTML = []byte("<!DOCTYPE html><html><body><h1>CoreScope</h1><p>index.html not found</p></body></html>")
|
||||
}
|
||||
bustValue := fmt.Sprintf("%d", time.Now().Unix())
|
||||
indexHTML := []byte(strings.ReplaceAll(string(rawHTML), "__BUST__", bustValue))
|
||||
log.Printf("[static] cache-bust value: %s", bustValue)
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Serve pre-processed index.html for root and /index.html
|
||||
if r.URL.Path == "/" || r.URL.Path == "/index.html" {
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
w.Write(indexHTML)
|
||||
return
|
||||
}
|
||||
|
||||
path := filepath.Join(root, r.URL.Path)
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
http.ServeFile(w, r, filepath.Join(root, "index.html"))
|
||||
// SPA fallback — serve pre-processed index.html
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
w.Write(indexHTML)
|
||||
return
|
||||
}
|
||||
// Disable caching for JS/CSS/HTML
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TestPerfStatsConcurrentAccess verifies that concurrent writes and reads
|
||||
// to PerfStats do not trigger data races. Run with: go test -race
|
||||
func TestPerfStatsConcurrentAccess(t *testing.T) {
|
||||
ps := NewPerfStats()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
const goroutines = 50
|
||||
const iterations = 200
|
||||
|
||||
// Concurrent writers (simulating perfMiddleware)
|
||||
for i := 0; i < goroutines; i++ {
|
||||
wg.Add(1)
|
||||
go func(id int) {
|
||||
defer wg.Done()
|
||||
for j := 0; j < iterations; j++ {
|
||||
ms := float64(j) * 0.5
|
||||
key := "/api/test"
|
||||
if id%2 == 0 {
|
||||
key = "/api/other"
|
||||
}
|
||||
|
||||
ps.mu.Lock()
|
||||
ps.Requests++
|
||||
ps.TotalMs += ms
|
||||
if _, ok := ps.Endpoints[key]; !ok {
|
||||
ps.Endpoints[key] = &EndpointPerf{Recent: make([]float64, 0, 100)}
|
||||
}
|
||||
ep := ps.Endpoints[key]
|
||||
ep.Count++
|
||||
ep.TotalMs += ms
|
||||
if ms > ep.MaxMs {
|
||||
ep.MaxMs = ms
|
||||
}
|
||||
ep.Recent = append(ep.Recent, ms)
|
||||
if len(ep.Recent) > 100 {
|
||||
ep.Recent = ep.Recent[1:]
|
||||
}
|
||||
if ms > 50 {
|
||||
ps.SlowQueries = append(ps.SlowQueries, SlowQuery{
|
||||
Path: key,
|
||||
Ms: ms,
|
||||
Time: time.Now().UTC().Format(time.RFC3339),
|
||||
})
|
||||
if len(ps.SlowQueries) > 50 {
|
||||
ps.SlowQueries = ps.SlowQueries[1:]
|
||||
}
|
||||
}
|
||||
ps.mu.Unlock()
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
|
||||
// Concurrent readers (simulating handlePerf / handleHealth)
|
||||
for i := 0; i < 10; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for j := 0; j < iterations; j++ {
|
||||
ps.mu.Lock()
|
||||
_ = ps.Requests
|
||||
_ = ps.TotalMs
|
||||
for _, ep := range ps.Endpoints {
|
||||
_ = ep.Count
|
||||
_ = ep.MaxMs
|
||||
c := make([]float64, len(ep.Recent))
|
||||
copy(c, ep.Recent)
|
||||
}
|
||||
s := make([]SlowQuery, len(ps.SlowQueries))
|
||||
copy(s, ps.SlowQueries)
|
||||
ps.mu.Unlock()
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
// Verify consistency
|
||||
ps.mu.Lock()
|
||||
defer ps.mu.Unlock()
|
||||
expectedRequests := int64(goroutines * iterations)
|
||||
if ps.Requests != expectedRequests {
|
||||
t.Errorf("expected %d requests, got %d", expectedRequests, ps.Requests)
|
||||
}
|
||||
if len(ps.Endpoints) == 0 {
|
||||
t.Error("expected endpoints to be populated")
|
||||
}
|
||||
}
|
||||
+65
-32
@@ -42,6 +42,7 @@ type Server struct {
|
||||
|
||||
// PerfStats tracks request performance.
|
||||
type PerfStats struct {
|
||||
mu sync.Mutex
|
||||
Requests int64
|
||||
TotalMs float64
|
||||
Endpoints map[string]*EndpointPerf
|
||||
@@ -162,10 +163,7 @@ func (s *Server) perfMiddleware(next http.Handler) http.Handler {
|
||||
next.ServeHTTP(w, r)
|
||||
ms := float64(time.Since(start).Microseconds()) / 1000.0
|
||||
|
||||
s.perfStats.Requests++
|
||||
s.perfStats.TotalMs += ms
|
||||
|
||||
// Normalize key: prefer mux route template (like Node.js req.route.path)
|
||||
// Normalize key outside lock (no shared state needed)
|
||||
key := r.URL.Path
|
||||
if route := mux.CurrentRoute(r); route != nil {
|
||||
if tmpl, err := route.GetPathTemplate(); err == nil {
|
||||
@@ -175,6 +173,11 @@ func (s *Server) perfMiddleware(next http.Handler) http.Handler {
|
||||
if key == r.URL.Path {
|
||||
key = perfHexFallback.ReplaceAllString(key, ":id")
|
||||
}
|
||||
|
||||
s.perfStats.mu.Lock()
|
||||
s.perfStats.Requests++
|
||||
s.perfStats.TotalMs += ms
|
||||
|
||||
if _, ok := s.perfStats.Endpoints[key]; !ok {
|
||||
s.perfStats.Endpoints[key] = &EndpointPerf{Recent: make([]float64, 0, 100)}
|
||||
}
|
||||
@@ -200,6 +203,7 @@ func (s *Server) perfMiddleware(next http.Handler) http.Handler {
|
||||
s.perfStats.SlowQueries = s.perfStats.SlowQueries[1:]
|
||||
}
|
||||
}
|
||||
s.perfStats.mu.Unlock()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -365,7 +369,8 @@ func (s *Server) handleHealth(w http.ResponseWriter, r *http.Request) {
|
||||
lastPauseMs = float64(m.PauseNs[(m.NumGC+255)%256]) / 1e6
|
||||
}
|
||||
|
||||
// Build slow queries list
|
||||
// Build slow queries list (copy under lock)
|
||||
s.perfStats.mu.Lock()
|
||||
recentSlow := make([]SlowQuery, 0)
|
||||
sliceEnd := s.perfStats.SlowQueries
|
||||
if len(sliceEnd) > 5 {
|
||||
@@ -374,6 +379,10 @@ func (s *Server) handleHealth(w http.ResponseWriter, r *http.Request) {
|
||||
for _, sq := range sliceEnd {
|
||||
recentSlow = append(recentSlow, sq)
|
||||
}
|
||||
perfRequests := s.perfStats.Requests
|
||||
perfTotalMs := s.perfStats.TotalMs
|
||||
perfSlowCount := len(s.perfStats.SlowQueries)
|
||||
s.perfStats.mu.Unlock()
|
||||
|
||||
writeJSON(w, HealthResponse{
|
||||
Status: "ok",
|
||||
@@ -403,9 +412,9 @@ func (s *Server) handleHealth(w http.ResponseWriter, r *http.Request) {
|
||||
EstimatedMB: pktEstMB,
|
||||
},
|
||||
Perf: HealthPerfStats{
|
||||
TotalRequests: int(s.perfStats.Requests),
|
||||
AvgMs: safeAvg(s.perfStats.TotalMs, float64(s.perfStats.Requests)),
|
||||
SlowQueries: len(s.perfStats.SlowQueries),
|
||||
TotalRequests: int(perfRequests),
|
||||
AvgMs: safeAvg(perfTotalMs, float64(perfRequests)),
|
||||
SlowQueries: perfSlowCount,
|
||||
RecentSlow: recentSlow,
|
||||
},
|
||||
})
|
||||
@@ -465,22 +474,50 @@ func (s *Server) handleStats(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (s *Server) handlePerf(w http.ResponseWriter, r *http.Request) {
|
||||
// Endpoint performance summary
|
||||
// Copy perfStats under lock to avoid data races
|
||||
s.perfStats.mu.Lock()
|
||||
type epSnapshot struct {
|
||||
path string
|
||||
count int
|
||||
totalMs float64
|
||||
maxMs float64
|
||||
recent []float64
|
||||
}
|
||||
epSnapshots := make([]epSnapshot, 0, len(s.perfStats.Endpoints))
|
||||
for path, ep := range s.perfStats.Endpoints {
|
||||
recentCopy := make([]float64, len(ep.Recent))
|
||||
copy(recentCopy, ep.Recent)
|
||||
epSnapshots = append(epSnapshots, epSnapshot{path, ep.Count, ep.TotalMs, ep.MaxMs, recentCopy})
|
||||
}
|
||||
uptimeSec := int(time.Since(s.perfStats.StartedAt).Seconds())
|
||||
totalRequests := s.perfStats.Requests
|
||||
totalMs := s.perfStats.TotalMs
|
||||
slowQueries := make([]SlowQuery, 0)
|
||||
sliceEnd := s.perfStats.SlowQueries
|
||||
if len(sliceEnd) > 20 {
|
||||
sliceEnd = sliceEnd[len(sliceEnd)-20:]
|
||||
}
|
||||
for _, sq := range sliceEnd {
|
||||
slowQueries = append(slowQueries, sq)
|
||||
}
|
||||
s.perfStats.mu.Unlock()
|
||||
|
||||
// Process snapshots outside lock
|
||||
type epEntry struct {
|
||||
path string
|
||||
data *EndpointStatsResp
|
||||
}
|
||||
var entries []epEntry
|
||||
for path, ep := range s.perfStats.Endpoints {
|
||||
sorted := sortedCopy(ep.Recent)
|
||||
for _, snap := range epSnapshots {
|
||||
sorted := sortedCopy(snap.recent)
|
||||
d := &EndpointStatsResp{
|
||||
Count: ep.Count,
|
||||
AvgMs: safeAvg(ep.TotalMs, float64(ep.Count)),
|
||||
Count: snap.count,
|
||||
AvgMs: safeAvg(snap.totalMs, float64(snap.count)),
|
||||
P50Ms: round(percentile(sorted, 0.5), 1),
|
||||
P95Ms: round(percentile(sorted, 0.95), 1),
|
||||
MaxMs: round(ep.MaxMs, 1),
|
||||
MaxMs: round(snap.maxMs, 1),
|
||||
}
|
||||
entries = append(entries, epEntry{path, d})
|
||||
entries = append(entries, epEntry{snap.path, d})
|
||||
}
|
||||
// Sort by total time spent (count * avg) descending, matching Node.js
|
||||
sort.Slice(entries, func(i, j int) bool {
|
||||
@@ -521,22 +558,10 @@ func (s *Server) handlePerf(w http.ResponseWriter, r *http.Request) {
|
||||
sqliteStats = &ss
|
||||
}
|
||||
|
||||
uptimeSec := int(time.Since(s.perfStats.StartedAt).Seconds())
|
||||
|
||||
// Convert slow queries
|
||||
slowQueries := make([]SlowQuery, 0)
|
||||
sliceEnd := s.perfStats.SlowQueries
|
||||
if len(sliceEnd) > 20 {
|
||||
sliceEnd = sliceEnd[len(sliceEnd)-20:]
|
||||
}
|
||||
for _, sq := range sliceEnd {
|
||||
slowQueries = append(slowQueries, sq)
|
||||
}
|
||||
|
||||
writeJSON(w, PerfResponse{
|
||||
Uptime: uptimeSec,
|
||||
TotalRequests: s.perfStats.Requests,
|
||||
AvgMs: safeAvg(s.perfStats.TotalMs, float64(s.perfStats.Requests)),
|
||||
TotalRequests: totalRequests,
|
||||
AvgMs: safeAvg(totalMs, float64(totalRequests)),
|
||||
Endpoints: summary,
|
||||
SlowQueries: slowQueries,
|
||||
Cache: perfCS,
|
||||
@@ -560,7 +585,13 @@ func (s *Server) handlePerf(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (s *Server) handlePerfReset(w http.ResponseWriter, r *http.Request) {
|
||||
s.perfStats = NewPerfStats()
|
||||
s.perfStats.mu.Lock()
|
||||
s.perfStats.Requests = 0
|
||||
s.perfStats.TotalMs = 0
|
||||
s.perfStats.Endpoints = make(map[string]*EndpointPerf)
|
||||
s.perfStats.SlowQueries = make([]SlowQuery, 0)
|
||||
s.perfStats.StartedAt = time.Now()
|
||||
s.perfStats.mu.Unlock()
|
||||
writeJSON(w, OkResp{Ok: true})
|
||||
}
|
||||
|
||||
@@ -730,10 +761,11 @@ func (s *Server) handlePacketDetail(w http.ResponseWriter, r *http.Request) {
|
||||
pathHops = []interface{}{}
|
||||
}
|
||||
|
||||
rawHex, _ := packet["raw_hex"].(string)
|
||||
writeJSON(w, PacketDetailResponse{
|
||||
Packet: packet,
|
||||
Path: pathHops,
|
||||
Breakdown: struct{}{},
|
||||
Breakdown: BuildBreakdown(rawHex),
|
||||
ObservationCount: observationCount,
|
||||
Observations: mapSliceToObservations(observations),
|
||||
})
|
||||
@@ -1204,7 +1236,8 @@ func (s *Server) handleAnalyticsHashSizes(w http.ResponseWriter, r *http.Request
|
||||
|
||||
func (s *Server) handleAnalyticsHashCollisions(w http.ResponseWriter, r *http.Request) {
|
||||
if s.store != nil {
|
||||
writeJSON(w, s.store.GetAnalyticsHashCollisions())
|
||||
region := r.URL.Query().Get("region")
|
||||
writeJSON(w, s.store.GetAnalyticsHashCollisions(region))
|
||||
return
|
||||
}
|
||||
writeJSON(w, map[string]interface{}{
|
||||
|
||||
+124
-6
@@ -2178,6 +2178,124 @@ func TestGetNodeHashSizeInfoLatestWins(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetNodeHashSizeInfoIgnoreDirectZeroHop(t *testing.T) {
|
||||
db := setupTestDB(t)
|
||||
seedTestData(t, db)
|
||||
store := NewPacketStore(db, nil)
|
||||
if err := store.Load(); err != nil {
|
||||
t.Fatalf("store.Load failed: %v", err)
|
||||
}
|
||||
|
||||
pk := "dddd111122223333444455556666777788889999aaaabbbbccccddddeeee3333"
|
||||
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'DirIgnore', 'repeater')", pk)
|
||||
|
||||
decoded := `{"name":"DirIgnore","pubKey":"` + pk + `"}`
|
||||
rawFlood2B := "11" + "40" + "aabb" // FLOOD advert, hashSize=2
|
||||
rawDirect0 := "12" + "00" + "aabb" // DIRECT advert, zero-hop (should be ignored)
|
||||
|
||||
payloadType := 4
|
||||
raws := []string{rawFlood2B, rawDirect0, rawFlood2B, rawDirect0, rawFlood2B}
|
||||
for i, raw := range raws {
|
||||
tx := &StoreTx{
|
||||
ID: 9150 + i,
|
||||
RawHex: raw,
|
||||
Hash: "dirignore" + strconv.Itoa(i),
|
||||
FirstSeen: "2024-01-01T0" + strconv.Itoa(i) + ":00:00Z",
|
||||
PayloadType: &payloadType,
|
||||
DecodedJSON: decoded,
|
||||
}
|
||||
store.packets = append(store.packets, tx)
|
||||
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
|
||||
}
|
||||
|
||||
info := store.GetNodeHashSizeInfo()
|
||||
ni := info[pk]
|
||||
if ni == nil {
|
||||
t.Fatal("expected hash info for test node")
|
||||
}
|
||||
if ni.HashSize != 2 {
|
||||
t.Errorf("HashSize=%d, want 2 (direct zero-hop adverts should be ignored)", ni.HashSize)
|
||||
}
|
||||
if ni.Inconsistent {
|
||||
t.Error("expected hash_size_inconsistent=false when direct zero-hop adverts are ignored")
|
||||
}
|
||||
if len(ni.AllSizes) != 1 || !ni.AllSizes[2] {
|
||||
t.Errorf("expected only 2-byte size in AllSizes, got %#v", ni.AllSizes)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetNodeHashSizeInfoOnlyDirectZeroHopIgnored(t *testing.T) {
|
||||
db := setupTestDB(t)
|
||||
seedTestData(t, db)
|
||||
store := NewPacketStore(db, nil)
|
||||
if err := store.Load(); err != nil {
|
||||
t.Fatalf("store.Load failed: %v", err)
|
||||
}
|
||||
|
||||
pk := "eeee111122223333444455556666777788889999aaaabbbbccccddddeeee4444"
|
||||
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'OnlyDirect', 'repeater')", pk)
|
||||
|
||||
decoded := `{"name":"OnlyDirect","pubKey":"` + pk + `"}`
|
||||
rawDirect0 := "12" + "00" + "aabb"
|
||||
payloadType := 4
|
||||
|
||||
tx := &StoreTx{
|
||||
ID: 9160,
|
||||
RawHex: rawDirect0,
|
||||
Hash: "onlydirect0",
|
||||
FirstSeen: "2024-01-01T00:00:00Z",
|
||||
PayloadType: &payloadType,
|
||||
DecodedJSON: decoded,
|
||||
}
|
||||
store.packets = append(store.packets, tx)
|
||||
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
|
||||
|
||||
info := store.GetNodeHashSizeInfo()
|
||||
if ni := info[pk]; ni != nil {
|
||||
t.Errorf("expected nil hash info for direct zero-hop only node, got HashSize=%d", ni.HashSize)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetNodeHashSizeInfoDirectNonZeroHopCounted(t *testing.T) {
|
||||
// A DIRECT advert with non-zero hop count should NOT be skipped —
|
||||
// only zero-hop DIRECT adverts misreport hash size.
|
||||
db := setupTestDB(t)
|
||||
seedTestData(t, db)
|
||||
store := NewPacketStore(db, nil)
|
||||
if err := store.Load(); err != nil {
|
||||
t.Fatalf("store.Load failed: %v", err)
|
||||
}
|
||||
|
||||
pk := "ffff111122223333444455556666777788889999aaaabbbbccccddddeeee5555"
|
||||
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'DirNonZero', 'repeater')", pk)
|
||||
|
||||
decoded := `{"name":"DirNonZero","pubKey":"` + pk + `"}`
|
||||
// DIRECT advert (route type 2 = 0x02 in bits 0-1), path byte 0x41:
|
||||
// upper 2 bits = 01 → hash_size = 2, lower 6 bits = 0x01 → hop count 1 (non-zero)
|
||||
rawDirectNonZero := "12" + "41" + "aabb" // header=0x12 (ADVERT|DIRECT), path=0x41
|
||||
payloadType := 4
|
||||
|
||||
tx := &StoreTx{
|
||||
ID: 9170,
|
||||
RawHex: rawDirectNonZero,
|
||||
Hash: "dirnonzero0",
|
||||
FirstSeen: "2024-01-01T00:00:00Z",
|
||||
PayloadType: &payloadType,
|
||||
DecodedJSON: decoded,
|
||||
}
|
||||
store.packets = append(store.packets, tx)
|
||||
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
|
||||
|
||||
info := store.GetNodeHashSizeInfo()
|
||||
ni := info[pk]
|
||||
if ni == nil {
|
||||
t.Fatal("expected hash info for DIRECT non-zero-hop node — it should NOT be skipped")
|
||||
}
|
||||
if ni.HashSize != 2 {
|
||||
t.Errorf("HashSize=%d, want 2 (DIRECT with hop count > 0 should be counted)", ni.HashSize)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetNodeHashSizeInfoNoAdverts(t *testing.T) {
|
||||
// A node with no ADVERT packets should not appear in hash size info.
|
||||
db := setupTestDB(t)
|
||||
@@ -2680,9 +2798,9 @@ func TestHashCollisionsNoNullArrays(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestHashCollisionsRegionParamIgnored(t *testing.T) {
|
||||
// Issue #417: region param was accepted but ignored.
|
||||
// After fix, the endpoint should work without region and not cache per-region.
|
||||
func TestHashCollisionsRegionParam(t *testing.T) {
|
||||
// Issue #438: region param should be accepted and used for filtering.
|
||||
// With no region observers configured, results should be identical to global.
|
||||
_, router := setupTestServer(t)
|
||||
|
||||
// Request without region
|
||||
@@ -2693,7 +2811,7 @@ func TestHashCollisionsRegionParamIgnored(t *testing.T) {
|
||||
t.Fatalf("expected 200, got %d", w1.Code)
|
||||
}
|
||||
|
||||
// Request with region param (should be ignored, same result)
|
||||
// Request with region param (no observers for this region, so falls back to global)
|
||||
req2 := httptest.NewRequest("GET", "/api/analytics/hash-collisions?region=us-west", nil)
|
||||
w2 := httptest.NewRecorder()
|
||||
router.ServeHTTP(w2, req2)
|
||||
@@ -2701,9 +2819,9 @@ func TestHashCollisionsRegionParamIgnored(t *testing.T) {
|
||||
t.Fatalf("expected 200, got %d", w2.Code)
|
||||
}
|
||||
|
||||
// Both should return identical results
|
||||
// With no region observers configured, both should return identical results
|
||||
if w1.Body.String() != w2.Body.String() {
|
||||
t.Error("responses differ with/without region param — region should be ignored")
|
||||
t.Error("responses differ with/without region param when no region observers configured")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
+74
-9
@@ -80,7 +80,7 @@ type PacketStore struct {
|
||||
rfCache map[string]*cachedResult // region → cached RF result
|
||||
topoCache map[string]*cachedResult // region → cached topology result
|
||||
hashCache map[string]*cachedResult // region → cached hash-sizes result
|
||||
collisionCache *cachedResult // cached hash-collisions result (no region filtering)
|
||||
collisionCache map[string]*cachedResult // cached hash-collisions result keyed by region ("" = global)
|
||||
chanCache map[string]*cachedResult // region → cached channels result
|
||||
distCache map[string]*cachedResult // region → cached distance result
|
||||
subpathCache map[string]*cachedResult // params → cached subpaths result
|
||||
@@ -176,6 +176,7 @@ func NewPacketStore(db *DB, cfg *PacketStoreConfig) *PacketStore {
|
||||
topoCache: make(map[string]*cachedResult),
|
||||
hashCache: make(map[string]*cachedResult),
|
||||
|
||||
collisionCache: make(map[string]*cachedResult),
|
||||
chanCache: make(map[string]*cachedResult),
|
||||
distCache: make(map[string]*cachedResult),
|
||||
subpathCache: make(map[string]*cachedResult),
|
||||
@@ -368,6 +369,11 @@ func (s *PacketStore) indexByNode(tx *StoreTx) {
|
||||
if tx.DecodedJSON == "" {
|
||||
return
|
||||
}
|
||||
// All three target fields ("pubKey", "destPubKey", "srcPubKey") share the
|
||||
// common suffix "ubKey" — skip JSON parse for packets that have none of them.
|
||||
if !strings.Contains(tx.DecodedJSON, "ubKey") {
|
||||
return
|
||||
}
|
||||
var decoded map[string]interface{}
|
||||
if json.Unmarshal([]byte(tx.DecodedJSON), &decoded) != nil {
|
||||
return
|
||||
@@ -696,7 +702,7 @@ func (s *PacketStore) invalidateCachesFor(inv cacheInvalidation) {
|
||||
s.rfCache = make(map[string]*cachedResult)
|
||||
s.topoCache = make(map[string]*cachedResult)
|
||||
s.hashCache = make(map[string]*cachedResult)
|
||||
s.collisionCache = nil
|
||||
s.collisionCache = make(map[string]*cachedResult)
|
||||
s.chanCache = make(map[string]*cachedResult)
|
||||
s.distCache = make(map[string]*cachedResult)
|
||||
s.subpathCache = make(map[string]*cachedResult)
|
||||
@@ -716,7 +722,7 @@ func (s *PacketStore) invalidateCachesFor(inv cacheInvalidation) {
|
||||
}
|
||||
if inv.hasNewTransmissions {
|
||||
s.hashCache = make(map[string]*cachedResult)
|
||||
s.collisionCache = nil
|
||||
s.collisionCache = make(map[string]*cachedResult)
|
||||
}
|
||||
if inv.hasChannelData {
|
||||
s.chanCache = make(map[string]*cachedResult)
|
||||
@@ -4181,20 +4187,20 @@ type hashSizeNodeInfo struct {
|
||||
|
||||
// GetAnalyticsHashCollisions returns pre-computed hash collision analysis.
|
||||
// This moves the O(n²) distance computation from the frontend to the server.
|
||||
func (s *PacketStore) GetAnalyticsHashCollisions() map[string]interface{} {
|
||||
func (s *PacketStore) GetAnalyticsHashCollisions(region string) map[string]interface{} {
|
||||
s.cacheMu.Lock()
|
||||
if s.collisionCache != nil && time.Now().Before(s.collisionCache.expiresAt) {
|
||||
if cached, ok := s.collisionCache[region]; ok && time.Now().Before(cached.expiresAt) {
|
||||
s.cacheHits++
|
||||
s.cacheMu.Unlock()
|
||||
return s.collisionCache.data
|
||||
return cached.data
|
||||
}
|
||||
s.cacheMisses++
|
||||
s.cacheMu.Unlock()
|
||||
|
||||
result := s.computeHashCollisions()
|
||||
result := s.computeHashCollisions(region)
|
||||
|
||||
s.cacheMu.Lock()
|
||||
s.collisionCache = &cachedResult{data: result, expiresAt: time.Now().Add(s.collisionCacheTTL)}
|
||||
s.collisionCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.collisionCacheTTL)}
|
||||
s.cacheMu.Unlock()
|
||||
|
||||
return result
|
||||
@@ -4236,11 +4242,60 @@ type twoByteCellInfo struct {
|
||||
CollisionCount int `json:"collision_count"`
|
||||
}
|
||||
|
||||
func (s *PacketStore) computeHashCollisions() map[string]interface{} {
|
||||
func (s *PacketStore) computeHashCollisions(region string) map[string]interface{} {
|
||||
// Get all nodes from DB
|
||||
nodes := s.getAllNodes()
|
||||
hashInfo := s.GetNodeHashSizeInfo()
|
||||
|
||||
// If region is specified, filter to only nodes seen by regional observers
|
||||
if region != "" {
|
||||
regionObs := s.resolveRegionObservers(region)
|
||||
if regionObs != nil {
|
||||
s.mu.RLock()
|
||||
regionNodePKs := make(map[string]bool)
|
||||
for _, tx := range s.packets {
|
||||
match := false
|
||||
for _, obs := range tx.Observations {
|
||||
if regionObs[obs.ObserverID] {
|
||||
match = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
// Collect node public keys from advert packets
|
||||
if tx.DecodedJSON != "" {
|
||||
var d map[string]interface{}
|
||||
if json.Unmarshal([]byte(tx.DecodedJSON), &d) == nil {
|
||||
if pk, ok := d["pubKey"].(string); ok && pk != "" {
|
||||
regionNodePKs[pk] = true
|
||||
}
|
||||
if pk, ok := d["public_key"].(string); ok && pk != "" {
|
||||
regionNodePKs[pk] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
// Include observers themselves as nodes in the region
|
||||
for _, obs := range tx.Observations {
|
||||
if obs.ObserverID != "" {
|
||||
regionNodePKs[obs.ObserverID] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
s.mu.RUnlock()
|
||||
|
||||
// Filter nodes to only those seen in the region
|
||||
filtered := make([]nodeInfo, 0, len(regionNodePKs))
|
||||
for _, n := range nodes {
|
||||
if regionNodePKs[n.PublicKey] {
|
||||
filtered = append(filtered, n)
|
||||
}
|
||||
}
|
||||
nodes = filtered
|
||||
}
|
||||
}
|
||||
|
||||
// Build collision nodes with hash info
|
||||
var allCNodes []collisionNode
|
||||
for _, n := range nodes {
|
||||
@@ -4487,10 +4542,20 @@ func (s *PacketStore) computeNodeHashSizeInfo() map[string]*hashSizeNodeInfo {
|
||||
if len(tx.RawHex) < 4 {
|
||||
continue
|
||||
}
|
||||
header, err := strconv.ParseUint(tx.RawHex[:2], 16, 8)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
routeType := int(header & 0x03)
|
||||
pathByte, err := strconv.ParseUint(tx.RawHex[2:4], 16, 8)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// DIRECT zero-hop adverts use path byte 0x00 locally and can misreport
|
||||
// multibyte repeater hash mode as 1-byte.
|
||||
if routeType == RouteDirect && (pathByte&0x3F) == 0 {
|
||||
continue
|
||||
}
|
||||
hs := int((pathByte>>6)&0x3) + 1
|
||||
|
||||
var d map[string]interface{}
|
||||
|
||||
+1
-1
@@ -289,7 +289,7 @@ type PacketTimestampsResponse struct {
|
||||
type PacketDetailResponse struct {
|
||||
Packet interface{} `json:"packet"`
|
||||
Path []interface{} `json:"path"`
|
||||
Breakdown interface{} `json:"breakdown"`
|
||||
Breakdown *Breakdown `json:"breakdown"`
|
||||
ObservationCount int `json:"observation_count"`
|
||||
Observations []ObservationResp `json:"observations,omitempty"`
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -40,7 +40,7 @@ STAGING_DATA="${STAGING_DATA_DIR:-$HOME/meshcore-staging-data}"
|
||||
STAGING_COMPOSE_FILE="docker-compose.staging.yml"
|
||||
|
||||
# Build metadata — exported so docker compose build picks them up via args
|
||||
export APP_VERSION=$(node -p "require('./package.json').version" 2>/dev/null || echo "unknown")
|
||||
export APP_VERSION=$(git describe --tags --match "v*" 2>/dev/null || echo "unknown")
|
||||
export GIT_COMMIT=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
|
||||
export BUILD_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||
|
||||
@@ -512,7 +512,7 @@ cmd_setup() {
|
||||
|
||||
# Default to latest release tag (instead of staying on master)
|
||||
if ! is_done "version_pin"; then
|
||||
git fetch origin --tags 2>/dev/null || true
|
||||
git fetch origin --tags --force 2>/dev/null || true
|
||||
local latest_tag
|
||||
latest_tag=$(git tag -l 'v*' --sort=-v:refname | head -1)
|
||||
if [ -n "$latest_tag" ]; then
|
||||
@@ -1317,7 +1317,7 @@ cmd_update() {
|
||||
local version="${1:-}"
|
||||
|
||||
info "Fetching latest changes and tags..."
|
||||
git fetch origin --tags
|
||||
git fetch origin --tags --force
|
||||
|
||||
if [ -z "$version" ]; then
|
||||
# No arg: checkout latest release tag
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "meshcore-analyzer",
|
||||
"version": "3.2.0",
|
||||
"version": "0.0.0-use-git-tags",
|
||||
"description": "Community-run alternative to the closed-source `analyzer.letsmesh.net`. MQTT packet collection + open-source web analyzer for the Bay Area MeshCore mesh.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
||||
+4
-4
@@ -148,7 +148,7 @@
|
||||
api('/analytics/rf' + sep, { ttl: CLIENT_TTL.analyticsRF }),
|
||||
api('/analytics/topology' + sep, { ttl: CLIENT_TTL.analyticsRF }),
|
||||
api('/analytics/channels' + sep, { ttl: CLIENT_TTL.analyticsRF }),
|
||||
api('/analytics/hash-collisions', { ttl: CLIENT_TTL.analyticsRF }),
|
||||
api('/analytics/hash-collisions' + sep, { ttl: CLIENT_TTL.analyticsRF }),
|
||||
]);
|
||||
_analyticsData = { hashData, rfData, topoData, chanData, collisionData };
|
||||
renderTab(_currentTab);
|
||||
@@ -1488,9 +1488,9 @@
|
||||
for (let i = 0; i < data.nodes.length - 1; i++) {
|
||||
const a = data.nodes[i], b = data.nodes[i+1];
|
||||
if (a.lat && a.lon && b.lat && b.lon && !(a.lat===0&&a.lon===0) && !(b.lat===0&&b.lon===0)) {
|
||||
const dLat = (a.lat - b.lat) * 111;
|
||||
const dLon = (a.lon - b.lon) * 85;
|
||||
const km = Math.sqrt(dLat*dLat + dLon*dLon);
|
||||
const km = window.HopResolver && window.HopResolver.haversineKm
|
||||
? window.HopResolver.haversineKm(a.lat, a.lon, b.lat, b.lon)
|
||||
: (() => { const R=6371, dLat=(b.lat-a.lat)*Math.PI/180, dLon=(b.lon-a.lon)*Math.PI/180, h=Math.sin(dLat/2)**2+Math.cos(a.lat*Math.PI/180)*Math.cos(b.lat*Math.PI/180)*Math.sin(dLon/2)**2; return R*2*Math.atan2(Math.sqrt(h),Math.sqrt(1-h)); })();
|
||||
total += km;
|
||||
const cls = km > 200 ? 'color:var(--status-red);font-weight:bold' : km > 50 ? 'color:var(--status-yellow)' : 'color:var(--status-green)';
|
||||
dists.push(`<div style="padding:2px 0"><span style="${cls}">${km < 1 ? (km*1000).toFixed(0)+'m' : km.toFixed(1)+'km'}</span> <span class="text-muted">${esc(a.name)} → ${esc(b.name)}</span></div>`);
|
||||
|
||||
+5
-2
@@ -274,6 +274,9 @@
|
||||
for (let i = 0; i < str.length; i++) h = ((h << 5) - h + str.charCodeAt(i)) | 0;
|
||||
return Math.abs(h);
|
||||
}
|
||||
function formatHashHex(hash) {
|
||||
return typeof hash === 'number' ? '0x' + hash.toString(16).toUpperCase().padStart(2, '0') : hash;
|
||||
}
|
||||
function getChannelColor(hash) { return CHANNEL_COLORS[hashCode(String(hash)) % CHANNEL_COLORS.length]; }
|
||||
function getSenderColor(name) {
|
||||
const isDark = document.documentElement.getAttribute('data-theme') === 'dark' ||
|
||||
@@ -659,7 +662,7 @@
|
||||
});
|
||||
|
||||
el.innerHTML = sorted.map(ch => {
|
||||
const name = ch.name || `Channel ${ch.hash}`;
|
||||
const name = ch.name || `Channel ${formatHashHex(ch.hash)}`;
|
||||
const color = getChannelColor(ch.hash);
|
||||
const time = ch.lastActivityMs ? formatSecondsAgo(Math.floor((Date.now() - ch.lastActivityMs) / 1000)) : '';
|
||||
const preview = ch.lastSender && ch.lastMessage
|
||||
@@ -688,7 +691,7 @@
|
||||
history.replaceState(null, '', `#/channels/${encodeURIComponent(hash)}`);
|
||||
renderChannelList();
|
||||
const ch = channels.find(c => c.hash === hash);
|
||||
const name = ch?.name || `Channel ${hash}`;
|
||||
const name = ch?.name || `Channel ${formatHashHex(hash)}`;
|
||||
const header = document.getElementById('chHeader');
|
||||
header.querySelector('.ch-header-text').textContent = `${name} — ${ch?.messageCount || 0} messages`;
|
||||
|
||||
|
||||
@@ -203,5 +203,5 @@ window.HopResolver = (function() {
|
||||
return nodesList.length > 0;
|
||||
}
|
||||
|
||||
return { init: init, resolve: resolve, ready: ready };
|
||||
return { init: init, resolve: resolve, ready: ready, haversineKm: haversineKm };
|
||||
})();
|
||||
|
||||
+29
-28
@@ -22,9 +22,9 @@
|
||||
<meta name="twitter:title" content="CoreScope">
|
||||
<meta name="twitter:description" content="Real-time MeshCore LoRa mesh network analyzer — live packet visualization, node tracking, channel decryption, and route analysis.">
|
||||
<meta name="twitter:image" content="https://raw.githubusercontent.com/Kpa-clawbot/corescope/master/public/og-image.png">
|
||||
<link rel="stylesheet" href="style.css?v=1775076186">
|
||||
<link rel="stylesheet" href="home.css?v=1775076186">
|
||||
<link rel="stylesheet" href="live.css?v=1775076186">
|
||||
<link rel="stylesheet" href="style.css?v=__BUST__">
|
||||
<link rel="stylesheet" href="home.css?v=__BUST__">
|
||||
<link rel="stylesheet" href="live.css?v=__BUST__">
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
|
||||
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
|
||||
crossorigin="anonymous">
|
||||
@@ -85,30 +85,31 @@
|
||||
<main id="app" role="main"></main>
|
||||
|
||||
<script src="vendor/qrcode.js"></script>
|
||||
<script src="roles.js?v=1775076186"></script>
|
||||
<script src="customize.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="region-filter.js?v=1775076186"></script>
|
||||
<script src="hop-resolver.js?v=1775076186"></script>
|
||||
<script src="hop-display.js?v=1775076186"></script>
|
||||
<script src="app.js?v=1775076186"></script>
|
||||
<script src="home.js?v=1775076186"></script>
|
||||
<script src="packet-filter.js?v=1775076186"></script>
|
||||
<script src="packets.js?v=1775076186"></script>
|
||||
<script src="geo-filter-overlay.js?v=1775076186"></script>
|
||||
<script src="map.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="channels.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="nodes.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="traces.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="analytics.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-v1-constellation.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-v2-constellation.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-lab.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="live.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="observers.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="observer-detail.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="compare.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="node-analytics.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="perf.js?v=1775076186" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="roles.js?v=__BUST__"></script>
|
||||
<script src="customize.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="region-filter.js?v=__BUST__"></script>
|
||||
<script src="hop-resolver.js?v=__BUST__"></script>
|
||||
<script src="hop-display.js?v=__BUST__"></script>
|
||||
<script src="app.js?v=__BUST__"></script>
|
||||
<script src="home.js?v=__BUST__"></script>
|
||||
<script src="packet-filter.js?v=__BUST__"></script>
|
||||
<script src="packet-helpers.js?v=__BUST__"></script>
|
||||
<script src="packets.js?v=__BUST__"></script>
|
||||
<script src="geo-filter-overlay.js?v=__BUST__"></script>
|
||||
<script src="map.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="channels.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="nodes.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="traces.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="analytics.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-v1-constellation.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-v2-constellation.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="audio-lab.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="live.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="observers.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="observer-detail.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="compare.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="node-analytics.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
<script src="perf.js?v=__BUST__" onerror="console.error('Failed to load:', this.src)"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
+134
-65
@@ -1,6 +1,10 @@
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
// getParsedPath / getParsedDecoded are in shared packet-helpers.js (loaded before this file)
|
||||
var getParsedPath = window.getParsedPath;
|
||||
var getParsedDecoded = window.getParsedDecoded;
|
||||
|
||||
// Status color helpers (read from CSS variables for theme support)
|
||||
function cssVar(name) { return getComputedStyle(document.documentElement).getPropertyValue(name).trim(); }
|
||||
function statusGreen() { return cssVar('--status-green') || '#22c55e'; }
|
||||
@@ -10,6 +14,7 @@
|
||||
let nodeData = {};
|
||||
let packetCount = 0;
|
||||
let activeAnims = 0;
|
||||
const MAX_CONCURRENT_ANIMS = 20;
|
||||
let nodeActivity = {};
|
||||
let recentPaths = [];
|
||||
let showGhostHops = localStorage.getItem('live-ghost-hops') !== 'false';
|
||||
@@ -368,12 +373,17 @@
|
||||
}
|
||||
}
|
||||
|
||||
function updateVCRClock(tsMs) {
|
||||
function vcrFormatTime(tsMs) {
|
||||
const d = new Date(tsMs);
|
||||
const hh = String(d.getHours()).padStart(2, '0');
|
||||
const mm = String(d.getMinutes()).padStart(2, '0');
|
||||
const ss = String(d.getSeconds()).padStart(2, '0');
|
||||
drawLcdText(`${hh}:${mm}:${ss}`, statusGreen());
|
||||
const utc = typeof getTimestampTimezone === 'function' && getTimestampTimezone() === 'utc';
|
||||
const hh = String(utc ? d.getUTCHours() : d.getHours()).padStart(2, '0');
|
||||
const mm = String(utc ? d.getUTCMinutes() : d.getMinutes()).padStart(2, '0');
|
||||
const ss = String(utc ? d.getUTCSeconds() : d.getSeconds()).padStart(2, '0');
|
||||
return `${hh}:${mm}:${ss}`;
|
||||
}
|
||||
|
||||
function updateVCRClock(tsMs) {
|
||||
drawLcdText(vcrFormatTime(tsMs), statusGreen());
|
||||
}
|
||||
|
||||
function updateVCRLcd() {
|
||||
@@ -425,8 +435,8 @@
|
||||
}
|
||||
|
||||
function dbPacketToLive(pkt) {
|
||||
const raw = JSON.parse(pkt.decoded_json || '{}');
|
||||
const hops = JSON.parse(pkt.path_json || '[]');
|
||||
const raw = getParsedDecoded(pkt);
|
||||
const hops = getParsedPath(pkt);
|
||||
const typeName = raw.type || pkt.payload_type_name || 'UNKNOWN';
|
||||
return {
|
||||
id: pkt.id, hash: pkt.hash,
|
||||
@@ -475,8 +485,13 @@
|
||||
}
|
||||
});
|
||||
|
||||
function packetTimestamp(pkt) {
|
||||
return new Date(pkt.timestamp || pkt.created_at || Date.now()).getTime();
|
||||
}
|
||||
if (typeof window !== 'undefined') window._live_packetTimestamp = packetTimestamp;
|
||||
|
||||
function bufferPacket(pkt) {
|
||||
pkt._ts = Date.now();
|
||||
pkt._ts = packetTimestamp(pkt);
|
||||
const entry = { ts: pkt._ts, pkt };
|
||||
VCR.buffer.push(entry);
|
||||
// Keep buffer capped at ~2000 — adjust playhead to avoid stale indices (#63)
|
||||
@@ -1060,8 +1075,7 @@
|
||||
const rect = timelineEl.getBoundingClientRect();
|
||||
const pct = (e.clientX - rect.left) / rect.width;
|
||||
const ts = Date.now() - VCR.timelineScope + pct * VCR.timelineScope;
|
||||
const d = new Date(ts);
|
||||
timeTooltip.textContent = d.toLocaleTimeString([], {hour:'2-digit',minute:'2-digit',second:'2-digit'});
|
||||
timeTooltip.textContent = vcrFormatTime(ts);
|
||||
timeTooltip.style.left = (e.clientX - rect.left) + 'px';
|
||||
timeTooltip.classList.remove('hidden');
|
||||
});
|
||||
@@ -1074,8 +1088,7 @@
|
||||
const rect = timelineEl.getBoundingClientRect();
|
||||
const pct = Math.max(0, Math.min(1, (touch.clientX - rect.left) / rect.width));
|
||||
const ts = Date.now() - VCR.timelineScope + pct * VCR.timelineScope;
|
||||
const d = new Date(ts);
|
||||
timeTooltip.textContent = d.toLocaleTimeString([], {hour:'2-digit',minute:'2-digit',second:'2-digit'});
|
||||
timeTooltip.textContent = vcrFormatTime(ts);
|
||||
timeTooltip.style.left = (touch.clientX - rect.left) + 'px';
|
||||
timeTooltip.classList.remove('hidden');
|
||||
});
|
||||
@@ -1436,7 +1449,7 @@
|
||||
for (const op of group.packets) {
|
||||
let opHops = [];
|
||||
if (op.path_json) {
|
||||
try { opHops = typeof op.path_json === 'string' ? JSON.parse(op.path_json) : op.path_json; } catch {}
|
||||
try { opHops = getParsedPath(op); } catch {}
|
||||
} else if (op.decoded?.path?.hops) {
|
||||
opHops = op.decoded.path.hops;
|
||||
}
|
||||
@@ -1581,6 +1594,21 @@
|
||||
window._livePruneStaleNodes = pruneStaleNodes;
|
||||
window._liveNodeMarkers = function() { return nodeMarkers; };
|
||||
window._liveNodeData = function() { return nodeData; };
|
||||
window._vcrFormatTime = vcrFormatTime;
|
||||
window._liveDbPacketToLive = dbPacketToLive;
|
||||
window._liveExpandToBufferEntries = expandToBufferEntries;
|
||||
window._liveSEG_MAP = SEG_MAP;
|
||||
window._liveBufferPacket = bufferPacket;
|
||||
window._liveVCR = function() { return VCR; };
|
||||
window._liveGetFavoritePubkeys = getFavoritePubkeys;
|
||||
window._livePacketInvolvesFavorite = packetInvolvesFavorite;
|
||||
window._liveIsNodeFavorited = isNodeFavorited;
|
||||
window._liveFormatLiveTimestampHtml = formatLiveTimestampHtml;
|
||||
window._liveResolveHopPositions = resolveHopPositions;
|
||||
window._liveVcrSpeedCycle = vcrSpeedCycle;
|
||||
window._liveVcrPause = vcrPause;
|
||||
window._liveVcrResumeLive = vcrResumeLive;
|
||||
window._liveVcrSetMode = vcrSetMode;
|
||||
|
||||
async function replayRecent() {
|
||||
try {
|
||||
@@ -1705,7 +1733,7 @@
|
||||
for (const fp of packets) {
|
||||
let fpHops = [];
|
||||
if (fp.path_json) {
|
||||
try { fpHops = typeof fp.path_json === 'string' ? JSON.parse(fp.path_json) : fp.path_json; } catch {}
|
||||
try { fpHops = getParsedPath(fp); } catch {}
|
||||
} else if (fp.decoded?.path?.hops) {
|
||||
fpHops = fp.decoded.path.hops;
|
||||
}
|
||||
@@ -1742,7 +1770,7 @@
|
||||
var qp = qd.payload || {};
|
||||
var hops;
|
||||
if (qpkt.path_json) {
|
||||
try { hops = typeof qpkt.path_json === 'string' ? JSON.parse(qpkt.path_json) : qpkt.path_json; } catch (e) { hops = qd.path?.hops || []; }
|
||||
try { hops = getParsedPath(qpkt); } catch (e) { hops = qd.path?.hops || []; }
|
||||
} else {
|
||||
hops = qd.path?.hops || [];
|
||||
}
|
||||
@@ -1843,6 +1871,7 @@
|
||||
|
||||
function animatePath(hopPositions, typeName, color, rawHex, onHop) {
|
||||
if (!animLayer || !pathsLayer) return;
|
||||
if (activeAnims >= MAX_CONCURRENT_ANIMS) return;
|
||||
activeAnims++;
|
||||
document.getElementById('liveAnimCount').textContent = activeAnims;
|
||||
let hopIndex = 0;
|
||||
@@ -1850,9 +1879,11 @@
|
||||
function nextHop() {
|
||||
if (hopIndex >= hopPositions.length) {
|
||||
activeAnims = Math.max(0, activeAnims - 1);
|
||||
document.getElementById('liveAnimCount').textContent = activeAnims;
|
||||
const countEl = document.getElementById('liveAnimCount');
|
||||
if (countEl) countEl.textContent = activeAnims;
|
||||
return;
|
||||
}
|
||||
if (!animLayer) return;
|
||||
// Audio hook: notify per-hop callback
|
||||
if (onHop) try { onHop(hopIndex, hopPositions.length, hopPositions[hopIndex]); } catch (e) {}
|
||||
const hp = hopPositions[hopIndex];
|
||||
@@ -1864,12 +1895,22 @@
|
||||
radius: 3, fillColor: '#94a3b8', fillOpacity: 0.35, color: '#94a3b8', weight: 1, opacity: 0.5
|
||||
}).addTo(animLayer);
|
||||
let pulseUp = true;
|
||||
const pulseTimer = setInterval(() => {
|
||||
if (!animLayer.hasLayer(ghost)) { clearInterval(pulseTimer); return; }
|
||||
ghost.setStyle({ fillOpacity: pulseUp ? 0.6 : 0.25, opacity: pulseUp ? 0.7 : 0.4 });
|
||||
pulseUp = !pulseUp;
|
||||
}, 600);
|
||||
setTimeout(() => { clearInterval(pulseTimer); if (animLayer.hasLayer(ghost)) animLayer.removeLayer(ghost); }, 3000);
|
||||
let lastPulseTime = performance.now();
|
||||
const pulseExpiry = lastPulseTime + 3000;
|
||||
function ghostPulse(now) {
|
||||
if (!animLayer || !animLayer.hasLayer(ghost)) return;
|
||||
if (now >= pulseExpiry) {
|
||||
if (animLayer && animLayer.hasLayer(ghost)) animLayer.removeLayer(ghost);
|
||||
return;
|
||||
}
|
||||
if (now - lastPulseTime >= 600) {
|
||||
lastPulseTime = now;
|
||||
ghost.setStyle({ fillOpacity: pulseUp ? 0.6 : 0.25, opacity: pulseUp ? 0.7 : 0.4 });
|
||||
pulseUp = !pulseUp;
|
||||
}
|
||||
requestAnimationFrame(ghostPulse);
|
||||
}
|
||||
requestAnimationFrame(ghostPulse);
|
||||
}
|
||||
} else {
|
||||
pulseNode(hp.key, hp.pos, typeName);
|
||||
@@ -1913,20 +1954,30 @@
|
||||
}).addTo(animLayer);
|
||||
|
||||
let r = 2, op = 0.9;
|
||||
const iv = setInterval(() => {
|
||||
r += 1.5; op -= 0.03;
|
||||
if (op <= 0) {
|
||||
clearInterval(iv);
|
||||
let lastPulse = performance.now();
|
||||
const pulseStart = lastPulse;
|
||||
function animatePulse(now) {
|
||||
if (now - pulseStart > 2000) {
|
||||
try { animLayer.removeLayer(ring); } catch {}
|
||||
return;
|
||||
}
|
||||
try {
|
||||
ring.setRadius(r);
|
||||
ring.setStyle({ opacity: op, weight: Math.max(0.3, 3 - r * 0.04) });
|
||||
} catch { clearInterval(iv); }
|
||||
}, 26);
|
||||
// Safety cleanup — never let a ring live longer than 2s
|
||||
setTimeout(() => { clearInterval(iv); try { animLayer.removeLayer(ring); } catch {} }, 2000);
|
||||
const elapsed = now - lastPulse;
|
||||
if (elapsed >= 26) {
|
||||
const ticks = Math.min(Math.floor(elapsed / 26), 4);
|
||||
r += 1.5 * ticks; op -= 0.03 * ticks;
|
||||
lastPulse = now;
|
||||
if (op <= 0) {
|
||||
try { animLayer.removeLayer(ring); } catch {}
|
||||
return;
|
||||
}
|
||||
try {
|
||||
ring.setRadius(r);
|
||||
ring.setStyle({ opacity: op, weight: Math.max(0.3, 3 - r * 0.04) });
|
||||
} catch { return; }
|
||||
}
|
||||
requestAnimationFrame(animatePulse);
|
||||
}
|
||||
requestAnimationFrame(animatePulse);
|
||||
|
||||
const baseColor = marker._baseColor || '#6b7280';
|
||||
const baseSize = marker._baseSize || 6;
|
||||
@@ -2239,43 +2290,61 @@
|
||||
radius: 3.5, fillColor: '#fff', fillOpacity: 1, color: color, weight: 1.5
|
||||
}).addTo(animLayer);
|
||||
|
||||
const interval = setInterval(() => {
|
||||
step++;
|
||||
const lat = from[0] + latStep * step;
|
||||
const lon = from[1] + lonStep * step;
|
||||
currentCoords.push([lat, lon]);
|
||||
line.setLatLngs(currentCoords);
|
||||
contrail.setLatLngs(currentCoords);
|
||||
dot.setLatLng([lat, lon]);
|
||||
|
||||
if (step >= steps) {
|
||||
clearInterval(interval);
|
||||
if (animLayer) animLayer.removeLayer(dot);
|
||||
|
||||
recentPaths.push({ line, glowLine: contrail, time: Date.now() });
|
||||
while (recentPaths.length > 5) {
|
||||
const old = recentPaths.shift();
|
||||
if (pathsLayer) { pathsLayer.removeLayer(old.line); pathsLayer.removeLayer(old.glowLine); }
|
||||
let lastStep = performance.now();
|
||||
function animateLine(now) {
|
||||
const elapsed = now - lastStep;
|
||||
if (elapsed >= 33) {
|
||||
const ticks = Math.min(Math.floor(elapsed / 33), 4);
|
||||
lastStep = now;
|
||||
for (let t = 0; t < ticks && step < steps; t++) {
|
||||
step++;
|
||||
const lat = from[0] + latStep * step;
|
||||
const lon = from[1] + lonStep * step;
|
||||
currentCoords.push([lat, lon]);
|
||||
}
|
||||
const lastPt = currentCoords[currentCoords.length - 1];
|
||||
line.setLatLngs(currentCoords);
|
||||
contrail.setLatLngs(currentCoords);
|
||||
dot.setLatLng(lastPt);
|
||||
|
||||
setTimeout(() => {
|
||||
let fadeOp = mainOpacity;
|
||||
const fi = setInterval(() => {
|
||||
fadeOp -= 0.1;
|
||||
if (fadeOp <= 0) {
|
||||
clearInterval(fi);
|
||||
if (pathsLayer) { pathsLayer.removeLayer(line); pathsLayer.removeLayer(contrail); }
|
||||
recentPaths = recentPaths.filter(p => p.line !== line);
|
||||
} else {
|
||||
line.setStyle({ opacity: fadeOp });
|
||||
contrail.setStyle({ opacity: fadeOp * 0.15 });
|
||||
if (step >= steps) {
|
||||
if (animLayer) animLayer.removeLayer(dot);
|
||||
|
||||
recentPaths.push({ line, glowLine: contrail, time: Date.now() });
|
||||
while (recentPaths.length > 5) {
|
||||
const old = recentPaths.shift();
|
||||
if (pathsLayer) { pathsLayer.removeLayer(old.line); pathsLayer.removeLayer(old.glowLine); }
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
let fadeOp = mainOpacity;
|
||||
let lastFade = performance.now();
|
||||
function animateFade(now) {
|
||||
const fadeElapsed = now - lastFade;
|
||||
if (fadeElapsed >= 52) {
|
||||
const fadeTicks = Math.min(Math.floor(fadeElapsed / 52), 4);
|
||||
lastFade = now;
|
||||
fadeOp -= 0.1 * fadeTicks;
|
||||
if (fadeOp <= 0) {
|
||||
if (pathsLayer) { pathsLayer.removeLayer(line); pathsLayer.removeLayer(contrail); }
|
||||
recentPaths = recentPaths.filter(p => p.line !== line);
|
||||
return;
|
||||
}
|
||||
line.setStyle({ opacity: fadeOp });
|
||||
contrail.setStyle({ opacity: fadeOp * 0.15 });
|
||||
}
|
||||
requestAnimationFrame(animateFade);
|
||||
}
|
||||
}, 52);
|
||||
}, 800);
|
||||
requestAnimationFrame(animateFade);
|
||||
}, 800);
|
||||
|
||||
if (onComplete) onComplete();
|
||||
if (onComplete) onComplete();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}, 33);
|
||||
requestAnimationFrame(animateLine);
|
||||
}
|
||||
requestAnimationFrame(animateLine);
|
||||
}
|
||||
|
||||
function showHeatMap() {
|
||||
|
||||
+66
-2
@@ -10,6 +10,8 @@
|
||||
let targetNodeKey = null;
|
||||
let observers = [];
|
||||
let filters = { repeater: true, companion: true, room: true, sensor: true, observer: true, lastHeard: '30d', neighbors: false, clusters: false, hashLabels: localStorage.getItem('meshcore-map-hash-labels') !== 'false', statusFilter: localStorage.getItem('meshcore-map-status-filter') || 'all' };
|
||||
let selectedReferenceNode = null; // pubkey of the reference node for neighbor filtering
|
||||
let neighborPubkeys = null; // Set of pubkeys that are direct neighbors of selected node
|
||||
let wsHandler = null;
|
||||
let heatLayer = null;
|
||||
let geoFilterLayer = null;
|
||||
@@ -108,6 +110,8 @@
|
||||
<fieldset class="mc-section">
|
||||
<legend class="mc-label">Filters</legend>
|
||||
<label for="mcNeighbors"><input type="checkbox" id="mcNeighbors"> Show direct neighbors</label>
|
||||
<div id="mcNeighborRef" style="display:none;font-size:11px;color:var(--text-muted);margin-top:2px;padding-left:20px;">Ref: <span id="mcNeighborRefName">—</span></div>
|
||||
<div id="mcNeighborHint" style="display:none;font-size:11px;color:var(--text-muted);margin-top:2px;padding-left:20px;">Click a node marker to set the reference node</div>
|
||||
</fieldset>
|
||||
<fieldset class="mc-section">
|
||||
<legend class="mc-label">Last Heard</legend>
|
||||
@@ -207,7 +211,19 @@
|
||||
const heatEl = document.getElementById('mcHeatmap');
|
||||
if (localStorage.getItem('meshcore-map-heatmap') === 'true') { heatEl.checked = true; }
|
||||
heatEl.addEventListener('change', e => { localStorage.setItem('meshcore-map-heatmap', e.target.checked); toggleHeatmap(e.target.checked); });
|
||||
document.getElementById('mcNeighbors').addEventListener('change', e => { filters.neighbors = e.target.checked; renderMarkers(); });
|
||||
document.getElementById('mcNeighbors').addEventListener('change', e => {
|
||||
filters.neighbors = e.target.checked;
|
||||
const hintEl = document.getElementById('mcNeighborHint');
|
||||
const refEl = document.getElementById('mcNeighborRef');
|
||||
if (e.target.checked && !selectedReferenceNode) {
|
||||
hintEl.style.display = 'block';
|
||||
refEl.style.display = 'none';
|
||||
} else {
|
||||
hintEl.style.display = 'none';
|
||||
refEl.style.display = selectedReferenceNode ? 'block' : 'none';
|
||||
}
|
||||
renderMarkers();
|
||||
});
|
||||
|
||||
// Hash Labels toggle
|
||||
const hashLabelEl = document.getElementById('mcHashLabels');
|
||||
@@ -646,6 +662,11 @@
|
||||
const status = getNodeStatus(role, lastMs);
|
||||
if (status !== filters.statusFilter) return false;
|
||||
}
|
||||
// Neighbor filter: show only the reference node and its direct neighbors
|
||||
if (filters.neighbors && selectedReferenceNode && neighborPubkeys) {
|
||||
const pk = n.public_key;
|
||||
if (pk !== selectedReferenceNode && !neighborPubkeys.has(pk)) return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
@@ -724,6 +745,43 @@
|
||||
</div>`;
|
||||
}
|
||||
|
||||
async function selectReferenceNode(pubkey, name) {
|
||||
selectedReferenceNode = pubkey;
|
||||
neighborPubkeys = new Set();
|
||||
try {
|
||||
const data = await api('/nodes/' + pubkey + '/paths');
|
||||
const paths = data.paths || [];
|
||||
for (const p of paths) {
|
||||
const hops = p.hops || [];
|
||||
// Find the reference node in the path; direct neighbors are adjacent hops
|
||||
for (let i = 0; i < hops.length; i++) {
|
||||
if (hops[i].pubkey === pubkey) {
|
||||
if (i > 0 && hops[i - 1].pubkey) neighborPubkeys.add(hops[i - 1].pubkey);
|
||||
if (i < hops.length - 1 && hops[i + 1].pubkey) neighborPubkeys.add(hops[i + 1].pubkey);
|
||||
}
|
||||
}
|
||||
// (Redundant block removed — the main loop above already handles first/last hops)
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Failed to fetch neighbor paths for', pubkey, '— neighbor filter may be incomplete:', e);
|
||||
neighborPubkeys = new Set();
|
||||
}
|
||||
// Update sidebar UI
|
||||
const refEl = document.getElementById('mcNeighborRef');
|
||||
const refNameEl = document.getElementById('mcNeighborRefName');
|
||||
const hintEl = document.getElementById('mcNeighborHint');
|
||||
if (refEl) { refEl.style.display = 'block'; }
|
||||
if (refNameEl) { refNameEl.textContent = name || pubkey.slice(0, 8); }
|
||||
if (hintEl) { hintEl.style.display = 'none'; }
|
||||
// Auto-enable the neighbors filter
|
||||
filters.neighbors = true;
|
||||
const cb = document.getElementById('mcNeighbors');
|
||||
if (cb) cb.checked = true;
|
||||
renderMarkers();
|
||||
}
|
||||
// Expose for popup onclick
|
||||
window._mapSelectRefNode = selectReferenceNode;
|
||||
|
||||
function buildPopup(node) {
|
||||
const key = node.public_key ? truncate(node.public_key, 16) : '—';
|
||||
const loc = (node.lat && node.lon) ? `${node.lat.toFixed(5)}, ${node.lon.toFixed(5)}` : '—';
|
||||
@@ -749,7 +807,10 @@
|
||||
<dt style="color:var(--text-muted);float:left;clear:left;width:80px;padding:2px 0;">Adverts</dt>
|
||||
<dd style="margin-left:88px;padding:2px 0;">${node.advert_count || 0}</dd>
|
||||
</dl>
|
||||
<div style="margin-top:8px;clear:both;"><a href="#/nodes/${node.public_key}" style="color:var(--accent);font-size:12px;">View Node →</a></div>
|
||||
<div style="margin-top:8px;clear:both;">
|
||||
<a href="#/nodes/${node.public_key}" style="color:var(--accent);font-size:12px;">View Node →</a>
|
||||
${node.public_key ? ` · <a href="#" onclick="event.preventDefault();window._mapSelectRefNode('${safeEsc(node.public_key.replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/</g, '\\x3c'))}','${safeEsc((node.name || 'Unknown').replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/</g, '\\x3c'))}')" style="color:var(--accent);font-size:12px;">Show Neighbors</a>` : ''}
|
||||
</div>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
@@ -775,6 +836,9 @@
|
||||
routeLayer = null;
|
||||
if (heatLayer) { heatLayer = null; }
|
||||
geoFilterLayer = null;
|
||||
selectedReferenceNode = null;
|
||||
neighborPubkeys = null;
|
||||
delete window._mapSelectRefNode;
|
||||
}
|
||||
|
||||
function toggleHeatmap(on) {
|
||||
|
||||
+42
-2
@@ -228,11 +228,39 @@
|
||||
loadNodes();
|
||||
// Auto-refresh when ADVERT packets arrive via WebSocket (fixes #131)
|
||||
wsHandler = debouncedOnWS(function (msgs) {
|
||||
if (msgs.some(isAdvertMessage)) {
|
||||
_allNodes = null;
|
||||
const advertMsgs = msgs.filter(isAdvertMessage);
|
||||
if (!advertMsgs.length) return;
|
||||
|
||||
if (!_allNodes) {
|
||||
invalidateApiCache('/nodes');
|
||||
loadNodes(true);
|
||||
return;
|
||||
}
|
||||
|
||||
let needReload = false;
|
||||
for (const m of advertMsgs) {
|
||||
const payload = m.data && m.data.decoded && m.data.decoded.payload;
|
||||
const pubKey = payload && (payload.pubKey || payload.public_key);
|
||||
if (!pubKey) { needReload = true; break; }
|
||||
|
||||
const existing = _allNodes.find(n => n.public_key === pubKey);
|
||||
if (existing) {
|
||||
if (payload.name) existing.name = payload.name;
|
||||
if (payload.lat != null) existing.lat = payload.lat;
|
||||
if (payload.lon != null) existing.lon = payload.lon;
|
||||
const ts = m.data.packet && (m.data.packet.timestamp || m.data.packet.first_seen);
|
||||
if (ts) existing.last_seen = ts;
|
||||
} else {
|
||||
needReload = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (needReload) {
|
||||
_allNodes = null;
|
||||
invalidateApiCache('/nodes');
|
||||
}
|
||||
loadNodes(true);
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
@@ -929,4 +957,16 @@
|
||||
|
||||
// Test hooks
|
||||
window._nodesIsAdvertMessage = isAdvertMessage;
|
||||
window._nodesGetAllNodes = function() { return _allNodes; };
|
||||
window._nodesSetAllNodes = function(n) { _allNodes = n; };
|
||||
window._nodesToggleSort = toggleSort;
|
||||
window._nodesSortNodes = sortNodes;
|
||||
window._nodesSortArrow = sortArrow;
|
||||
window._nodesGetSortState = function() { return sortState; };
|
||||
window._nodesSetSortState = function(s) { sortState = s; };
|
||||
window._nodesSyncClaimedToFavorites = syncClaimedToFavorites;
|
||||
window._nodesRenderNodeTimestampHtml = renderNodeTimestampHtml;
|
||||
window._nodesRenderNodeTimestampText = renderNodeTimestampText;
|
||||
window._nodesGetStatusInfo = getStatusInfo;
|
||||
window._nodesGetStatusTooltip = getStatusTooltip;
|
||||
})();
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
/* === CoreScope — packet-helpers.js (shared packet utilities) === */
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Cached JSON.parse helpers for packet data (issue #387).
|
||||
* Avoids repeated parsing of path_json / decoded_json on the same packet object.
|
||||
* Results are cached as _parsedPath / _parsedDecoded properties on the packet.
|
||||
*
|
||||
* Handles pre-parsed objects (non-string values) gracefully — returns them as-is.
|
||||
*/
|
||||
|
||||
window.getParsedPath = function getParsedPath(p) {
|
||||
if (p._parsedPath !== undefined) return p._parsedPath;
|
||||
var raw = p.path_json;
|
||||
if (typeof raw !== 'string') {
|
||||
p._parsedPath = Array.isArray(raw) ? raw : [];
|
||||
return p._parsedPath;
|
||||
}
|
||||
try { p._parsedPath = JSON.parse(raw) || []; } catch (e) { p._parsedPath = []; }
|
||||
return p._parsedPath;
|
||||
};
|
||||
|
||||
/**
|
||||
* Clear cached _parsedPath/_parsedDecoded from a packet object.
|
||||
* Must be called after spreading a parent packet into an observation/child,
|
||||
* otherwise the child inherits stale cached values from the parent (issue #504).
|
||||
*/
|
||||
window.clearParsedCache = function clearParsedCache(p) {
|
||||
delete p._parsedPath;
|
||||
delete p._parsedDecoded;
|
||||
return p;
|
||||
};
|
||||
|
||||
window.getParsedDecoded = function getParsedDecoded(p) {
|
||||
if (p._parsedDecoded !== undefined) return p._parsedDecoded;
|
||||
var raw = p.decoded_json;
|
||||
if (typeof raw !== 'string') {
|
||||
p._parsedDecoded = (raw && typeof raw === 'object') ? raw : {};
|
||||
return p._parsedDecoded;
|
||||
}
|
||||
try { p._parsedDecoded = JSON.parse(raw) || {}; } catch (e) { p._parsedDecoded = {}; }
|
||||
return p._parsedDecoded;
|
||||
};
|
||||
+79
-42
@@ -8,7 +8,7 @@
|
||||
// Resolve observer_id to friendly name from loaded observers list
|
||||
function obsName(id) {
|
||||
if (!id) return '—';
|
||||
const o = observers.find(ob => ob.id === id);
|
||||
const o = observerMap.get(id);
|
||||
if (!o) return id;
|
||||
return o.iata ? `${o.name} (${o.iata})` : o.name;
|
||||
}
|
||||
@@ -21,6 +21,7 @@
|
||||
let packetsPaused = false;
|
||||
let pauseBuffer = [];
|
||||
let observers = [];
|
||||
let observerMap = new Map(); // id → observer for O(1) lookups (#383)
|
||||
let regionMap = {};
|
||||
const TYPE_NAMES = { 0:'Request', 1:'Response', 2:'Direct Msg', 3:'ACK', 4:'Advert', 5:'Channel Msg', 7:'Anon Req', 8:'Path', 9:'Trace', 11:'Control' };
|
||||
function typeName(t) { return TYPE_NAMES[t] ?? `Type ${t}`; }
|
||||
@@ -34,9 +35,18 @@
|
||||
let hopNameCache = {};
|
||||
let showHexHashes = localStorage.getItem('meshcore-hex-hashes') === 'true';
|
||||
let filtersBuilt = false;
|
||||
let _renderTimer = null;
|
||||
function scheduleRender() {
|
||||
clearTimeout(_renderTimer);
|
||||
_renderTimer = setTimeout(() => renderTableRows(), 200);
|
||||
}
|
||||
const PANEL_WIDTH_KEY = 'meshcore-panel-width';
|
||||
const PANEL_CLOSE_HTML = '<button class="panel-close-btn" title="Close detail pane (Esc)">✕</button>';
|
||||
|
||||
// getParsedPath / getParsedDecoded are in shared packet-helpers.js (loaded before this file)
|
||||
const getParsedPath = window.getParsedPath;
|
||||
const getParsedDecoded = window.getParsedDecoded;
|
||||
|
||||
// --- Virtual scroll state ---
|
||||
const VSCROLL_ROW_HEIGHT = 36; // estimated row height in px
|
||||
const VSCROLL_BUFFER = 30; // extra rows above/below viewport
|
||||
@@ -259,6 +269,7 @@
|
||||
if (obs) {
|
||||
expandedHashes.add(h);
|
||||
const obsPacket = {...data.packet, observer_id: obs.observer_id, observer_name: obs.observer_name, snr: obs.snr, rssi: obs.rssi, path_json: obs.path_json, timestamp: obs.timestamp, first_seen: obs.timestamp};
|
||||
clearParsedCache(obsPacket);
|
||||
selectPacket(obs.id, h, {packet: obsPacket, breakdown: data.breakdown, observations: data.observations}, obs.id);
|
||||
} else {
|
||||
selectPacket(data.packet.id, h, data);
|
||||
@@ -314,7 +325,7 @@
|
||||
panel.appendChild(content);
|
||||
const pkt = data.packet;
|
||||
try {
|
||||
const hops = JSON.parse(pkt.path_json || '[]');
|
||||
const hops = getParsedPath(pkt);
|
||||
const newHops = hops.filter(h => !(h in hopNameCache));
|
||||
if (newHops.length) await resolveHops(newHops);
|
||||
} catch {}
|
||||
@@ -326,6 +337,7 @@
|
||||
wsHandler = debouncedOnWS(function (msgs) {
|
||||
if (packetsPaused) {
|
||||
pauseBuffer.push(...msgs);
|
||||
if (pauseBuffer.length > 2000) pauseBuffer = pauseBuffer.slice(-2000);
|
||||
const btn = document.getElementById('pktPauseBtn');
|
||||
if (btn) btn.textContent = '▶ ' + pauseBuffer.length;
|
||||
return;
|
||||
@@ -349,7 +361,7 @@
|
||||
if (filters.hash && p.hash !== filters.hash) return false;
|
||||
if (RegionFilter.getRegionParam()) {
|
||||
const selectedRegions = RegionFilter.getRegionParam().split(',');
|
||||
const obs = observers.find(o => o.id === p.observer_id);
|
||||
const obs = observerMap.get(p.observer_id);
|
||||
if (!obs || !selectedRegions.includes(obs.iata)) return false;
|
||||
}
|
||||
if (filters.node && !(p.decoded_json || '').includes(filters.node)) return false;
|
||||
@@ -360,7 +372,7 @@
|
||||
// Resolve any new hops, then update and re-render
|
||||
const newHops = new Set();
|
||||
for (const p of filtered) {
|
||||
try { JSON.parse(p.path_json || '[]').forEach(h => { if (!(h in hopNameCache)) newHops.add(h); }); } catch {}
|
||||
try { getParsedPath(p).forEach(h => { if (!(h in hopNameCache)) newHops.add(h); }); } catch {}
|
||||
}
|
||||
(newHops.size ? resolveHops([...newHops]) : Promise.resolve()).then(() => {
|
||||
if (groupByHash) {
|
||||
@@ -382,6 +394,7 @@
|
||||
// Update expanded children if this group is expanded
|
||||
if (expandedHashes.has(h) && existing._children) {
|
||||
existing._children.unshift(p);
|
||||
if (existing._children.length > 200) existing._children.length = 200;
|
||||
sortGroupChildren(existing);
|
||||
}
|
||||
} else {
|
||||
@@ -402,11 +415,16 @@
|
||||
if (h) hashIndex.set(h, newGroup);
|
||||
}
|
||||
}
|
||||
// Re-sort by latest DESC
|
||||
// Re-sort by latest DESC, then evict oldest beyond the limit
|
||||
packets.sort((a, b) => (b.latest || '').localeCompare(a.latest || ''));
|
||||
if (packets.length > PACKET_LIMIT) {
|
||||
const evicted = packets.splice(PACKET_LIMIT);
|
||||
for (const p of evicted) { if (p.hash) hashIndex.delete(p.hash); }
|
||||
}
|
||||
} else {
|
||||
// Flat mode: prepend
|
||||
// Flat mode: prepend, then evict oldest beyond the limit
|
||||
packets = filtered.concat(packets);
|
||||
if (packets.length > PACKET_LIMIT) packets.length = PACKET_LIMIT;
|
||||
}
|
||||
totalCount += filtered.length;
|
||||
// Debounce WS-triggered renders to avoid rapid full rebuilds
|
||||
@@ -417,6 +435,7 @@
|
||||
}
|
||||
|
||||
function destroy() {
|
||||
clearTimeout(_renderTimer);
|
||||
if (wsHandler) offWS(wsHandler);
|
||||
wsHandler = null;
|
||||
detachVScrollListener();
|
||||
@@ -439,6 +458,7 @@
|
||||
hopNameCache = {};
|
||||
totalCount = 0;
|
||||
observers = [];
|
||||
observerMap = new Map();
|
||||
directPacketId = null;
|
||||
directPacketHash = null;
|
||||
groupByHash = true;
|
||||
@@ -450,6 +470,7 @@
|
||||
try {
|
||||
const data = await api('/observers', { ttl: CLIENT_TTL.observers });
|
||||
observers = data.observers || [];
|
||||
observerMap = new Map(observers.map(o => [o.id, o]));
|
||||
} catch {}
|
||||
}
|
||||
|
||||
@@ -481,7 +502,7 @@
|
||||
await Promise.all(multiObs.map(async (p) => {
|
||||
try {
|
||||
const d = await api(`/packets/${p.hash}`);
|
||||
if (d?.observations) p._children = d.observations.map(o => ({...d.packet, ...o, _isObservation: true}));
|
||||
if (d?.observations) p._children = d.observations.map(o => clearParsedCache({...d.packet, ...o, _isObservation: true}));
|
||||
} catch {}
|
||||
}));
|
||||
// Flatten: replace grouped packets with individual observations
|
||||
@@ -500,7 +521,7 @@
|
||||
// Pre-resolve all path hops to node names
|
||||
const allHops = new Set();
|
||||
for (const p of packets) {
|
||||
try { const path = JSON.parse(p.path_json || '[]'); path.forEach(h => allHops.add(h)); } catch {}
|
||||
try { getParsedPath(p).forEach(h => allHops.add(h)); } catch {}
|
||||
}
|
||||
if (allHops.size) await resolveHops([...allHops]);
|
||||
|
||||
@@ -509,7 +530,7 @@
|
||||
for (const p of packets) {
|
||||
if (!p.observer_id) continue;
|
||||
try {
|
||||
const path = JSON.parse(p.path_json || '[]');
|
||||
const path = getParsedPath(p);
|
||||
const ambiguous = path.filter(h => hopNameCache[h]?.ambiguous);
|
||||
if (ambiguous.length) {
|
||||
if (!hopsByObserver[p.observer_id]) hopsByObserver[p.observer_id] = new Set();
|
||||
@@ -696,7 +717,7 @@
|
||||
obsTrigger.textContent = 'All Observers ▾';
|
||||
} else if (selectedObservers.size === 1) {
|
||||
const id = [...selectedObservers][0];
|
||||
const o = observers.find(x => String(x.id) === id);
|
||||
const o = observerMap.get(id) || observerMap.get(Number(id));
|
||||
obsTrigger.textContent = (o ? (o.name || o.id) : id) + ' ▾';
|
||||
} else {
|
||||
obsTrigger.textContent = selectedObservers.size + ' Observers ▾';
|
||||
@@ -817,7 +838,7 @@
|
||||
try {
|
||||
const data = await api(`/packets/${p.hash}`);
|
||||
if (data?.packet && data.observations) {
|
||||
p._children = data.observations.map(o => ({...data.packet, ...o, _isObservation: true}));
|
||||
p._children = data.observations.map(o => clearParsedCache({...data.packet, ...o, _isObservation: true}));
|
||||
p._fetchedData = data;
|
||||
}
|
||||
} catch {}
|
||||
@@ -830,7 +851,7 @@
|
||||
// Resolve any new hops from updated header paths
|
||||
const newHops = new Set();
|
||||
for (const p of packets) {
|
||||
try { JSON.parse(p.path_json || '[]').forEach(h => { if (!(h in hopNameCache)) newHops.add(h); }); } catch {}
|
||||
try { getParsedPath(p).forEach(h => { if (!(h in hopNameCache)) newHops.add(h); }); } catch {}
|
||||
}
|
||||
if (newHops.size) await resolveHops([...newHops]);
|
||||
renderTableRows();
|
||||
@@ -990,6 +1011,7 @@
|
||||
if (child) {
|
||||
const parentData = group._fetchedData;
|
||||
const obsPacket = parentData ? {...parentData.packet, observer_id: child.observer_id, observer_name: child.observer_name, snr: child.snr, rssi: child.rssi, path_json: child.path_json, timestamp: child.timestamp, first_seen: child.timestamp} : child;
|
||||
if (parentData) { clearParsedCache(obsPacket); }
|
||||
selectPacket(child.id, parentHash, {packet: obsPacket, breakdown: parentData?.breakdown, observations: parentData?.observations}, child.id);
|
||||
}
|
||||
}
|
||||
@@ -1023,7 +1045,7 @@
|
||||
headerPathJson = match.path_json;
|
||||
}
|
||||
}
|
||||
const groupRegion = headerObserverId ? (observers.find(o => o.id === headerObserverId)?.iata || '') : '';
|
||||
const groupRegion = headerObserverId ? (observerMap.get(headerObserverId)?.iata || '') : '';
|
||||
let groupPath = [];
|
||||
try { groupPath = JSON.parse(headerPathJson || '[]'); } catch {}
|
||||
const groupPathStr = renderPath(groupPath, headerObserverId);
|
||||
@@ -1043,7 +1065,7 @@
|
||||
<td class="col-observer">${isSingle ? truncate(obsName(headerObserverId), 16) : truncate(obsName(headerObserverId), 10) + (p.observer_count > 1 ? ' +' + (p.observer_count - 1) : '')}</td>
|
||||
<td class="col-path"><span class="path-hops">${groupPathStr}</span></td>
|
||||
<td class="col-rpt">${p.observation_count > 1 ? '<span class="badge badge-obs" title="Seen ' + p.observation_count + ' times">👁 ' + p.observation_count + '</span>' : (isSingle ? '' : p.count)}</td>
|
||||
<td class="col-details">${getDetailPreview((() => { try { return JSON.parse(p.decoded_json || '{}'); } catch { return {}; } })())}</td>
|
||||
<td class="col-details">${getDetailPreview(getParsedDecoded(p))}</td>
|
||||
</tr>`;
|
||||
if (isExpanded && p._children) {
|
||||
let visibleChildren = p._children;
|
||||
@@ -1055,9 +1077,8 @@
|
||||
const typeClass = payloadTypeColor(c.payload_type);
|
||||
const size = c.raw_hex ? Math.floor(c.raw_hex.length / 2) : 0;
|
||||
const childHashBytes = ((parseInt(c.raw_hex?.slice(2, 4), 16) || 0) >> 6) + 1;
|
||||
const childRegion = c.observer_id ? (observers.find(o => o.id === c.observer_id)?.iata || '') : '';
|
||||
let childPath = [];
|
||||
try { childPath = JSON.parse(c.path_json || '[]'); } catch {}
|
||||
const childRegion = c.observer_id ? (observerMap.get(c.observer_id)?.iata || '') : '';
|
||||
const childPath = getParsedPath(c);
|
||||
const childPathStr = renderPath(childPath, c.observer_id);
|
||||
html += `<tr class="group-child" data-id="${c.id}" data-hash="${c.hash || ''}" data-action="select-observation" data-value="${c.id}" data-parent-hash="${p.hash}" tabindex="0" role="row">
|
||||
<td></td><td class="col-region">${childRegion ? `<span class="badge-region">${childRegion}</span>` : '—'}</td>
|
||||
@@ -1069,7 +1090,7 @@
|
||||
<td class="col-observer">${truncate(obsName(c.observer_id), 16)}</td>
|
||||
<td class="col-path"><span class="path-hops">${childPathStr}</span></td>
|
||||
<td class="col-rpt"></td>
|
||||
<td class="col-details">${getDetailPreview((() => { try { return JSON.parse(c.decoded_json || '{}'); } catch { return {}; } })())}</td>
|
||||
<td class="col-details">${getDetailPreview(getParsedDecoded(c))}</td>
|
||||
</tr>`;
|
||||
}
|
||||
}
|
||||
@@ -1078,10 +1099,9 @@
|
||||
|
||||
// Build HTML for a single flat (ungrouped) packet row
|
||||
function buildFlatRowHtml(p) {
|
||||
let decoded, pathHops = [];
|
||||
try { decoded = JSON.parse(p.decoded_json || '{}'); } catch {}
|
||||
try { pathHops = JSON.parse(p.path_json || '[]') || []; } catch {}
|
||||
const region = p.observer_id ? (observers.find(o => o.id === p.observer_id)?.iata || '') : '';
|
||||
const decoded = getParsedDecoded(p);
|
||||
const pathHops = getParsedPath(p);
|
||||
const region = p.observer_id ? (observerMap.get(p.observer_id)?.iata || '') : '';
|
||||
const typeName = payloadTypeName(p.payload_type);
|
||||
const typeClass = payloadTypeColor(p.payload_type);
|
||||
const size = p.raw_hex ? Math.floor(p.raw_hex.length / 2) : 0;
|
||||
@@ -1131,7 +1151,6 @@
|
||||
}
|
||||
_cumulativeOffsetsCache = offsets;
|
||||
return offsets;
|
||||
return offsets;
|
||||
}
|
||||
|
||||
function renderVisibleRows() {
|
||||
@@ -1399,7 +1418,7 @@
|
||||
// Resolve path hops for detail view
|
||||
const pkt = data.packet;
|
||||
try {
|
||||
const hops = JSON.parse(pkt.path_json || '[]');
|
||||
const hops = getParsedPath(pkt);
|
||||
const newHops = hops.filter(h => !(h in hopNameCache));
|
||||
if (newHops.length) await resolveHops(newHops);
|
||||
} catch {}
|
||||
@@ -1417,10 +1436,8 @@
|
||||
const pkt = data.packet;
|
||||
const breakdown = data.breakdown || {};
|
||||
const ranges = breakdown.ranges || [];
|
||||
let decoded;
|
||||
try { decoded = JSON.parse(pkt.decoded_json); } catch { decoded = {}; }
|
||||
let pathHops;
|
||||
try { pathHops = JSON.parse(pkt.path_json || '[]') || []; } catch { pathHops = []; }
|
||||
const decoded = getParsedDecoded(pkt);
|
||||
const pathHops = getParsedPath(pkt);
|
||||
|
||||
// Resolve sender GPS — from packet directly, or from known node in DB
|
||||
let senderLat = decoded.lat != null ? decoded.lat : (decoded.latitude || null);
|
||||
@@ -1596,10 +1613,8 @@
|
||||
const replayPackets = [];
|
||||
if (obs.length > 1) {
|
||||
for (const o of obs) {
|
||||
let oPath;
|
||||
try { oPath = JSON.parse(o.path_json || '[]'); } catch { oPath = pathHops; }
|
||||
let oDec;
|
||||
try { oDec = JSON.parse(o.decoded_json || '{}'); } catch { oDec = decoded; }
|
||||
const oPath = getParsedPath(o);
|
||||
const oDec = getParsedDecoded(o);
|
||||
replayPackets.push({
|
||||
id: o.id, hash: pkt.hash, raw: o.raw_hex || pkt.raw_hex,
|
||||
_ts: new Date(o.timestamp).getTime(),
|
||||
@@ -1674,7 +1689,7 @@
|
||||
let rows = '';
|
||||
|
||||
// Header section
|
||||
rows += sectionRow('Header');
|
||||
rows += sectionRow('Header', 'section-header');
|
||||
rows += fieldRow(0, 'Header Byte', '0x' + (buf.slice(0, 2) || '??'), `Route: ${routeTypeName(pkt.route_type)}, Payload: ${payloadTypeName(pkt.payload_type)}`);
|
||||
const pathByte0 = parseInt(buf.slice(2, 4), 16);
|
||||
const hashSizeVal = isNaN(pathByte0) ? '?' : ((pathByte0 >> 6) + 1);
|
||||
@@ -1684,7 +1699,7 @@
|
||||
// Transport codes
|
||||
let off = 2;
|
||||
if (pkt.route_type === 0 || pkt.route_type === 3) {
|
||||
rows += sectionRow('Transport Codes');
|
||||
rows += sectionRow('Transport Codes', 'section-transport');
|
||||
rows += fieldRow(off, 'Next Hop', buf.slice(off * 2, (off + 2) * 2), '');
|
||||
rows += fieldRow(off + 2, 'Last Hop', buf.slice((off + 2) * 2, (off + 4) * 2), '');
|
||||
off += 4;
|
||||
@@ -1692,7 +1707,7 @@
|
||||
|
||||
// Path
|
||||
if (pathHops.length > 0) {
|
||||
rows += sectionRow('Path (' + pathHops.length + ' hops)');
|
||||
rows += sectionRow('Path (' + pathHops.length + ' hops)', 'section-path');
|
||||
const pathByte = parseInt(buf.slice(2, 4), 16);
|
||||
const hashSize = (pathByte >> 6) + 1;
|
||||
for (let i = 0; i < pathHops.length; i++) {
|
||||
@@ -1704,7 +1719,7 @@
|
||||
}
|
||||
|
||||
// Payload
|
||||
rows += sectionRow('Payload — ' + payloadTypeName(pkt.payload_type));
|
||||
rows += sectionRow('Payload — ' + payloadTypeName(pkt.payload_type), 'section-payload');
|
||||
|
||||
if (decoded.type === 'ADVERT') {
|
||||
rows += fieldRow(1, 'Advertised Hash Size', hashSizeVal + ' byte' + (hashSizeVal !== 1 ? 's' : ''), 'From path byte 0x' + (buf.slice(2, 4) || '??') + ' — bits 7-6 = ' + (hashSizeVal - 1));
|
||||
@@ -1754,8 +1769,8 @@
|
||||
</table>`;
|
||||
}
|
||||
|
||||
function sectionRow(label) {
|
||||
return `<tr class="section-row"><td colspan="4">${label}</td></tr>`;
|
||||
function sectionRow(label, cls) {
|
||||
return `<tr class="section-row${cls ? ' ' + cls : ''}"><td colspan="4">${label}</td></tr>`;
|
||||
}
|
||||
function fieldRow(offset, name, value, desc) {
|
||||
return `<tr><td class="mono">${offset}</td><td>${name}</td><td class="mono">${value}</td><td class="text-muted">${desc || ''}</td></tr>`;
|
||||
@@ -1901,7 +1916,7 @@
|
||||
let obsSortMode = localStorage.getItem('meshcore-obs-sort') || SORT_OBSERVER;
|
||||
|
||||
function getPathHopCount(c) {
|
||||
try { return JSON.parse(c.path_json || '[]').length; } catch { return 0; }
|
||||
try { return getParsedPath(c).length; } catch { return 0; }
|
||||
}
|
||||
|
||||
function sortGroupChildren(group) {
|
||||
@@ -1966,7 +1981,7 @@
|
||||
if (!pkt) return;
|
||||
const group = packets.find(p => p.hash === hash);
|
||||
if (group && data.observations) {
|
||||
group._children = data.observations.map(o => ({...pkt, ...o, _isObservation: true}));
|
||||
group._children = data.observations.map(o => clearParsedCache({...pkt, ...o, _isObservation: true}));
|
||||
group._fetchedData = data;
|
||||
// Sort children based on current sort mode
|
||||
sortGroupChildren(group);
|
||||
@@ -1974,7 +1989,7 @@
|
||||
// Resolve any new hops from children
|
||||
const childHops = new Set();
|
||||
for (const c of (group?._children || [])) {
|
||||
try { JSON.parse(c.path_json || '[]').forEach(h => childHops.add(h)); } catch {}
|
||||
try { getParsedPath(c).forEach(h => childHops.add(h)); } catch {}
|
||||
}
|
||||
const newHops = [...childHops].filter(h => !(h in hopNameCache));
|
||||
if (newHops.length) await resolveHops(newHops);
|
||||
@@ -2007,6 +2022,28 @@
|
||||
});
|
||||
|
||||
// Standalone packet detail page: #/packet/123 or #/packet/HASH
|
||||
// Expose pure functions for unit testing (vm.createContext pattern)
|
||||
if (typeof window !== 'undefined') {
|
||||
window._packetsTestAPI = {
|
||||
typeName,
|
||||
obsName,
|
||||
getDetailPreview,
|
||||
sortGroupChildren,
|
||||
getPathHopCount,
|
||||
renderDecodedPacket,
|
||||
kv,
|
||||
buildFieldTable,
|
||||
sectionRow,
|
||||
fieldRow,
|
||||
renderTimestampCell,
|
||||
renderPath,
|
||||
_getRowCount,
|
||||
_cumulativeRowOffsets,
|
||||
buildGroupRowHtml,
|
||||
buildFlatRowHtml,
|
||||
};
|
||||
}
|
||||
|
||||
registerPage('packet-detail', {
|
||||
init: async (app, routeParam) => {
|
||||
const param = routeParam;
|
||||
@@ -2016,7 +2053,7 @@
|
||||
const data = await api(`/packets/${param}`);
|
||||
if (!data?.packet) { app.innerHTML = `<div style="max-width:800px;margin:0 auto;padding:40px;text-align:center"><h2>Packet not found</h2><p>Packet ${param} doesn't exist.</p><a href="#/packets">← Back to packets</a></div>`; return; }
|
||||
const hops = [];
|
||||
try { const ph = JSON.parse(data.packet.path_json || '[]'); hops.push(...ph); } catch {}
|
||||
try { hops.push(...getParsedPath(data.packet)); } catch {}
|
||||
const newHops = hops.filter(h => !(h in hopNameCache));
|
||||
if (newHops.length) await resolveHops(newHops);
|
||||
const container = document.createElement('div');
|
||||
|
||||
@@ -375,6 +375,10 @@ a:focus-visible, button:focus-visible, input:focus-visible, select:focus-visible
|
||||
background: var(--section-bg, #eef2ff); font-weight: 700; font-size: 11px;
|
||||
text-transform: uppercase; letter-spacing: .5px; color: var(--accent);
|
||||
}
|
||||
.field-table .section-header td { background: rgba(243,139,168,0.18); }
|
||||
.field-table .section-transport td { background: rgba(137,180,250,0.18); }
|
||||
.field-table .section-path td { background: rgba(166,227,161,0.18); }
|
||||
.field-table .section-payload td { background: rgba(249,226,175,0.18); }
|
||||
|
||||
/* === Path display === */
|
||||
.path-hops {
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* test-anim-perf.js — Performance benchmark for animation timer management
|
||||
*
|
||||
* Demonstrates that the rAF + concurrency-cap approach keeps active animation
|
||||
* count bounded, whereas the old setInterval approach accumulated without limit.
|
||||
*
|
||||
* Run: node test-anim-perf.js
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
let passed = 0, failed = 0;
|
||||
function assert(cond, msg) {
|
||||
if (cond) { console.log(` ✅ ${msg}`); passed++; }
|
||||
else { console.log(` ❌ ${msg}`); failed++; }
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Simulate OLD behaviour: setInterval-based, no concurrency cap
|
||||
// ---------------------------------------------------------------------------
|
||||
function simulateOldModel(packetsPerSec, hopsPerPacket, durationSec) {
|
||||
// Each hop spawns 3 intervals (pulse 26ms, line 33ms, fade 52ms).
|
||||
// Pulse lasts ~2s, line ~0.66s, fade ~0.8s+0.4s ≈ 1.2s
|
||||
// At any moment, timers from the last ~2s of packets are still alive.
|
||||
const intervalLifetimes = [2.0, 0.66, 1.2]; // seconds each interval lives
|
||||
let maxConcurrent = 0;
|
||||
// Walk through time in 0.1s steps
|
||||
const dt = 0.1;
|
||||
const spawns = []; // {time, lifetime}
|
||||
for (let t = 0; t < durationSec; t += dt) {
|
||||
// Spawn timers for packets arriving in this window
|
||||
const pktsInWindow = packetsPerSec * dt;
|
||||
for (let p = 0; p < pktsInWindow; p++) {
|
||||
for (let h = 0; h < hopsPerPacket; h++) {
|
||||
for (const lt of intervalLifetimes) {
|
||||
spawns.push({ time: t, lifetime: lt });
|
||||
}
|
||||
}
|
||||
}
|
||||
// Count alive timers
|
||||
const alive = spawns.filter(s => t < s.time + s.lifetime).length;
|
||||
if (alive > maxConcurrent) maxConcurrent = alive;
|
||||
}
|
||||
return maxConcurrent;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Simulate NEW behaviour: rAF + MAX_CONCURRENT_ANIMS cap
|
||||
// ---------------------------------------------------------------------------
|
||||
function simulateNewModel(packetsPerSec, hopsPerPacket, durationSec) {
|
||||
const MAX_CONCURRENT_ANIMS = 20;
|
||||
let activeAnims = 0;
|
||||
let maxConcurrent = 0;
|
||||
const anims = []; // {endTime}
|
||||
const dt = 0.1;
|
||||
for (let t = 0; t < durationSec; t += dt) {
|
||||
// Expire finished animations
|
||||
while (anims.length && anims[0].endTime <= t) {
|
||||
anims.shift();
|
||||
activeAnims--;
|
||||
}
|
||||
// Try to start new animations
|
||||
const pktsInWindow = packetsPerSec * dt;
|
||||
for (let p = 0; p < pktsInWindow; p++) {
|
||||
if (activeAnims >= MAX_CONCURRENT_ANIMS) break; // cap reached — drop
|
||||
activeAnims++;
|
||||
// rAF animation lifetime: longest is pulse ~2s
|
||||
anims.push({ endTime: t + 2.0 });
|
||||
}
|
||||
// Sort by endTime so expiry works
|
||||
anims.sort((a, b) => a.endTime - b.endTime);
|
||||
if (activeAnims > maxConcurrent) maxConcurrent = activeAnims;
|
||||
}
|
||||
return maxConcurrent;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
console.log('\n=== Animation timer accumulation: old vs new ===');
|
||||
|
||||
// Scenario: 5 pkts/sec, 3 hops each, 30 seconds
|
||||
const oldPeak30s = simulateOldModel(5, 3, 30);
|
||||
const newPeak30s = simulateNewModel(5, 3, 30);
|
||||
console.log(` Old model (30s @ 5pkt/s×3hops): peak ${oldPeak30s} concurrent timers`);
|
||||
console.log(` New model (30s @ 5pkt/s×3hops): peak ${newPeak30s} concurrent animations`);
|
||||
assert(oldPeak30s > 100, `old model accumulates >100 timers (got ${oldPeak30s})`);
|
||||
assert(newPeak30s <= 20, `new model stays ≤20 (got ${newPeak30s})`);
|
||||
|
||||
// Scenario: 5 minutes sustained
|
||||
const oldPeak5m = simulateOldModel(5, 3, 300);
|
||||
const newPeak5m = simulateNewModel(5, 3, 300);
|
||||
console.log(` Old model (5min @ 5pkt/s×3hops): peak ${oldPeak5m} concurrent timers`);
|
||||
console.log(` New model (5min @ 5pkt/s×3hops): peak ${newPeak5m} concurrent animations`);
|
||||
assert(oldPeak5m > 100, `old model at 5min still unbounded (got ${oldPeak5m})`);
|
||||
assert(newPeak5m <= 20, `new model at 5min still ≤20 (got ${newPeak5m})`);
|
||||
|
||||
// Scenario: burst — 20 pkts/sec for 10s
|
||||
const oldBurst = simulateOldModel(20, 3, 10);
|
||||
const newBurst = simulateNewModel(20, 3, 10);
|
||||
console.log(` Old model (burst 20pkt/s×3hops, 10s): peak ${oldBurst} concurrent timers`);
|
||||
console.log(` New model (burst 20pkt/s×3hops, 10s): peak ${newBurst} concurrent animations`);
|
||||
assert(oldBurst > 200, `old model under burst >200 timers (got ${oldBurst})`);
|
||||
assert(newBurst <= 20, `new model under burst stays ≤20 (got ${newBurst})`);
|
||||
|
||||
console.log('\n=== drawAnimatedLine frame-drop catch-up ===');
|
||||
|
||||
// Read the source and verify catch-up logic exists
|
||||
const fs = require('fs');
|
||||
const src = fs.readFileSync(__dirname + '/public/live.js', 'utf8');
|
||||
|
||||
// Extract the animateLine function body
|
||||
const lineMatch = src.match(/function animateLine\(now\)\s*\{[\s\S]*?requestAnimationFrame\(animateLine\)/);
|
||||
assert(lineMatch && /Math\.min\(Math\.floor\(elapsed\s*\/\s*33\)/.test(lineMatch[0]),
|
||||
'drawAnimatedLine catches up on frame drops (multi-tick per frame)');
|
||||
|
||||
const fadeMatch = src.match(/function animateFade\(now\)\s*\{[\s\S]*?requestAnimationFrame\(animateFade\)/);
|
||||
assert(fadeMatch && /Math\.min\(Math\.floor\(fadeElapsed\s*\/\s*52\)/.test(fadeMatch[0]),
|
||||
'animateFade catches up on frame drops (multi-tick per frame)');
|
||||
|
||||
console.log(`\n${passed} passed, ${failed} failed\n`);
|
||||
process.exit(failed ? 1 : 0);
|
||||
+1743
-10
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,78 @@
|
||||
/* Unit tests for live.js animation system — verifies rAF migration and concurrency cap */
|
||||
'use strict';
|
||||
const fs = require('fs');
|
||||
const assert = require('assert');
|
||||
|
||||
const src = fs.readFileSync('public/live.js', 'utf8');
|
||||
|
||||
let passed = 0, failed = 0;
|
||||
function test(name, fn) {
|
||||
try { fn(); passed++; console.log(` ✅ ${name}`); }
|
||||
catch (e) { failed++; console.log(` ❌ ${name}: ${e.message}`); }
|
||||
}
|
||||
|
||||
console.log('\n=== Animation interval elimination ===');
|
||||
|
||||
test('pulseNode does not use setInterval', () => {
|
||||
// Extract pulseNode function body
|
||||
const pulseStart = src.indexOf('function pulseNode(');
|
||||
const nextFn = src.indexOf('\n function ', pulseStart + 1);
|
||||
const body = src.substring(pulseStart, nextFn);
|
||||
assert.ok(!body.includes('setInterval'), 'pulseNode still uses setInterval');
|
||||
assert.ok(body.includes('requestAnimationFrame'), 'pulseNode should use requestAnimationFrame');
|
||||
});
|
||||
|
||||
test('drawAnimatedLine does not use setInterval', () => {
|
||||
const drawStart = src.indexOf('function drawAnimatedLine(');
|
||||
const nextFn = src.indexOf('\n function ', drawStart + 1);
|
||||
const body = src.substring(drawStart, nextFn);
|
||||
assert.ok(!body.includes('setInterval'), 'drawAnimatedLine still uses setInterval');
|
||||
assert.ok(body.includes('requestAnimationFrame'), 'drawAnimatedLine should use requestAnimationFrame');
|
||||
});
|
||||
|
||||
test('ghost hop pulse does not use setInterval', () => {
|
||||
// Ghost pulse is inside animatePath
|
||||
const animStart = src.indexOf('function animatePath(');
|
||||
const animEnd = src.indexOf('\n function ', animStart + 1);
|
||||
const body = src.substring(animStart, animEnd);
|
||||
assert.ok(!body.includes('setInterval'), 'animatePath still uses setInterval');
|
||||
});
|
||||
|
||||
console.log('\n=== Concurrency cap ===');
|
||||
|
||||
test('MAX_CONCURRENT_ANIMS is defined', () => {
|
||||
assert.ok(src.includes('MAX_CONCURRENT_ANIMS'), 'MAX_CONCURRENT_ANIMS constant not found');
|
||||
});
|
||||
|
||||
test('MAX_CONCURRENT_ANIMS is set to 20', () => {
|
||||
const match = src.match(/MAX_CONCURRENT_ANIMS\s*=\s*(\d+)/);
|
||||
assert.ok(match, 'Could not parse MAX_CONCURRENT_ANIMS value');
|
||||
assert.strictEqual(parseInt(match[1]), 20);
|
||||
});
|
||||
|
||||
test('animatePath checks MAX_CONCURRENT_ANIMS before proceeding', () => {
|
||||
const animStart = src.indexOf('function animatePath(');
|
||||
// Check that within the first 200 chars of the function, we check the cap
|
||||
const snippet = src.substring(animStart, animStart + 300);
|
||||
assert.ok(snippet.includes('activeAnims >= MAX_CONCURRENT_ANIMS'), 'animatePath should check activeAnims against cap');
|
||||
});
|
||||
|
||||
console.log('\n=== Safety: no stale setInterval in animation functions ===');
|
||||
|
||||
test('no setInterval remains in animation hot path', () => {
|
||||
// The only acceptable setIntervals are the UI ones (timeline, clock, prune, rate counter)
|
||||
// Count total setInterval occurrences
|
||||
const matches = src.match(/setInterval\(/g) || [];
|
||||
// Count known OK ones: _timelineRefreshInterval, _lcdClockInterval, _pruneInterval, _rateCounterInterval
|
||||
const okPatterns = ['_timelineRefreshInterval', '_lcdClockInterval', '_pruneInterval', '_rateCounterInterval'];
|
||||
let okCount = 0;
|
||||
for (const p of okPatterns) {
|
||||
if (src.includes(p + ' = setInterval') || src.includes(p + '= setInterval')) okCount++;
|
||||
}
|
||||
// Allow some non-animation setIntervals (the 4 UI ones above)
|
||||
assert.ok(matches.length <= okCount + 1,
|
||||
`Found ${matches.length} setInterval calls, expected at most ${okCount + 1} (non-animation). Some animation setIntervals may remain.`);
|
||||
});
|
||||
|
||||
console.log(`\n${passed} passed, ${failed} failed\n`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
+853
@@ -0,0 +1,853 @@
|
||||
/* Unit tests for live.js functions (tested via VM sandbox)
|
||||
* Part of #344 — live.js coverage
|
||||
*/
|
||||
'use strict';
|
||||
const vm = require('vm');
|
||||
const fs = require('fs');
|
||||
const assert = require('assert');
|
||||
|
||||
let passed = 0, failed = 0;
|
||||
const pendingTests = [];
|
||||
function test(name, fn) {
|
||||
try {
|
||||
const out = fn();
|
||||
if (out && typeof out.then === 'function') {
|
||||
pendingTests.push(
|
||||
out.then(() => { passed++; console.log(` ✅ ${name}`); })
|
||||
.catch((e) => { failed++; console.log(` ❌ ${name}: ${e.message}`); })
|
||||
);
|
||||
return;
|
||||
}
|
||||
passed++; console.log(` ✅ ${name}`);
|
||||
} catch (e) { failed++; console.log(` ❌ ${name}: ${e.message}`); }
|
||||
}
|
||||
|
||||
// --- Browser-like sandbox ---
|
||||
function makeSandbox() {
|
||||
const ctx = {
|
||||
window: { addEventListener: () => {}, dispatchEvent: () => {}, devicePixelRatio: 1 },
|
||||
document: {
|
||||
readyState: 'complete',
|
||||
createElement: (tag) => ({
|
||||
tagName: tag, id: '', textContent: '', innerHTML: '', style: {},
|
||||
classList: { add() {}, remove() {}, contains() { return false; } },
|
||||
setAttribute() {}, getAttribute() { return null; },
|
||||
addEventListener() {}, focus() {},
|
||||
getContext: () => ({
|
||||
clearRect() {}, fillRect() {}, beginPath() {}, arc() {}, fill() {},
|
||||
scale() {}, fillStyle: '', font: '', fillText() {},
|
||||
}),
|
||||
offsetWidth: 200, offsetHeight: 40, width: 0, height: 0,
|
||||
}),
|
||||
head: { appendChild: () => {} },
|
||||
getElementById: () => null,
|
||||
addEventListener: () => {},
|
||||
querySelectorAll: () => [],
|
||||
querySelector: () => null,
|
||||
createElementNS: () => ({
|
||||
tagName: 'svg', id: '', textContent: '', innerHTML: '', style: {},
|
||||
setAttribute() {}, getAttribute() { return null; },
|
||||
}),
|
||||
documentElement: { getAttribute: () => null, setAttribute: () => {} },
|
||||
body: { appendChild: () => {}, removeChild: () => {}, contains: () => false },
|
||||
hidden: false,
|
||||
},
|
||||
console,
|
||||
Date, Infinity, Math, Array, Object, String, Number, JSON, RegExp,
|
||||
Error, TypeError, Map, Set, Promise, URLSearchParams,
|
||||
parseInt, parseFloat, isNaN, isFinite,
|
||||
encodeURIComponent, decodeURIComponent,
|
||||
setTimeout: () => 0, clearTimeout: () => {},
|
||||
setInterval: () => 0, clearInterval: () => {},
|
||||
fetch: () => Promise.resolve({ json: () => Promise.resolve({}) }),
|
||||
performance: { now: () => Date.now() },
|
||||
requestAnimationFrame: (cb) => setTimeout(cb, 0),
|
||||
cancelAnimationFrame: () => {},
|
||||
localStorage: (() => {
|
||||
const store = {};
|
||||
return {
|
||||
getItem: k => store[k] !== undefined ? store[k] : null,
|
||||
setItem: (k, v) => { store[k] = String(v); },
|
||||
removeItem: k => { delete store[k]; },
|
||||
};
|
||||
})(),
|
||||
location: { hash: '', protocol: 'https:', host: 'localhost' },
|
||||
CustomEvent: class CustomEvent {},
|
||||
addEventListener: () => {},
|
||||
dispatchEvent: () => {},
|
||||
getComputedStyle: () => ({ getPropertyValue: () => '' }),
|
||||
matchMedia: () => ({ matches: false, addEventListener: () => {} }),
|
||||
navigator: {},
|
||||
visualViewport: null,
|
||||
MutationObserver: function() { this.observe = () => {}; this.disconnect = () => {}; },
|
||||
WebSocket: function() { this.close = () => {}; },
|
||||
IATA_COORDS_GEO: {},
|
||||
};
|
||||
vm.createContext(ctx);
|
||||
return ctx;
|
||||
}
|
||||
|
||||
function loadInCtx(ctx, file) {
|
||||
vm.runInContext(fs.readFileSync(file, 'utf8'), ctx);
|
||||
for (const k of Object.keys(ctx.window)) ctx[k] = ctx.window[k];
|
||||
}
|
||||
|
||||
function makeLeafletMock() {
|
||||
return {
|
||||
circleMarker: () => {
|
||||
const m = {
|
||||
addTo() { return m; }, bindTooltip() { return m; }, on() { return m; },
|
||||
setRadius() {}, setStyle() {}, setLatLng() {},
|
||||
getLatLng() { return { lat: 0, lng: 0 }; },
|
||||
_baseColor: '', _baseSize: 5, _glowMarker: null, remove() {},
|
||||
};
|
||||
return m;
|
||||
},
|
||||
polyline: () => { const p = { addTo() { return p; }, setStyle() {}, remove() {} }; return p; },
|
||||
polygon: () => { const p = { addTo() { return p; }, remove() {} }; return p; },
|
||||
map: () => {
|
||||
const m = {
|
||||
setView() { return m; }, addLayer() { return m; }, on() { return m; },
|
||||
getZoom() { return 11; }, getCenter() { return { lat: 37, lng: -122 }; },
|
||||
getBounds() { return { contains: () => true }; }, fitBounds() { return m; },
|
||||
invalidateSize() {}, remove() {}, hasLayer() { return false; }, removeLayer() {},
|
||||
};
|
||||
return m;
|
||||
},
|
||||
layerGroup: () => {
|
||||
const g = {
|
||||
addTo() { return g; }, addLayer() {}, removeLayer() {},
|
||||
clearLayers() {}, hasLayer() { return true; }, eachLayer() {},
|
||||
};
|
||||
return g;
|
||||
},
|
||||
tileLayer: () => ({ addTo() { return this; } }),
|
||||
control: { attribution: () => ({ addTo() {} }) },
|
||||
DomUtil: { addClass() {}, removeClass() {} },
|
||||
};
|
||||
}
|
||||
|
||||
function addLiveGlobals(ctx) {
|
||||
ctx.L = makeLeafletMock();
|
||||
ctx.registerPage = () => {};
|
||||
ctx.onWS = () => {};
|
||||
ctx.offWS = () => {};
|
||||
ctx.connectWS = () => {};
|
||||
ctx.api = () => Promise.resolve([]);
|
||||
ctx.invalidateApiCache = () => {};
|
||||
ctx.favStar = () => '';
|
||||
ctx.bindFavStars = () => {};
|
||||
ctx.getFavorites = () => [];
|
||||
ctx.isFavorite = () => false;
|
||||
ctx.HopResolver = { init() {}, resolve: () => ({}), ready: () => false };
|
||||
ctx.MeshAudio = null;
|
||||
ctx.RegionFilter = { init() {}, getSelected: () => null, onRegionChange: () => {} };
|
||||
}
|
||||
|
||||
function makeLiveSandbox({ withAppJs = false } = {}) {
|
||||
const ctx = makeSandbox();
|
||||
addLiveGlobals(ctx);
|
||||
|
||||
loadInCtx(ctx, 'public/roles.js');
|
||||
if (withAppJs) loadInCtx(ctx, 'public/app.js');
|
||||
try { loadInCtx(ctx, 'public/live.js'); } catch (e) {
|
||||
console.error('live.js load error:', e.message);
|
||||
for (const k of Object.keys(ctx.window)) ctx[k] = ctx.window[k];
|
||||
}
|
||||
return ctx;
|
||||
}
|
||||
|
||||
// ===== dbPacketToLive =====
|
||||
console.log('\n=== live.js: dbPacketToLive ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const dbPacketToLive = ctx.window._liveDbPacketToLive;
|
||||
assert.ok(dbPacketToLive, '_liveDbPacketToLive must be exposed');
|
||||
|
||||
test('converts basic DB packet to live format', () => {
|
||||
const pkt = {
|
||||
id: 42, hash: 'abc123',
|
||||
raw_hex: 'deadbeef',
|
||||
path_json: '["hop1","hop2"]',
|
||||
decoded_json: '{"type":"GRP_TXT","text":"hello"}',
|
||||
timestamp: '2024-06-15T12:00:00Z',
|
||||
snr: 7.5, rssi: -85, observer_name: 'ObsA',
|
||||
};
|
||||
const result = dbPacketToLive(pkt);
|
||||
assert.strictEqual(result.id, 42);
|
||||
assert.strictEqual(result.hash, 'abc123');
|
||||
assert.strictEqual(result.raw, 'deadbeef');
|
||||
assert.strictEqual(result.snr, 7.5);
|
||||
assert.strictEqual(result.rssi, -85);
|
||||
assert.strictEqual(result.observer, 'ObsA');
|
||||
assert.strictEqual(result.decoded.header.payloadTypeName, 'GRP_TXT');
|
||||
assert.strictEqual(result.decoded.payload.text, 'hello');
|
||||
assert.deepStrictEqual(result.decoded.path.hops, ['hop1', 'hop2']);
|
||||
assert.strictEqual(result._ts, new Date('2024-06-15T12:00:00Z').getTime());
|
||||
});
|
||||
|
||||
test('handles null decoded_json', () => {
|
||||
const pkt = { id: 1, hash: 'x', decoded_json: null, path_json: null, timestamp: '2024-01-01T00:00:00Z' };
|
||||
const result = dbPacketToLive(pkt);
|
||||
assert.strictEqual(result.decoded.header.payloadTypeName, 'UNKNOWN');
|
||||
assert.deepStrictEqual(result.decoded.path.hops, []);
|
||||
});
|
||||
|
||||
test('uses payload_type_name as fallback', () => {
|
||||
const pkt = { id: 2, hash: 'y', decoded_json: '{}', path_json: '[]', timestamp: '2024-01-01T00:00:00Z', payload_type_name: 'ADVERT' };
|
||||
const result = dbPacketToLive(pkt);
|
||||
assert.strictEqual(result.decoded.header.payloadTypeName, 'ADVERT');
|
||||
});
|
||||
|
||||
test('uses created_at as timestamp fallback', () => {
|
||||
const pkt = { id: 3, hash: 'z', decoded_json: '{}', path_json: '[]', created_at: '2024-03-01T06:00:00Z' };
|
||||
const result = dbPacketToLive(pkt);
|
||||
assert.strictEqual(result._ts, new Date('2024-03-01T06:00:00Z').getTime());
|
||||
});
|
||||
}
|
||||
|
||||
// ===== expandToBufferEntries =====
|
||||
console.log('\n=== live.js: expandToBufferEntries ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const expand = ctx.window._liveExpandToBufferEntries;
|
||||
assert.ok(expand, '_liveExpandToBufferEntries must be exposed');
|
||||
|
||||
test('single packet without observations returns one entry', () => {
|
||||
const pkts = [{
|
||||
id: 1, hash: 'h1', timestamp: '2024-06-15T12:00:00Z',
|
||||
decoded_json: '{"type":"GRP_TXT"}', path_json: '[]',
|
||||
}];
|
||||
const entries = expand(pkts);
|
||||
assert.strictEqual(entries.length, 1);
|
||||
assert.strictEqual(entries[0].pkt.id, 1);
|
||||
assert.strictEqual(entries[0].ts, new Date('2024-06-15T12:00:00Z').getTime());
|
||||
});
|
||||
|
||||
test('packet with observations expands to one entry per observation', () => {
|
||||
const pkts = [{
|
||||
id: 10, hash: 'h10', timestamp: '2024-06-15T12:00:00Z',
|
||||
decoded_json: '{"type":"ADVERT"}', path_json: '[]', raw_hex: 'ff',
|
||||
observations: [
|
||||
{ timestamp: '2024-06-15T12:00:01Z', snr: 5, observer_name: 'O1' },
|
||||
{ timestamp: '2024-06-15T12:00:02Z', snr: 8, observer_name: 'O2' },
|
||||
{ timestamp: '2024-06-15T12:00:03Z', snr: 3, observer_name: 'O3' },
|
||||
],
|
||||
}];
|
||||
const entries = expand(pkts);
|
||||
assert.strictEqual(entries.length, 3);
|
||||
assert.strictEqual(entries[0].pkt.observer, 'O1');
|
||||
assert.strictEqual(entries[1].pkt.observer, 'O2');
|
||||
assert.strictEqual(entries[2].pkt.observer, 'O3');
|
||||
// All should share the same hash
|
||||
assert.strictEqual(entries[0].pkt.hash, 'h10');
|
||||
assert.strictEqual(entries[2].pkt.hash, 'h10');
|
||||
// Entries should be in chronological order
|
||||
assert.ok(entries[0].ts < entries[1].ts, 'entry 0 should be before entry 1');
|
||||
assert.ok(entries[1].ts < entries[2].ts, 'entry 1 should be before entry 2');
|
||||
});
|
||||
|
||||
test('empty observations array treated as no observations', () => {
|
||||
const pkts = [{
|
||||
id: 5, hash: 'h5', timestamp: '2024-01-01T00:00:00Z',
|
||||
decoded_json: '{}', path_json: '[]', observations: [],
|
||||
}];
|
||||
const entries = expand(pkts);
|
||||
assert.strictEqual(entries.length, 1);
|
||||
});
|
||||
|
||||
test('multiple packets expand independently', () => {
|
||||
const pkts = [
|
||||
{ id: 1, hash: 'h1', timestamp: '2024-01-01T00:00:00Z', decoded_json: '{}', path_json: '[]' },
|
||||
{
|
||||
id: 2, hash: 'h2', timestamp: '2024-01-01T00:00:00Z', decoded_json: '{}', path_json: '[]', raw_hex: 'aa',
|
||||
observations: [
|
||||
{ timestamp: '2024-01-01T00:00:01Z', observer_name: 'X' },
|
||||
{ timestamp: '2024-01-01T00:00:02Z', observer_name: 'Y' },
|
||||
],
|
||||
},
|
||||
];
|
||||
const entries = expand(pkts);
|
||||
assert.strictEqual(entries.length, 3);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SEG_MAP (7-segment display) =====
|
||||
console.log('\n=== live.js: SEG_MAP ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const SEG_MAP = ctx.window._liveSEG_MAP;
|
||||
assert.ok(SEG_MAP, '_liveSEG_MAP must be exposed');
|
||||
|
||||
test('all digits 0-9 are mapped', () => {
|
||||
for (let i = 0; i <= 9; i++) {
|
||||
assert.ok(SEG_MAP[String(i)] !== undefined, `digit ${i} must be in SEG_MAP`);
|
||||
assert.ok(SEG_MAP[String(i)] > 0, `digit ${i} must have non-zero segments`);
|
||||
}
|
||||
});
|
||||
|
||||
test('digit 8 lights all 7 segments and no others', () => {
|
||||
// 0x7F = 0b01111111 — all 7 segment bits on, MSB (colon) off
|
||||
const val = SEG_MAP['8'];
|
||||
assert.strictEqual(val & 0x7F, 0x7F, 'all 7 segment bits should be set');
|
||||
assert.strictEqual(val & 0x80, 0, 'colon bit should not be set for a digit');
|
||||
});
|
||||
|
||||
test('colon only sets the MSB (dot/colon indicator)', () => {
|
||||
const val = SEG_MAP[':'];
|
||||
assert.strictEqual(val & 0x80, 0x80, 'MSB (colon bit) should be set');
|
||||
assert.strictEqual(val & 0x7F, 0, 'no segment bits should be set for colon');
|
||||
});
|
||||
|
||||
test('space lights no segments', () => {
|
||||
assert.strictEqual(SEG_MAP[' '], 0x00, 'space should have no bits set');
|
||||
});
|
||||
|
||||
test('digit 1 lights fewer segments than digit 8', () => {
|
||||
// Behavioral: 1 has fewer segments lit than 8
|
||||
const ones = (n) => { let c = 0; while (n) { c += n & 1; n >>= 1; } return c; };
|
||||
assert.ok(ones(SEG_MAP['1']) < ones(SEG_MAP['8']),
|
||||
'digit 1 should have fewer segment bits than digit 8');
|
||||
});
|
||||
|
||||
test('VCR mode letters are mapped with non-zero segments', () => {
|
||||
for (const ch of ['P', 'A', 'U', 'S', 'E', 'L', 'I', 'V']) {
|
||||
assert.ok(SEG_MAP[ch] !== undefined, `${ch} must be in SEG_MAP`);
|
||||
assert.ok(SEG_MAP[ch] > 0, `${ch} must have non-zero segments`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// ===== VCR state machine =====
|
||||
console.log('\n=== live.js: VCR state machine ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const VCR = ctx.window._liveVCR;
|
||||
const vcrSetMode = ctx.window._liveVcrSetMode;
|
||||
const vcrPause = ctx.window._liveVcrPause;
|
||||
const vcrSpeedCycle = ctx.window._liveVcrSpeedCycle;
|
||||
assert.ok(VCR, '_liveVCR must be exposed');
|
||||
|
||||
test('VCR initial mode is LIVE', () => {
|
||||
assert.strictEqual(VCR().mode, 'LIVE');
|
||||
});
|
||||
|
||||
test('vcrSetMode changes mode', () => {
|
||||
vcrSetMode('PAUSED');
|
||||
assert.strictEqual(VCR().mode, 'PAUSED');
|
||||
assert.ok(VCR().frozenNow != null, 'frozenNow should be set when not LIVE');
|
||||
});
|
||||
|
||||
test('vcrSetMode LIVE clears frozenNow', () => {
|
||||
vcrSetMode('LIVE');
|
||||
assert.strictEqual(VCR().mode, 'LIVE');
|
||||
assert.strictEqual(VCR().frozenNow, null);
|
||||
});
|
||||
|
||||
test('vcrPause stops replay and sets PAUSED', () => {
|
||||
vcrSetMode('LIVE');
|
||||
vcrPause();
|
||||
assert.strictEqual(VCR().mode, 'PAUSED');
|
||||
assert.strictEqual(VCR().missedCount, 0);
|
||||
});
|
||||
|
||||
test('vcrPause is idempotent', () => {
|
||||
vcrPause();
|
||||
const frozen1 = VCR().frozenNow;
|
||||
assert.strictEqual(VCR().mode, 'PAUSED', 'mode should be PAUSED after first call');
|
||||
vcrPause();
|
||||
assert.strictEqual(VCR().frozenNow, frozen1);
|
||||
assert.strictEqual(VCR().mode, 'PAUSED', 'mode should stay PAUSED after second call');
|
||||
});
|
||||
|
||||
test('vcrSpeedCycle cycles through 1,2,4,8', () => {
|
||||
vcrSetMode('LIVE');
|
||||
VCR().speed = 1;
|
||||
vcrSpeedCycle();
|
||||
assert.strictEqual(VCR().speed, 2);
|
||||
vcrSpeedCycle();
|
||||
assert.strictEqual(VCR().speed, 4);
|
||||
vcrSpeedCycle();
|
||||
assert.strictEqual(VCR().speed, 8);
|
||||
vcrSpeedCycle();
|
||||
assert.strictEqual(VCR().speed, 1); // wraps around
|
||||
});
|
||||
|
||||
const vcrResumeLive = ctx.window._liveVcrResumeLive;
|
||||
assert.ok(vcrResumeLive, '_liveVcrResumeLive must be exposed');
|
||||
|
||||
test('vcrResumeLive transitions from PAUSED to LIVE', () => {
|
||||
vcrPause();
|
||||
assert.strictEqual(VCR().mode, 'PAUSED');
|
||||
assert.ok(VCR().frozenNow != null, 'frozenNow should be set when paused');
|
||||
vcrResumeLive();
|
||||
assert.strictEqual(VCR().mode, 'LIVE');
|
||||
assert.strictEqual(VCR().frozenNow, null, 'frozenNow should be cleared');
|
||||
assert.strictEqual(VCR().playhead, -1, 'playhead should reset to -1');
|
||||
assert.strictEqual(VCR().speed, 1, 'speed should reset to 1');
|
||||
assert.strictEqual(VCR().missedCount, 0, 'missedCount should be 0');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== getFavoritePubkeys =====
|
||||
console.log('\n=== live.js: getFavoritePubkeys ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const getFavPubkeys = ctx.window._liveGetFavoritePubkeys;
|
||||
assert.ok(getFavPubkeys, '_liveGetFavoritePubkeys must be exposed');
|
||||
|
||||
test('returns empty array when no favorites stored', () => {
|
||||
ctx.localStorage.removeItem('meshcore-favorites');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
const result = getFavPubkeys();
|
||||
assert.ok(Array.isArray(result));
|
||||
assert.strictEqual(result.length, 0);
|
||||
});
|
||||
|
||||
test('reads from meshcore-favorites', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["pk1","pk2"]');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
const result = getFavPubkeys();
|
||||
assert.ok(result.includes('pk1'));
|
||||
assert.ok(result.includes('pk2'));
|
||||
});
|
||||
|
||||
test('reads from meshcore-my-nodes pubkeys', () => {
|
||||
ctx.localStorage.removeItem('meshcore-favorites');
|
||||
ctx.localStorage.setItem('meshcore-my-nodes', '[{"pubkey":"mynode1"},{"pubkey":"mynode2"}]');
|
||||
const result = getFavPubkeys();
|
||||
assert.ok(result.includes('mynode1'));
|
||||
assert.ok(result.includes('mynode2'));
|
||||
});
|
||||
|
||||
test('merges both sources', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["fav1"]');
|
||||
ctx.localStorage.setItem('meshcore-my-nodes', '[{"pubkey":"mine1"}]');
|
||||
const result = getFavPubkeys();
|
||||
assert.ok(result.includes('fav1'));
|
||||
assert.ok(result.includes('mine1'));
|
||||
assert.strictEqual(result.length, 2);
|
||||
});
|
||||
|
||||
test('handles corrupt localStorage gracefully', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', 'not json');
|
||||
ctx.localStorage.setItem('meshcore-my-nodes', '{bad}');
|
||||
const result = getFavPubkeys();
|
||||
assert.ok(Array.isArray(result));
|
||||
assert.strictEqual(result.length, 0, 'corrupt data should yield empty array');
|
||||
});
|
||||
|
||||
test('filters out falsy values', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["pk1",null,"",false,"pk2"]');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
const result = getFavPubkeys();
|
||||
assert.ok(!result.includes(null));
|
||||
assert.ok(!result.includes(''));
|
||||
assert.strictEqual(result.length, 2);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== packetInvolvesFavorite =====
|
||||
console.log('\n=== live.js: packetInvolvesFavorite ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
// Clean localStorage to avoid leakage from prior test sections
|
||||
ctx.localStorage.removeItem('meshcore-favorites');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
const involves = ctx.window._livePacketInvolvesFavorite;
|
||||
assert.ok(involves, '_livePacketInvolvesFavorite must be exposed');
|
||||
|
||||
test('returns false when no favorites', () => {
|
||||
ctx.localStorage.removeItem('meshcore-favorites');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
const pkt = { decoded: { header: {}, payload: { pubKey: 'abc' } } };
|
||||
assert.strictEqual(involves(pkt), false);
|
||||
});
|
||||
|
||||
test('matches sender pubKey', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["sender123"]');
|
||||
const pkt = { decoded: { header: {}, payload: { pubKey: 'sender123' } } };
|
||||
assert.strictEqual(involves(pkt), true);
|
||||
});
|
||||
|
||||
test('matches hop prefix', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["abcdef1234567890"]');
|
||||
const pkt = { decoded: { header: {}, payload: {}, path: { hops: ['abcd'] } } };
|
||||
assert.strictEqual(involves(pkt), true);
|
||||
});
|
||||
|
||||
test('does not match unrelated hop', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["abcdef1234567890"]');
|
||||
const pkt = { decoded: { header: {}, payload: {}, path: { hops: ['ffff'] } } };
|
||||
assert.strictEqual(involves(pkt), false);
|
||||
});
|
||||
|
||||
test('handles missing decoded fields gracefully', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["xyz"]');
|
||||
const pkt = {};
|
||||
assert.strictEqual(involves(pkt), false);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== isNodeFavorited =====
|
||||
console.log('\n=== live.js: isNodeFavorited ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
// Clean localStorage to avoid leakage from prior test sections
|
||||
ctx.localStorage.removeItem('meshcore-favorites');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
const isFav = ctx.window._liveIsNodeFavorited;
|
||||
assert.ok(isFav, '_liveIsNodeFavorited must be exposed');
|
||||
|
||||
test('returns true when pubkey is in favorites', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["pk1","pk2"]');
|
||||
assert.strictEqual(isFav('pk1'), true);
|
||||
});
|
||||
|
||||
test('returns false when pubkey not in favorites', () => {
|
||||
ctx.localStorage.setItem('meshcore-favorites', '["pk1"]');
|
||||
assert.strictEqual(isFav('pk99'), false);
|
||||
});
|
||||
|
||||
test('returns false with empty favorites', () => {
|
||||
ctx.localStorage.removeItem('meshcore-favorites');
|
||||
ctx.localStorage.removeItem('meshcore-my-nodes');
|
||||
assert.strictEqual(isFav('pk1'), false);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== formatLiveTimestampHtml =====
|
||||
console.log('\n=== live.js: formatLiveTimestampHtml ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox({ withAppJs: true });
|
||||
|
||||
const fmt = ctx.window._liveFormatLiveTimestampHtml;
|
||||
assert.ok(fmt, '_liveFormatLiveTimestampHtml must be exposed');
|
||||
|
||||
test('formats a recent ISO timestamp', () => {
|
||||
const iso = new Date(Date.now() - 30000).toISOString();
|
||||
const html = fmt(iso);
|
||||
assert.ok(html.includes('timestamp-text'), 'should contain timestamp-text span');
|
||||
assert.ok(html.includes('title='), 'should have tooltip');
|
||||
});
|
||||
|
||||
test('handles null input', () => {
|
||||
const html = fmt(null);
|
||||
assert.ok(typeof html === 'string');
|
||||
assert.ok(html.includes('—'), 'null input should render em-dash fallback');
|
||||
});
|
||||
|
||||
test('handles numeric timestamp', () => {
|
||||
const html = fmt(Date.now() - 60000);
|
||||
assert.ok(typeof html === 'string');
|
||||
assert.ok(html.includes('timestamp-text'), 'numeric timestamp should produce timestamp-text span');
|
||||
assert.ok(html.includes('title='), 'numeric timestamp should have tooltip');
|
||||
});
|
||||
|
||||
test('future timestamp shows warning icon', () => {
|
||||
const future = new Date(Date.now() + 120000).toISOString();
|
||||
const html = fmt(future);
|
||||
assert.ok(html.includes('timestamp-future-icon'), 'should show future warning');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== resolveHopPositions =====
|
||||
console.log('\n=== live.js: resolveHopPositions ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const resolve = ctx.window._liveResolveHopPositions;
|
||||
const nodeData = ctx.window._liveNodeData();
|
||||
const nodeMarkers = ctx.window._liveNodeMarkers();
|
||||
assert.ok(resolve, '_liveResolveHopPositions must be exposed');
|
||||
|
||||
test('returns empty array for empty hops', () => {
|
||||
const result = resolve([], {});
|
||||
assert.deepStrictEqual(result, []);
|
||||
});
|
||||
|
||||
test('returns sender position when payload has pubKey + coords', () => {
|
||||
const payload = { pubKey: 'sender1', name: 'Sender', lat: 37.5, lon: -122.0 };
|
||||
// No nodes in nodeData, so hops won't resolve
|
||||
const result = resolve([], payload);
|
||||
// With empty hops, the function still adds the sender as an anchor point.
|
||||
assert.ok(Array.isArray(result), 'should return an array');
|
||||
assert.strictEqual(result.length, 1, 'sender coords should produce one anchor position');
|
||||
assert.strictEqual(result[0].pos[0], 37.5, 'anchor should use sender lat');
|
||||
assert.strictEqual(result[0].pos[1], -122.0, 'anchor should use sender lon');
|
||||
assert.strictEqual(result[0].name, 'Sender', 'anchor should use sender name');
|
||||
assert.strictEqual(result[0].known, true, 'sender with coords should be marked as known');
|
||||
});
|
||||
|
||||
test('resolves known node from nodeData', () => {
|
||||
// Add a node to nodeData
|
||||
nodeData['nodeA_pubkey'] = { public_key: 'nodeA_pubkey', name: 'NodeA', lat: 37.3, lon: -122.0 };
|
||||
nodeData['nodeB_pubkey'] = { public_key: 'nodeB_pubkey', name: 'NodeB', lat: 38.0, lon: -121.0 };
|
||||
// Need HopResolver to resolve the hop prefix — set on both ctx and window
|
||||
const mockResolver = {
|
||||
init() {},
|
||||
ready() { return true; },
|
||||
resolve(hops) {
|
||||
const map = {};
|
||||
for (const h of hops) {
|
||||
if (h === 'nodeA') map[h] = { name: 'NodeA', pubkey: 'nodeA_pubkey' };
|
||||
else if (h === 'nodeB') map[h] = { name: 'NodeB', pubkey: 'nodeB_pubkey' };
|
||||
else map[h] = { name: null, pubkey: null };
|
||||
}
|
||||
return map;
|
||||
},
|
||||
};
|
||||
ctx.HopResolver = mockResolver;
|
||||
ctx.window.HopResolver = mockResolver;
|
||||
// Need at least 2 known nodes for ghost mode to not filter down
|
||||
const result = resolve(['nodeA', 'nodeB'], {});
|
||||
assert.ok(result.length >= 2, `expected >= 2 positions, got ${result.length}`);
|
||||
const foundA = result.find(r => r.key === 'nodeA_pubkey');
|
||||
assert.ok(foundA, 'should resolve nodeA to nodeA_pubkey');
|
||||
assert.strictEqual(foundA.pos[0], 37.3);
|
||||
assert.strictEqual(foundA.pos[1], -122.0);
|
||||
assert.strictEqual(foundA.known, true);
|
||||
delete nodeData['nodeA_pubkey'];
|
||||
delete nodeData['nodeB_pubkey'];
|
||||
});
|
||||
|
||||
test('ghost hops get interpolated positions between known nodes', () => {
|
||||
// Set up: two known nodes, one unknown hop between them
|
||||
nodeData['n1'] = { public_key: 'n1', name: 'N1', lat: 37.0, lon: -122.0 };
|
||||
nodeData['n2'] = { public_key: 'n2', name: 'N2', lat: 38.0, lon: -121.0 };
|
||||
const mockResolver = {
|
||||
init() {},
|
||||
ready() { return true; },
|
||||
resolve(hops) {
|
||||
const map = {};
|
||||
for (const h of hops) {
|
||||
if (h === 'h1') map[h] = { name: 'N1', pubkey: 'n1' };
|
||||
else if (h === 'h3') map[h] = { name: 'N2', pubkey: 'n2' };
|
||||
else map[h] = { name: null, pubkey: null };
|
||||
}
|
||||
return map;
|
||||
},
|
||||
};
|
||||
ctx.HopResolver = mockResolver;
|
||||
ctx.window.HopResolver = mockResolver;
|
||||
const result = resolve(['h1', 'h2', 'h3'], {});
|
||||
assert.ok(result.length >= 2, `should have at least 2 positions, got ${result.length}`);
|
||||
// Check that the ghost hop got an interpolated position
|
||||
const ghost = result.find(r => r.ghost);
|
||||
assert.ok(ghost, 'ghost hop should be present in resolved positions — if missing, interpolation logic changed');
|
||||
assert.ok(ghost.pos[0] > 37.0 && ghost.pos[0] < 38.0, 'ghost lat should be interpolated');
|
||||
assert.ok(ghost.pos[1] > -122.0 && ghost.pos[1] < -121.0, 'ghost lon should be interpolated');
|
||||
delete nodeData['n1'];
|
||||
delete nodeData['n2'];
|
||||
});
|
||||
}
|
||||
|
||||
// ===== bufferPacket and VCR buffer management =====
|
||||
console.log('\n=== live.js: bufferPacket / VCR buffer ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const bufferPacket = ctx.window._liveBufferPacket;
|
||||
const VCR = ctx.window._liveVCR;
|
||||
assert.ok(bufferPacket, '_liveBufferPacket must be exposed');
|
||||
|
||||
test('bufferPacket adds entry to VCR buffer', () => {
|
||||
const initialLen = VCR().buffer.length;
|
||||
const pkt = { hash: 'test1', decoded: { header: { payloadTypeName: 'GRP_TXT' }, payload: {} } };
|
||||
bufferPacket(pkt);
|
||||
assert.strictEqual(VCR().buffer.length, initialLen + 1);
|
||||
const last = VCR().buffer[VCR().buffer.length - 1];
|
||||
assert.strictEqual(last.pkt.hash, 'test1');
|
||||
assert.ok(last.ts > 0);
|
||||
});
|
||||
|
||||
test('bufferPacket sets _ts on packet', () => {
|
||||
const pkt = { hash: 'test2', decoded: { header: {}, payload: {} } };
|
||||
const before = Date.now();
|
||||
bufferPacket(pkt);
|
||||
const after = Date.now();
|
||||
assert.ok(pkt._ts >= before && pkt._ts <= after, `_ts should be between ${before} and ${after}, got ${pkt._ts}`);
|
||||
});
|
||||
|
||||
test('VCR buffer caps at ~2000 entries', () => {
|
||||
// Fill buffer past 2000
|
||||
VCR().buffer.length = 0;
|
||||
for (let i = 0; i < 2100; i++) {
|
||||
VCR().buffer.push({ ts: Date.now(), pkt: { hash: 'fill' + i } });
|
||||
}
|
||||
// Next bufferPacket triggers trim: 2100+1=2101 > 2000 → splice(0, 500) → 1601
|
||||
const pkt = { hash: 'overflow', decoded: { header: {}, payload: {} } };
|
||||
bufferPacket(pkt);
|
||||
assert.strictEqual(VCR().buffer.length, 1601, `buffer should be 2101 - 500 = 1601, got ${VCR().buffer.length}`);
|
||||
});
|
||||
|
||||
test('bufferPacket increments missedCount when PAUSED', () => {
|
||||
ctx.window._liveVcrSetMode('PAUSED');
|
||||
VCR().missedCount = 0;
|
||||
const pkt = { hash: 'missed1', decoded: { header: {}, payload: {} } };
|
||||
bufferPacket(pkt);
|
||||
assert.strictEqual(VCR().missedCount, 1);
|
||||
bufferPacket({ hash: 'missed2', decoded: { header: {}, payload: {} } });
|
||||
assert.strictEqual(VCR().missedCount, 2);
|
||||
ctx.window._liveVcrSetMode('LIVE');
|
||||
});
|
||||
|
||||
test('bufferPacket handles malformed packet without decoded field', () => {
|
||||
const before = VCR().buffer.length;
|
||||
// Packet with no decoded field at all — should not throw, and should still be buffered
|
||||
bufferPacket({ hash: 'malformed1' });
|
||||
assert.strictEqual(VCR().buffer.length, before + 1, 'malformed packet should still be added to buffer');
|
||||
});
|
||||
|
||||
test('bufferPacket handles packet with null decoded', () => {
|
||||
const before = VCR().buffer.length;
|
||||
bufferPacket({ hash: 'malformed2', decoded: null });
|
||||
assert.strictEqual(VCR().buffer.length, before + 1, 'packet with null decoded should still be added to buffer');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== VCR frozenNow behavior =====
|
||||
console.log('\n=== live.js: VCR frozenNow ===');
|
||||
{
|
||||
const ctx = makeLiveSandbox();
|
||||
const VCR = ctx.window._liveVCR;
|
||||
const setMode = ctx.window._liveVcrSetMode;
|
||||
|
||||
test('frozenNow is set on first non-LIVE mode', () => {
|
||||
setMode('LIVE');
|
||||
assert.strictEqual(VCR().frozenNow, null);
|
||||
setMode('PAUSED');
|
||||
const t1 = VCR().frozenNow;
|
||||
assert.ok(t1 > 0);
|
||||
// Should NOT change on subsequent non-LIVE mode changes
|
||||
setMode('REPLAY');
|
||||
assert.strictEqual(VCR().frozenNow, t1, 'frozenNow should not change if already set');
|
||||
});
|
||||
|
||||
test('frozenNow cleared on LIVE', () => {
|
||||
setMode('PAUSED');
|
||||
assert.ok(VCR().frozenNow != null);
|
||||
setMode('LIVE');
|
||||
assert.strictEqual(VCR().frozenNow, null);
|
||||
});
|
||||
}
|
||||
|
||||
// ===== Source-level checks for live.js safety guards =====
|
||||
// NOTE: These src.includes() checks are intentionally brittle — they verify that specific
|
||||
// safety guards exist in the source code TODAY. They will break on whitespace/rename refactors,
|
||||
// which is an acceptable tradeoff: a failing test forces the developer to verify the guard
|
||||
// still exists in its new form. For critical guards (animation limits, null checks), prefer
|
||||
// behavioral tests where feasible (see bufferPacket and VCR sections above).
|
||||
console.log('\n=== live.js: source-level safety checks ===');
|
||||
{
|
||||
const src = fs.readFileSync('public/live.js', 'utf8');
|
||||
|
||||
test('renderPacketTree null-checks packets array', () => {
|
||||
assert.ok(src.includes('if (!packets || !packets.length) return;'),
|
||||
'renderPacketTree must guard null/empty packets');
|
||||
});
|
||||
|
||||
test('animatePath guards MAX_CONCURRENT_ANIMS', () => {
|
||||
assert.ok(src.includes('if (activeAnims >= MAX_CONCURRENT_ANIMS) return;'),
|
||||
'animatePath must respect concurrent animation limit');
|
||||
});
|
||||
|
||||
test('animatePath guards null animLayer/pathsLayer', () => {
|
||||
assert.ok(src.includes('if (!animLayer || !pathsLayer) return;'),
|
||||
'animatePath must guard null layers');
|
||||
});
|
||||
|
||||
test('pulseNode guards null animLayer/nodesLayer', () => {
|
||||
assert.ok(src.includes('if (!animLayer || !nodesLayer) return;'),
|
||||
'pulseNode must guard null layers');
|
||||
});
|
||||
|
||||
test('nextHop guards null animLayer', () => {
|
||||
assert.ok(src.includes('if (!animLayer) return;'),
|
||||
'nextHop must guard null animLayer before drawing');
|
||||
});
|
||||
|
||||
test('VCR buffer trim adjusts playhead', () => {
|
||||
assert.ok(src.includes('VCR.playhead = Math.max(0, VCR.playhead - trimCount)'),
|
||||
'buffer trim must adjust playhead to prevent stale indices');
|
||||
});
|
||||
|
||||
test('tab hidden skips animations', () => {
|
||||
assert.ok(src.includes('if (_tabHidden)'),
|
||||
'bufferPacket should skip animation when tab is hidden');
|
||||
});
|
||||
|
||||
test('visibility change clears propagation buffer', () => {
|
||||
assert.ok(src.includes('propagationBuffer.clear()'),
|
||||
'tab restore should clear propagation buffer');
|
||||
});
|
||||
|
||||
test('connectWS has reconnect on close', () => {
|
||||
assert.ok(src.includes('ws.onclose = () => setTimeout(connectWS, WS_RECONNECT_MS)'),
|
||||
'WebSocket should auto-reconnect on close');
|
||||
});
|
||||
|
||||
test('addNodeMarker avoids duplicates', () => {
|
||||
assert.ok(src.includes('if (nodeMarkers[n.public_key]) return nodeMarkers[n.public_key]'),
|
||||
'addNodeMarker should return existing marker if already exists');
|
||||
});
|
||||
|
||||
test('matrix mode saves toggle to localStorage', () => {
|
||||
assert.ok(src.includes("localStorage.setItem('live-matrix-mode'"),
|
||||
'matrix toggle should persist to localStorage');
|
||||
});
|
||||
|
||||
test('matrix rain saves toggle to localStorage', () => {
|
||||
assert.ok(src.includes("localStorage.setItem('live-matrix-rain'"),
|
||||
'matrix rain toggle should persist to localStorage');
|
||||
});
|
||||
|
||||
test('realistic propagation saves toggle to localStorage', () => {
|
||||
assert.ok(src.includes("localStorage.setItem('live-realistic-propagation'"),
|
||||
'realistic propagation toggle should persist to localStorage');
|
||||
});
|
||||
|
||||
test('favorites filter saves toggle to localStorage', () => {
|
||||
assert.ok(src.includes("localStorage.setItem('live-favorites-only'"),
|
||||
'favorites filter toggle should persist to localStorage');
|
||||
});
|
||||
|
||||
test('ghost hops saves toggle to localStorage', () => {
|
||||
assert.ok(src.includes("localStorage.setItem('live-ghost-hops'"),
|
||||
'ghost hops toggle should persist to localStorage');
|
||||
});
|
||||
|
||||
test('clearNodeMarkers resets HopResolver', () => {
|
||||
assert.ok(src.includes('if (window.HopResolver) HopResolver.init([])'),
|
||||
'clearNodeMarkers should reset HopResolver');
|
||||
});
|
||||
|
||||
test('rescaleMarkers reads zoom from map', () => {
|
||||
assert.ok(src.includes('const zoom = map.getZoom()'),
|
||||
'rescaleMarkers should read current zoom level');
|
||||
});
|
||||
|
||||
test('startReplay pre-aggregates by hash', () => {
|
||||
assert.ok(src.includes('const hashGroups = new Map()'),
|
||||
'startReplay should group buffer entries by hash');
|
||||
});
|
||||
|
||||
test('orientation change retries resize with delays', () => {
|
||||
assert.ok(src.includes('[50, 200, 500, 1000, 2000].forEach'),
|
||||
'orientation change handler should retry resize at multiple intervals');
|
||||
});
|
||||
|
||||
test('VCR rewind deduplicates buffer entries by ID', () => {
|
||||
assert.ok(src.includes('const existingIds = new Set(VCR.buffer.map(b => b.pkt.id)'),
|
||||
'vcrRewind should dedup by packet ID');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SUMMARY =====
|
||||
Promise.allSettled(pendingTests).then(() => {
|
||||
console.log(`\n${'═'.repeat(40)}`);
|
||||
console.log(` live.js tests: ${passed} passed, ${failed} failed`);
|
||||
console.log(`${'═'.repeat(40)}\n`);
|
||||
if (failed > 0) process.exit(1);
|
||||
}).catch((e) => {
|
||||
console.error('Failed waiting for async tests:', e);
|
||||
process.exit(1);
|
||||
});
|
||||
+763
@@ -0,0 +1,763 @@
|
||||
/* Unit tests for packets.js functions (tested via VM sandbox) */
|
||||
'use strict';
|
||||
const vm = require('vm');
|
||||
const fs = require('fs');
|
||||
const assert = require('assert');
|
||||
|
||||
let passed = 0, failed = 0;
|
||||
function test(name, fn) {
|
||||
try {
|
||||
fn();
|
||||
passed++;
|
||||
console.log(` ✅ ${name}`);
|
||||
} catch (e) {
|
||||
failed++;
|
||||
console.log(` ❌ ${name}: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Build a browser-like sandbox with all deps packets.js needs
|
||||
function makeSandbox() {
|
||||
const registeredPages = {};
|
||||
const ctx = {
|
||||
window: {
|
||||
addEventListener: () => {},
|
||||
removeEventListener: () => {},
|
||||
dispatchEvent: () => {},
|
||||
innerWidth: 1200,
|
||||
PacketFilter: null,
|
||||
},
|
||||
document: {
|
||||
readyState: 'complete',
|
||||
createElement: (tag) => ({
|
||||
tagName: tag.toUpperCase(), id: '', textContent: '', innerHTML: '',
|
||||
className: '', style: {}, appendChild: () => {}, setAttribute: () => {},
|
||||
addEventListener: () => {}, querySelectorAll: () => [], querySelector: () => null,
|
||||
classList: { add: () => {}, remove: () => {}, contains: () => false },
|
||||
}),
|
||||
head: { appendChild: () => {} },
|
||||
getElementById: () => null,
|
||||
addEventListener: () => {},
|
||||
removeEventListener: () => {},
|
||||
querySelectorAll: () => [],
|
||||
querySelector: () => null,
|
||||
body: { appendChild: () => {} },
|
||||
},
|
||||
console,
|
||||
Date,
|
||||
Infinity,
|
||||
Math,
|
||||
Array,
|
||||
Object,
|
||||
String,
|
||||
Number,
|
||||
JSON,
|
||||
RegExp,
|
||||
Error,
|
||||
TypeError,
|
||||
RangeError,
|
||||
parseInt,
|
||||
parseFloat,
|
||||
isNaN,
|
||||
isFinite,
|
||||
encodeURIComponent,
|
||||
decodeURIComponent,
|
||||
setTimeout: () => {},
|
||||
clearTimeout: () => {},
|
||||
setInterval: () => {},
|
||||
clearInterval: () => {},
|
||||
fetch: () => Promise.resolve({ ok: true, json: () => Promise.resolve({}) }),
|
||||
performance: { now: () => Date.now() },
|
||||
localStorage: (() => {
|
||||
const store = {};
|
||||
return {
|
||||
getItem: k => store[k] || null,
|
||||
setItem: (k, v) => { store[k] = String(v); },
|
||||
removeItem: k => { delete store[k]; },
|
||||
};
|
||||
})(),
|
||||
location: { hash: '' },
|
||||
history: { replaceState: () => {} },
|
||||
CustomEvent: class CustomEvent {},
|
||||
Map,
|
||||
Set,
|
||||
Promise,
|
||||
URLSearchParams,
|
||||
addEventListener: () => {},
|
||||
removeEventListener: () => {},
|
||||
dispatchEvent: () => {},
|
||||
requestAnimationFrame: (cb) => setTimeout(cb, 0),
|
||||
_registeredPages: registeredPages,
|
||||
// Stub global functions packets.js depends on
|
||||
registerPage: (name, handler) => { registeredPages[name] = handler; },
|
||||
};
|
||||
vm.createContext(ctx);
|
||||
return ctx;
|
||||
}
|
||||
|
||||
function loadInCtx(ctx, file) {
|
||||
vm.runInContext(fs.readFileSync(file, 'utf8'), ctx, { filename: file });
|
||||
for (const k of Object.keys(ctx.window)) {
|
||||
ctx[k] = ctx.window[k];
|
||||
}
|
||||
}
|
||||
|
||||
function loadPacketsSandbox() {
|
||||
const ctx = makeSandbox();
|
||||
// Load dependencies first
|
||||
loadInCtx(ctx, 'public/roles.js');
|
||||
loadInCtx(ctx, 'public/app.js');
|
||||
// HopDisplay stub (simpler than loading real file which may have DOM deps)
|
||||
vm.runInContext(`
|
||||
window.HopDisplay = {
|
||||
renderHop: function(h, entry, opts) {
|
||||
if (entry && entry.name) return '<span class="hop-named">' + entry.name + '</span>';
|
||||
return '<span class="hop-hex">' + h + '</span>';
|
||||
},
|
||||
_showFromBtn: function() {}
|
||||
};
|
||||
`, ctx);
|
||||
loadInCtx(ctx, 'public/packets.js');
|
||||
return ctx;
|
||||
}
|
||||
|
||||
// ===== TESTS =====
|
||||
|
||||
console.log('\n=== packets.js: typeName ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('typeName returns known type', () => {
|
||||
assert.strictEqual(api.typeName(0), 'Request');
|
||||
assert.strictEqual(api.typeName(4), 'Advert');
|
||||
assert.strictEqual(api.typeName(5), 'Channel Msg');
|
||||
});
|
||||
|
||||
test('typeName returns fallback for unknown', () => {
|
||||
assert.strictEqual(api.typeName(99), 'Type 99');
|
||||
assert.strictEqual(api.typeName(undefined), 'Type undefined');
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: obsName ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('obsName returns dash for falsy id', () => {
|
||||
assert.strictEqual(api.obsName(null), '—');
|
||||
assert.strictEqual(api.obsName(''), '—');
|
||||
assert.strictEqual(api.obsName(undefined), '—');
|
||||
});
|
||||
|
||||
test('obsName returns id when not in observerMap', () => {
|
||||
assert.strictEqual(api.obsName('unknown-id'), 'unknown-id');
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: kv ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('kv produces correct HTML', () => {
|
||||
const result = api.kv('Route', 'Direct');
|
||||
assert(result.includes('byop-key'));
|
||||
assert(result.includes('Route'));
|
||||
assert(result.includes('Direct'));
|
||||
assert(result.includes('byop-val'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: sectionRow / fieldRow ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('sectionRow produces section HTML', () => {
|
||||
const result = api.sectionRow('Header');
|
||||
assert(result.includes('section-row'));
|
||||
assert(result.includes('Header'));
|
||||
assert(result.includes('colspan="4"'));
|
||||
});
|
||||
|
||||
test('fieldRow produces field HTML', () => {
|
||||
const result = api.fieldRow(0, 'Header Byte', '0xFF', 'some desc');
|
||||
assert(result.includes('0'));
|
||||
assert(result.includes('Header Byte'));
|
||||
assert(result.includes('0xFF'));
|
||||
assert(result.includes('some desc'));
|
||||
assert(result.includes('mono'));
|
||||
});
|
||||
|
||||
test('fieldRow handles empty description', () => {
|
||||
const result = api.fieldRow(5, 'Test', 'val', '');
|
||||
assert(result.includes('text-muted'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: getDetailPreview ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('getDetailPreview returns empty for null/undefined', () => {
|
||||
assert.strictEqual(api.getDetailPreview(null), '');
|
||||
assert.strictEqual(api.getDetailPreview(undefined), '');
|
||||
});
|
||||
|
||||
test('getDetailPreview handles CHAN type', () => {
|
||||
const result = api.getDetailPreview({ type: 'CHAN', text: 'hello world', channel: 'general' });
|
||||
assert(result.includes('💬'));
|
||||
assert(result.includes('hello world'));
|
||||
assert(result.includes('chan-tag'));
|
||||
assert(result.includes('general'));
|
||||
});
|
||||
|
||||
test('getDetailPreview truncates long CHAN text', () => {
|
||||
const longText = 'x'.repeat(100);
|
||||
const result = api.getDetailPreview({ type: 'CHAN', text: longText });
|
||||
assert(result.includes('…'));
|
||||
assert(!result.includes('x'.repeat(100)));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles ADVERT type', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'ADVERT', name: 'TestNode', pubKey: 'abc123',
|
||||
flags: { repeater: true }
|
||||
});
|
||||
assert(result.includes('📡'));
|
||||
assert(result.includes('TestNode'));
|
||||
assert(result.includes('hop-link'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles ADVERT room', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'ADVERT', name: 'RoomNode', pubKey: 'abc',
|
||||
flags: { room: true }
|
||||
});
|
||||
assert(result.includes('🏠'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles ADVERT sensor', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'ADVERT', name: 'Sensor1', pubKey: 'abc',
|
||||
flags: { sensor: true }
|
||||
});
|
||||
assert(result.includes('🌡'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles ADVERT companion (default)', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'ADVERT', name: 'Comp', pubKey: 'abc',
|
||||
flags: {}
|
||||
});
|
||||
assert(result.includes('📻'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles GRP_TXT with channelHash (no_key)', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'GRP_TXT', channelHash: 0xAB, decryptionStatus: 'no_key'
|
||||
});
|
||||
assert(result.includes('🔒'));
|
||||
assert(result.includes('0xAB'));
|
||||
assert(result.includes('no key'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles GRP_TXT decryption_failed', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'GRP_TXT', channelHash: 5, decryptionStatus: 'decryption_failed'
|
||||
});
|
||||
assert(result.includes('decryption failed'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles GRP_TXT with channelHashHex', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'GRP_TXT', channelHash: 0xFF, channelHashHex: 'FF'
|
||||
});
|
||||
assert(result.includes('0xFF'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles TXT_MSG', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'TXT_MSG', srcHash: 'abcdef01', destHash: '12345678'
|
||||
});
|
||||
assert(result.includes('✉️'));
|
||||
assert(result.includes('abcdef01'));
|
||||
assert(result.includes('12345678'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles PATH', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'PATH', srcHash: 'aabb', destHash: 'ccdd'
|
||||
});
|
||||
assert(result.includes('🔀'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles REQ', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'REQ', srcHash: 'aa', destHash: 'bb'
|
||||
});
|
||||
assert(result.includes('🔒'));
|
||||
assert(result.includes('aa'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles RESPONSE', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'RESPONSE', srcHash: 'aa', destHash: 'bb'
|
||||
});
|
||||
assert(result.includes('🔒'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles ANON_REQ', () => {
|
||||
const result = api.getDetailPreview({
|
||||
type: 'ANON_REQ', destHash: 'dd'
|
||||
});
|
||||
assert(result.includes('anon'));
|
||||
assert(result.includes('dd'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles text fallback', () => {
|
||||
const result = api.getDetailPreview({ text: 'some message' });
|
||||
assert(result.includes('some message'));
|
||||
});
|
||||
|
||||
test('getDetailPreview truncates long text fallback', () => {
|
||||
const result = api.getDetailPreview({ text: 'z'.repeat(100) });
|
||||
assert(result.includes('…'));
|
||||
});
|
||||
|
||||
test('getDetailPreview handles public_key fallback', () => {
|
||||
const result = api.getDetailPreview({ public_key: 'abcdef1234567890abcdef' });
|
||||
assert(result.includes('📡'));
|
||||
assert(result.includes('abcdef1234567890'));
|
||||
});
|
||||
|
||||
test('getDetailPreview returns empty for empty decoded', () => {
|
||||
assert.strictEqual(api.getDetailPreview({}), '');
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: getPathHopCount ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('getPathHopCount with valid path', () => {
|
||||
assert.strictEqual(api.getPathHopCount({ path_json: '["a","b","c"]' }), 3);
|
||||
});
|
||||
|
||||
test('getPathHopCount with empty path', () => {
|
||||
assert.strictEqual(api.getPathHopCount({ path_json: '[]' }), 0);
|
||||
});
|
||||
|
||||
test('getPathHopCount with null/missing', () => {
|
||||
assert.strictEqual(api.getPathHopCount({}), 0);
|
||||
assert.strictEqual(api.getPathHopCount({ path_json: null }), 0);
|
||||
});
|
||||
|
||||
test('getPathHopCount with invalid JSON', () => {
|
||||
assert.strictEqual(api.getPathHopCount({ path_json: 'not json' }), 0);
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: sortGroupChildren ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('sortGroupChildren handles null/empty gracefully', () => {
|
||||
api.sortGroupChildren(null);
|
||||
api.sortGroupChildren({});
|
||||
api.sortGroupChildren({ _children: [] });
|
||||
// No throw
|
||||
});
|
||||
|
||||
test('sortGroupChildren default sort groups by observer earliest-first', () => {
|
||||
// Need to set obsSortMode — it reads from closure. Default is 'observer'.
|
||||
const group = {
|
||||
_children: [
|
||||
{ observer_name: 'B', timestamp: '2024-01-01T02:00:00Z' },
|
||||
{ observer_name: 'A', timestamp: '2024-01-01T01:00:00Z' },
|
||||
{ observer_name: 'B', timestamp: '2024-01-01T01:30:00Z' },
|
||||
]
|
||||
};
|
||||
api.sortGroupChildren(group);
|
||||
// A has earliest timestamp, should be first
|
||||
assert.strictEqual(group._children[0].observer_name, 'A');
|
||||
// Then B entries
|
||||
assert.strictEqual(group._children[1].observer_name, 'B');
|
||||
assert.strictEqual(group._children[2].observer_name, 'B');
|
||||
// B entries should be time-ascending within group
|
||||
assert(group._children[1].timestamp < group._children[2].timestamp);
|
||||
});
|
||||
|
||||
test('sortGroupChildren updates header from first child', () => {
|
||||
const group = {
|
||||
observer_id: 'old',
|
||||
_children: [
|
||||
{ observer_name: 'A', observer_id: 'new-id', timestamp: '2024-01-01T01:00:00Z', snr: 10, rssi: -50, path_json: '["x"]', direction: 'rx' },
|
||||
]
|
||||
};
|
||||
api.sortGroupChildren(group);
|
||||
assert.strictEqual(group.observer_id, 'new-id');
|
||||
assert.strictEqual(group.snr, 10);
|
||||
assert.strictEqual(group.rssi, -50);
|
||||
assert.strictEqual(group.path_json, '["x"]');
|
||||
assert.strictEqual(group.direction, 'rx');
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: renderTimestampCell ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('renderTimestampCell produces HTML with timestamp-text', () => {
|
||||
const result = api.renderTimestampCell('2024-01-15T10:30:00Z');
|
||||
assert(result.includes('timestamp-text'));
|
||||
});
|
||||
|
||||
test('renderTimestampCell handles null gracefully', () => {
|
||||
const result = api.renderTimestampCell(null);
|
||||
// Should not throw, produces some output
|
||||
assert(typeof result === 'string');
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: renderPath ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('renderPath returns dash for empty/null', () => {
|
||||
assert.strictEqual(api.renderPath(null, null), '—');
|
||||
assert.strictEqual(api.renderPath([], null), '—');
|
||||
});
|
||||
|
||||
test('renderPath renders hops with arrows', () => {
|
||||
const result = api.renderPath(['aa', 'bb'], null);
|
||||
assert(result.includes('arrow'));
|
||||
assert(result.includes('aa'));
|
||||
assert(result.includes('bb'));
|
||||
});
|
||||
|
||||
test('renderPath renders single hop without arrow', () => {
|
||||
const result = api.renderPath(['cc'], null);
|
||||
assert(result.includes('cc'));
|
||||
assert(!result.includes('arrow'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: renderDecodedPacket ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('renderDecodedPacket produces header section', () => {
|
||||
const decoded = {
|
||||
header: { routeType: 0, payloadType: 4, payloadVersion: 1 },
|
||||
payload: { name: 'TestNode' },
|
||||
path: { hops: [] }
|
||||
};
|
||||
const hex = 'aabbccdd';
|
||||
const result = api.renderDecodedPacket(decoded, hex);
|
||||
assert(result.includes('byop-decoded'));
|
||||
assert(result.includes('Header'));
|
||||
assert(result.includes('4 bytes'));
|
||||
});
|
||||
|
||||
test('renderDecodedPacket renders path hops', () => {
|
||||
const decoded = {
|
||||
header: { routeType: 0, payloadType: 4 },
|
||||
payload: {},
|
||||
path: { hops: ['aa', 'bb'] }
|
||||
};
|
||||
const hex = 'aabbccdd';
|
||||
const result = api.renderDecodedPacket(decoded, hex);
|
||||
assert(result.includes('Path (2 hops)'));
|
||||
assert(result.includes('aa'));
|
||||
assert(result.includes('bb'));
|
||||
});
|
||||
|
||||
test('renderDecodedPacket renders payload fields', () => {
|
||||
const decoded = {
|
||||
header: { routeType: 0, payloadType: 5 },
|
||||
payload: { channel: 'general', text: 'hello' },
|
||||
path: { hops: [] }
|
||||
};
|
||||
const hex = 'aabb';
|
||||
const result = api.renderDecodedPacket(decoded, hex);
|
||||
assert(result.includes('channel'));
|
||||
assert(result.includes('general'));
|
||||
assert(result.includes('hello'));
|
||||
});
|
||||
|
||||
test('renderDecodedPacket renders nested objects as JSON', () => {
|
||||
const decoded = {
|
||||
header: { routeType: 0, payloadType: 0 },
|
||||
payload: { flags: { repeater: true } },
|
||||
path: { hops: [] }
|
||||
};
|
||||
const hex = 'aa';
|
||||
const result = api.renderDecodedPacket(decoded, hex);
|
||||
assert(result.includes('byop-pre'));
|
||||
assert(result.includes('repeater'));
|
||||
});
|
||||
|
||||
test('renderDecodedPacket skips null payload values', () => {
|
||||
const decoded = {
|
||||
header: { routeType: 0, payloadType: 0 },
|
||||
payload: { a: null, b: undefined, c: 'visible' },
|
||||
path: { hops: [] }
|
||||
};
|
||||
const hex = 'aa';
|
||||
const result = api.renderDecodedPacket(decoded, hex);
|
||||
assert(result.includes('visible'));
|
||||
// null/undefined values should be skipped
|
||||
const kvCount = (result.match(/byop-row/g) || []).length;
|
||||
// Only 'c' should appear in payload (a and b are null/undefined), plus header fields
|
||||
assert(kvCount >= 1);
|
||||
});
|
||||
|
||||
test('renderDecodedPacket renders raw hex', () => {
|
||||
const decoded = {
|
||||
header: { routeType: 0, payloadType: 0 },
|
||||
payload: {},
|
||||
path: { hops: [] }
|
||||
};
|
||||
const hex = 'aabbcc';
|
||||
const result = api.renderDecodedPacket(decoded, hex);
|
||||
assert(result.includes('AA BB CC'));
|
||||
assert(result.includes('byop-hex'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: buildFieldTable ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('buildFieldTable produces table HTML', () => {
|
||||
const pkt = { raw_hex: 'c0400102', route_type: 1, payload_type: 4 };
|
||||
const decoded = { type: 'ADVERT', name: 'Node', pubKey: 'abc', flags: { type: 2, hasLocation: false, hasName: true, raw: 0x22 } };
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('field-table'));
|
||||
assert(result.includes('Header'));
|
||||
assert(result.includes('Header Byte'));
|
||||
assert(result.includes('Path Length'));
|
||||
});
|
||||
|
||||
test('buildFieldTable handles transport codes (route_type 0)', () => {
|
||||
const pkt = { raw_hex: 'c0400102030405060708', route_type: 0, payload_type: 0 };
|
||||
const decoded = { destHash: 'aa', srcHash: 'bb', mac: 'cc', encryptedData: 'dd' };
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Transport Codes'));
|
||||
assert(result.includes('Next Hop'));
|
||||
assert(result.includes('Last Hop'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders path hops', () => {
|
||||
const pkt = { raw_hex: 'c042aabb', route_type: 1, payload_type: 0 };
|
||||
const decoded = { destHash: 'xx' };
|
||||
const result = api.buildFieldTable(pkt, decoded, ['aa', 'bb'], []);
|
||||
assert(result.includes('Path (2 hops)'));
|
||||
assert(result.includes('Hop 0'));
|
||||
assert(result.includes('Hop 1'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders ADVERT payload', () => {
|
||||
const pkt = { raw_hex: 'c040', route_type: 1, payload_type: 4 };
|
||||
const decoded = {
|
||||
type: 'ADVERT', pubKey: 'abc123', timestamp: 1234567890,
|
||||
timestampISO: '2009-02-13T23:31:30Z', signature: 'sig',
|
||||
name: 'TestNode',
|
||||
flags: { type: 1, hasLocation: true, hasName: true, raw: 0x55 }
|
||||
};
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Public Key'));
|
||||
assert(result.includes('Timestamp'));
|
||||
assert(result.includes('Signature'));
|
||||
assert(result.includes('App Flags'));
|
||||
assert(result.includes('Companion'));
|
||||
assert(result.includes('Latitude'));
|
||||
assert(result.includes('Node Name'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders GRP_TXT payload', () => {
|
||||
const pkt = { raw_hex: 'c040', route_type: 1, payload_type: 5 };
|
||||
const decoded = { type: 'GRP_TXT', channelHash: 0xAB, mac: 'AABB', encryptedData: 'data', decryptionStatus: 'no_key' };
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Channel Hash'));
|
||||
assert(result.includes('MAC'));
|
||||
assert(result.includes('Encrypted Data'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders CHAN payload', () => {
|
||||
const pkt = { raw_hex: 'c040', route_type: 1, payload_type: 5 };
|
||||
const decoded = { type: 'CHAN', channel: 'general', sender: 'Alice', sender_timestamp: '12:00' };
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Channel'));
|
||||
assert(result.includes('general'));
|
||||
assert(result.includes('Sender'));
|
||||
assert(result.includes('Sender Time'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders ACK payload', () => {
|
||||
const pkt = { raw_hex: 'c040', route_type: 1, payload_type: 3 };
|
||||
const decoded = { type: 'ACK', ackChecksum: 'DEADBEEF' };
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Checksum'));
|
||||
assert(result.includes('DEADBEEF'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders destHash-based payload', () => {
|
||||
const pkt = { raw_hex: 'c040', route_type: 1, payload_type: 2 };
|
||||
const decoded = { destHash: 'DD', srcHash: 'SS', mac: 'MM', encryptedData: 'EE' };
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Dest Hash'));
|
||||
assert(result.includes('Src Hash'));
|
||||
});
|
||||
|
||||
test('buildFieldTable renders raw fallback for unknown payload', () => {
|
||||
const pkt = { raw_hex: 'c040aabbccdd', route_type: 1, payload_type: 99 };
|
||||
const decoded = {};
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('Raw'));
|
||||
});
|
||||
|
||||
test('buildFieldTable hash_size calculation', () => {
|
||||
// Path byte 0xC0 → bits 7-6 = 3 → hash_size = 4
|
||||
const pkt = { raw_hex: '00C0', route_type: 1, payload_type: 0 };
|
||||
const decoded = {};
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('hash_size=4'));
|
||||
});
|
||||
|
||||
test('buildFieldTable handles empty raw_hex', () => {
|
||||
const pkt = { raw_hex: '', route_type: 1, payload_type: 0 };
|
||||
const decoded = {};
|
||||
const result = api.buildFieldTable(pkt, decoded, [], []);
|
||||
assert(result.includes('field-table'));
|
||||
assert(result.includes('0B') || result.includes('0 bytes') || result.includes('??'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: _getRowCount ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('_getRowCount returns 1 for ungrouped', () => {
|
||||
// _displayGrouped is internal, but when not grouped, should return 1
|
||||
// Since we can't easily control _displayGrouped, test the function behavior
|
||||
const result = api._getRowCount({ hash: 'abc', _children: [{ observer_id: '1' }] });
|
||||
// Default _displayGrouped depends on initialization, but the function should not throw
|
||||
assert(typeof result === 'number');
|
||||
assert(result >= 1);
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: buildFlatRowHtml ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('buildFlatRowHtml produces table row', () => {
|
||||
const p = {
|
||||
id: 1, hash: 'abc123', timestamp: '2024-01-01T00:00:00Z',
|
||||
observer_id: null, raw_hex: 'aabb', payload_type: 4,
|
||||
route_type: 1, decoded_json: '{}', path_json: '[]'
|
||||
};
|
||||
const result = api.buildFlatRowHtml(p);
|
||||
assert(result.includes('<tr'));
|
||||
assert(result.includes('data-id="1"'));
|
||||
assert(result.includes('data-hash="abc123"'));
|
||||
});
|
||||
|
||||
test('buildFlatRowHtml calculates size from hex', () => {
|
||||
const p = {
|
||||
id: 2, hash: 'x', timestamp: '', observer_id: null,
|
||||
raw_hex: 'aabbccdd', payload_type: 0, route_type: 0,
|
||||
decoded_json: '{}', path_json: '[]'
|
||||
};
|
||||
const result = api.buildFlatRowHtml(p);
|
||||
assert(result.includes('4B')); // 8 hex chars = 4 bytes
|
||||
});
|
||||
|
||||
test('buildFlatRowHtml handles missing raw_hex', () => {
|
||||
const p = {
|
||||
id: 3, hash: 'y', timestamp: '', observer_id: null,
|
||||
raw_hex: null, payload_type: 0, route_type: 0,
|
||||
decoded_json: '{}', path_json: '[]'
|
||||
};
|
||||
const result = api.buildFlatRowHtml(p);
|
||||
assert(result.includes('0B'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: buildGroupRowHtml ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
const api = ctx._packetsTestAPI;
|
||||
|
||||
test('buildGroupRowHtml renders single-count group', () => {
|
||||
const p = {
|
||||
hash: 'abc', count: 1, latest: '2024-01-01T00:00:00Z',
|
||||
observer_id: null, raw_hex: 'aabb', payload_type: 4,
|
||||
route_type: 1, decoded_json: '{}', path_json: '[]',
|
||||
observation_count: 1, observer_count: 1
|
||||
};
|
||||
const result = api.buildGroupRowHtml(p);
|
||||
assert(result.includes('<tr'));
|
||||
assert(result.includes('data-hash="abc"'));
|
||||
// Single count: no expand arrow, no group-header class
|
||||
assert(!result.includes('group-header'));
|
||||
});
|
||||
|
||||
test('buildGroupRowHtml renders multi-count group with expand arrow', () => {
|
||||
const p = {
|
||||
hash: 'xyz', count: 3, latest: '2024-01-01T00:00:00Z',
|
||||
observer_id: null, raw_hex: 'aabbcc', payload_type: 0,
|
||||
route_type: 0, decoded_json: '{}', path_json: '[]',
|
||||
observation_count: 3, observer_count: 2
|
||||
};
|
||||
const result = api.buildGroupRowHtml(p);
|
||||
assert(result.includes('group-header'));
|
||||
assert(result.includes('▶')); // collapsed arrow
|
||||
});
|
||||
|
||||
test('buildGroupRowHtml shows observation count badge', () => {
|
||||
const p = {
|
||||
hash: 'obs', count: 1, latest: '2024-01-01T00:00:00Z',
|
||||
observer_id: null, raw_hex: 'aa', payload_type: 0,
|
||||
route_type: 0, decoded_json: '{}', path_json: '[]',
|
||||
observation_count: 5, observer_count: 1
|
||||
};
|
||||
const result = api.buildGroupRowHtml(p);
|
||||
assert(result.includes('badge-obs'));
|
||||
assert(result.includes('👁'));
|
||||
assert(result.includes('5'));
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n=== packets.js: page registration ===');
|
||||
{
|
||||
const ctx = loadPacketsSandbox();
|
||||
// registerPage is defined in app.js and stores in its own `pages` closure.
|
||||
// We verify via the navigateTo mechanism or by checking the pages object isn't empty.
|
||||
// Since we can't easily access the closure, just verify the test API is exposed.
|
||||
test('_packetsTestAPI is exposed on window', () => {
|
||||
assert(ctx._packetsTestAPI);
|
||||
assert(typeof ctx._packetsTestAPI.typeName === 'function');
|
||||
assert(typeof ctx._packetsTestAPI.getDetailPreview === 'function');
|
||||
assert(typeof ctx._packetsTestAPI.sortGroupChildren === 'function');
|
||||
assert(typeof ctx._packetsTestAPI.buildFieldTable === 'function');
|
||||
});
|
||||
}
|
||||
|
||||
// ===== SUMMARY =====
|
||||
console.log(`\n${'='.repeat(40)}`);
|
||||
console.log(`packets.js tests: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) process.exit(1);
|
||||
Reference in New Issue
Block a user