diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 343961f..d1e796b 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -36,7 +36,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
fetch-depth: 0
@@ -54,7 +54,7 @@ jobs:
- name: Set up Go 1.22
if: steps.docs-check.outputs.docs_only != 'true'
- uses: actions/setup-go@v5
+ uses: actions/setup-go@v6
with:
go-version: '1.22'
cache-dependency-path: |
@@ -144,7 +144,7 @@ jobs:
- name: Upload Go coverage badges
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v5
with:
name: go-badges
path: .badges/go-*.json
@@ -156,10 +156,13 @@ jobs:
# ───────────────────────────────────────────────────────────────
node-test:
name: "🧪 Node.js Tests"
- runs-on: self-hosted
+ runs-on: [self-hosted, Linux]
+ defaults:
+ run:
+ shell: bash
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
fetch-depth: 0
@@ -177,7 +180,7 @@ jobs:
- name: Set up Node.js 22
if: steps.docs-check.outputs.docs_only != 'true'
- uses: actions/setup-node@v4
+ uses: actions/setup-node@v5
with:
node-version: '22'
@@ -307,7 +310,7 @@ jobs:
- name: Upload Node.js test badges
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v5
with:
name: node-badges
path: .badges/
@@ -321,13 +324,13 @@ jobs:
name: "🏗️ Build Docker Image"
if: github.event_name == 'push'
needs: [go-test, node-test]
- runs-on: self-hosted
+ runs-on: [self-hosted, Linux]
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Set up Node.js 22
- uses: actions/setup-node@v4
+ uses: actions/setup-node@v5
with:
node-version: '22'
@@ -346,10 +349,10 @@ jobs:
name: "🚀 Deploy Staging"
if: github.event_name == 'push'
needs: [build]
- runs-on: self-hosted
+ runs-on: [self-hosted, Linux]
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Start staging on port 82
run: |
@@ -391,21 +394,21 @@ jobs:
name: "📝 Publish Badges & Summary"
if: github.event_name == 'push'
needs: [deploy]
- runs-on: self-hosted
+ runs-on: [self-hosted, Linux]
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Download Go coverage badges
continue-on-error: true
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
with:
name: go-badges
path: .badges/
- name: Download Node.js test badges
continue-on-error: true
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
with:
name: node-badges
path: .badges/
diff --git a/cmd/ingestor/decoder.go b/cmd/ingestor/decoder.go
index 147ec88..708d5b4 100644
--- a/cmd/ingestor/decoder.go
+++ b/cmd/ingestor/decoder.go
@@ -72,8 +72,8 @@ type Header struct {
// TransportCodes are present on TRANSPORT_FLOOD and TRANSPORT_DIRECT routes.
type TransportCodes struct {
- NextHop string `json:"nextHop"`
- LastHop string `json:"lastHop"`
+ Code1 string `json:"code1"`
+ Code2 string `json:"code2"`
}
// Path holds decoded path/hop information.
@@ -92,6 +92,8 @@ type AdvertFlags struct {
Room bool `json:"room"`
Sensor bool `json:"sensor"`
HasLocation bool `json:"hasLocation"`
+ HasFeat1 bool `json:"hasFeat1"`
+ HasFeat2 bool `json:"hasFeat2"`
HasName bool `json:"hasName"`
}
@@ -111,6 +113,8 @@ type Payload struct {
Lat *float64 `json:"lat,omitempty"`
Lon *float64 `json:"lon,omitempty"`
Name string `json:"name,omitempty"`
+ Feat1 *int `json:"feat1,omitempty"`
+ Feat2 *int `json:"feat2,omitempty"`
BatteryMv *int `json:"battery_mv,omitempty"`
TemperatureC *float64 `json:"temperature_c,omitempty"`
ChannelHash int `json:"channelHash,omitempty"`
@@ -123,6 +127,8 @@ type Payload struct {
EphemeralPubKey string `json:"ephemeralPubKey,omitempty"`
PathData string `json:"pathData,omitempty"`
Tag uint32 `json:"tag,omitempty"`
+ AuthCode uint32 `json:"authCode,omitempty"`
+ TraceFlags *int `json:"traceFlags,omitempty"`
RawHex string `json:"raw,omitempty"`
Error string `json:"error,omitempty"`
}
@@ -199,14 +205,13 @@ func decodeEncryptedPayload(typeName string, buf []byte) Payload {
}
func decodeAck(buf []byte) Payload {
- if len(buf) < 6 {
+ if len(buf) < 4 {
return Payload{Type: "ACK", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
+ checksum := binary.LittleEndian.Uint32(buf[0:4])
return Payload{
Type: "ACK",
- DestHash: hex.EncodeToString(buf[0:1]),
- SrcHash: hex.EncodeToString(buf[1:2]),
- ExtraHash: hex.EncodeToString(buf[2:6]),
+ ExtraHash: fmt.Sprintf("%08x", checksum),
}
}
@@ -231,6 +236,8 @@ func decodeAdvert(buf []byte) Payload {
if len(appdata) > 0 {
flags := appdata[0]
advType := int(flags & 0x0F)
+ hasFeat1 := flags&0x20 != 0
+ hasFeat2 := flags&0x40 != 0
p.Flags = &AdvertFlags{
Raw: int(flags),
Type: advType,
@@ -239,6 +246,8 @@ func decodeAdvert(buf []byte) Payload {
Room: advType == 3,
Sensor: advType == 4,
HasLocation: flags&0x10 != 0,
+ HasFeat1: hasFeat1,
+ HasFeat2: hasFeat2,
HasName: flags&0x80 != 0,
}
@@ -252,6 +261,16 @@ func decodeAdvert(buf []byte) Payload {
p.Lon = &lon
off += 8
}
+ if hasFeat1 && len(appdata) >= off+2 {
+ feat1 := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
+ p.Feat1 = &feat1
+ off += 2
+ }
+ if hasFeat2 && len(appdata) >= off+2 {
+ feat2 := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
+ p.Feat2 = &feat2
+ off += 2
+ }
if p.Flags.HasName {
// Find null terminator to separate name from trailing telemetry bytes
nameEnd := len(appdata)
@@ -469,15 +488,22 @@ func decodePathPayload(buf []byte) Payload {
}
func decodeTrace(buf []byte) Payload {
- if len(buf) < 12 {
+ if len(buf) < 9 {
return Payload{Type: "TRACE", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
- return Payload{
- Type: "TRACE",
- DestHash: hex.EncodeToString(buf[5:11]),
- SrcHash: hex.EncodeToString(buf[11:12]),
- Tag: binary.LittleEndian.Uint32(buf[1:5]),
+ tag := binary.LittleEndian.Uint32(buf[0:4])
+ authCode := binary.LittleEndian.Uint32(buf[4:8])
+ flags := int(buf[8])
+ p := Payload{
+ Type: "TRACE",
+ Tag: tag,
+ AuthCode: authCode,
+ TraceFlags: &flags,
}
+ if len(buf) > 9 {
+ p.PathData = hex.EncodeToString(buf[9:])
+ }
+ return p
}
func decodePayload(payloadType int, buf []byte, channelKeys map[string]string) Payload {
@@ -520,8 +546,7 @@ func DecodePacket(hexString string, channelKeys map[string]string) (*DecodedPack
}
header := decodeHeader(buf[0])
- pathByte := buf[1]
- offset := 2
+ offset := 1
var tc *TransportCodes
if isTransportRoute(header.RouteType) {
@@ -529,12 +554,18 @@ func DecodePacket(hexString string, channelKeys map[string]string) (*DecodedPack
return nil, fmt.Errorf("packet too short for transport codes")
}
tc = &TransportCodes{
- NextHop: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
- LastHop: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
+ Code1: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
+ Code2: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
}
offset += 4
}
+ if offset >= len(buf) {
+ return nil, fmt.Errorf("packet too short (no path byte)")
+ }
+ pathByte := buf[offset]
+ offset++
+
path, bytesConsumed := decodePath(pathByte, buf, offset)
offset += bytesConsumed
@@ -562,16 +593,24 @@ func ComputeContentHash(rawHex string) string {
return rawHex
}
- pathByte := buf[1]
+ headerByte := buf[0]
+ offset := 1
+ if isTransportRoute(int(headerByte & 0x03)) {
+ offset += 4
+ }
+ if offset >= len(buf) {
+ if len(rawHex) >= 16 {
+ return rawHex[:16]
+ }
+ return rawHex
+ }
+ pathByte := buf[offset]
+ offset++
hashSize := int((pathByte>>6)&0x3) + 1
hashCount := int(pathByte & 0x3F)
pathBytes := hashSize * hashCount
- headerByte := buf[0]
- payloadStart := 2 + pathBytes
- if isTransportRoute(int(headerByte & 0x03)) {
- payloadStart += 4
- }
+ payloadStart := offset + pathBytes
if payloadStart > len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
diff --git a/cmd/ingestor/decoder_test.go b/cmd/ingestor/decoder_test.go
index 8c219dd..51ae989 100644
--- a/cmd/ingestor/decoder_test.go
+++ b/cmd/ingestor/decoder_test.go
@@ -129,7 +129,8 @@ func TestDecodePath3ByteHashes(t *testing.T) {
func TestTransportCodes(t *testing.T) {
// Route type 0 (TRANSPORT_FLOOD) should have transport codes
- hex := "1400" + "AABB" + "CCDD" + "1A" + strings.Repeat("00", 10)
+ // Firmware order: header + transport_codes(4) + path_len + path + payload
+ hex := "14" + "AABB" + "CCDD" + "00" + strings.Repeat("00", 10)
pkt, err := DecodePacket(hex, nil)
if err != nil {
t.Fatal(err)
@@ -140,11 +141,11 @@ func TestTransportCodes(t *testing.T) {
if pkt.TransportCodes == nil {
t.Fatal("transportCodes should not be nil for TRANSPORT_FLOOD")
}
- if pkt.TransportCodes.NextHop != "AABB" {
- t.Errorf("nextHop=%s, want AABB", pkt.TransportCodes.NextHop)
+ if pkt.TransportCodes.Code1 != "AABB" {
+ t.Errorf("code1=%s, want AABB", pkt.TransportCodes.Code1)
}
- if pkt.TransportCodes.LastHop != "CCDD" {
- t.Errorf("lastHop=%s, want CCDD", pkt.TransportCodes.LastHop)
+ if pkt.TransportCodes.Code2 != "CCDD" {
+ t.Errorf("code2=%s, want CCDD", pkt.TransportCodes.Code2)
}
// Route type 1 (FLOOD) should NOT have transport codes
@@ -537,10 +538,11 @@ func TestDecodeTraceShort(t *testing.T) {
func TestDecodeTraceValid(t *testing.T) {
buf := make([]byte, 16)
- buf[0] = 0x00
- buf[1] = 0x01 // tag LE uint32 = 1
- buf[5] = 0xAA // destHash start
- buf[11] = 0xBB
+ // tag(4) + authCode(4) + flags(1) + pathData
+ binary.LittleEndian.PutUint32(buf[0:4], 1) // tag = 1
+ binary.LittleEndian.PutUint32(buf[4:8], 0xDEADBEEF) // authCode
+ buf[8] = 0x02 // flags
+ buf[9] = 0xAA // path data
p := decodeTrace(buf)
if p.Error != "" {
t.Errorf("unexpected error: %s", p.Error)
@@ -548,9 +550,18 @@ func TestDecodeTraceValid(t *testing.T) {
if p.Tag != 1 {
t.Errorf("tag=%d, want 1", p.Tag)
}
+ if p.AuthCode != 0xDEADBEEF {
+ t.Errorf("authCode=%d, want 0xDEADBEEF", p.AuthCode)
+ }
+ if p.TraceFlags == nil || *p.TraceFlags != 2 {
+ t.Errorf("traceFlags=%v, want 2", p.TraceFlags)
+ }
if p.Type != "TRACE" {
t.Errorf("type=%s, want TRACE", p.Type)
}
+ if p.PathData == "" {
+ t.Error("pathData should not be empty")
+ }
}
func TestDecodeAdvertShort(t *testing.T) {
@@ -833,10 +844,9 @@ func TestComputeContentHashShortHex(t *testing.T) {
}
func TestComputeContentHashTransportRoute(t *testing.T) {
- // Route type 0 (TRANSPORT_FLOOD) with no path hops + 4 transport code bytes
- // header=0x14 (TRANSPORT_FLOOD, ADVERT), path=0x00 (0 hops)
- // transport codes = 4 bytes, then payload
- hex := "1400" + "AABBCCDD" + strings.Repeat("EE", 10)
+ // Route type 0 (TRANSPORT_FLOOD) with transport codes then path=0x00 (0 hops)
+ // header=0x14 (TRANSPORT_FLOOD, ADVERT), transport(4), path=0x00
+ hex := "14" + "AABBCCDD" + "00" + strings.Repeat("EE", 10)
hash := ComputeContentHash(hex)
if len(hash) != 16 {
t.Errorf("hash length=%d, want 16", len(hash))
@@ -870,12 +880,10 @@ func TestComputeContentHashPayloadBeyondBufferLongHex(t *testing.T) {
func TestComputeContentHashTransportBeyondBuffer(t *testing.T) {
// Transport route (0x00 = TRANSPORT_FLOOD) with path claiming some bytes
- // total buffer too short for transport codes + path
- // header=0x00, pathByte=0x02 (2 hops, 1-byte hash), then only 2 more bytes
- // payloadStart = 2 + 2 + 4(transport) = 8, but buffer only 6 bytes
- hex := "0002" + "AABB" + strings.Repeat("CC", 6) // 20 chars = 10 bytes
+ // header=0x00, transport(4), pathByte=0x02 (2 hops, 1-byte hash)
+ // offset=1+4+1+2=8, buffer needs to be >= 8
+ hex := "00" + "AABB" + "CCDD" + "02" + strings.Repeat("CC", 6) // 20 chars = 10 bytes
hash := ComputeContentHash(hex)
- // payloadStart = 2 + 2 + 4 = 8, buffer is 10 bytes → should work
if len(hash) != 16 {
t.Errorf("hash length=%d, want 16", len(hash))
}
@@ -913,8 +921,8 @@ func TestDecodePacketWithNewlines(t *testing.T) {
}
func TestDecodePacketTransportRouteTooShort(t *testing.T) {
- // TRANSPORT_FLOOD (route=0) but only 3 bytes total → too short for transport codes
- _, err := DecodePacket("140011", nil)
+ // TRANSPORT_FLOOD (route=0) but only 2 bytes total → too short for transport codes
+ _, err := DecodePacket("1400", nil)
if err == nil {
t.Error("expected error for transport route with too-short buffer")
}
@@ -931,16 +939,19 @@ func TestDecodeAckShort(t *testing.T) {
}
func TestDecodeAckValid(t *testing.T) {
- buf := []byte{0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF}
+ buf := []byte{0xAA, 0xBB, 0xCC, 0xDD}
p := decodeAck(buf)
if p.Error != "" {
t.Errorf("unexpected error: %s", p.Error)
}
- if p.DestHash != "aa" {
- t.Errorf("destHash=%s, want aa", p.DestHash)
+ if p.ExtraHash != "ddccbbaa" {
+ t.Errorf("extraHash=%s, want ddccbbaa", p.ExtraHash)
}
- if p.ExtraHash != "ccddeeff" {
- t.Errorf("extraHash=%s, want ccddeeff", p.ExtraHash)
+ if p.DestHash != "" {
+ t.Errorf("destHash should be empty, got %s", p.DestHash)
+ }
+ if p.SrcHash != "" {
+ t.Errorf("srcHash should be empty, got %s", p.SrcHash)
}
}
diff --git a/cmd/server/coverage_test.go b/cmd/server/coverage_test.go
index 9cd0a71..da089ee 100644
--- a/cmd/server/coverage_test.go
+++ b/cmd/server/coverage_test.go
@@ -1,3714 +1,3717 @@
-package main
-
-import (
- "database/sql"
- "encoding/json"
- "fmt"
- "math"
- "net/http"
- "net/http/httptest"
- "strings"
- "testing"
- "time"
-
- "github.com/gorilla/mux"
- _ "modernc.org/sqlite"
-)
-
-// --- helpers ---
-
-func setupTestDBv2(t *testing.T) *DB {
- t.Helper()
- conn, err := sql.Open("sqlite", ":memory:")
- if err != nil {
- t.Fatal(err)
- }
- schema := `
- CREATE TABLE nodes (
- public_key TEXT PRIMARY KEY, name TEXT, role TEXT,
- lat REAL, lon REAL, last_seen TEXT, first_seen TEXT, advert_count INTEGER DEFAULT 0,
- battery_mv INTEGER, temperature_c REAL
- );
- CREATE TABLE observers (
- id TEXT PRIMARY KEY, name TEXT, iata TEXT, last_seen TEXT, first_seen TEXT,
- packet_count INTEGER DEFAULT 0, model TEXT, firmware TEXT,
- client_version TEXT, radio TEXT, battery_mv INTEGER, uptime_secs INTEGER, noise_floor REAL
- );
- CREATE TABLE transmissions (
- id INTEGER PRIMARY KEY AUTOINCREMENT, raw_hex TEXT NOT NULL,
- hash TEXT NOT NULL UNIQUE, first_seen TEXT NOT NULL,
- route_type INTEGER, payload_type INTEGER, payload_version INTEGER,
- decoded_json TEXT, created_at TEXT DEFAULT (datetime('now'))
- );
- CREATE TABLE observations (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- transmission_id INTEGER NOT NULL REFERENCES transmissions(id),
- observer_id TEXT, observer_name TEXT, direction TEXT,
- snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp INTEGER NOT NULL
- );
- `
- if _, err := conn.Exec(schema); err != nil {
- t.Fatal(err)
- }
- return &DB{conn: conn, isV3: false}
-}
-
-func seedV2Data(t *testing.T, db *DB) {
- t.Helper()
- now := time.Now().UTC()
- recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
- epoch := now.Add(-1 * time.Hour).Unix()
-
- db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
- VALUES ('obs1', 'Obs One', 'SJC', ?, '2026-01-01T00:00:00Z', 100)`, recent)
- db.conn.Exec(`INSERT INTO nodes (public_key, name, role, lat, lon, last_seen, first_seen, advert_count)
- VALUES ('aabbccdd11223344', 'TestRepeater', 'repeater', 37.5, -122.0, ?, '2026-01-01T00:00:00Z', 50)`, recent)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('AABB', 'abc123def4567890', ?, 1, 4, '{"pubKey":"aabbccdd11223344","name":"TestRepeater","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
- VALUES (1, 'obs1', 'Obs One', 12.5, -90, '["aa","bb"]', ?)`, epoch)
-}
-
-func setupNoStoreServer(t *testing.T) (*Server, *mux.Router) {
- t.Helper()
- db := setupTestDB(t)
- seedTestData(t, db)
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- // No store — forces DB fallback paths
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
- return srv, router
-}
-
-// --- detectSchema ---
-
-func TestDetectSchemaV3(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- if !db.isV3 {
- t.Error("expected v3 schema (observer_idx)")
- }
-}
-
-func TestDetectSchemaV2(t *testing.T) {
- db := setupTestDBv2(t)
- defer db.Close()
- db.detectSchema()
- if db.isV3 {
- t.Error("expected v2 schema (observer_id), got v3")
- }
-}
-
-func TestDetectSchemaV2Queries(t *testing.T) {
- db := setupTestDBv2(t)
- defer db.Close()
- seedV2Data(t, db)
-
- // v2 schema should work with QueryPackets
- result, err := db.QueryPackets(PacketQuery{Limit: 50, Order: "DESC"})
- if err != nil {
- t.Fatal(err)
- }
- if result.Total != 1 {
- t.Errorf("expected 1 transmission in v2, got %d", result.Total)
- }
-
- // v2 grouped query
- gResult, err := db.QueryGroupedPackets(PacketQuery{Limit: 50, Order: "DESC"})
- if err != nil {
- t.Fatal(err)
- }
- if gResult.Total != 1 {
- t.Errorf("expected 1 grouped in v2, got %d", gResult.Total)
- }
-
- // v2 GetObserverPacketCounts
- counts := db.GetObserverPacketCounts(0)
- if counts["obs1"] != 1 {
- t.Errorf("expected 1 obs count for obs1, got %d", counts["obs1"])
- }
-
- // v2 QueryMultiNodePackets
- mResult, err := db.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "DESC", "", "")
- if err != nil {
- t.Fatal(err)
- }
- if mResult.Total != 1 {
- t.Errorf("expected 1 multi-node packet in v2, got %d", mResult.Total)
- }
-}
-
-// --- buildPacketWhere ---
-
-func TestBuildPacketWhere(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- tests := []struct {
- name string
- query PacketQuery
- wantWhere int
- }{
- {"empty", PacketQuery{}, 0},
- {"type filter", PacketQuery{Type: intPtr(4)}, 1},
- {"route filter", PacketQuery{Route: intPtr(1)}, 1},
- {"observer filter", PacketQuery{Observer: "obs1"}, 1},
- {"hash filter", PacketQuery{Hash: "ABC123DEF4567890"}, 1},
- {"since filter", PacketQuery{Since: "2025-01-01"}, 1},
- {"until filter", PacketQuery{Until: "2099-01-01"}, 1},
- {"region filter", PacketQuery{Region: "SJC"}, 1},
- {"node filter", PacketQuery{Node: "TestRepeater"}, 1},
- {"all filters", PacketQuery{
- Type: intPtr(4), Route: intPtr(1), Observer: "obs1",
- Hash: "abc123", Since: "2025-01-01", Until: "2099-01-01",
- Region: "SJC", Node: "TestRepeater",
- }, 8},
- }
- for _, tc := range tests {
- t.Run(tc.name, func(t *testing.T) {
- where, args := db.buildPacketWhere(tc.query)
- if len(where) != tc.wantWhere {
- t.Errorf("expected %d where clauses, got %d", tc.wantWhere, len(where))
- }
- if len(where) != len(args) {
- t.Errorf("where count (%d) != args count (%d)", len(where), len(args))
- }
- })
- }
-}
-
-// --- DB.QueryMultiNodePackets ---
-
-func TestDBQueryMultiNodePackets(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- t.Run("empty pubkeys", func(t *testing.T) {
- result, err := db.QueryMultiNodePackets(nil, 50, 0, "DESC", "", "")
- if err != nil {
- t.Fatal(err)
- }
- if result.Total != 0 {
- t.Errorf("expected 0 for empty pubkeys, got %d", result.Total)
- }
- })
-
- t.Run("single pubkey match", func(t *testing.T) {
- result, err := db.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "DESC", "", "")
- if err != nil {
- t.Fatal(err)
- }
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("multiple pubkeys", func(t *testing.T) {
- result, err := db.QueryMultiNodePackets(
- []string{"aabbccdd11223344", "eeff00112233aabb"}, 50, 0, "DESC", "", "")
- if err != nil {
- t.Fatal(err)
- }
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("with time filters", func(t *testing.T) {
- result, err := db.QueryMultiNodePackets(
- []string{"aabbccdd11223344"}, 50, 0, "ASC",
- "2020-01-01T00:00:00Z", "2099-01-01T00:00:00Z")
- if err != nil {
- t.Fatal(err)
- }
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("default limit and order", func(t *testing.T) {
- result, err := db.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 0, 0, "", "", "")
- if err != nil {
- t.Fatal(err)
- }
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("no match", func(t *testing.T) {
- result, err := db.QueryMultiNodePackets([]string{"nonexistent"}, 50, 0, "DESC", "", "")
- if err != nil {
- t.Fatal(err)
- }
- if result.Total != 0 {
- t.Errorf("expected 0, got %d", result.Total)
- }
- })
-}
-
-// --- Store.QueryMultiNodePackets ---
-
-func TestStoreQueryMultiNodePackets(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- t.Run("empty pubkeys", func(t *testing.T) {
- result := store.QueryMultiNodePackets(nil, 50, 0, "DESC", "", "")
- if result.Total != 0 {
- t.Errorf("expected 0, got %d", result.Total)
- }
- })
-
- t.Run("matching pubkey", func(t *testing.T) {
- result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "DESC", "", "")
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("ASC order", func(t *testing.T) {
- result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "ASC", "", "")
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("with since/until", func(t *testing.T) {
- result := store.QueryMultiNodePackets(
- []string{"aabbccdd11223344"}, 50, 0, "DESC",
- "2020-01-01T00:00:00Z", "2099-01-01T00:00:00Z")
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-
- t.Run("offset beyond total", func(t *testing.T) {
- result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 9999, "DESC", "", "")
- if len(result.Packets) != 0 {
- t.Errorf("expected 0 packets, got %d", len(result.Packets))
- }
- })
-
- t.Run("default limit", func(t *testing.T) {
- result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 0, 0, "DESC", "", "")
- if result.Total < 1 {
- t.Errorf("expected >=1, got %d", result.Total)
- }
- })
-}
-
-// --- IngestNewFromDB ---
-
-func TestIngestNewFromDB(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- initialMax := store.MaxTransmissionID()
-
- // Insert a new transmission in DB
- now := time.Now().UTC().Format(time.RFC3339)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('EEFF', 'newhash123456abcd', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now)
- newTxID := 0
- db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
-
- // Add observation for the new transmission
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (?, 1, 10.0, -92, '["cc"]', ?)`, newTxID, time.Now().Unix())
-
- // Ingest
- broadcastMaps, newMax := store.IngestNewFromDB(initialMax, 100)
- if newMax <= initialMax {
- t.Errorf("expected newMax > %d, got %d", initialMax, newMax)
- }
- if len(broadcastMaps) < 1 {
- t.Errorf("expected >=1 broadcast maps, got %d", len(broadcastMaps))
- }
-
- // Verify broadcast map contains nested "packet" field (fixes #162)
- if len(broadcastMaps) > 0 {
- bm := broadcastMaps[0]
- pkt, ok := bm["packet"]
- if !ok || pkt == nil {
- t.Error("broadcast map missing 'packet' field (required by packets.js)")
- }
- pktMap, ok := pkt.(map[string]interface{})
- if ok {
- for _, field := range []string{"id", "hash", "payload_type", "observer_id"} {
- if _, exists := pktMap[field]; !exists {
- t.Errorf("packet sub-object missing field %q", field)
- }
- }
- }
- // Verify decoded also present at top level (for live.js)
- if _, ok := bm["decoded"]; !ok {
- t.Error("broadcast map missing 'decoded' field (required by live.js)")
- }
- }
-
- // Verify ingested into store
- updatedMax := store.MaxTransmissionID()
- if updatedMax < newMax {
- t.Errorf("store max (%d) should be >= newMax (%d)", updatedMax, newMax)
- }
-
- t.Run("no new data", func(t *testing.T) {
- maps, max := store.IngestNewFromDB(newMax, 100)
- if maps != nil {
- t.Errorf("expected nil for no new data, got %d maps", len(maps))
- }
- if max != newMax {
- t.Errorf("expected same max %d, got %d", newMax, max)
- }
- })
-
- t.Run("default limit", func(t *testing.T) {
- _, _ = store.IngestNewFromDB(newMax, 0)
- })
-}
-
-func TestIngestNewFromDBv2(t *testing.T) {
- db := setupTestDBv2(t)
- defer db.Close()
- seedV2Data(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- initialMax := store.MaxTransmissionID()
-
- now := time.Now().UTC().Format(time.RFC3339)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('EEFF', 'v2newhash12345678', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now)
- newTxID := 0
- db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
- VALUES (?, 'obs1', 'Obs One', 10.0, -92, '["cc"]', ?)`, newTxID, time.Now().Unix())
-
- broadcastMaps, newMax := store.IngestNewFromDB(initialMax, 100)
- if newMax <= initialMax {
- t.Errorf("expected newMax > %d, got %d", initialMax, newMax)
- }
- if len(broadcastMaps) < 1 {
- t.Errorf("expected >=1 broadcast maps, got %d", len(broadcastMaps))
- }
-}
-
-// --- MaxTransmissionID ---
-
-func TestMaxTransmissionID(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- maxID := store.MaxTransmissionID()
- if maxID <= 0 {
- t.Errorf("expected maxID > 0, got %d", maxID)
- }
-
- t.Run("empty store", func(t *testing.T) {
- emptyStore := NewPacketStore(db)
- if emptyStore.MaxTransmissionID() != 0 {
- t.Error("expected 0 for empty store")
- }
- })
-}
-
-// --- Route handler DB fallback (no store) ---
-
-func TestHandleBulkHealthNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/nodes/bulk-health?limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body []interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if body == nil {
- t.Fatal("expected array response")
- }
-}
-
-func TestHandleBulkHealthNoStoreMaxLimit(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/nodes/bulk-health?limit=500", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsRFNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
-
- t.Run("basic", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/rf", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if _, ok := body["snr"]; !ok {
- t.Error("expected snr field")
- }
- if _, ok := body["payloadTypes"]; !ok {
- t.Error("expected payloadTypes field")
- }
- })
-
- t.Run("with region", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/rf?region=SJC", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-func TestHandlePacketsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
-
- t.Run("basic packets", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("multi-node", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?nodes=aabbccdd11223344,eeff00112233aabb", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if _, ok := body["packets"]; !ok {
- t.Error("expected packets field")
- }
- })
-
- t.Run("grouped", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?groupByHash=true&limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-func TestHandlePacketsMultiNodeWithStore(t *testing.T) {
- _, router := setupTestServer(t)
- req := httptest.NewRequest("GET", "/api/packets?nodes=aabbccdd11223344&order=asc&limit=10&offset=0", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if _, ok := body["packets"]; !ok {
- t.Error("expected packets field")
- }
-}
-
-func TestHandlePacketDetailNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
-
- t.Run("by hash", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/abc123def4567890", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404 (no store), got %d: %s", w.Code, w.Body.String())
- }
- })
-
- t.Run("by ID", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/1", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404 (no store), got %d: %s", w.Code, w.Body.String())
- }
- })
-
- t.Run("not found", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/9999", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-
- t.Run("non-numeric non-hash", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/notahash", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-}
-
-func TestHandleAnalyticsChannelsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if _, ok := body["activeChannels"]; !ok {
- t.Error("expected activeChannels field")
- }
-}
-
-// --- transmissionsForObserver (byObserver index path) ---
-
-func TestTransmissionsForObserverIndex(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Query packets for an observer — hits the byObserver index
- result := store.QueryPackets(PacketQuery{Limit: 50, Observer: "obs1", Order: "DESC"})
- if result.Total < 1 {
- t.Errorf("expected >=1 packets for obs1, got %d", result.Total)
- }
-
- // Query with observer + type (uses from != nil path in transmissionsForObserver)
- pt := 4
- result2 := store.QueryPackets(PacketQuery{Limit: 50, Observer: "obs1", Type: &pt, Order: "DESC"})
- if result2.Total < 1 {
- t.Errorf("expected >=1 filtered packets, got %d", result2.Total)
- }
-}
-
-// --- GetChannelMessages (dedup, observer, hops paths) ---
-
-func TestGetChannelMessagesFromStore(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Test channel should exist from seed data
- messages, total := store.GetChannelMessages("#test", 100, 0)
- if total < 1 {
- t.Errorf("expected >=1 messages for #test, got %d", total)
- }
- if len(messages) < 1 {
- t.Errorf("expected >=1 message entries, got %d", len(messages))
- }
-
- t.Run("non-existent channel", func(t *testing.T) {
- msgs, total := store.GetChannelMessages("nonexistent", 100, 0)
- if total != 0 || len(msgs) != 0 {
- t.Errorf("expected 0 for nonexistent channel, got %d/%d", total, len(msgs))
- }
- })
-
- t.Run("default limit", func(t *testing.T) {
- _, total := store.GetChannelMessages("#test", 0, 0)
- if total < 1 {
- t.Errorf("expected >=1 with default limit, got %d", total)
- }
- })
-
- t.Run("with offset", func(t *testing.T) {
- _, _ = store.GetChannelMessages("#test", 10, 9999)
- })
-}
-
-func TestGetChannelMessagesDedupe(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
-
- now := time.Now().UTC()
- recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
- epoch := now.Add(-1 * time.Hour).Unix()
-
- seedTestData(t, db)
-
- // Insert a duplicate channel message with the same hash as existing
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('DDEE', 'dupchannelhash1234', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"Hello: World","sender":"TestUser"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (3, 1, 11.0, -91, '["aa"]', ?)`, epoch)
-
- // Insert another dupe same hash as above (should dedup)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('DDFF', 'dupchannelhash5678', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"Hello: World","sender":"TestUser"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (4, 2, 9.0, -93, '[]', ?)`, epoch)
-
- store := NewPacketStore(db)
- store.Load()
-
- msgs, total := store.GetChannelMessages("#test", 100, 0)
- // Should have messages, with some deduped
- if total < 1 {
- t.Errorf("expected >=1 total messages, got %d", total)
- }
- _ = msgs
-}
-
-// --- GetChannels ---
-
-func TestGetChannelsFromStore(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- channels := store.GetChannels("")
- if len(channels) < 1 {
- t.Errorf("expected >=1 channel, got %d", len(channels))
- }
-
- t.Run("with region", func(t *testing.T) {
- ch := store.GetChannels("SJC")
- _ = ch
- })
-
- t.Run("non-existent region", func(t *testing.T) {
- ch := store.GetChannels("NONEXIST")
- // Region filter may return 0 or fallback to unfiltered depending on DB content
- _ = ch
- })
-}
-
-// --- resolve (prefixMap) ---
-
-func TestPrefixMapResolve(t *testing.T) {
- nodes := []nodeInfo{
- {PublicKey: "aabbccdd11223344", Name: "NodeA", HasGPS: true, Lat: 37.5, Lon: -122.0},
- {PublicKey: "aabbccdd55667788", Name: "NodeB", HasGPS: false},
- {PublicKey: "eeff0011aabbccdd", Name: "NodeC", HasGPS: true, Lat: 38.0, Lon: -121.0},
- }
- pm := buildPrefixMap(nodes)
-
- t.Run("exact match", func(t *testing.T) {
- n := pm.resolve("aabbccdd11223344")
- if n == nil || n.Name != "NodeA" {
- t.Errorf("expected NodeA, got %v", n)
- }
- })
-
- t.Run("prefix match single", func(t *testing.T) {
- n := pm.resolve("eeff")
- if n == nil || n.Name != "NodeC" {
- t.Errorf("expected NodeC, got %v", n)
- }
- })
-
- t.Run("prefix match multiple — prefer GPS", func(t *testing.T) {
- n := pm.resolve("aabbccdd")
- if n == nil {
- t.Fatal("expected non-nil")
- }
- if !n.HasGPS {
- t.Error("expected GPS-preferred candidate")
- }
- if n.Name != "NodeA" {
- t.Errorf("expected NodeA (has GPS), got %s", n.Name)
- }
- })
-
- t.Run("no match", func(t *testing.T) {
- n := pm.resolve("zzzzz")
- if n != nil {
- t.Errorf("expected nil, got %v", n)
- }
- })
-
- t.Run("multiple candidates no GPS", func(t *testing.T) {
- noGPSNodes := []nodeInfo{
- {PublicKey: "aa11bb22", Name: "X", HasGPS: false},
- {PublicKey: "aa11cc33", Name: "Y", HasGPS: false},
- }
- pm2 := buildPrefixMap(noGPSNodes)
- n := pm2.resolve("aa11")
- if n == nil {
- t.Fatal("expected non-nil")
- }
- // Should return first candidate
- })
-}
-
-// --- pathLen ---
-
-func TestPathLen(t *testing.T) {
- tests := []struct {
- json string
- want int
- }{
- {"", 0},
- {"invalid", 0},
- {`[]`, 0},
- {`["aa"]`, 1},
- {`["aa","bb","cc"]`, 3},
- }
- for _, tc := range tests {
- got := pathLen(tc.json)
- if got != tc.want {
- t.Errorf("pathLen(%q) = %d, want %d", tc.json, got, tc.want)
- }
- }
-}
-
-// --- floatPtrOrNil ---
-
-func TestFloatPtrOrNil(t *testing.T) {
- v := 3.14
- if floatPtrOrNil(&v) != 3.14 {
- t.Error("expected 3.14")
- }
- if floatPtrOrNil(nil) != nil {
- t.Error("expected nil")
- }
-}
-
-// --- nullFloatPtr ---
-
-func TestNullFloatPtr(t *testing.T) {
- valid := sql.NullFloat64{Float64: 2.71, Valid: true}
- p := nullFloatPtr(valid)
- if p == nil || *p != 2.71 {
- t.Errorf("expected 2.71, got %v", p)
- }
- invalid := sql.NullFloat64{Valid: false}
- if nullFloatPtr(invalid) != nil {
- t.Error("expected nil for invalid")
- }
-}
-
-// --- nilIfEmpty ---
-
-func TestNilIfEmpty(t *testing.T) {
- if nilIfEmpty("") != nil {
- t.Error("expected nil for empty")
- }
- if nilIfEmpty("hello") != "hello" {
- t.Error("expected 'hello'")
- }
-}
-
-// --- pickBestObservation ---
-
-func TestPickBestObservation(t *testing.T) {
- t.Run("empty observations", func(t *testing.T) {
- tx := &StoreTx{}
- pickBestObservation(tx)
- if tx.ObserverID != "" {
- t.Error("expected empty observer for no observations")
- }
- })
-
- t.Run("single observation", func(t *testing.T) {
- snr := 10.0
- tx := &StoreTx{
- Observations: []*StoreObs{
- {ObserverID: "obs1", ObserverName: "One", SNR: &snr, PathJSON: `["aa"]`},
- },
- }
- pickBestObservation(tx)
- if tx.ObserverID != "obs1" {
- t.Errorf("expected obs1, got %s", tx.ObserverID)
- }
- })
-
- t.Run("picks longest path", func(t *testing.T) {
- snr1, snr2 := 10.0, 5.0
- tx := &StoreTx{
- Observations: []*StoreObs{
- {ObserverID: "obs1", SNR: &snr1, PathJSON: `["aa"]`},
- {ObserverID: "obs2", SNR: &snr2, PathJSON: `["aa","bb","cc"]`},
- },
- }
- pickBestObservation(tx)
- if tx.ObserverID != "obs2" {
- t.Errorf("expected obs2 (longest path), got %s", tx.ObserverID)
- }
- })
-}
-
-// --- indexByNode ---
-
-func TestIndexByNode(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
-
- t.Run("empty decoded_json", func(t *testing.T) {
- tx := &StoreTx{Hash: "h1"}
- store.indexByNode(tx)
- if len(store.byNode) != 0 {
- t.Error("expected no index entries")
- }
- })
-
- t.Run("valid decoded_json", func(t *testing.T) {
- tx := &StoreTx{
- Hash: "h2",
- DecodedJSON: `{"pubKey":"aabbccdd11223344","destPubKey":"eeff00112233aabb"}`,
- }
- store.indexByNode(tx)
- if len(store.byNode["aabbccdd11223344"]) != 1 {
- t.Error("expected pubKey indexed")
- }
- if len(store.byNode["eeff00112233aabb"]) != 1 {
- t.Error("expected destPubKey indexed")
- }
- })
-
- t.Run("duplicate hash skipped", func(t *testing.T) {
- tx := &StoreTx{
- Hash: "h2",
- DecodedJSON: `{"pubKey":"aabbccdd11223344"}`,
- }
- store.indexByNode(tx)
- // Should not add duplicate
- if len(store.byNode["aabbccdd11223344"]) != 1 {
- t.Errorf("expected 1, got %d", len(store.byNode["aabbccdd11223344"]))
- }
- })
-
- t.Run("invalid json", func(t *testing.T) {
- tx := &StoreTx{Hash: "h3", DecodedJSON: "not json"}
- store.indexByNode(tx)
- // Should not panic or add anything
- })
-}
-
-// --- resolveVersion ---
-
-func TestResolveVersion(t *testing.T) {
- old := Version
- defer func() { Version = old }()
-
- Version = "v1.2.3"
- if resolveVersion() != "v1.2.3" {
- t.Error("expected v1.2.3")
- }
-
- Version = ""
- if resolveVersion() != "unknown" {
- t.Error("expected unknown when empty")
- }
-}
-
-// --- wsOrStatic ---
-
-func TestWsOrStaticNonWebSocket(t *testing.T) {
- hub := NewHub()
- staticHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- w.WriteHeader(200)
- w.Write([]byte("static"))
- })
- handler := wsOrStatic(hub, staticHandler)
-
- req := httptest.NewRequest("GET", "/", nil)
- w := httptest.NewRecorder()
- handler.ServeHTTP(w, req)
-
- if w.Code != 200 {
- t.Errorf("expected 200, got %d", w.Code)
- }
- if w.Body.String() != "static" {
- t.Errorf("expected 'static', got %s", w.Body.String())
- }
-}
-
-// --- Poller.Start ---
-
-func TestPollerStartStop(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- hub := NewHub()
-
- poller := NewPoller(db, hub, 50*time.Millisecond)
- go poller.Start()
- time.Sleep(150 * time.Millisecond)
- poller.Stop()
-}
-
-func TestPollerStartWithStore(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- hub := NewHub()
- store := NewPacketStore(db)
- store.Load()
-
- poller := NewPoller(db, hub, 50*time.Millisecond)
- poller.store = store
- go poller.Start()
-
- // Insert new data while poller running
- now := time.Now().UTC().Format(time.RFC3339)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type)
- VALUES ('FFEE', 'pollerhash12345678', ?, 1, 4)`, now)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES ((SELECT MAX(id) FROM transmissions), 1, 10.0, -92, '[]', ?)`, time.Now().Unix())
-
- time.Sleep(200 * time.Millisecond)
- poller.Stop()
-}
-
-// --- perfMiddleware slow query path ---
-
-func TestPerfMiddlewareSlowQuery(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
-
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- // Add a slow handler
- router.HandleFunc("/api/test-slow", func(w http.ResponseWriter, r *http.Request) {
- time.Sleep(110 * time.Millisecond)
- writeJSON(w, map[string]string{"ok": "true"})
- }).Methods("GET")
-
- req := httptest.NewRequest("GET", "/api/test-slow", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if len(srv.perfStats.SlowQueries) < 1 {
- t.Error("expected slow query to be recorded")
- }
-}
-
-func TestPerfMiddlewareNonAPIPath(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- // Non-API path should pass through without perf tracking
- router.HandleFunc("/not-api", func(w http.ResponseWriter, r *http.Request) {
- w.WriteHeader(200)
- }).Methods("GET")
-
- initialReqs := srv.perfStats.Requests
- req := httptest.NewRequest("GET", "/not-api", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if srv.perfStats.Requests != initialReqs {
- t.Error("non-API request should not be tracked")
- }
-}
-
-// --- writeJSON error path ---
-
-func TestWriteJSONErrorPath(t *testing.T) {
- w := httptest.NewRecorder()
- // math.Inf cannot be marshaled to JSON — triggers the error path
- writeJSON(w, math.Inf(1))
- // Should not panic, just log the error
-}
-
-// --- GetObserverPacketCounts ---
-
-func TestGetObserverPacketCountsV3(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- counts := db.GetObserverPacketCounts(0)
- if len(counts) == 0 {
- t.Error("expected some observer counts")
- }
-}
-
-// --- Additional route fallback tests ---
-
-func TestHandleAnalyticsTopologyNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/analytics/topology", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsDistanceNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/analytics/distance", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsHashSizesNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsSubpathsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/analytics/subpaths", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsSubpathDetailNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
-
- t.Run("with hops", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/subpath-detail?hops=aa,bb", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("missing hops", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/subpath-detail", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("single hop", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/subpath-detail?hops=aa", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-func TestHandleChannelsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/channels", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleChannelMessagesNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/channels/test/messages", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandlePacketTimestampsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
-
- t.Run("with since", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/timestamps?since=2020-01-01T00:00:00Z", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("missing since", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/timestamps", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 400 {
- t.Fatalf("expected 400, got %d", w.Code)
- }
- })
-}
-
-func TestHandleStatsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/stats", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleHealthNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/health", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if body["status"] != "ok" {
- t.Errorf("expected status ok, got %v", body["status"])
- }
-}
-
-// --- buildTransmissionWhere additional coverage ---
-
-func TestBuildTransmissionWhereRFC3339(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- t.Run("RFC3339 since", func(t *testing.T) {
- q := PacketQuery{Since: "2020-01-01T00:00:00Z"}
- where, args := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- if len(args) != 1 {
- t.Errorf("expected 1 arg, got %d", len(args))
- }
- if !strings.Contains(where[0], "observations") {
- t.Error("expected observations subquery for RFC3339 since")
- }
- })
-
- t.Run("RFC3339 until", func(t *testing.T) {
- q := PacketQuery{Until: "2099-01-01T00:00:00Z"}
- where, args := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- if len(args) != 1 {
- t.Errorf("expected 1 arg, got %d", len(args))
- }
- })
-
- t.Run("non-RFC3339 since", func(t *testing.T) {
- q := PacketQuery{Since: "2020-01-01"}
- where, _ := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- if strings.Contains(where[0], "observations") {
- t.Error("expected direct first_seen comparison for non-RFC3339")
- }
- })
-
- t.Run("observer v3", func(t *testing.T) {
- q := PacketQuery{Observer: "obs1"}
- where, _ := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- if !strings.Contains(where[0], "observer_idx") {
- t.Error("expected observer_idx subquery for v3")
- }
- })
-
- t.Run("region v3", func(t *testing.T) {
- q := PacketQuery{Region: "SJC"}
- where, _ := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- if !strings.Contains(where[0], "iata") {
- t.Error("expected iata subquery for region")
- }
- })
-}
-
-func TestBuildTransmissionWhereV2(t *testing.T) {
- db := setupTestDBv2(t)
- defer db.Close()
- seedV2Data(t, db)
-
- t.Run("observer v2", func(t *testing.T) {
- q := PacketQuery{Observer: "obs1"}
- where, _ := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- if !strings.Contains(where[0], "observer_id") {
- t.Error("expected observer_id subquery for v2")
- }
- })
-
- t.Run("region v2", func(t *testing.T) {
- q := PacketQuery{Region: "SJC"}
- where, _ := db.buildTransmissionWhere(q)
- if len(where) != 1 {
- t.Errorf("expected 1 clause, got %d", len(where))
- }
- })
-}
-
-// --- GetMaxTransmissionID (DB) ---
-
-func TestDBGetMaxTransmissionID(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- maxID := db.GetMaxTransmissionID()
- if maxID <= 0 {
- t.Errorf("expected > 0, got %d", maxID)
- }
-}
-
-// --- GetNodeLocations ---
-
-func TestGetNodeLocations(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- locs := db.GetNodeLocations()
- if len(locs) == 0 {
- t.Error("expected some node locations")
- }
- pk := strings.ToLower("aabbccdd11223344")
- if entry, ok := locs[pk]; ok {
- if entry["lat"] == nil {
- t.Error("expected non-nil lat")
- }
- } else {
- t.Error("expected node location for test repeater")
- }
-}
-
-// --- Store edge cases ---
-
-func TestStoreQueryPacketsEdgeCases(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- t.Run("hash filter", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Hash: "abc123def4567890", Limit: 50, Order: "DESC"})
- if result.Total != 1 {
- t.Errorf("expected 1, got %d", result.Total)
- }
- })
-
- t.Run("non-existent hash", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Hash: "0000000000000000", Limit: 50, Order: "DESC"})
- if result.Total != 0 {
- t.Errorf("expected 0, got %d", result.Total)
- }
- })
-
- t.Run("ASC order", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Limit: 50, Order: "ASC"})
- if result.Total < 1 {
- t.Error("expected results")
- }
- })
-
- t.Run("offset beyond end", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Limit: 50, Offset: 9999, Order: "DESC"})
- if len(result.Packets) != 0 {
- t.Errorf("expected 0, got %d", len(result.Packets))
- }
- })
-
- t.Run("node filter with index", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Node: "aabbccdd11223344", Limit: 50, Order: "DESC"})
- if result.Total < 1 {
- t.Error("expected >=1")
- }
- })
-
- t.Run("route filter", func(t *testing.T) {
- rt := 1
- result := store.QueryPackets(PacketQuery{Route: &rt, Limit: 50, Order: "DESC"})
- if result.Total < 1 {
- t.Error("expected >=1")
- }
- })
-
- t.Run("since filter", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Since: "2020-01-01", Limit: 50, Order: "DESC"})
- if result.Total < 1 {
- t.Error("expected >=1")
- }
- })
-
- t.Run("until filter", func(t *testing.T) {
- result := store.QueryPackets(PacketQuery{Until: "2099-01-01", Limit: 50, Order: "DESC"})
- if result.Total < 1 {
- t.Error("expected >=1")
- }
- })
-}
-
-// --- HandlePackets with various options ---
-
-func TestHandlePacketsWithQueryOptions(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("with type filter", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?type=4&limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("with route filter", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?route=1&limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("expand observations", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?limit=10&expand=observations", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("ASC order", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets?order=asc&limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-// --- handleObservers and handleObserverDetail ---
-
-func TestHandleObserversNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/observers", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleObserverDetailNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/observers/obs1", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d: %s", w.Code, w.Body.String())
- }
-}
-
-func TestHandleObserverAnalyticsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/observers/obs1/analytics", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 503 {
- t.Fatalf("expected 503, got %d: %s", w.Code, w.Body.String())
- }
-}
-
-// --- HandleTraces ---
-
-func TestHandleTracesNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/traces/abc123def4567890", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- HandleResolveHops ---
-
-func TestHandleResolveHops(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("empty hops", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/resolve-hops", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("with hops", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/resolve-hops?hops=aabb,eeff", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-// --- HandlePerf ---
-
-func TestHandlePerfNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/perf", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- HandleIATACoords ---
-
-func TestHandleIATACoordsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/iata-coords", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- Conversion helpers ---
-
-func TestStrOrNil(t *testing.T) {
- if strOrNil("") != nil {
- t.Error("expected nil")
- }
- if strOrNil("abc") != "abc" {
- t.Error("expected abc")
- }
-}
-
-func TestIntPtrOrNil(t *testing.T) {
- if intPtrOrNil(nil) != nil {
- t.Error("expected nil")
- }
- v := 42
- if intPtrOrNil(&v) != 42 {
- t.Error("expected 42")
- }
-}
-
-func TestNullIntPtr(t *testing.T) {
- valid := sql.NullInt64{Int64: 7, Valid: true}
- p := nullIntPtr(valid)
- if p == nil || *p != 7 {
- t.Error("expected 7")
- }
- invalid := sql.NullInt64{Valid: false}
- if nullIntPtr(invalid) != nil {
- t.Error("expected nil")
- }
-}
-
-func TestNullStr(t *testing.T) {
- valid := sql.NullString{String: "hello", Valid: true}
- if nullStr(valid) != "hello" {
- t.Error("expected hello")
- }
- invalid := sql.NullString{Valid: false}
- if nullStr(invalid) != nil {
- t.Error("expected nil")
- }
-}
-
-func TestNullStrVal(t *testing.T) {
- valid := sql.NullString{String: "test", Valid: true}
- if nullStrVal(valid) != "test" {
- t.Error("expected test")
- }
- invalid := sql.NullString{Valid: false}
- if nullStrVal(invalid) != "" {
- t.Error("expected empty string")
- }
-}
-
-func TestNullFloat(t *testing.T) {
- valid := sql.NullFloat64{Float64: 1.5, Valid: true}
- if nullFloat(valid) != 1.5 {
- t.Error("expected 1.5")
- }
- invalid := sql.NullFloat64{Valid: false}
- if nullFloat(invalid) != nil {
- t.Error("expected nil")
- }
-}
-
-func TestNullInt(t *testing.T) {
- valid := sql.NullInt64{Int64: 99, Valid: true}
- if nullInt(valid) != 99 {
- t.Error("expected 99")
- }
- invalid := sql.NullInt64{Valid: false}
- if nullInt(invalid) != nil {
- t.Error("expected nil")
- }
-}
-
-// --- resolveCommit ---
-
-func TestResolveCommit(t *testing.T) {
- old := Commit
- defer func() { Commit = old }()
-
- Commit = "abc123"
- if resolveCommit() != "abc123" {
- t.Error("expected abc123")
- }
-
- Commit = ""
- // With no .git-commit file and possibly no git, should return something
- result := resolveCommit()
- if result == "" {
- t.Error("expected non-empty result")
- }
-}
-
-// --- parsePathJSON ---
-
-func TestParsePathJSON(t *testing.T) {
- if parsePathJSON("") != nil {
- t.Error("expected nil for empty")
- }
- if parsePathJSON("[]") != nil {
- t.Error("expected nil for []")
- }
- if parsePathJSON("invalid") != nil {
- t.Error("expected nil for invalid")
- }
- hops := parsePathJSON(`["aa","bb"]`)
- if len(hops) != 2 {
- t.Errorf("expected 2 hops, got %d", len(hops))
- }
-}
-
-// --- Store.GetPerfStoreStats & GetCacheStats ---
-
-func TestStorePerfAndCacheStats(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- stats := store.GetPerfStoreStats()
- if _, ok := stats["totalLoaded"]; !ok {
- t.Error("expected totalLoaded")
- }
-
- cacheStats := store.GetCacheStats()
- if _, ok := cacheStats["size"]; !ok {
- t.Error("expected size")
- }
-}
-
-// --- enrichObs ---
-
-func TestEnrichObs(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Find an observation from the loaded store
- var obs *StoreObs
- for _, o := range store.byObsID {
- obs = o
- break
- }
- if obs == nil {
- t.Skip("no observations loaded")
- }
-
- enriched := store.enrichObs(obs)
- if enriched["observer_id"] == nil {
- t.Error("expected observer_id")
- }
-}
-
-// --- HandleNodeSearch ---
-
-func TestHandleNodeSearch(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("with query", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/search?q=Test", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("empty query", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/search?q=", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-// --- HandleNodeDetail ---
-
-func TestHandleNodeDetail(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("existing", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("not found", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/nonexistent12345678", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-}
-
-// --- HandleNodeHealth ---
-
-func TestHandleNodeHealth(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("not found", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/nonexistent12345678/health", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-}
-
-// --- HandleNodePaths ---
-
-func TestHandleNodePaths(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("existing", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/paths", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("not found", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/nonexistent12345678/paths", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-}
-
-// --- HandleNodeAnalytics ---
-
-func TestHandleNodeAnalytics(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("existing", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/analytics?days=7", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("not found", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/nonexistent/analytics", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-
- t.Run("days bounds", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/analytics?days=0", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("days max", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/analytics?days=999", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-// --- HandleNetworkStatus ---
-
-func TestHandleNetworkStatus(t *testing.T) {
- _, router := setupTestServer(t)
- req := httptest.NewRequest("GET", "/api/nodes/network-status", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- HandleConfigEndpoints ---
-
-func TestHandleConfigEndpoints(t *testing.T) {
- _, router := setupTestServer(t)
-
- endpoints := []string{
- "/api/config/cache",
- "/api/config/client",
- "/api/config/regions",
- "/api/config/theme",
- "/api/config/map",
- }
- for _, ep := range endpoints {
- t.Run(ep, func(t *testing.T) {
- req := httptest.NewRequest("GET", ep, nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d for %s", w.Code, ep)
- }
- })
- }
-}
-
-// --- HandleAudioLabBuckets ---
-
-func TestHandleAudioLabBuckets(t *testing.T) {
- _, router := setupTestServer(t)
- req := httptest.NewRequest("GET", "/api/audio-lab/buckets", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- // May return 200 or 404 depending on implementation
- if w.Code != 200 {
- // Audio lab might not be fully implemented — just verify it doesn't crash
- }
-}
-
-// --- txToMap ---
-
-func TestTxToMap(t *testing.T) {
- snr := 10.5
- rssi := -90.0
- pt := 4
- rt := 1
- tx := &StoreTx{
- ID: 1,
- RawHex: "AABB",
- Hash: "abc123",
- FirstSeen: "2025-01-01",
- RouteType: &rt,
- PayloadType: &pt,
- DecodedJSON: `{"type":"ADVERT"}`,
- ObservationCount: 2,
- ObserverID: "obs1",
- ObserverName: "Obs One",
- SNR: &snr,
- RSSI: &rssi,
- PathJSON: `["aa"]`,
- Direction: "RX",
- }
- m := txToMap(tx)
- if m["id"] != 1 {
- t.Error("expected id 1")
- }
- if m["hash"] != "abc123" {
- t.Error("expected hash abc123")
- }
- if m["snr"] != 10.5 {
- t.Error("expected snr 10.5")
- }
-}
-
-// --- filterTxSlice ---
-
-func TestFilterTxSlice(t *testing.T) {
- txs := []*StoreTx{
- {ID: 1, Hash: "a"},
- {ID: 2, Hash: "b"},
- {ID: 3, Hash: "a"},
- }
- result := filterTxSlice(txs, func(tx *StoreTx) bool {
- return tx.Hash == "a"
- })
- if len(result) != 2 {
- t.Errorf("expected 2, got %d", len(result))
- }
-}
-
-// --- GetTimestamps ---
-
-func TestStoreGetTimestamps(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- ts := store.GetTimestamps("2000-01-01")
- if len(ts) < 1 {
- t.Error("expected >=1 timestamps")
- }
-}
-
-// Helper
-func intPtr(v int) *int {
- return &v
-}
-
-// setupRichTestDB creates a test DB with richer data including paths, multiple observers, channel data.
-func setupRichTestDB(t *testing.T) *DB {
- t.Helper()
- db := setupTestDB(t)
-
- now := time.Now().UTC()
- recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
- yesterday := now.Add(-24 * time.Hour).Format(time.RFC3339)
- recentEpoch := now.Add(-1 * time.Hour).Unix()
- yesterdayEpoch := now.Add(-24 * time.Hour).Unix()
-
- seedTestData(t, db)
-
- // Add advert packet with raw_hex that has valid header + path bytes for hash size parsing
- // route_type 1 = FLOOD, path byte at position 1 (hex index 2..3)
- // header: 0x01 (route_type=1), path byte: 0x40 (hashSize bits=01 → size 2)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0140aabbccdd', 'hash_with_path_01', ?, 1, 4, '{"pubKey":"aabbccdd11223344","name":"TestRepeater","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (3, 1, 10.0, -91, '["aabb","ccdd"]', ?)`, recentEpoch)
-
- // Another advert with 3-byte hash size: header 0x01, path byte 0x80 (bits=10 → size 3)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0180eeff0011', 'hash_with_path_02', ?, 1, 4, '{"pubKey":"eeff00112233aabb","name":"TestCompanion","type":"ADVERT"}')`, yesterday)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (4, 2, 8.5, -94, '["eeff","0011","2233"]', ?)`, yesterdayEpoch)
-
- // Another channel message with different sender for analytics
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('CC01', 'chan_msg_hash_001', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"User2: Another msg","sender":"User2","channelHash":"abc123"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (5, 1, 14.0, -88, '["aa"]', ?)`, recentEpoch)
-
- return db
-}
-
-// --- Store-backed analytics tests ---
-
-func TestStoreGetBulkHealthWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- results := store.GetBulkHealth(50, "")
- if len(results) == 0 {
- t.Error("expected bulk health results")
- }
- // Check that results have expected structure
- for _, r := range results {
- if _, ok := r["public_key"]; !ok {
- t.Error("expected public_key field")
- }
- if _, ok := r["stats"]; !ok {
- t.Error("expected stats field")
- }
- }
-
- t.Run("with region filter", func(t *testing.T) {
- results := store.GetBulkHealth(50, "SJC")
- _ = results
- })
-}
-
-func TestStoreGetAnalyticsHashSizes(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- result := store.GetAnalyticsHashSizes("")
- if result["total"] == nil {
- t.Error("expected total field")
- }
- dist, ok := result["distribution"].(map[string]int)
- if !ok {
- t.Error("expected distribution map")
- }
- _ = dist
-
- t.Run("with region", func(t *testing.T) {
- r := store.GetAnalyticsHashSizes("SJC")
- _ = r
- })
-}
-
-func TestStoreGetAnalyticsSubpaths(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- result := store.GetAnalyticsSubpaths("", 2, 8, 100)
- if _, ok := result["subpaths"]; !ok {
- t.Error("expected subpaths field")
- }
-
- t.Run("with region", func(t *testing.T) {
- r := store.GetAnalyticsSubpaths("SJC", 2, 4, 50)
- _ = r
- })
-}
-
-func TestSubpathPrecomputedIndex(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- // After Load(), the precomputed index must be populated.
- if len(store.spIndex) == 0 {
- t.Fatal("expected spIndex to be populated after Load()")
- }
- if store.spTotalPaths == 0 {
- t.Fatal("expected spTotalPaths > 0 after Load()")
- }
-
- // The rich test DB has paths ["aa","bb"], ["aabb","ccdd"], and
- // ["eeff","0011","2233"]. That yields 5 unique raw subpaths.
- expectedRaw := map[string]int{
- "aa,bb": 1,
- "aabb,ccdd": 1,
- "eeff,0011": 1,
- "0011,2233": 1,
- "eeff,0011,2233": 1,
- }
- for key, want := range expectedRaw {
- got, ok := store.spIndex[key]
- if !ok {
- t.Errorf("expected spIndex[%q] to exist", key)
- } else if got != want {
- t.Errorf("spIndex[%q] = %d, want %d", key, got, want)
- }
- }
- if store.spTotalPaths != 3 {
- t.Errorf("spTotalPaths = %d, want 3", store.spTotalPaths)
- }
-
- // Fast-path (no region) and slow-path (with region) must return the
- // same shape.
- fast := store.GetAnalyticsSubpaths("", 2, 8, 100)
- slow := store.GetAnalyticsSubpaths("SJC", 2, 4, 50)
- for _, r := range []map[string]interface{}{fast, slow} {
- if _, ok := r["subpaths"]; !ok {
- t.Error("missing subpaths in result")
- }
- if _, ok := r["totalPaths"]; !ok {
- t.Error("missing totalPaths in result")
- }
- }
-
- // Verify fast path totalPaths matches index.
- if tp, ok := fast["totalPaths"].(int); ok && tp != store.spTotalPaths {
- t.Errorf("fast totalPaths=%d, spTotalPaths=%d", tp, store.spTotalPaths)
- }
-}
-
-func TestStoreGetAnalyticsRFCacheHit(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- // First call — cache miss
- result1 := store.GetAnalyticsRF("")
- if result1["totalPackets"] == nil {
- t.Error("expected totalPackets")
- }
-
- // Second call — should hit cache
- result2 := store.GetAnalyticsRF("")
- if result2["totalPackets"] == nil {
- t.Error("expected cached totalPackets")
- }
-
- // Verify cache hit was recorded
- stats := store.GetCacheStats()
- hits, _ := stats["hits"].(int64)
- if hits < 1 {
- t.Error("expected at least 1 cache hit")
- }
-}
-
-func TestStoreGetAnalyticsTopology(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- result := store.GetAnalyticsTopology("")
- if result == nil {
- t.Error("expected non-nil result")
- }
-
- // #155: uniqueNodes must match DB 7-day active count, not hop resolution
- stats, err := db.GetStats()
- if err != nil {
- t.Fatalf("GetStats failed: %v", err)
- }
- un, ok := result["uniqueNodes"].(int)
- if !ok {
- t.Fatalf("uniqueNodes is not int: %T", result["uniqueNodes"])
- }
- if un != stats.TotalNodes {
- t.Errorf("uniqueNodes=%d should match stats totalNodes=%d", un, stats.TotalNodes)
- }
-
- t.Run("with region", func(t *testing.T) {
- r := store.GetAnalyticsTopology("SJC")
- _ = r
- })
-}
-
-func TestStoreGetAnalyticsChannels(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- result := store.GetAnalyticsChannels("")
- if _, ok := result["activeChannels"]; !ok {
- t.Error("expected activeChannels")
- }
- if _, ok := result["topSenders"]; !ok {
- t.Error("expected topSenders")
- }
- if _, ok := result["channelTimeline"]; !ok {
- t.Error("expected channelTimeline")
- }
-
- t.Run("with region", func(t *testing.T) {
- r := store.GetAnalyticsChannels("SJC")
- _ = r
- })
-}
-
-// Regression test for #154: channelHash is a number in decoded JSON from decoder.js,
-// not a string. The Go struct must handle both types correctly.
-func TestStoreGetAnalyticsChannelsNumericHash(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- recent := time.Now().Add(-1 * time.Hour).Format(time.RFC3339)
- recentEpoch := time.Now().Add(-1 * time.Hour).Unix()
-
- // Insert GRP_TXT packets with numeric channelHash (matches decoder.js output)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('DD01', 'grp_num_hash_1', ?, 1, 5, '{"type":"GRP_TXT","channelHash":97,"channelHashHex":"61","decryptionStatus":"no_key"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (4, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('DD02', 'grp_num_hash_2', ?, 1, 5, '{"type":"GRP_TXT","channelHash":42,"channelHashHex":"2A","decryptionStatus":"no_key"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (5, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // Also a decrypted CHAN with numeric channelHash
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('DD03', 'chan_num_hash_3', ?, 1, 5, '{"type":"CHAN","channel":"general","channelHash":97,"channelHashHex":"61","text":"hello","sender":"Alice"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (6, 1, 12.0, -88, '[]', ?)`, recentEpoch)
-
- store := NewPacketStore(db)
- store.Load()
- result := store.GetAnalyticsChannels("")
-
- channels := result["channels"].([]map[string]interface{})
- if len(channels) < 2 {
- t.Errorf("expected at least 2 channels (hash 97 + hash 42), got %d", len(channels))
- }
-
- // Verify the numeric-hash channels we inserted have proper hashes (not "?")
- found97 := false
- found42 := false
- for _, ch := range channels {
- if ch["hash"] == "97" {
- found97 = true
- }
- if ch["hash"] == "42" {
- found42 = true
- }
- }
- if !found97 {
- t.Error("expected to find channel with hash '97' (numeric channelHash parsing)")
- }
- if !found42 {
- t.Error("expected to find channel with hash '42' (numeric channelHash parsing)")
- }
-
- // Verify the decrypted CHAN channel has the correct name
- foundGeneral := false
- for _, ch := range channels {
- if ch["name"] == "general" {
- foundGeneral = true
- if ch["hash"] != "97" {
- t.Errorf("expected hash '97' for general channel, got %v", ch["hash"])
- }
- }
- }
- if !foundGeneral {
- t.Error("expected to find channel named 'general'")
- }
-}
-
-func TestStoreGetAnalyticsDistance(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- result := store.GetAnalyticsDistance("")
- if result == nil {
- t.Error("expected non-nil result")
- }
-
- t.Run("with region", func(t *testing.T) {
- r := store.GetAnalyticsDistance("SJC")
- _ = r
- })
-}
-
-func TestStoreGetSubpathDetail(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- result := store.GetSubpathDetail([]string{"aabb", "ccdd"})
- if result == nil {
- t.Error("expected non-nil result")
- }
- if _, ok := result["hops"]; !ok {
- t.Error("expected hops field")
- }
-}
-
-// --- Route handlers with store for analytics ---
-
-func TestHandleAnalyticsRFWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- t.Run("basic", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/rf", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("with region", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/analytics/rf?region=SJC", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-}
-
-func TestHandleBulkHealthWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/nodes/bulk-health?limit=50®ion=SJC", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsSubpathsWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/subpaths?minLen=2&maxLen=4&limit=50", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsSubpathDetailWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/subpath-detail?hops=aabb,ccdd", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsDistanceWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/distance", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsHashSizesWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsTopologyWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/topology", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsChannelsWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- GetChannelMessages more paths ---
-
-func TestGetChannelMessagesRichData(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- messages, total := store.GetChannelMessages("#test", 100, 0)
- if total < 2 {
- t.Errorf("expected >=2 messages for #test with rich data, got %d", total)
- }
-
- // Verify message fields
- for _, msg := range messages {
- if _, ok := msg["sender"]; !ok {
- t.Error("expected sender field")
- }
- if _, ok := msg["hops"]; !ok {
- t.Error("expected hops field")
- }
- }
-}
-
-// --- handleObservers with actual data ---
-
-func TestHandleObserversWithData(t *testing.T) {
- _, router := setupTestServer(t)
- req := httptest.NewRequest("GET", "/api/observers", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- observers, ok := body["observers"].([]interface{})
- if !ok || len(observers) == 0 {
- t.Error("expected non-empty observers")
- }
-}
-
-// --- handleChannelMessages with store ---
-
-func TestHandleChannelMessagesWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/channels/%23test/messages?limit=10", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- handleChannels with store ---
-
-func TestHandleChannelsWithStore(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/channels", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- Traces via store path ---
-
-func TestHandleTracesWithStore(t *testing.T) {
- _, router := setupTestServer(t)
- req := httptest.NewRequest("GET", "/api/traces/abc123def4567890", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- Store.GetStoreStats ---
-
-func TestStoreGetStoreStats(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- stats, err := store.GetStoreStats()
- if err != nil {
- t.Fatal(err)
- }
- if stats.TotalTransmissions < 1 {
- t.Error("expected transmissions > 0")
- }
-}
-
-// --- Store.QueryGroupedPackets ---
-
-func TestStoreQueryGroupedPackets(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- result := store.QueryGroupedPackets(PacketQuery{Limit: 50, Order: "DESC"})
- if result.Total < 1 {
- t.Error("expected >=1 grouped packets")
- }
-}
-
-// --- Store.GetPacketByHash / GetPacketByID / GetTransmissionByID ---
-
-func TestStoreGetPacketByHash(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- pkt := store.GetPacketByHash("abc123def4567890")
- if pkt == nil {
- t.Fatal("expected packet")
- }
- if pkt["hash"] != "abc123def4567890" {
- t.Errorf("wrong hash: %v", pkt["hash"])
- }
-
- t.Run("not found", func(t *testing.T) {
- pkt := store.GetPacketByHash("0000000000000000")
- if pkt != nil {
- t.Error("expected nil for not found")
- }
- })
-}
-
-// --- Coverage gap-filling tests ---
-
-func TestResolvePayloadTypeNameUnknown(t *testing.T) {
- // nil → UNKNOWN
- if got := resolvePayloadTypeName(nil); got != "UNKNOWN" {
- t.Errorf("expected UNKNOWN for nil, got %s", got)
- }
- // known type
- pt4 := 4
- if got := resolvePayloadTypeName(&pt4); got != "ADVERT" {
- t.Errorf("expected ADVERT, got %s", got)
- }
- // unknown type → UNK(N) format
- pt99 := 99
- if got := resolvePayloadTypeName(&pt99); got != "UNK(99)" {
- t.Errorf("expected UNK(99), got %s", got)
- }
-}
-
-func TestCacheHitTopology(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- // First call — cache miss
- r1 := store.GetAnalyticsTopology("")
- if r1 == nil {
- t.Fatal("expected topology result")
- }
-
- // Second call — cache hit
- r2 := store.GetAnalyticsTopology("")
- if r2 == nil {
- t.Fatal("expected cached topology result")
- }
-
- stats := store.GetCacheStats()
- hits := stats["hits"].(int64)
- if hits < 1 {
- t.Errorf("expected cache hit, got %d hits", hits)
- }
-}
-
-func TestCacheHitHashSizes(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- r1 := store.GetAnalyticsHashSizes("")
- if r1 == nil {
- t.Fatal("expected hash sizes result")
- }
-
- r2 := store.GetAnalyticsHashSizes("")
- if r2 == nil {
- t.Fatal("expected cached hash sizes result")
- }
-
- stats := store.GetCacheStats()
- hits := stats["hits"].(int64)
- if hits < 1 {
- t.Errorf("expected cache hit, got %d", hits)
- }
-}
-
-func TestCacheHitChannels(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- r1 := store.GetAnalyticsChannels("")
- if r1 == nil {
- t.Fatal("expected channels result")
- }
-
- r2 := store.GetAnalyticsChannels("")
- if r2 == nil {
- t.Fatal("expected cached channels result")
- }
-
- stats := store.GetCacheStats()
- hits := stats["hits"].(int64)
- if hits < 1 {
- t.Errorf("expected cache hit, got %d", hits)
- }
-}
-
-func TestGetChannelMessagesEdgeCases(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- // Channel not found — empty result
- msgs, total := store.GetChannelMessages("nonexistent_channel", 10, 0)
- if total != 0 {
- t.Errorf("expected 0 total for nonexistent channel, got %d", total)
- }
- if len(msgs) != 0 {
- t.Errorf("expected empty msgs, got %d", len(msgs))
- }
-
- // Default limit (0 → 100)
- msgs, _ = store.GetChannelMessages("#test", 0, 0)
- _ = msgs // just exercises the default limit path
-
- // Offset beyond range
- msgs, total = store.GetChannelMessages("#test", 10, 9999)
- if len(msgs) != 0 {
- t.Errorf("expected empty msgs for large offset, got %d", len(msgs))
- }
- if total == 0 {
- t.Error("total should be > 0 even with large offset")
- }
-
- // Negative offset
- msgs, _ = store.GetChannelMessages("#test", 10, -5)
- _ = msgs // exercises the start < 0 path
-}
-
-func TestFilterPacketsEmptyRegion(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Region with no observers → empty result
- results := store.QueryPackets(PacketQuery{Region: "NONEXISTENT", Limit: 100})
- if results.Total != 0 {
- t.Errorf("expected 0 results for nonexistent region, got %d", results.Total)
- }
-}
-
-func TestFilterPacketsSinceUntil(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Since far future → empty
- results := store.QueryPackets(PacketQuery{Since: "2099-01-01T00:00:00Z", Limit: 100})
- if results.Total != 0 {
- t.Errorf("expected 0 results for far future since, got %d", results.Total)
- }
-
- // Until far past → empty
- results = store.QueryPackets(PacketQuery{Until: "2000-01-01T00:00:00Z", Limit: 100})
- if results.Total != 0 {
- t.Errorf("expected 0 results for far past until, got %d", results.Total)
- }
-
- // Route filter
- rt := 1
- results = store.QueryPackets(PacketQuery{Route: &rt, Limit: 100})
- if results.Total == 0 {
- t.Error("expected results for route_type=1 filter")
- }
-}
-
-func TestFilterPacketsHashOnly(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Single hash fast-path — found
- results := store.QueryPackets(PacketQuery{Hash: "abc123def4567890", Limit: 100})
- if results.Total != 1 {
- t.Errorf("expected 1 result for known hash, got %d", results.Total)
- }
-
- // Single hash fast-path — not found
- results = store.QueryPackets(PacketQuery{Hash: "0000000000000000", Limit: 100})
- if results.Total != 0 {
- t.Errorf("expected 0 results for unknown hash, got %d", results.Total)
- }
-}
-
-func TestFilterPacketsObserverWithType(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Observer + type filter (takes non-indexed path)
- pt := 4
- results := store.QueryPackets(PacketQuery{Observer: "obs1", Type: &pt, Limit: 100})
- _ = results // exercises the combined observer+type filter path
-}
-
-func TestFilterPacketsNodeFilter(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Node filter — exercises DecodedJSON containment check
- results := store.QueryPackets(PacketQuery{Node: "aabbccdd11223344", Limit: 100})
- if results.Total == 0 {
- t.Error("expected results for node filter")
- }
-
- // Node filter with hash combined
- results = store.QueryPackets(PacketQuery{Node: "aabbccdd11223344", Hash: "abc123def4567890", Limit: 100})
- _ = results
-}
-
-func TestGetNodeHashSizeInfoEdgeCases(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
-
- now := time.Now().UTC()
- recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
- recentEpoch := now.Add(-1 * time.Hour).Unix()
-
- // Observers
- db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
- VALUES ('obs1', 'Obs', 'SJC', ?, '2026-01-01T00:00:00Z', 10)`, recent)
-
- // Adverts with various edge cases
- // 1. Valid advert with pubKey
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0140aabbccdd', 'hs_valid_1', ?, 1, 4, '{"pubKey":"aabbccdd11223344","name":"NodeA","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (1, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 2. Short raw_hex (< 4 chars)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('01', 'hs_short_hex', ?, 1, 4, '{"pubKey":"eeff00112233aabb","name":"NodeB","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (2, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 3. Invalid hex in path byte position
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('01GGHHII', 'hs_bad_hex', ?, 1, 4, '{"pubKey":"1122334455667788","name":"NodeC","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (3, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 4. Invalid JSON
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0140aabb', 'hs_bad_json', ?, 1, 4, 'not-json')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (4, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 5. JSON with public_key field instead of pubKey
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0180eeff', 'hs_alt_key', ?, 1, 4, '{"public_key":"aabbccdd11223344","name":"NodeA","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (5, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 6. JSON with no pubKey at all
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('01C0ffee', 'hs_no_pk', ?, 1, 4, '{"name":"NodeZ","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (6, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 7. Empty decoded_json
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0140bbcc', 'hs_empty_json', ?, 1, 4, '')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (7, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- // 8-10. Multiple adverts for same node with different hash sizes (flip-flop test)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0140dd01', 'hs_flip_1', ?, 1, 4, '{"pubKey":"ffff000011112222","name":"Flipper","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (8, 1, 10.0, -90, '[]', ?)`, recentEpoch)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0180dd02', 'hs_flip_2', ?, 1, 4, '{"pubKey":"ffff000011112222","name":"Flipper","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (9, 1, 10.0, -90, '[]', ?)`, recentEpoch)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('0140dd03', 'hs_flip_3', ?, 1, 4, '{"pubKey":"ffff000011112222","name":"Flipper","type":"ADVERT"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (10, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- store := NewPacketStore(db)
- store.Load()
- info := store.GetNodeHashSizeInfo()
-
- // Valid node should be present
- if _, ok := info["aabbccdd11223344"]; !ok {
- t.Error("expected aabbccdd11223344 in hash size info")
- }
-
- // Flipper should have inconsistent flag (2→3→2 = 2 transitions, 2 unique sizes, 3 obs)
- if flipper, ok := info["ffff000011112222"]; ok {
- if len(flipper.AllSizes) < 2 {
- t.Errorf("expected 2+ unique sizes for flipper, got %d", len(flipper.AllSizes))
- }
- if !flipper.Inconsistent {
- t.Error("expected Inconsistent=true for flip-flop node")
- }
- } else {
- t.Error("expected ffff000011112222 in hash size info")
- }
-
- // Bad entries (short hex, bad hex, bad json, no pk) should not corrupt results
- if _, ok := info["eeff00112233aabb"]; ok {
- t.Error("short raw_hex node should not be in results")
- }
- if _, ok := info["1122334455667788"]; ok {
- t.Error("bad hex node should not be in results")
- }
-}
-
-func TestHandleResolveHopsEdgeCases(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- // Empty hops param
- req := httptest.NewRequest("GET", "/api/resolve-hops", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Errorf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- resolved := body["resolved"].(map[string]interface{})
- if len(resolved) != 0 {
- t.Errorf("expected empty resolved for empty hops, got %d", len(resolved))
- }
-
- // Multiple hops with empty string included
- req = httptest.NewRequest("GET", "/api/resolve-hops?hops=aabb,,eeff", nil)
- w = httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Errorf("expected 200, got %d", w.Code)
- }
- json.Unmarshal(w.Body.Bytes(), &body)
- resolved = body["resolved"].(map[string]interface{})
- // Empty string should be skipped
- if _, ok := resolved[""]; ok {
- t.Error("empty hop should be skipped")
- }
-
- // Nonexistent prefix — zero candidates
- req = httptest.NewRequest("GET", "/api/resolve-hops?hops=nonexistent_prefix_xyz", nil)
- w = httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Errorf("expected 200, got %d", w.Code)
- }
-}
-
-func TestHandleObserversError(t *testing.T) {
- // Use a closed DB to trigger an error from GetObservers
- db := setupTestDB(t)
- seedTestData(t, db)
-
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
- db.Close() // force error after routes registered
-
- req := httptest.NewRequest("GET", "/api/observers", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 500 {
- t.Errorf("expected 500 for closed DB, got %d", w.Code)
- }
-}
-
-func TestHandleAnalyticsChannelsDBFallback(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- // Server with NO store — takes DB fallback path
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Errorf("expected 200, got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if _, ok := body["activeChannels"]; !ok {
- t.Error("expected activeChannels in DB-fallback response")
- }
- if _, ok := body["channels"]; !ok {
- t.Error("expected channels in DB-fallback response")
- }
-}
-
-func TestGetChannelMessagesDedupeRepeats(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
-
- now := time.Now().UTC()
- recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
- recentEpoch := now.Add(-1 * time.Hour).Unix()
-
- db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
- VALUES ('obs1', 'Obs1', 'SJC', ?, '2026-01-01T00:00:00Z', 10)`, recent)
- db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
- VALUES ('obs2', 'Obs2', 'LAX', ?, '2026-01-01T00:00:00Z', 10)`, recent)
-
- // Insert two copies of same CHAN message (same hash, different observers)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('CC01', 'dedup_chan_1', ?, 1, 5, '{"type":"CHAN","channel":"#general","text":"Alice: hello","sender":"Alice"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (1, 1, 12.0, -88, '["aa"]', ?)`, recentEpoch)
-
- // Same sender + hash → different observation (simulates dedup)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('CC02', 'dedup_chan_1', ?, 1, 5, '{"type":"CHAN","channel":"#general","text":"Alice: hello","sender":"Alice"}')`, recent)
- // Note: won't load due to UNIQUE constraint on hash → tests the code path with single tx having multiple obs
-
- // Second different message
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('CC03', 'dedup_chan_2', ?, 1, 5, '{"type":"CHAN","channel":"#general","text":"Bob: world","sender":"Bob"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (2, 2, 10.0, -90, '["bb"]', ?)`, recentEpoch)
-
- // GRP_TXT (not CHAN) — should be skipped by GetChannelMessages
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('DD01', 'grp_msg_hash_1', ?, 1, 5, '{"type":"GRP_TXT","channelHash":"42","text":"encrypted"}')`, recent)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (3, 1, 10.0, -90, '[]', ?)`, recentEpoch)
-
- store := NewPacketStore(db)
- store.Load()
-
- msgs, total := store.GetChannelMessages("#general", 10, 0)
- if total == 0 {
- t.Error("expected messages for #general")
- }
-
- // Check message structure
- for _, msg := range msgs {
- if _, ok := msg["sender"]; !ok {
- t.Error("expected sender field")
- }
- if _, ok := msg["text"]; !ok {
- t.Error("expected text field")
- }
- if _, ok := msg["observers"]; !ok {
- t.Error("expected observers field")
- }
- }
-}
-
-func TestTransmissionsForObserverFromSlice(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Test with from=nil (index path) — for non-existent observer
- result := store.transmissionsForObserver("nonexistent_obs", nil)
- if len(result) != 0 {
- t.Errorf("expected nil/empty for nonexistent observer, got %d", len(result))
- }
-
- // Test with from=non-nil slice (filter path)
- allPackets := store.packets
- result = store.transmissionsForObserver("obs1", allPackets)
- if len(result) == 0 {
- t.Error("expected results for obs1 from filter path")
- }
-}
-
-func TestGetPerfStoreStatsPublicKeyField(t *testing.T) {
- db := setupRichTestDB(t)
- defer db.Close()
- store := NewPacketStore(db)
- store.Load()
-
- stats := store.GetPerfStoreStats()
- indexes := stats["indexes"].(map[string]interface{})
- // advertByObserver should count distinct pubkeys from advert packets
- aboc := indexes["advertByObserver"].(int)
- if aboc == 0 {
- t.Error("expected advertByObserver > 0 for rich test DB")
- }
-}
-
-func TestHandleAudioLabBucketsQueryError(t *testing.T) {
- // Use closed DB to trigger query error
- db := setupTestDB(t)
- seedTestData(t, db)
-
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
- db.Close()
-
- req := httptest.NewRequest("GET", "/api/audio-lab/buckets", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Errorf("expected 200 (empty buckets on error), got %d", w.Code)
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- buckets := body["buckets"].(map[string]interface{})
- if len(buckets) != 0 {
- t.Errorf("expected empty buckets on query error, got %d", len(buckets))
- }
-}
-
-func TestStoreGetTransmissionByID(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- pkt := store.GetTransmissionByID(1)
- if pkt == nil {
- t.Fatal("expected packet")
- }
-
- t.Run("not found", func(t *testing.T) {
- pkt := store.GetTransmissionByID(99999)
- if pkt != nil {
- t.Error("expected nil")
- }
- })
-}
-
-func TestStoreGetPacketByID(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Get an observation ID from the store
- var obsID int
- for id := range store.byObsID {
- obsID = id
- break
- }
- if obsID == 0 {
- t.Skip("no observations")
- }
-
- pkt := store.GetPacketByID(obsID)
- if pkt == nil {
- t.Fatal("expected packet")
- }
-
- t.Run("not found", func(t *testing.T) {
- pkt := store.GetPacketByID(99999)
- if pkt != nil {
- t.Error("expected nil")
- }
- })
-}
-
-// --- Store.GetObservationsForHash ---
-
-func TestStoreGetObservationsForHash(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- obs := store.GetObservationsForHash("abc123def4567890")
- if len(obs) < 1 {
- t.Error("expected >=1 observation")
- }
-
- t.Run("not found", func(t *testing.T) {
- obs := store.GetObservationsForHash("0000000000000000")
- if len(obs) != 0 {
- t.Errorf("expected 0, got %d", len(obs))
- }
- })
-}
-
-// --- Store.GetNewTransmissionsSince ---
-
-func TestStoreGetNewTransmissionsSince(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- txs, err := db.GetNewTransmissionsSince(0, 100)
- if err != nil {
- t.Fatal(err)
- }
- if len(txs) < 1 {
- t.Error("expected >=1 transmission")
- }
-}
-
-// --- HandlePacketDetail with store (by hash, by tx ID, by obs ID) ---
-
-func TestHandlePacketDetailWithStoreAllPaths(t *testing.T) {
- _, router := setupTestServer(t)
-
- t.Run("by hash", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/abc123def4567890", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d: %s", w.Code, w.Body.String())
- }
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
- if body["observations"] == nil {
- t.Error("expected observations")
- }
- })
-
- t.Run("by tx ID", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/1", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
- })
-
- t.Run("not found ID", func(t *testing.T) {
- req := httptest.NewRequest("GET", "/api/packets/999999", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 404 {
- t.Fatalf("expected 404, got %d", w.Code)
- }
- })
-}
-
-// --- Additional DB function coverage ---
-
-func TestDBGetNewTransmissionsSince(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- txs, err := db.GetNewTransmissionsSince(0, 100)
- if err != nil {
- t.Fatal(err)
- }
- if len(txs) < 1 {
- t.Error("expected >=1 transmissions")
- }
-}
-
-func TestDBGetNetworkStatus(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- cfg := &Config{}
- ht := cfg.GetHealthThresholds()
- result, err := db.GetNetworkStatus(ht)
- if err != nil {
- t.Fatal(err)
- }
- if result == nil {
- t.Error("expected non-nil result")
- }
-}
-
-func TestDBGetObserverByID(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- obs, err := db.GetObserverByID("obs1")
- if err != nil {
- t.Fatal(err)
- }
- if obs == nil {
- t.Error("expected non-nil observer")
- }
- if obs.ID != "obs1" {
- t.Errorf("expected obs1, got %s", obs.ID)
- }
-
- t.Run("not found", func(t *testing.T) {
- obs, err := db.GetObserverByID("nonexistent")
- if err == nil && obs != nil {
- t.Error("expected nil observer for nonexistent ID")
- }
- // Some implementations return (nil, err) — that's fine too
- })
-}
-
-func TestDBGetTraces(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- traces, err := db.GetTraces("abc123def4567890")
- if err != nil {
- t.Fatal(err)
- }
- _ = traces
-}
-
-// --- DB queries with different filter combos ---
-
-func TestDBQueryPacketsAllFilters(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- pt := 4
- rt := 1
- result, err := db.QueryPackets(PacketQuery{
- Limit: 50,
- Type: &pt,
- Route: &rt,
- Observer: "obs1",
- Hash: "abc123def4567890",
- Since: "2020-01-01",
- Until: "2099-01-01",
- Region: "SJC",
- Node: "TestRepeater",
- Order: "ASC",
- })
- if err != nil {
- t.Fatal(err)
- }
- _ = result
-}
-
-// --- IngestNewFromDB dedup path ---
-
-func TestIngestNewFromDBDuplicateObs(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- initialMax := store.MaxTransmissionID()
-
- // Insert new transmission with same hash as existing (should merge into existing tx)
- now := time.Now().UTC().Format(time.RFC3339)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('AABB', 'dedup_test_hash_01', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now)
- newTxID := 0
- db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
-
- // Add observation
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (?, 1, 11.0, -89, '["dd"]', ?)`, newTxID, time.Now().Unix())
- // Add duplicate observation (same observer_id + path_json)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (?, 1, 11.0, -89, '["dd"]', ?)`, newTxID, time.Now().Unix())
-
- _, newMax := store.IngestNewFromDB(initialMax, 100)
- if newMax <= initialMax {
- t.Errorf("expected newMax > %d, got %d", initialMax, newMax)
- }
-}
-
-// --- IngestNewObservations (fixes #174) ---
-
-func TestIngestNewObservations(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- // Get initial observation count for transmission 1 (hash abc123def4567890)
- initialTx := store.byHash["abc123def4567890"]
- if initialTx == nil {
- t.Fatal("expected to find transmission abc123def4567890 in store")
- }
- initialObsCount := initialTx.ObservationCount
- if initialObsCount != 2 {
- t.Fatalf("expected 2 initial observations, got %d", initialObsCount)
- }
-
- // Record the max obs ID after initial load
- maxObsID := db.GetMaxObservationID()
-
- // Simulate a new observation arriving for the existing transmission AFTER
- // the poller has already advanced past its transmission ID
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (1, 2, 5.0, -100, '["aa","bb","cc"]', ?)`, time.Now().Unix())
-
- // Verify IngestNewFromDB does NOT pick up the new observation (tx id hasn't changed)
- txMax := store.MaxTransmissionID()
- _, newTxMax := store.IngestNewFromDB(txMax, 100)
- if initialTx.ObservationCount != initialObsCount {
- t.Errorf("IngestNewFromDB should not have changed obs count, was %d now %d",
- initialObsCount, initialTx.ObservationCount)
- }
- _ = newTxMax
-
- // IngestNewObservations should pick it up
- newObsMax := store.IngestNewObservations(maxObsID, 500)
- if newObsMax <= maxObsID {
- t.Errorf("expected newObsMax > %d, got %d", maxObsID, newObsMax)
- }
- if initialTx.ObservationCount != initialObsCount+1 {
- t.Errorf("expected obs count %d, got %d", initialObsCount+1, initialTx.ObservationCount)
- }
- if len(initialTx.Observations) != initialObsCount+1 {
- t.Errorf("expected %d observations slice len, got %d", initialObsCount+1, len(initialTx.Observations))
- }
-
- // Best observation should have been re-picked (new obs has longer path)
- if initialTx.PathJSON != `["aa","bb","cc"]` {
- t.Errorf("expected best path to be updated to longer path, got %s", initialTx.PathJSON)
- }
-
- t.Run("no new observations", func(t *testing.T) {
- max := store.IngestNewObservations(newObsMax, 500)
- if max != newObsMax {
- t.Errorf("expected same max %d, got %d", newObsMax, max)
- }
- })
-
- t.Run("dedup by observer+path", func(t *testing.T) {
- // Insert duplicate observation (same observer + path as existing)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (1, 1, 12.5, -90, '["aa","bb"]', ?)`, time.Now().Unix())
- prevCount := initialTx.ObservationCount
- newMax2 := store.IngestNewObservations(newObsMax, 500)
- if initialTx.ObservationCount != prevCount {
- t.Errorf("duplicate obs should not increase count, was %d now %d",
- prevCount, initialTx.ObservationCount)
- }
- _ = newMax2
- })
-
- t.Run("default limit", func(t *testing.T) {
- _ = store.IngestNewObservations(newObsMax, 0)
- })
-}
-
-func TestIngestNewObservationsV2(t *testing.T) {
- db := setupTestDBv2(t)
- defer db.Close()
- seedV2Data(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- tx := store.byHash["abc123def4567890"]
- if tx == nil {
- t.Fatal("expected to find transmission in store")
- }
- initialCount := tx.ObservationCount
-
- maxObsID := db.GetMaxObservationID()
-
- // Add new observation for existing transmission
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
- VALUES (1, 'obs2', 'Obs Two', 6.0, -98, '["dd","ee"]', ?)`, time.Now().Unix())
-
- newMax := store.IngestNewObservations(maxObsID, 500)
- if newMax <= maxObsID {
- t.Errorf("expected newMax > %d, got %d", maxObsID, newMax)
- }
- if tx.ObservationCount != initialCount+1 {
- t.Errorf("expected obs count %d, got %d", initialCount+1, tx.ObservationCount)
- }
-}
-
-func TestGetMaxObservationID(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
-
- maxID := db.GetMaxObservationID()
- if maxID != 0 {
- t.Errorf("expected 0 for empty table, got %d", maxID)
- }
-
- seedTestData(t, db)
- maxID = db.GetMaxObservationID()
- if maxID <= 0 {
- t.Errorf("expected positive max obs ID, got %d", maxID)
- }
-}
-
-// --- perfMiddleware with endpoint normalization ---
-
-func TestPerfMiddlewareEndpointNormalization(t *testing.T) {
- _, router := setupTestServer(t)
-
- // Hit a route with a hex hash — should normalize to :id
- req := httptest.NewRequest("GET", "/api/packets/abc123def4567890", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- // The hex id should have been normalized in perf stats
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- handleNodeAnalytics edge cases ---
-
-func TestHandleNodeAnalyticsNameless(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- // Insert a node without a name
- db.conn.Exec(`INSERT INTO nodes (public_key, role, lat, lon, last_seen, first_seen, advert_count)
- VALUES ('nameless_node_pk_1', 'repeater', 37.5, -122.0, ?, '2026-01-01', 1)`,
- time.Now().UTC().Format(time.RFC3339))
-
- cfg := &Config{Port: 3000}
- hub := NewHub()
- srv := NewServer(db, cfg, hub)
- store := NewPacketStore(db)
- store.Load()
- srv.store = store
- router := mux.NewRouter()
- srv.RegisterRoutes(router)
-
- req := httptest.NewRequest("GET", "/api/nodes/nameless_node_pk_1/analytics?days=1", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- if w.Code != 200 {
- t.Fatalf("expected 200, got %d", w.Code)
- }
-}
-
-// --- PerfStats overflow (>100 recent entries) ---
-
-func TestPerfStatsRecentOverflow(t *testing.T) {
- _, router := setupTestServer(t)
- // Hit an endpoint 120 times to overflow the Recent buffer (capped at 100)
- for i := 0; i < 120; i++ {
- req := httptest.NewRequest("GET", fmt.Sprintf("/api/health?i=%d", i), nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- }
-}
-
-// --- handleAudioLabBuckets ---
-
-func TestHandleAudioLabBucketsNoStore(t *testing.T) {
- _, router := setupNoStoreServer(t)
- req := httptest.NewRequest("GET", "/api/audio-lab/buckets", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
- // Just verify no crash
-}
-
-// --- Store region filter paths ---
-
-func TestStoreQueryPacketsRegionFilter(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- result := store.QueryPackets(PacketQuery{Region: "SJC", Limit: 50, Order: "DESC"})
- _ = result
-
- result2 := store.QueryPackets(PacketQuery{Region: "NONEXIST", Limit: 50, Order: "DESC"})
- if result2.Total != 0 {
- t.Errorf("expected 0 for non-existent region, got %d", result2.Total)
- }
-}
-
-// --- DB.GetObserverIdsForRegion ---
-
-func TestDBGetObserverIdsForRegion(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- ids, err := db.GetObserverIdsForRegion("SJC")
- if err != nil {
- t.Fatal(err)
- }
- if len(ids) == 0 {
- t.Error("expected observer IDs for SJC")
- }
-
- ids2, err := db.GetObserverIdsForRegion("NONEXIST")
- if err != nil {
- t.Fatal(err)
- }
- if len(ids2) != 0 {
- t.Errorf("expected 0 for NONEXIST, got %d", len(ids2))
- }
-}
-
-// --- DB.GetDistinctIATAs ---
-
-func TestDBGetDistinctIATAs(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- iatas, err := db.GetDistinctIATAs()
- if err != nil {
- t.Fatal(err)
- }
- if len(iatas) == 0 {
- t.Error("expected at least one IATA code")
- }
-}
-
-// --- DB.SearchNodes ---
-
-func TestDBSearchNodes(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- nodes, err := db.SearchNodes("Test", 10)
- if err != nil {
- t.Fatal(err)
- }
- if len(nodes) == 0 {
- t.Error("expected nodes matching 'Test'")
- }
-}
-
-// --- Ensure non-panic on GetDBSizeStats with path ---
-
-func TestGetDBSizeStatsMemory(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
-
- stats := db.GetDBSizeStats()
- if stats["dbSizeMB"] != float64(0) {
- t.Errorf("expected 0 for in-memory, got %v", stats["dbSizeMB"])
- }
-}
-
-// Regression test for #198: channel messages must include newly ingested packets.
-// byPayloadType must maintain newest-first ordering after IngestNewFromDB so that
-// GetChannelMessages reverse iteration returns the latest messages.
-func TestGetChannelMessagesAfterIngest(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- store := NewPacketStore(db)
- store.Load()
-
- initialMax := store.MaxTransmissionID()
-
- // Get baseline message count
- _, totalBefore := store.GetChannelMessages("#test", 100, 0)
-
- // Insert a new channel message into the DB (newer than anything loaded)
- now := time.Now().UTC()
- nowStr := now.Format(time.RFC3339)
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
- VALUES ('FF01', 'newchannelmsg19800', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"NewUser: brand new message","sender":"NewUser"}')`, nowStr)
- newTxID := 0
- db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
- db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
- VALUES (?, 1, 12.0, -88, '[]', ?)`, newTxID, now.Unix())
-
- // Ingest the new data
- _, newMax := store.IngestNewFromDB(initialMax, 100)
- if newMax <= initialMax {
- t.Fatalf("ingest did not advance maxID: %d -> %d", initialMax, newMax)
- }
-
- // GetChannelMessages must now include the new message
- msgs, totalAfter := store.GetChannelMessages("#test", 100, 0)
- if totalAfter <= totalBefore {
- t.Errorf("expected more messages after ingest: before=%d after=%d", totalBefore, totalAfter)
- }
-
- // The newest message (last in the returned slice) must be the one we just inserted
- if len(msgs) == 0 {
- t.Fatal("expected at least one message")
- }
- lastMsg := msgs[len(msgs)-1]
- if lastMsg["text"] != "brand new message" {
- t.Errorf("newest message should be 'brand new message', got %q", lastMsg["text"])
- }
-}
-
+package main
+
+import (
+ "database/sql"
+ "encoding/json"
+ "fmt"
+ "math"
+ "net/http"
+ "net/http/httptest"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gorilla/mux"
+ _ "modernc.org/sqlite"
+)
+
+// --- helpers ---
+
+func setupTestDBv2(t *testing.T) *DB {
+ t.Helper()
+ conn, err := sql.Open("sqlite", ":memory:")
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Force single connection so all goroutines share the same in-memory DB
+ conn.SetMaxOpenConns(1)
+ schema := `
+ CREATE TABLE nodes (
+ public_key TEXT PRIMARY KEY, name TEXT, role TEXT,
+ lat REAL, lon REAL, last_seen TEXT, first_seen TEXT, advert_count INTEGER DEFAULT 0,
+ battery_mv INTEGER, temperature_c REAL
+ );
+ CREATE TABLE observers (
+ id TEXT PRIMARY KEY, name TEXT, iata TEXT, last_seen TEXT, first_seen TEXT,
+ packet_count INTEGER DEFAULT 0, model TEXT, firmware TEXT,
+ client_version TEXT, radio TEXT, battery_mv INTEGER, uptime_secs INTEGER, noise_floor REAL
+ );
+ CREATE TABLE transmissions (
+ id INTEGER PRIMARY KEY AUTOINCREMENT, raw_hex TEXT NOT NULL,
+ hash TEXT NOT NULL UNIQUE, first_seen TEXT NOT NULL,
+ route_type INTEGER, payload_type INTEGER, payload_version INTEGER,
+ decoded_json TEXT, created_at TEXT DEFAULT (datetime('now'))
+ );
+ CREATE TABLE observations (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ transmission_id INTEGER NOT NULL REFERENCES transmissions(id),
+ observer_id TEXT, observer_name TEXT, direction TEXT,
+ snr REAL, rssi REAL, score INTEGER, path_json TEXT, timestamp INTEGER NOT NULL
+ );
+ `
+ if _, err := conn.Exec(schema); err != nil {
+ t.Fatal(err)
+ }
+ return &DB{conn: conn, isV3: false}
+}
+
+func seedV2Data(t *testing.T, db *DB) {
+ t.Helper()
+ now := time.Now().UTC()
+ recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
+ epoch := now.Add(-1 * time.Hour).Unix()
+
+ db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
+ VALUES ('obs1', 'Obs One', 'SJC', ?, '2026-01-01T00:00:00Z', 100)`, recent)
+ db.conn.Exec(`INSERT INTO nodes (public_key, name, role, lat, lon, last_seen, first_seen, advert_count)
+ VALUES ('aabbccdd11223344', 'TestRepeater', 'repeater', 37.5, -122.0, ?, '2026-01-01T00:00:00Z', 50)`, recent)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('AABB', 'abc123def4567890', ?, 1, 4, '{"pubKey":"aabbccdd11223344","name":"TestRepeater","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
+ VALUES (1, 'obs1', 'Obs One', 12.5, -90, '["aa","bb"]', ?)`, epoch)
+}
+
+func setupNoStoreServer(t *testing.T) (*Server, *mux.Router) {
+ t.Helper()
+ db := setupTestDB(t)
+ seedTestData(t, db)
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ // No store — forces DB fallback paths
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+ return srv, router
+}
+
+// --- detectSchema ---
+
+func TestDetectSchemaV3(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ if !db.isV3 {
+ t.Error("expected v3 schema (observer_idx)")
+ }
+}
+
+func TestDetectSchemaV2(t *testing.T) {
+ db := setupTestDBv2(t)
+ defer db.Close()
+ db.detectSchema()
+ if db.isV3 {
+ t.Error("expected v2 schema (observer_id), got v3")
+ }
+}
+
+func TestDetectSchemaV2Queries(t *testing.T) {
+ db := setupTestDBv2(t)
+ defer db.Close()
+ seedV2Data(t, db)
+
+ // v2 schema should work with QueryPackets
+ result, err := db.QueryPackets(PacketQuery{Limit: 50, Order: "DESC"})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total != 1 {
+ t.Errorf("expected 1 transmission in v2, got %d", result.Total)
+ }
+
+ // v2 grouped query
+ gResult, err := db.QueryGroupedPackets(PacketQuery{Limit: 50, Order: "DESC"})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if gResult.Total != 1 {
+ t.Errorf("expected 1 grouped in v2, got %d", gResult.Total)
+ }
+
+ // v2 GetObserverPacketCounts
+ counts := db.GetObserverPacketCounts(0)
+ if counts["obs1"] != 1 {
+ t.Errorf("expected 1 obs count for obs1, got %d", counts["obs1"])
+ }
+
+ // v2 QueryMultiNodePackets
+ mResult, err := db.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "DESC", "", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if mResult.Total != 1 {
+ t.Errorf("expected 1 multi-node packet in v2, got %d", mResult.Total)
+ }
+}
+
+// --- buildPacketWhere ---
+
+func TestBuildPacketWhere(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ tests := []struct {
+ name string
+ query PacketQuery
+ wantWhere int
+ }{
+ {"empty", PacketQuery{}, 0},
+ {"type filter", PacketQuery{Type: intPtr(4)}, 1},
+ {"route filter", PacketQuery{Route: intPtr(1)}, 1},
+ {"observer filter", PacketQuery{Observer: "obs1"}, 1},
+ {"hash filter", PacketQuery{Hash: "ABC123DEF4567890"}, 1},
+ {"since filter", PacketQuery{Since: "2025-01-01"}, 1},
+ {"until filter", PacketQuery{Until: "2099-01-01"}, 1},
+ {"region filter", PacketQuery{Region: "SJC"}, 1},
+ {"node filter", PacketQuery{Node: "TestRepeater"}, 1},
+ {"all filters", PacketQuery{
+ Type: intPtr(4), Route: intPtr(1), Observer: "obs1",
+ Hash: "abc123", Since: "2025-01-01", Until: "2099-01-01",
+ Region: "SJC", Node: "TestRepeater",
+ }, 8},
+ }
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ where, args := db.buildPacketWhere(tc.query)
+ if len(where) != tc.wantWhere {
+ t.Errorf("expected %d where clauses, got %d", tc.wantWhere, len(where))
+ }
+ if len(where) != len(args) {
+ t.Errorf("where count (%d) != args count (%d)", len(where), len(args))
+ }
+ })
+ }
+}
+
+// --- DB.QueryMultiNodePackets ---
+
+func TestDBQueryMultiNodePackets(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ t.Run("empty pubkeys", func(t *testing.T) {
+ result, err := db.QueryMultiNodePackets(nil, 50, 0, "DESC", "", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total != 0 {
+ t.Errorf("expected 0 for empty pubkeys, got %d", result.Total)
+ }
+ })
+
+ t.Run("single pubkey match", func(t *testing.T) {
+ result, err := db.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "DESC", "", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("multiple pubkeys", func(t *testing.T) {
+ result, err := db.QueryMultiNodePackets(
+ []string{"aabbccdd11223344", "eeff00112233aabb"}, 50, 0, "DESC", "", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("with time filters", func(t *testing.T) {
+ result, err := db.QueryMultiNodePackets(
+ []string{"aabbccdd11223344"}, 50, 0, "ASC",
+ "2020-01-01T00:00:00Z", "2099-01-01T00:00:00Z")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("default limit and order", func(t *testing.T) {
+ result, err := db.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 0, 0, "", "", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("no match", func(t *testing.T) {
+ result, err := db.QueryMultiNodePackets([]string{"nonexistent"}, 50, 0, "DESC", "", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result.Total != 0 {
+ t.Errorf("expected 0, got %d", result.Total)
+ }
+ })
+}
+
+// --- Store.QueryMultiNodePackets ---
+
+func TestStoreQueryMultiNodePackets(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ t.Run("empty pubkeys", func(t *testing.T) {
+ result := store.QueryMultiNodePackets(nil, 50, 0, "DESC", "", "")
+ if result.Total != 0 {
+ t.Errorf("expected 0, got %d", result.Total)
+ }
+ })
+
+ t.Run("matching pubkey", func(t *testing.T) {
+ result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "DESC", "", "")
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("ASC order", func(t *testing.T) {
+ result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 0, "ASC", "", "")
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("with since/until", func(t *testing.T) {
+ result := store.QueryMultiNodePackets(
+ []string{"aabbccdd11223344"}, 50, 0, "DESC",
+ "2020-01-01T00:00:00Z", "2099-01-01T00:00:00Z")
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+
+ t.Run("offset beyond total", func(t *testing.T) {
+ result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 50, 9999, "DESC", "", "")
+ if len(result.Packets) != 0 {
+ t.Errorf("expected 0 packets, got %d", len(result.Packets))
+ }
+ })
+
+ t.Run("default limit", func(t *testing.T) {
+ result := store.QueryMultiNodePackets([]string{"aabbccdd11223344"}, 0, 0, "DESC", "", "")
+ if result.Total < 1 {
+ t.Errorf("expected >=1, got %d", result.Total)
+ }
+ })
+}
+
+// --- IngestNewFromDB ---
+
+func TestIngestNewFromDB(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ initialMax := store.MaxTransmissionID()
+
+ // Insert a new transmission in DB
+ now := time.Now().UTC().Format(time.RFC3339)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('EEFF', 'newhash123456abcd', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now)
+ newTxID := 0
+ db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
+
+ // Add observation for the new transmission
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (?, 1, 10.0, -92, '["cc"]', ?)`, newTxID, time.Now().Unix())
+
+ // Ingest
+ broadcastMaps, newMax := store.IngestNewFromDB(initialMax, 100)
+ if newMax <= initialMax {
+ t.Errorf("expected newMax > %d, got %d", initialMax, newMax)
+ }
+ if len(broadcastMaps) < 1 {
+ t.Errorf("expected >=1 broadcast maps, got %d", len(broadcastMaps))
+ }
+
+ // Verify broadcast map contains nested "packet" field (fixes #162)
+ if len(broadcastMaps) > 0 {
+ bm := broadcastMaps[0]
+ pkt, ok := bm["packet"]
+ if !ok || pkt == nil {
+ t.Error("broadcast map missing 'packet' field (required by packets.js)")
+ }
+ pktMap, ok := pkt.(map[string]interface{})
+ if ok {
+ for _, field := range []string{"id", "hash", "payload_type", "observer_id"} {
+ if _, exists := pktMap[field]; !exists {
+ t.Errorf("packet sub-object missing field %q", field)
+ }
+ }
+ }
+ // Verify decoded also present at top level (for live.js)
+ if _, ok := bm["decoded"]; !ok {
+ t.Error("broadcast map missing 'decoded' field (required by live.js)")
+ }
+ }
+
+ // Verify ingested into store
+ updatedMax := store.MaxTransmissionID()
+ if updatedMax < newMax {
+ t.Errorf("store max (%d) should be >= newMax (%d)", updatedMax, newMax)
+ }
+
+ t.Run("no new data", func(t *testing.T) {
+ maps, max := store.IngestNewFromDB(newMax, 100)
+ if maps != nil {
+ t.Errorf("expected nil for no new data, got %d maps", len(maps))
+ }
+ if max != newMax {
+ t.Errorf("expected same max %d, got %d", newMax, max)
+ }
+ })
+
+ t.Run("default limit", func(t *testing.T) {
+ _, _ = store.IngestNewFromDB(newMax, 0)
+ })
+}
+
+func TestIngestNewFromDBv2(t *testing.T) {
+ db := setupTestDBv2(t)
+ defer db.Close()
+ seedV2Data(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ initialMax := store.MaxTransmissionID()
+
+ now := time.Now().UTC().Format(time.RFC3339)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('EEFF', 'v2newhash12345678', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now)
+ newTxID := 0
+ db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
+ VALUES (?, 'obs1', 'Obs One', 10.0, -92, '["cc"]', ?)`, newTxID, time.Now().Unix())
+
+ broadcastMaps, newMax := store.IngestNewFromDB(initialMax, 100)
+ if newMax <= initialMax {
+ t.Errorf("expected newMax > %d, got %d", initialMax, newMax)
+ }
+ if len(broadcastMaps) < 1 {
+ t.Errorf("expected >=1 broadcast maps, got %d", len(broadcastMaps))
+ }
+}
+
+// --- MaxTransmissionID ---
+
+func TestMaxTransmissionID(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ maxID := store.MaxTransmissionID()
+ if maxID <= 0 {
+ t.Errorf("expected maxID > 0, got %d", maxID)
+ }
+
+ t.Run("empty store", func(t *testing.T) {
+ emptyStore := NewPacketStore(db)
+ if emptyStore.MaxTransmissionID() != 0 {
+ t.Error("expected 0 for empty store")
+ }
+ })
+}
+
+// --- Route handler DB fallback (no store) ---
+
+func TestHandleBulkHealthNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/nodes/bulk-health?limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body []interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if body == nil {
+ t.Fatal("expected array response")
+ }
+}
+
+func TestHandleBulkHealthNoStoreMaxLimit(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/nodes/bulk-health?limit=500", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsRFNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+
+ t.Run("basic", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/rf", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if _, ok := body["snr"]; !ok {
+ t.Error("expected snr field")
+ }
+ if _, ok := body["payloadTypes"]; !ok {
+ t.Error("expected payloadTypes field")
+ }
+ })
+
+ t.Run("with region", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/rf?region=SJC", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+func TestHandlePacketsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+
+ t.Run("basic packets", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("multi-node", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?nodes=aabbccdd11223344,eeff00112233aabb", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if _, ok := body["packets"]; !ok {
+ t.Error("expected packets field")
+ }
+ })
+
+ t.Run("grouped", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?groupByHash=true&limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+func TestHandlePacketsMultiNodeWithStore(t *testing.T) {
+ _, router := setupTestServer(t)
+ req := httptest.NewRequest("GET", "/api/packets?nodes=aabbccdd11223344&order=asc&limit=10&offset=0", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if _, ok := body["packets"]; !ok {
+ t.Error("expected packets field")
+ }
+}
+
+func TestHandlePacketDetailNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+
+ t.Run("by hash", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/abc123def4567890", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404 (no store), got %d: %s", w.Code, w.Body.String())
+ }
+ })
+
+ t.Run("by ID", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/1", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404 (no store), got %d: %s", w.Code, w.Body.String())
+ }
+ })
+
+ t.Run("not found", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/9999", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+
+ t.Run("non-numeric non-hash", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/notahash", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+}
+
+func TestHandleAnalyticsChannelsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if _, ok := body["activeChannels"]; !ok {
+ t.Error("expected activeChannels field")
+ }
+}
+
+// --- transmissionsForObserver (byObserver index path) ---
+
+func TestTransmissionsForObserverIndex(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Query packets for an observer — hits the byObserver index
+ result := store.QueryPackets(PacketQuery{Limit: 50, Observer: "obs1", Order: "DESC"})
+ if result.Total < 1 {
+ t.Errorf("expected >=1 packets for obs1, got %d", result.Total)
+ }
+
+ // Query with observer + type (uses from != nil path in transmissionsForObserver)
+ pt := 4
+ result2 := store.QueryPackets(PacketQuery{Limit: 50, Observer: "obs1", Type: &pt, Order: "DESC"})
+ if result2.Total < 1 {
+ t.Errorf("expected >=1 filtered packets, got %d", result2.Total)
+ }
+}
+
+// --- GetChannelMessages (dedup, observer, hops paths) ---
+
+func TestGetChannelMessagesFromStore(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Test channel should exist from seed data
+ messages, total := store.GetChannelMessages("#test", 100, 0)
+ if total < 1 {
+ t.Errorf("expected >=1 messages for #test, got %d", total)
+ }
+ if len(messages) < 1 {
+ t.Errorf("expected >=1 message entries, got %d", len(messages))
+ }
+
+ t.Run("non-existent channel", func(t *testing.T) {
+ msgs, total := store.GetChannelMessages("nonexistent", 100, 0)
+ if total != 0 || len(msgs) != 0 {
+ t.Errorf("expected 0 for nonexistent channel, got %d/%d", total, len(msgs))
+ }
+ })
+
+ t.Run("default limit", func(t *testing.T) {
+ _, total := store.GetChannelMessages("#test", 0, 0)
+ if total < 1 {
+ t.Errorf("expected >=1 with default limit, got %d", total)
+ }
+ })
+
+ t.Run("with offset", func(t *testing.T) {
+ _, _ = store.GetChannelMessages("#test", 10, 9999)
+ })
+}
+
+func TestGetChannelMessagesDedupe(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+
+ now := time.Now().UTC()
+ recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
+ epoch := now.Add(-1 * time.Hour).Unix()
+
+ seedTestData(t, db)
+
+ // Insert a duplicate channel message with the same hash as existing
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('DDEE', 'dupchannelhash1234', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"Hello: World","sender":"TestUser"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (3, 1, 11.0, -91, '["aa"]', ?)`, epoch)
+
+ // Insert another dupe same hash as above (should dedup)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('DDFF', 'dupchannelhash5678', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"Hello: World","sender":"TestUser"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (4, 2, 9.0, -93, '[]', ?)`, epoch)
+
+ store := NewPacketStore(db)
+ store.Load()
+
+ msgs, total := store.GetChannelMessages("#test", 100, 0)
+ // Should have messages, with some deduped
+ if total < 1 {
+ t.Errorf("expected >=1 total messages, got %d", total)
+ }
+ _ = msgs
+}
+
+// --- GetChannels ---
+
+func TestGetChannelsFromStore(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ channels := store.GetChannels("")
+ if len(channels) < 1 {
+ t.Errorf("expected >=1 channel, got %d", len(channels))
+ }
+
+ t.Run("with region", func(t *testing.T) {
+ ch := store.GetChannels("SJC")
+ _ = ch
+ })
+
+ t.Run("non-existent region", func(t *testing.T) {
+ ch := store.GetChannels("NONEXIST")
+ // Region filter may return 0 or fallback to unfiltered depending on DB content
+ _ = ch
+ })
+}
+
+// --- resolve (prefixMap) ---
+
+func TestPrefixMapResolve(t *testing.T) {
+ nodes := []nodeInfo{
+ {PublicKey: "aabbccdd11223344", Name: "NodeA", HasGPS: true, Lat: 37.5, Lon: -122.0},
+ {PublicKey: "aabbccdd55667788", Name: "NodeB", HasGPS: false},
+ {PublicKey: "eeff0011aabbccdd", Name: "NodeC", HasGPS: true, Lat: 38.0, Lon: -121.0},
+ }
+ pm := buildPrefixMap(nodes)
+
+ t.Run("exact match", func(t *testing.T) {
+ n := pm.resolve("aabbccdd11223344")
+ if n == nil || n.Name != "NodeA" {
+ t.Errorf("expected NodeA, got %v", n)
+ }
+ })
+
+ t.Run("prefix match single", func(t *testing.T) {
+ n := pm.resolve("eeff")
+ if n == nil || n.Name != "NodeC" {
+ t.Errorf("expected NodeC, got %v", n)
+ }
+ })
+
+ t.Run("prefix match multiple — prefer GPS", func(t *testing.T) {
+ n := pm.resolve("aabbccdd")
+ if n == nil {
+ t.Fatal("expected non-nil")
+ }
+ if !n.HasGPS {
+ t.Error("expected GPS-preferred candidate")
+ }
+ if n.Name != "NodeA" {
+ t.Errorf("expected NodeA (has GPS), got %s", n.Name)
+ }
+ })
+
+ t.Run("no match", func(t *testing.T) {
+ n := pm.resolve("zzzzz")
+ if n != nil {
+ t.Errorf("expected nil, got %v", n)
+ }
+ })
+
+ t.Run("multiple candidates no GPS", func(t *testing.T) {
+ noGPSNodes := []nodeInfo{
+ {PublicKey: "aa11bb22", Name: "X", HasGPS: false},
+ {PublicKey: "aa11cc33", Name: "Y", HasGPS: false},
+ }
+ pm2 := buildPrefixMap(noGPSNodes)
+ n := pm2.resolve("aa11")
+ if n == nil {
+ t.Fatal("expected non-nil")
+ }
+ // Should return first candidate
+ })
+}
+
+// --- pathLen ---
+
+func TestPathLen(t *testing.T) {
+ tests := []struct {
+ json string
+ want int
+ }{
+ {"", 0},
+ {"invalid", 0},
+ {`[]`, 0},
+ {`["aa"]`, 1},
+ {`["aa","bb","cc"]`, 3},
+ }
+ for _, tc := range tests {
+ got := pathLen(tc.json)
+ if got != tc.want {
+ t.Errorf("pathLen(%q) = %d, want %d", tc.json, got, tc.want)
+ }
+ }
+}
+
+// --- floatPtrOrNil ---
+
+func TestFloatPtrOrNil(t *testing.T) {
+ v := 3.14
+ if floatPtrOrNil(&v) != 3.14 {
+ t.Error("expected 3.14")
+ }
+ if floatPtrOrNil(nil) != nil {
+ t.Error("expected nil")
+ }
+}
+
+// --- nullFloatPtr ---
+
+func TestNullFloatPtr(t *testing.T) {
+ valid := sql.NullFloat64{Float64: 2.71, Valid: true}
+ p := nullFloatPtr(valid)
+ if p == nil || *p != 2.71 {
+ t.Errorf("expected 2.71, got %v", p)
+ }
+ invalid := sql.NullFloat64{Valid: false}
+ if nullFloatPtr(invalid) != nil {
+ t.Error("expected nil for invalid")
+ }
+}
+
+// --- nilIfEmpty ---
+
+func TestNilIfEmpty(t *testing.T) {
+ if nilIfEmpty("") != nil {
+ t.Error("expected nil for empty")
+ }
+ if nilIfEmpty("hello") != "hello" {
+ t.Error("expected 'hello'")
+ }
+}
+
+// --- pickBestObservation ---
+
+func TestPickBestObservation(t *testing.T) {
+ t.Run("empty observations", func(t *testing.T) {
+ tx := &StoreTx{}
+ pickBestObservation(tx)
+ if tx.ObserverID != "" {
+ t.Error("expected empty observer for no observations")
+ }
+ })
+
+ t.Run("single observation", func(t *testing.T) {
+ snr := 10.0
+ tx := &StoreTx{
+ Observations: []*StoreObs{
+ {ObserverID: "obs1", ObserverName: "One", SNR: &snr, PathJSON: `["aa"]`},
+ },
+ }
+ pickBestObservation(tx)
+ if tx.ObserverID != "obs1" {
+ t.Errorf("expected obs1, got %s", tx.ObserverID)
+ }
+ })
+
+ t.Run("picks longest path", func(t *testing.T) {
+ snr1, snr2 := 10.0, 5.0
+ tx := &StoreTx{
+ Observations: []*StoreObs{
+ {ObserverID: "obs1", SNR: &snr1, PathJSON: `["aa"]`},
+ {ObserverID: "obs2", SNR: &snr2, PathJSON: `["aa","bb","cc"]`},
+ },
+ }
+ pickBestObservation(tx)
+ if tx.ObserverID != "obs2" {
+ t.Errorf("expected obs2 (longest path), got %s", tx.ObserverID)
+ }
+ })
+}
+
+// --- indexByNode ---
+
+func TestIndexByNode(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+
+ t.Run("empty decoded_json", func(t *testing.T) {
+ tx := &StoreTx{Hash: "h1"}
+ store.indexByNode(tx)
+ if len(store.byNode) != 0 {
+ t.Error("expected no index entries")
+ }
+ })
+
+ t.Run("valid decoded_json", func(t *testing.T) {
+ tx := &StoreTx{
+ Hash: "h2",
+ DecodedJSON: `{"pubKey":"aabbccdd11223344","destPubKey":"eeff00112233aabb"}`,
+ }
+ store.indexByNode(tx)
+ if len(store.byNode["aabbccdd11223344"]) != 1 {
+ t.Error("expected pubKey indexed")
+ }
+ if len(store.byNode["eeff00112233aabb"]) != 1 {
+ t.Error("expected destPubKey indexed")
+ }
+ })
+
+ t.Run("duplicate hash skipped", func(t *testing.T) {
+ tx := &StoreTx{
+ Hash: "h2",
+ DecodedJSON: `{"pubKey":"aabbccdd11223344"}`,
+ }
+ store.indexByNode(tx)
+ // Should not add duplicate
+ if len(store.byNode["aabbccdd11223344"]) != 1 {
+ t.Errorf("expected 1, got %d", len(store.byNode["aabbccdd11223344"]))
+ }
+ })
+
+ t.Run("invalid json", func(t *testing.T) {
+ tx := &StoreTx{Hash: "h3", DecodedJSON: "not json"}
+ store.indexByNode(tx)
+ // Should not panic or add anything
+ })
+}
+
+// --- resolveVersion ---
+
+func TestResolveVersion(t *testing.T) {
+ old := Version
+ defer func() { Version = old }()
+
+ Version = "v1.2.3"
+ if resolveVersion() != "v1.2.3" {
+ t.Error("expected v1.2.3")
+ }
+
+ Version = ""
+ if resolveVersion() != "unknown" {
+ t.Error("expected unknown when empty")
+ }
+}
+
+// --- wsOrStatic ---
+
+func TestWsOrStaticNonWebSocket(t *testing.T) {
+ hub := NewHub()
+ staticHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(200)
+ w.Write([]byte("static"))
+ })
+ handler := wsOrStatic(hub, staticHandler)
+
+ req := httptest.NewRequest("GET", "/", nil)
+ w := httptest.NewRecorder()
+ handler.ServeHTTP(w, req)
+
+ if w.Code != 200 {
+ t.Errorf("expected 200, got %d", w.Code)
+ }
+ if w.Body.String() != "static" {
+ t.Errorf("expected 'static', got %s", w.Body.String())
+ }
+}
+
+// --- Poller.Start ---
+
+func TestPollerStartStop(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ hub := NewHub()
+
+ poller := NewPoller(db, hub, 50*time.Millisecond)
+ go poller.Start()
+ time.Sleep(150 * time.Millisecond)
+ poller.Stop()
+}
+
+func TestPollerStartWithStore(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ hub := NewHub()
+ store := NewPacketStore(db)
+ store.Load()
+
+ poller := NewPoller(db, hub, 50*time.Millisecond)
+ poller.store = store
+ go poller.Start()
+
+ // Insert new data while poller running
+ now := time.Now().UTC().Format(time.RFC3339)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type)
+ VALUES ('FFEE', 'pollerhash12345678', ?, 1, 4)`, now)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES ((SELECT MAX(id) FROM transmissions), 1, 10.0, -92, '[]', ?)`, time.Now().Unix())
+
+ time.Sleep(200 * time.Millisecond)
+ poller.Stop()
+}
+
+// --- perfMiddleware slow query path ---
+
+func TestPerfMiddlewareSlowQuery(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ // Add a slow handler
+ router.HandleFunc("/api/test-slow", func(w http.ResponseWriter, r *http.Request) {
+ time.Sleep(110 * time.Millisecond)
+ writeJSON(w, map[string]string{"ok": "true"})
+ }).Methods("GET")
+
+ req := httptest.NewRequest("GET", "/api/test-slow", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if len(srv.perfStats.SlowQueries) < 1 {
+ t.Error("expected slow query to be recorded")
+ }
+}
+
+func TestPerfMiddlewareNonAPIPath(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ // Non-API path should pass through without perf tracking
+ router.HandleFunc("/not-api", func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(200)
+ }).Methods("GET")
+
+ initialReqs := srv.perfStats.Requests
+ req := httptest.NewRequest("GET", "/not-api", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if srv.perfStats.Requests != initialReqs {
+ t.Error("non-API request should not be tracked")
+ }
+}
+
+// --- writeJSON error path ---
+
+func TestWriteJSONErrorPath(t *testing.T) {
+ w := httptest.NewRecorder()
+ // math.Inf cannot be marshaled to JSON — triggers the error path
+ writeJSON(w, math.Inf(1))
+ // Should not panic, just log the error
+}
+
+// --- GetObserverPacketCounts ---
+
+func TestGetObserverPacketCountsV3(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ counts := db.GetObserverPacketCounts(0)
+ if len(counts) == 0 {
+ t.Error("expected some observer counts")
+ }
+}
+
+// --- Additional route fallback tests ---
+
+func TestHandleAnalyticsTopologyNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/analytics/topology", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsDistanceNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/analytics/distance", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsHashSizesNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsSubpathsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/analytics/subpaths", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsSubpathDetailNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+
+ t.Run("with hops", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/subpath-detail?hops=aa,bb", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("missing hops", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/subpath-detail", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("single hop", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/subpath-detail?hops=aa", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+func TestHandleChannelsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/channels", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleChannelMessagesNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/channels/test/messages", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandlePacketTimestampsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+
+ t.Run("with since", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/timestamps?since=2020-01-01T00:00:00Z", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("missing since", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/timestamps", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 400 {
+ t.Fatalf("expected 400, got %d", w.Code)
+ }
+ })
+}
+
+func TestHandleStatsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/stats", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleHealthNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/health", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if body["status"] != "ok" {
+ t.Errorf("expected status ok, got %v", body["status"])
+ }
+}
+
+// --- buildTransmissionWhere additional coverage ---
+
+func TestBuildTransmissionWhereRFC3339(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ t.Run("RFC3339 since", func(t *testing.T) {
+ q := PacketQuery{Since: "2020-01-01T00:00:00Z"}
+ where, args := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ if len(args) != 1 {
+ t.Errorf("expected 1 arg, got %d", len(args))
+ }
+ if !strings.Contains(where[0], "observations") {
+ t.Error("expected observations subquery for RFC3339 since")
+ }
+ })
+
+ t.Run("RFC3339 until", func(t *testing.T) {
+ q := PacketQuery{Until: "2099-01-01T00:00:00Z"}
+ where, args := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ if len(args) != 1 {
+ t.Errorf("expected 1 arg, got %d", len(args))
+ }
+ })
+
+ t.Run("non-RFC3339 since", func(t *testing.T) {
+ q := PacketQuery{Since: "2020-01-01"}
+ where, _ := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ if strings.Contains(where[0], "observations") {
+ t.Error("expected direct first_seen comparison for non-RFC3339")
+ }
+ })
+
+ t.Run("observer v3", func(t *testing.T) {
+ q := PacketQuery{Observer: "obs1"}
+ where, _ := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ if !strings.Contains(where[0], "observer_idx") {
+ t.Error("expected observer_idx subquery for v3")
+ }
+ })
+
+ t.Run("region v3", func(t *testing.T) {
+ q := PacketQuery{Region: "SJC"}
+ where, _ := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ if !strings.Contains(where[0], "iata") {
+ t.Error("expected iata subquery for region")
+ }
+ })
+}
+
+func TestBuildTransmissionWhereV2(t *testing.T) {
+ db := setupTestDBv2(t)
+ defer db.Close()
+ seedV2Data(t, db)
+
+ t.Run("observer v2", func(t *testing.T) {
+ q := PacketQuery{Observer: "obs1"}
+ where, _ := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ if !strings.Contains(where[0], "observer_id") {
+ t.Error("expected observer_id subquery for v2")
+ }
+ })
+
+ t.Run("region v2", func(t *testing.T) {
+ q := PacketQuery{Region: "SJC"}
+ where, _ := db.buildTransmissionWhere(q)
+ if len(where) != 1 {
+ t.Errorf("expected 1 clause, got %d", len(where))
+ }
+ })
+}
+
+// --- GetMaxTransmissionID (DB) ---
+
+func TestDBGetMaxTransmissionID(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ maxID := db.GetMaxTransmissionID()
+ if maxID <= 0 {
+ t.Errorf("expected > 0, got %d", maxID)
+ }
+}
+
+// --- GetNodeLocations ---
+
+func TestGetNodeLocations(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ locs := db.GetNodeLocations()
+ if len(locs) == 0 {
+ t.Error("expected some node locations")
+ }
+ pk := strings.ToLower("aabbccdd11223344")
+ if entry, ok := locs[pk]; ok {
+ if entry["lat"] == nil {
+ t.Error("expected non-nil lat")
+ }
+ } else {
+ t.Error("expected node location for test repeater")
+ }
+}
+
+// --- Store edge cases ---
+
+func TestStoreQueryPacketsEdgeCases(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ t.Run("hash filter", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Hash: "abc123def4567890", Limit: 50, Order: "DESC"})
+ if result.Total != 1 {
+ t.Errorf("expected 1, got %d", result.Total)
+ }
+ })
+
+ t.Run("non-existent hash", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Hash: "0000000000000000", Limit: 50, Order: "DESC"})
+ if result.Total != 0 {
+ t.Errorf("expected 0, got %d", result.Total)
+ }
+ })
+
+ t.Run("ASC order", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Limit: 50, Order: "ASC"})
+ if result.Total < 1 {
+ t.Error("expected results")
+ }
+ })
+
+ t.Run("offset beyond end", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Limit: 50, Offset: 9999, Order: "DESC"})
+ if len(result.Packets) != 0 {
+ t.Errorf("expected 0, got %d", len(result.Packets))
+ }
+ })
+
+ t.Run("node filter with index", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Node: "aabbccdd11223344", Limit: 50, Order: "DESC"})
+ if result.Total < 1 {
+ t.Error("expected >=1")
+ }
+ })
+
+ t.Run("route filter", func(t *testing.T) {
+ rt := 1
+ result := store.QueryPackets(PacketQuery{Route: &rt, Limit: 50, Order: "DESC"})
+ if result.Total < 1 {
+ t.Error("expected >=1")
+ }
+ })
+
+ t.Run("since filter", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Since: "2020-01-01", Limit: 50, Order: "DESC"})
+ if result.Total < 1 {
+ t.Error("expected >=1")
+ }
+ })
+
+ t.Run("until filter", func(t *testing.T) {
+ result := store.QueryPackets(PacketQuery{Until: "2099-01-01", Limit: 50, Order: "DESC"})
+ if result.Total < 1 {
+ t.Error("expected >=1")
+ }
+ })
+}
+
+// --- HandlePackets with various options ---
+
+func TestHandlePacketsWithQueryOptions(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("with type filter", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?type=4&limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("with route filter", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?route=1&limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("expand observations", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?limit=10&expand=observations", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("ASC order", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets?order=asc&limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+// --- handleObservers and handleObserverDetail ---
+
+func TestHandleObserversNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/observers", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleObserverDetailNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/observers/obs1", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d: %s", w.Code, w.Body.String())
+ }
+}
+
+func TestHandleObserverAnalyticsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/observers/obs1/analytics", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 503 {
+ t.Fatalf("expected 503, got %d: %s", w.Code, w.Body.String())
+ }
+}
+
+// --- HandleTraces ---
+
+func TestHandleTracesNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/traces/abc123def4567890", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- HandleResolveHops ---
+
+func TestHandleResolveHops(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("empty hops", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/resolve-hops", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("with hops", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/resolve-hops?hops=aabb,eeff", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+// --- HandlePerf ---
+
+func TestHandlePerfNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/perf", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- HandleIATACoords ---
+
+func TestHandleIATACoordsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/iata-coords", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- Conversion helpers ---
+
+func TestStrOrNil(t *testing.T) {
+ if strOrNil("") != nil {
+ t.Error("expected nil")
+ }
+ if strOrNil("abc") != "abc" {
+ t.Error("expected abc")
+ }
+}
+
+func TestIntPtrOrNil(t *testing.T) {
+ if intPtrOrNil(nil) != nil {
+ t.Error("expected nil")
+ }
+ v := 42
+ if intPtrOrNil(&v) != 42 {
+ t.Error("expected 42")
+ }
+}
+
+func TestNullIntPtr(t *testing.T) {
+ valid := sql.NullInt64{Int64: 7, Valid: true}
+ p := nullIntPtr(valid)
+ if p == nil || *p != 7 {
+ t.Error("expected 7")
+ }
+ invalid := sql.NullInt64{Valid: false}
+ if nullIntPtr(invalid) != nil {
+ t.Error("expected nil")
+ }
+}
+
+func TestNullStr(t *testing.T) {
+ valid := sql.NullString{String: "hello", Valid: true}
+ if nullStr(valid) != "hello" {
+ t.Error("expected hello")
+ }
+ invalid := sql.NullString{Valid: false}
+ if nullStr(invalid) != nil {
+ t.Error("expected nil")
+ }
+}
+
+func TestNullStrVal(t *testing.T) {
+ valid := sql.NullString{String: "test", Valid: true}
+ if nullStrVal(valid) != "test" {
+ t.Error("expected test")
+ }
+ invalid := sql.NullString{Valid: false}
+ if nullStrVal(invalid) != "" {
+ t.Error("expected empty string")
+ }
+}
+
+func TestNullFloat(t *testing.T) {
+ valid := sql.NullFloat64{Float64: 1.5, Valid: true}
+ if nullFloat(valid) != 1.5 {
+ t.Error("expected 1.5")
+ }
+ invalid := sql.NullFloat64{Valid: false}
+ if nullFloat(invalid) != nil {
+ t.Error("expected nil")
+ }
+}
+
+func TestNullInt(t *testing.T) {
+ valid := sql.NullInt64{Int64: 99, Valid: true}
+ if nullInt(valid) != 99 {
+ t.Error("expected 99")
+ }
+ invalid := sql.NullInt64{Valid: false}
+ if nullInt(invalid) != nil {
+ t.Error("expected nil")
+ }
+}
+
+// --- resolveCommit ---
+
+func TestResolveCommit(t *testing.T) {
+ old := Commit
+ defer func() { Commit = old }()
+
+ Commit = "abc123"
+ if resolveCommit() != "abc123" {
+ t.Error("expected abc123")
+ }
+
+ Commit = ""
+ // With no .git-commit file and possibly no git, should return something
+ result := resolveCommit()
+ if result == "" {
+ t.Error("expected non-empty result")
+ }
+}
+
+// --- parsePathJSON ---
+
+func TestParsePathJSON(t *testing.T) {
+ if parsePathJSON("") != nil {
+ t.Error("expected nil for empty")
+ }
+ if parsePathJSON("[]") != nil {
+ t.Error("expected nil for []")
+ }
+ if parsePathJSON("invalid") != nil {
+ t.Error("expected nil for invalid")
+ }
+ hops := parsePathJSON(`["aa","bb"]`)
+ if len(hops) != 2 {
+ t.Errorf("expected 2 hops, got %d", len(hops))
+ }
+}
+
+// --- Store.GetPerfStoreStats & GetCacheStats ---
+
+func TestStorePerfAndCacheStats(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ stats := store.GetPerfStoreStats()
+ if _, ok := stats["totalLoaded"]; !ok {
+ t.Error("expected totalLoaded")
+ }
+
+ cacheStats := store.GetCacheStats()
+ if _, ok := cacheStats["size"]; !ok {
+ t.Error("expected size")
+ }
+}
+
+// --- enrichObs ---
+
+func TestEnrichObs(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Find an observation from the loaded store
+ var obs *StoreObs
+ for _, o := range store.byObsID {
+ obs = o
+ break
+ }
+ if obs == nil {
+ t.Skip("no observations loaded")
+ }
+
+ enriched := store.enrichObs(obs)
+ if enriched["observer_id"] == nil {
+ t.Error("expected observer_id")
+ }
+}
+
+// --- HandleNodeSearch ---
+
+func TestHandleNodeSearch(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("with query", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/search?q=Test", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("empty query", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/search?q=", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+// --- HandleNodeDetail ---
+
+func TestHandleNodeDetail(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("existing", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("not found", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/nonexistent12345678", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+}
+
+// --- HandleNodeHealth ---
+
+func TestHandleNodeHealth(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("not found", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/nonexistent12345678/health", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+}
+
+// --- HandleNodePaths ---
+
+func TestHandleNodePaths(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("existing", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/paths", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("not found", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/nonexistent12345678/paths", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+}
+
+// --- HandleNodeAnalytics ---
+
+func TestHandleNodeAnalytics(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("existing", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/analytics?days=7", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("not found", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/nonexistent/analytics", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+
+ t.Run("days bounds", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/analytics?days=0", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("days max", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344/analytics?days=999", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+// --- HandleNetworkStatus ---
+
+func TestHandleNetworkStatus(t *testing.T) {
+ _, router := setupTestServer(t)
+ req := httptest.NewRequest("GET", "/api/nodes/network-status", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- HandleConfigEndpoints ---
+
+func TestHandleConfigEndpoints(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ endpoints := []string{
+ "/api/config/cache",
+ "/api/config/client",
+ "/api/config/regions",
+ "/api/config/theme",
+ "/api/config/map",
+ }
+ for _, ep := range endpoints {
+ t.Run(ep, func(t *testing.T) {
+ req := httptest.NewRequest("GET", ep, nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d for %s", w.Code, ep)
+ }
+ })
+ }
+}
+
+// --- HandleAudioLabBuckets ---
+
+func TestHandleAudioLabBuckets(t *testing.T) {
+ _, router := setupTestServer(t)
+ req := httptest.NewRequest("GET", "/api/audio-lab/buckets", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ // May return 200 or 404 depending on implementation
+ if w.Code != 200 {
+ // Audio lab might not be fully implemented — just verify it doesn't crash
+ }
+}
+
+// --- txToMap ---
+
+func TestTxToMap(t *testing.T) {
+ snr := 10.5
+ rssi := -90.0
+ pt := 4
+ rt := 1
+ tx := &StoreTx{
+ ID: 1,
+ RawHex: "AABB",
+ Hash: "abc123",
+ FirstSeen: "2025-01-01",
+ RouteType: &rt,
+ PayloadType: &pt,
+ DecodedJSON: `{"type":"ADVERT"}`,
+ ObservationCount: 2,
+ ObserverID: "obs1",
+ ObserverName: "Obs One",
+ SNR: &snr,
+ RSSI: &rssi,
+ PathJSON: `["aa"]`,
+ Direction: "RX",
+ }
+ m := txToMap(tx)
+ if m["id"] != 1 {
+ t.Error("expected id 1")
+ }
+ if m["hash"] != "abc123" {
+ t.Error("expected hash abc123")
+ }
+ if m["snr"] != 10.5 {
+ t.Error("expected snr 10.5")
+ }
+}
+
+// --- filterTxSlice ---
+
+func TestFilterTxSlice(t *testing.T) {
+ txs := []*StoreTx{
+ {ID: 1, Hash: "a"},
+ {ID: 2, Hash: "b"},
+ {ID: 3, Hash: "a"},
+ }
+ result := filterTxSlice(txs, func(tx *StoreTx) bool {
+ return tx.Hash == "a"
+ })
+ if len(result) != 2 {
+ t.Errorf("expected 2, got %d", len(result))
+ }
+}
+
+// --- GetTimestamps ---
+
+func TestStoreGetTimestamps(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ ts := store.GetTimestamps("2000-01-01")
+ if len(ts) < 1 {
+ t.Error("expected >=1 timestamps")
+ }
+}
+
+// Helper
+func intPtr(v int) *int {
+ return &v
+}
+
+// setupRichTestDB creates a test DB with richer data including paths, multiple observers, channel data.
+func setupRichTestDB(t *testing.T) *DB {
+ t.Helper()
+ db := setupTestDB(t)
+
+ now := time.Now().UTC()
+ recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
+ yesterday := now.Add(-24 * time.Hour).Format(time.RFC3339)
+ recentEpoch := now.Add(-1 * time.Hour).Unix()
+ yesterdayEpoch := now.Add(-24 * time.Hour).Unix()
+
+ seedTestData(t, db)
+
+ // Add advert packet with raw_hex that has valid header + path bytes for hash size parsing
+ // route_type 1 = FLOOD, path byte at position 1 (hex index 2..3)
+ // header: 0x01 (route_type=1), path byte: 0x40 (hashSize bits=01 → size 2)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0140aabbccdd', 'hash_with_path_01', ?, 1, 4, '{"pubKey":"aabbccdd11223344","name":"TestRepeater","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (3, 1, 10.0, -91, '["aabb","ccdd"]', ?)`, recentEpoch)
+
+ // Another advert with 3-byte hash size: header 0x01, path byte 0x80 (bits=10 → size 3)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0180eeff0011', 'hash_with_path_02', ?, 1, 4, '{"pubKey":"eeff00112233aabb","name":"TestCompanion","type":"ADVERT"}')`, yesterday)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (4, 2, 8.5, -94, '["eeff","0011","2233"]', ?)`, yesterdayEpoch)
+
+ // Another channel message with different sender for analytics
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('CC01', 'chan_msg_hash_001', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"User2: Another msg","sender":"User2","channelHash":"abc123"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (5, 1, 14.0, -88, '["aa"]', ?)`, recentEpoch)
+
+ return db
+}
+
+// --- Store-backed analytics tests ---
+
+func TestStoreGetBulkHealthWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ results := store.GetBulkHealth(50, "")
+ if len(results) == 0 {
+ t.Error("expected bulk health results")
+ }
+ // Check that results have expected structure
+ for _, r := range results {
+ if _, ok := r["public_key"]; !ok {
+ t.Error("expected public_key field")
+ }
+ if _, ok := r["stats"]; !ok {
+ t.Error("expected stats field")
+ }
+ }
+
+ t.Run("with region filter", func(t *testing.T) {
+ results := store.GetBulkHealth(50, "SJC")
+ _ = results
+ })
+}
+
+func TestStoreGetAnalyticsHashSizes(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.GetAnalyticsHashSizes("")
+ if result["total"] == nil {
+ t.Error("expected total field")
+ }
+ dist, ok := result["distribution"].(map[string]int)
+ if !ok {
+ t.Error("expected distribution map")
+ }
+ _ = dist
+
+ t.Run("with region", func(t *testing.T) {
+ r := store.GetAnalyticsHashSizes("SJC")
+ _ = r
+ })
+}
+
+func TestStoreGetAnalyticsSubpaths(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.GetAnalyticsSubpaths("", 2, 8, 100)
+ if _, ok := result["subpaths"]; !ok {
+ t.Error("expected subpaths field")
+ }
+
+ t.Run("with region", func(t *testing.T) {
+ r := store.GetAnalyticsSubpaths("SJC", 2, 4, 50)
+ _ = r
+ })
+}
+
+func TestSubpathPrecomputedIndex(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ // After Load(), the precomputed index must be populated.
+ if len(store.spIndex) == 0 {
+ t.Fatal("expected spIndex to be populated after Load()")
+ }
+ if store.spTotalPaths == 0 {
+ t.Fatal("expected spTotalPaths > 0 after Load()")
+ }
+
+ // The rich test DB has paths ["aa","bb"], ["aabb","ccdd"], and
+ // ["eeff","0011","2233"]. That yields 5 unique raw subpaths.
+ expectedRaw := map[string]int{
+ "aa,bb": 1,
+ "aabb,ccdd": 1,
+ "eeff,0011": 1,
+ "0011,2233": 1,
+ "eeff,0011,2233": 1,
+ }
+ for key, want := range expectedRaw {
+ got, ok := store.spIndex[key]
+ if !ok {
+ t.Errorf("expected spIndex[%q] to exist", key)
+ } else if got != want {
+ t.Errorf("spIndex[%q] = %d, want %d", key, got, want)
+ }
+ }
+ if store.spTotalPaths != 3 {
+ t.Errorf("spTotalPaths = %d, want 3", store.spTotalPaths)
+ }
+
+ // Fast-path (no region) and slow-path (with region) must return the
+ // same shape.
+ fast := store.GetAnalyticsSubpaths("", 2, 8, 100)
+ slow := store.GetAnalyticsSubpaths("SJC", 2, 4, 50)
+ for _, r := range []map[string]interface{}{fast, slow} {
+ if _, ok := r["subpaths"]; !ok {
+ t.Error("missing subpaths in result")
+ }
+ if _, ok := r["totalPaths"]; !ok {
+ t.Error("missing totalPaths in result")
+ }
+ }
+
+ // Verify fast path totalPaths matches index.
+ if tp, ok := fast["totalPaths"].(int); ok && tp != store.spTotalPaths {
+ t.Errorf("fast totalPaths=%d, spTotalPaths=%d", tp, store.spTotalPaths)
+ }
+}
+
+func TestStoreGetAnalyticsRFCacheHit(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ // First call — cache miss
+ result1 := store.GetAnalyticsRF("")
+ if result1["totalPackets"] == nil {
+ t.Error("expected totalPackets")
+ }
+
+ // Second call — should hit cache
+ result2 := store.GetAnalyticsRF("")
+ if result2["totalPackets"] == nil {
+ t.Error("expected cached totalPackets")
+ }
+
+ // Verify cache hit was recorded
+ stats := store.GetCacheStats()
+ hits, _ := stats["hits"].(int64)
+ if hits < 1 {
+ t.Error("expected at least 1 cache hit")
+ }
+}
+
+func TestStoreGetAnalyticsTopology(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.GetAnalyticsTopology("")
+ if result == nil {
+ t.Error("expected non-nil result")
+ }
+
+ // #155: uniqueNodes must match DB 7-day active count, not hop resolution
+ stats, err := db.GetStats()
+ if err != nil {
+ t.Fatalf("GetStats failed: %v", err)
+ }
+ un, ok := result["uniqueNodes"].(int)
+ if !ok {
+ t.Fatalf("uniqueNodes is not int: %T", result["uniqueNodes"])
+ }
+ if un != stats.TotalNodes {
+ t.Errorf("uniqueNodes=%d should match stats totalNodes=%d", un, stats.TotalNodes)
+ }
+
+ t.Run("with region", func(t *testing.T) {
+ r := store.GetAnalyticsTopology("SJC")
+ _ = r
+ })
+}
+
+func TestStoreGetAnalyticsChannels(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.GetAnalyticsChannels("")
+ if _, ok := result["activeChannels"]; !ok {
+ t.Error("expected activeChannels")
+ }
+ if _, ok := result["topSenders"]; !ok {
+ t.Error("expected topSenders")
+ }
+ if _, ok := result["channelTimeline"]; !ok {
+ t.Error("expected channelTimeline")
+ }
+
+ t.Run("with region", func(t *testing.T) {
+ r := store.GetAnalyticsChannels("SJC")
+ _ = r
+ })
+}
+
+// Regression test for #154: channelHash is a number in decoded JSON from decoder.js,
+// not a string. The Go struct must handle both types correctly.
+func TestStoreGetAnalyticsChannelsNumericHash(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ recent := time.Now().Add(-1 * time.Hour).Format(time.RFC3339)
+ recentEpoch := time.Now().Add(-1 * time.Hour).Unix()
+
+ // Insert GRP_TXT packets with numeric channelHash (matches decoder.js output)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('DD01', 'grp_num_hash_1', ?, 1, 5, '{"type":"GRP_TXT","channelHash":97,"channelHashHex":"61","decryptionStatus":"no_key"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (4, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('DD02', 'grp_num_hash_2', ?, 1, 5, '{"type":"GRP_TXT","channelHash":42,"channelHashHex":"2A","decryptionStatus":"no_key"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (5, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // Also a decrypted CHAN with numeric channelHash
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('DD03', 'chan_num_hash_3', ?, 1, 5, '{"type":"CHAN","channel":"general","channelHash":97,"channelHashHex":"61","text":"hello","sender":"Alice"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (6, 1, 12.0, -88, '[]', ?)`, recentEpoch)
+
+ store := NewPacketStore(db)
+ store.Load()
+ result := store.GetAnalyticsChannels("")
+
+ channels := result["channels"].([]map[string]interface{})
+ if len(channels) < 2 {
+ t.Errorf("expected at least 2 channels (hash 97 + hash 42), got %d", len(channels))
+ }
+
+ // Verify the numeric-hash channels we inserted have proper hashes (not "?")
+ found97 := false
+ found42 := false
+ for _, ch := range channels {
+ if ch["hash"] == "97" {
+ found97 = true
+ }
+ if ch["hash"] == "42" {
+ found42 = true
+ }
+ }
+ if !found97 {
+ t.Error("expected to find channel with hash '97' (numeric channelHash parsing)")
+ }
+ if !found42 {
+ t.Error("expected to find channel with hash '42' (numeric channelHash parsing)")
+ }
+
+ // Verify the decrypted CHAN channel has the correct name
+ foundGeneral := false
+ for _, ch := range channels {
+ if ch["name"] == "general" {
+ foundGeneral = true
+ if ch["hash"] != "97" {
+ t.Errorf("expected hash '97' for general channel, got %v", ch["hash"])
+ }
+ }
+ }
+ if !foundGeneral {
+ t.Error("expected to find channel named 'general'")
+ }
+}
+
+func TestStoreGetAnalyticsDistance(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.GetAnalyticsDistance("")
+ if result == nil {
+ t.Error("expected non-nil result")
+ }
+
+ t.Run("with region", func(t *testing.T) {
+ r := store.GetAnalyticsDistance("SJC")
+ _ = r
+ })
+}
+
+func TestStoreGetSubpathDetail(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.GetSubpathDetail([]string{"aabb", "ccdd"})
+ if result == nil {
+ t.Error("expected non-nil result")
+ }
+ if _, ok := result["hops"]; !ok {
+ t.Error("expected hops field")
+ }
+}
+
+// --- Route handlers with store for analytics ---
+
+func TestHandleAnalyticsRFWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ t.Run("basic", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/rf", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("with region", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/analytics/rf?region=SJC", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+}
+
+func TestHandleBulkHealthWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/nodes/bulk-health?limit=50®ion=SJC", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsSubpathsWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/subpaths?minLen=2&maxLen=4&limit=50", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsSubpathDetailWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/subpath-detail?hops=aabb,ccdd", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsDistanceWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/distance", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsHashSizesWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsTopologyWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/topology", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsChannelsWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- GetChannelMessages more paths ---
+
+func TestGetChannelMessagesRichData(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ messages, total := store.GetChannelMessages("#test", 100, 0)
+ if total < 2 {
+ t.Errorf("expected >=2 messages for #test with rich data, got %d", total)
+ }
+
+ // Verify message fields
+ for _, msg := range messages {
+ if _, ok := msg["sender"]; !ok {
+ t.Error("expected sender field")
+ }
+ if _, ok := msg["hops"]; !ok {
+ t.Error("expected hops field")
+ }
+ }
+}
+
+// --- handleObservers with actual data ---
+
+func TestHandleObserversWithData(t *testing.T) {
+ _, router := setupTestServer(t)
+ req := httptest.NewRequest("GET", "/api/observers", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ observers, ok := body["observers"].([]interface{})
+ if !ok || len(observers) == 0 {
+ t.Error("expected non-empty observers")
+ }
+}
+
+// --- handleChannelMessages with store ---
+
+func TestHandleChannelMessagesWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/channels/%23test/messages?limit=10", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- handleChannels with store ---
+
+func TestHandleChannelsWithStore(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/channels", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- Traces via store path ---
+
+func TestHandleTracesWithStore(t *testing.T) {
+ _, router := setupTestServer(t)
+ req := httptest.NewRequest("GET", "/api/traces/abc123def4567890", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- Store.GetStoreStats ---
+
+func TestStoreGetStoreStats(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ stats, err := store.GetStoreStats()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if stats.TotalTransmissions < 1 {
+ t.Error("expected transmissions > 0")
+ }
+}
+
+// --- Store.QueryGroupedPackets ---
+
+func TestStoreQueryGroupedPackets(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.QueryGroupedPackets(PacketQuery{Limit: 50, Order: "DESC"})
+ if result.Total < 1 {
+ t.Error("expected >=1 grouped packets")
+ }
+}
+
+// --- Store.GetPacketByHash / GetPacketByID / GetTransmissionByID ---
+
+func TestStoreGetPacketByHash(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ pkt := store.GetPacketByHash("abc123def4567890")
+ if pkt == nil {
+ t.Fatal("expected packet")
+ }
+ if pkt["hash"] != "abc123def4567890" {
+ t.Errorf("wrong hash: %v", pkt["hash"])
+ }
+
+ t.Run("not found", func(t *testing.T) {
+ pkt := store.GetPacketByHash("0000000000000000")
+ if pkt != nil {
+ t.Error("expected nil for not found")
+ }
+ })
+}
+
+// --- Coverage gap-filling tests ---
+
+func TestResolvePayloadTypeNameUnknown(t *testing.T) {
+ // nil → UNKNOWN
+ if got := resolvePayloadTypeName(nil); got != "UNKNOWN" {
+ t.Errorf("expected UNKNOWN for nil, got %s", got)
+ }
+ // known type
+ pt4 := 4
+ if got := resolvePayloadTypeName(&pt4); got != "ADVERT" {
+ t.Errorf("expected ADVERT, got %s", got)
+ }
+ // unknown type → UNK(N) format
+ pt99 := 99
+ if got := resolvePayloadTypeName(&pt99); got != "UNK(99)" {
+ t.Errorf("expected UNK(99), got %s", got)
+ }
+}
+
+func TestCacheHitTopology(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ // First call — cache miss
+ r1 := store.GetAnalyticsTopology("")
+ if r1 == nil {
+ t.Fatal("expected topology result")
+ }
+
+ // Second call — cache hit
+ r2 := store.GetAnalyticsTopology("")
+ if r2 == nil {
+ t.Fatal("expected cached topology result")
+ }
+
+ stats := store.GetCacheStats()
+ hits := stats["hits"].(int64)
+ if hits < 1 {
+ t.Errorf("expected cache hit, got %d hits", hits)
+ }
+}
+
+func TestCacheHitHashSizes(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ r1 := store.GetAnalyticsHashSizes("")
+ if r1 == nil {
+ t.Fatal("expected hash sizes result")
+ }
+
+ r2 := store.GetAnalyticsHashSizes("")
+ if r2 == nil {
+ t.Fatal("expected cached hash sizes result")
+ }
+
+ stats := store.GetCacheStats()
+ hits := stats["hits"].(int64)
+ if hits < 1 {
+ t.Errorf("expected cache hit, got %d", hits)
+ }
+}
+
+func TestCacheHitChannels(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ r1 := store.GetAnalyticsChannels("")
+ if r1 == nil {
+ t.Fatal("expected channels result")
+ }
+
+ r2 := store.GetAnalyticsChannels("")
+ if r2 == nil {
+ t.Fatal("expected cached channels result")
+ }
+
+ stats := store.GetCacheStats()
+ hits := stats["hits"].(int64)
+ if hits < 1 {
+ t.Errorf("expected cache hit, got %d", hits)
+ }
+}
+
+func TestGetChannelMessagesEdgeCases(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Channel not found — empty result
+ msgs, total := store.GetChannelMessages("nonexistent_channel", 10, 0)
+ if total != 0 {
+ t.Errorf("expected 0 total for nonexistent channel, got %d", total)
+ }
+ if len(msgs) != 0 {
+ t.Errorf("expected empty msgs, got %d", len(msgs))
+ }
+
+ // Default limit (0 → 100)
+ msgs, _ = store.GetChannelMessages("#test", 0, 0)
+ _ = msgs // just exercises the default limit path
+
+ // Offset beyond range
+ msgs, total = store.GetChannelMessages("#test", 10, 9999)
+ if len(msgs) != 0 {
+ t.Errorf("expected empty msgs for large offset, got %d", len(msgs))
+ }
+ if total == 0 {
+ t.Error("total should be > 0 even with large offset")
+ }
+
+ // Negative offset
+ msgs, _ = store.GetChannelMessages("#test", 10, -5)
+ _ = msgs // exercises the start < 0 path
+}
+
+func TestFilterPacketsEmptyRegion(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Region with no observers → empty result
+ results := store.QueryPackets(PacketQuery{Region: "NONEXISTENT", Limit: 100})
+ if results.Total != 0 {
+ t.Errorf("expected 0 results for nonexistent region, got %d", results.Total)
+ }
+}
+
+func TestFilterPacketsSinceUntil(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Since far future → empty
+ results := store.QueryPackets(PacketQuery{Since: "2099-01-01T00:00:00Z", Limit: 100})
+ if results.Total != 0 {
+ t.Errorf("expected 0 results for far future since, got %d", results.Total)
+ }
+
+ // Until far past → empty
+ results = store.QueryPackets(PacketQuery{Until: "2000-01-01T00:00:00Z", Limit: 100})
+ if results.Total != 0 {
+ t.Errorf("expected 0 results for far past until, got %d", results.Total)
+ }
+
+ // Route filter
+ rt := 1
+ results = store.QueryPackets(PacketQuery{Route: &rt, Limit: 100})
+ if results.Total == 0 {
+ t.Error("expected results for route_type=1 filter")
+ }
+}
+
+func TestFilterPacketsHashOnly(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Single hash fast-path — found
+ results := store.QueryPackets(PacketQuery{Hash: "abc123def4567890", Limit: 100})
+ if results.Total != 1 {
+ t.Errorf("expected 1 result for known hash, got %d", results.Total)
+ }
+
+ // Single hash fast-path — not found
+ results = store.QueryPackets(PacketQuery{Hash: "0000000000000000", Limit: 100})
+ if results.Total != 0 {
+ t.Errorf("expected 0 results for unknown hash, got %d", results.Total)
+ }
+}
+
+func TestFilterPacketsObserverWithType(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Observer + type filter (takes non-indexed path)
+ pt := 4
+ results := store.QueryPackets(PacketQuery{Observer: "obs1", Type: &pt, Limit: 100})
+ _ = results // exercises the combined observer+type filter path
+}
+
+func TestFilterPacketsNodeFilter(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Node filter — exercises DecodedJSON containment check
+ results := store.QueryPackets(PacketQuery{Node: "aabbccdd11223344", Limit: 100})
+ if results.Total == 0 {
+ t.Error("expected results for node filter")
+ }
+
+ // Node filter with hash combined
+ results = store.QueryPackets(PacketQuery{Node: "aabbccdd11223344", Hash: "abc123def4567890", Limit: 100})
+ _ = results
+}
+
+func TestGetNodeHashSizeInfoEdgeCases(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+
+ now := time.Now().UTC()
+ recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
+ recentEpoch := now.Add(-1 * time.Hour).Unix()
+
+ // Observers
+ db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
+ VALUES ('obs1', 'Obs', 'SJC', ?, '2026-01-01T00:00:00Z', 10)`, recent)
+
+ // Adverts with various edge cases
+ // 1. Valid advert with pubKey
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0140aabbccdd', 'hs_valid_1', ?, 1, 4, '{"pubKey":"aabbccdd11223344","name":"NodeA","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (1, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 2. Short raw_hex (< 4 chars)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('01', 'hs_short_hex', ?, 1, 4, '{"pubKey":"eeff00112233aabb","name":"NodeB","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (2, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 3. Invalid hex in path byte position
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('01GGHHII', 'hs_bad_hex', ?, 1, 4, '{"pubKey":"1122334455667788","name":"NodeC","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (3, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 4. Invalid JSON
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0140aabb', 'hs_bad_json', ?, 1, 4, 'not-json')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (4, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 5. JSON with public_key field instead of pubKey
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0180eeff', 'hs_alt_key', ?, 1, 4, '{"public_key":"aabbccdd11223344","name":"NodeA","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (5, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 6. JSON with no pubKey at all
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('01C0ffee', 'hs_no_pk', ?, 1, 4, '{"name":"NodeZ","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (6, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 7. Empty decoded_json
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0140bbcc', 'hs_empty_json', ?, 1, 4, '')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (7, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ // 8-10. Multiple adverts for same node with different hash sizes (flip-flop test)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0140dd01', 'hs_flip_1', ?, 1, 4, '{"pubKey":"ffff000011112222","name":"Flipper","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (8, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0180dd02', 'hs_flip_2', ?, 1, 4, '{"pubKey":"ffff000011112222","name":"Flipper","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (9, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('0140dd03', 'hs_flip_3', ?, 1, 4, '{"pubKey":"ffff000011112222","name":"Flipper","type":"ADVERT"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (10, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ store := NewPacketStore(db)
+ store.Load()
+ info := store.GetNodeHashSizeInfo()
+
+ // Valid node should be present
+ if _, ok := info["aabbccdd11223344"]; !ok {
+ t.Error("expected aabbccdd11223344 in hash size info")
+ }
+
+ // Flipper should have inconsistent flag (2→3→2 = 2 transitions, 2 unique sizes, 3 obs)
+ if flipper, ok := info["ffff000011112222"]; ok {
+ if len(flipper.AllSizes) < 2 {
+ t.Errorf("expected 2+ unique sizes for flipper, got %d", len(flipper.AllSizes))
+ }
+ if !flipper.Inconsistent {
+ t.Error("expected Inconsistent=true for flip-flop node")
+ }
+ } else {
+ t.Error("expected ffff000011112222 in hash size info")
+ }
+
+ // Bad entries (short hex, bad hex, bad json, no pk) should not corrupt results
+ if _, ok := info["eeff00112233aabb"]; ok {
+ t.Error("short raw_hex node should not be in results")
+ }
+ if _, ok := info["1122334455667788"]; ok {
+ t.Error("bad hex node should not be in results")
+ }
+}
+
+func TestHandleResolveHopsEdgeCases(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ // Empty hops param
+ req := httptest.NewRequest("GET", "/api/resolve-hops", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Errorf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ resolved := body["resolved"].(map[string]interface{})
+ if len(resolved) != 0 {
+ t.Errorf("expected empty resolved for empty hops, got %d", len(resolved))
+ }
+
+ // Multiple hops with empty string included
+ req = httptest.NewRequest("GET", "/api/resolve-hops?hops=aabb,,eeff", nil)
+ w = httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Errorf("expected 200, got %d", w.Code)
+ }
+ json.Unmarshal(w.Body.Bytes(), &body)
+ resolved = body["resolved"].(map[string]interface{})
+ // Empty string should be skipped
+ if _, ok := resolved[""]; ok {
+ t.Error("empty hop should be skipped")
+ }
+
+ // Nonexistent prefix — zero candidates
+ req = httptest.NewRequest("GET", "/api/resolve-hops?hops=nonexistent_prefix_xyz", nil)
+ w = httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Errorf("expected 200, got %d", w.Code)
+ }
+}
+
+func TestHandleObserversError(t *testing.T) {
+ // Use a closed DB to trigger an error from GetObservers
+ db := setupTestDB(t)
+ seedTestData(t, db)
+
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+ db.Close() // force error after routes registered
+
+ req := httptest.NewRequest("GET", "/api/observers", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 500 {
+ t.Errorf("expected 500 for closed DB, got %d", w.Code)
+ }
+}
+
+func TestHandleAnalyticsChannelsDBFallback(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ // Server with NO store — takes DB fallback path
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Errorf("expected 200, got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if _, ok := body["activeChannels"]; !ok {
+ t.Error("expected activeChannels in DB-fallback response")
+ }
+ if _, ok := body["channels"]; !ok {
+ t.Error("expected channels in DB-fallback response")
+ }
+}
+
+func TestGetChannelMessagesDedupeRepeats(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+
+ now := time.Now().UTC()
+ recent := now.Add(-1 * time.Hour).Format(time.RFC3339)
+ recentEpoch := now.Add(-1 * time.Hour).Unix()
+
+ db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
+ VALUES ('obs1', 'Obs1', 'SJC', ?, '2026-01-01T00:00:00Z', 10)`, recent)
+ db.conn.Exec(`INSERT INTO observers (id, name, iata, last_seen, first_seen, packet_count)
+ VALUES ('obs2', 'Obs2', 'LAX', ?, '2026-01-01T00:00:00Z', 10)`, recent)
+
+ // Insert two copies of same CHAN message (same hash, different observers)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('CC01', 'dedup_chan_1', ?, 1, 5, '{"type":"CHAN","channel":"#general","text":"Alice: hello","sender":"Alice"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (1, 1, 12.0, -88, '["aa"]', ?)`, recentEpoch)
+
+ // Same sender + hash → different observation (simulates dedup)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('CC02', 'dedup_chan_1', ?, 1, 5, '{"type":"CHAN","channel":"#general","text":"Alice: hello","sender":"Alice"}')`, recent)
+ // Note: won't load due to UNIQUE constraint on hash → tests the code path with single tx having multiple obs
+
+ // Second different message
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('CC03', 'dedup_chan_2', ?, 1, 5, '{"type":"CHAN","channel":"#general","text":"Bob: world","sender":"Bob"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (2, 2, 10.0, -90, '["bb"]', ?)`, recentEpoch)
+
+ // GRP_TXT (not CHAN) — should be skipped by GetChannelMessages
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('DD01', 'grp_msg_hash_1', ?, 1, 5, '{"type":"GRP_TXT","channelHash":"42","text":"encrypted"}')`, recent)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (3, 1, 10.0, -90, '[]', ?)`, recentEpoch)
+
+ store := NewPacketStore(db)
+ store.Load()
+
+ msgs, total := store.GetChannelMessages("#general", 10, 0)
+ if total == 0 {
+ t.Error("expected messages for #general")
+ }
+
+ // Check message structure
+ for _, msg := range msgs {
+ if _, ok := msg["sender"]; !ok {
+ t.Error("expected sender field")
+ }
+ if _, ok := msg["text"]; !ok {
+ t.Error("expected text field")
+ }
+ if _, ok := msg["observers"]; !ok {
+ t.Error("expected observers field")
+ }
+ }
+}
+
+func TestTransmissionsForObserverFromSlice(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Test with from=nil (index path) — for non-existent observer
+ result := store.transmissionsForObserver("nonexistent_obs", nil)
+ if len(result) != 0 {
+ t.Errorf("expected nil/empty for nonexistent observer, got %d", len(result))
+ }
+
+ // Test with from=non-nil slice (filter path)
+ allPackets := store.packets
+ result = store.transmissionsForObserver("obs1", allPackets)
+ if len(result) == 0 {
+ t.Error("expected results for obs1 from filter path")
+ }
+}
+
+func TestGetPerfStoreStatsPublicKeyField(t *testing.T) {
+ db := setupRichTestDB(t)
+ defer db.Close()
+ store := NewPacketStore(db)
+ store.Load()
+
+ stats := store.GetPerfStoreStats()
+ indexes := stats["indexes"].(map[string]interface{})
+ // advertByObserver should count distinct pubkeys from advert packets
+ aboc := indexes["advertByObserver"].(int)
+ if aboc == 0 {
+ t.Error("expected advertByObserver > 0 for rich test DB")
+ }
+}
+
+func TestHandleAudioLabBucketsQueryError(t *testing.T) {
+ // Use closed DB to trigger query error
+ db := setupTestDB(t)
+ seedTestData(t, db)
+
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+ db.Close()
+
+ req := httptest.NewRequest("GET", "/api/audio-lab/buckets", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Errorf("expected 200 (empty buckets on error), got %d", w.Code)
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ buckets := body["buckets"].(map[string]interface{})
+ if len(buckets) != 0 {
+ t.Errorf("expected empty buckets on query error, got %d", len(buckets))
+ }
+}
+
+func TestStoreGetTransmissionByID(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ pkt := store.GetTransmissionByID(1)
+ if pkt == nil {
+ t.Fatal("expected packet")
+ }
+
+ t.Run("not found", func(t *testing.T) {
+ pkt := store.GetTransmissionByID(99999)
+ if pkt != nil {
+ t.Error("expected nil")
+ }
+ })
+}
+
+func TestStoreGetPacketByID(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Get an observation ID from the store
+ var obsID int
+ for id := range store.byObsID {
+ obsID = id
+ break
+ }
+ if obsID == 0 {
+ t.Skip("no observations")
+ }
+
+ pkt := store.GetPacketByID(obsID)
+ if pkt == nil {
+ t.Fatal("expected packet")
+ }
+
+ t.Run("not found", func(t *testing.T) {
+ pkt := store.GetPacketByID(99999)
+ if pkt != nil {
+ t.Error("expected nil")
+ }
+ })
+}
+
+// --- Store.GetObservationsForHash ---
+
+func TestStoreGetObservationsForHash(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ obs := store.GetObservationsForHash("abc123def4567890")
+ if len(obs) < 1 {
+ t.Error("expected >=1 observation")
+ }
+
+ t.Run("not found", func(t *testing.T) {
+ obs := store.GetObservationsForHash("0000000000000000")
+ if len(obs) != 0 {
+ t.Errorf("expected 0, got %d", len(obs))
+ }
+ })
+}
+
+// --- Store.GetNewTransmissionsSince ---
+
+func TestStoreGetNewTransmissionsSince(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ txs, err := db.GetNewTransmissionsSince(0, 100)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(txs) < 1 {
+ t.Error("expected >=1 transmission")
+ }
+}
+
+// --- HandlePacketDetail with store (by hash, by tx ID, by obs ID) ---
+
+func TestHandlePacketDetailWithStoreAllPaths(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ t.Run("by hash", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/abc123def4567890", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d: %s", w.Code, w.Body.String())
+ }
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+ if body["observations"] == nil {
+ t.Error("expected observations")
+ }
+ })
+
+ t.Run("by tx ID", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/1", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+ })
+
+ t.Run("not found ID", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/api/packets/999999", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 404 {
+ t.Fatalf("expected 404, got %d", w.Code)
+ }
+ })
+}
+
+// --- Additional DB function coverage ---
+
+func TestDBGetNewTransmissionsSince(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ txs, err := db.GetNewTransmissionsSince(0, 100)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(txs) < 1 {
+ t.Error("expected >=1 transmissions")
+ }
+}
+
+func TestDBGetNetworkStatus(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ cfg := &Config{}
+ ht := cfg.GetHealthThresholds()
+ result, err := db.GetNetworkStatus(ht)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result == nil {
+ t.Error("expected non-nil result")
+ }
+}
+
+func TestDBGetObserverByID(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ obs, err := db.GetObserverByID("obs1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if obs == nil {
+ t.Error("expected non-nil observer")
+ }
+ if obs.ID != "obs1" {
+ t.Errorf("expected obs1, got %s", obs.ID)
+ }
+
+ t.Run("not found", func(t *testing.T) {
+ obs, err := db.GetObserverByID("nonexistent")
+ if err == nil && obs != nil {
+ t.Error("expected nil observer for nonexistent ID")
+ }
+ // Some implementations return (nil, err) — that's fine too
+ })
+}
+
+func TestDBGetTraces(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ traces, err := db.GetTraces("abc123def4567890")
+ if err != nil {
+ t.Fatal(err)
+ }
+ _ = traces
+}
+
+// --- DB queries with different filter combos ---
+
+func TestDBQueryPacketsAllFilters(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ pt := 4
+ rt := 1
+ result, err := db.QueryPackets(PacketQuery{
+ Limit: 50,
+ Type: &pt,
+ Route: &rt,
+ Observer: "obs1",
+ Hash: "abc123def4567890",
+ Since: "2020-01-01",
+ Until: "2099-01-01",
+ Region: "SJC",
+ Node: "TestRepeater",
+ Order: "ASC",
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+ _ = result
+}
+
+// --- IngestNewFromDB dedup path ---
+
+func TestIngestNewFromDBDuplicateObs(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ initialMax := store.MaxTransmissionID()
+
+ // Insert new transmission with same hash as existing (should merge into existing tx)
+ now := time.Now().UTC().Format(time.RFC3339)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('AABB', 'dedup_test_hash_01', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now)
+ newTxID := 0
+ db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
+
+ // Add observation
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (?, 1, 11.0, -89, '["dd"]', ?)`, newTxID, time.Now().Unix())
+ // Add duplicate observation (same observer_id + path_json)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (?, 1, 11.0, -89, '["dd"]', ?)`, newTxID, time.Now().Unix())
+
+ _, newMax := store.IngestNewFromDB(initialMax, 100)
+ if newMax <= initialMax {
+ t.Errorf("expected newMax > %d, got %d", initialMax, newMax)
+ }
+}
+
+// --- IngestNewObservations (fixes #174) ---
+
+func TestIngestNewObservations(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ // Get initial observation count for transmission 1 (hash abc123def4567890)
+ initialTx := store.byHash["abc123def4567890"]
+ if initialTx == nil {
+ t.Fatal("expected to find transmission abc123def4567890 in store")
+ }
+ initialObsCount := initialTx.ObservationCount
+ if initialObsCount != 2 {
+ t.Fatalf("expected 2 initial observations, got %d", initialObsCount)
+ }
+
+ // Record the max obs ID after initial load
+ maxObsID := db.GetMaxObservationID()
+
+ // Simulate a new observation arriving for the existing transmission AFTER
+ // the poller has already advanced past its transmission ID
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (1, 2, 5.0, -100, '["aa","bb","cc"]', ?)`, time.Now().Unix())
+
+ // Verify IngestNewFromDB does NOT pick up the new observation (tx id hasn't changed)
+ txMax := store.MaxTransmissionID()
+ _, newTxMax := store.IngestNewFromDB(txMax, 100)
+ if initialTx.ObservationCount != initialObsCount {
+ t.Errorf("IngestNewFromDB should not have changed obs count, was %d now %d",
+ initialObsCount, initialTx.ObservationCount)
+ }
+ _ = newTxMax
+
+ // IngestNewObservations should pick it up
+ newObsMaps := store.IngestNewObservations(maxObsID, 500)
+ if len(newObsMaps) != 1 {
+ t.Errorf("expected 1 observation broadcast map, got %d", len(newObsMaps))
+ }
+ if initialTx.ObservationCount != initialObsCount+1 {
+ t.Errorf("expected obs count %d, got %d", initialObsCount+1, initialTx.ObservationCount)
+ }
+ if len(initialTx.Observations) != initialObsCount+1 {
+ t.Errorf("expected %d observations slice len, got %d", initialObsCount+1, len(initialTx.Observations))
+ }
+
+ // Best observation should have been re-picked (new obs has longer path)
+ if initialTx.PathJSON != `["aa","bb","cc"]` {
+ t.Errorf("expected best path to be updated to longer path, got %s", initialTx.PathJSON)
+ }
+
+ t.Run("no new observations", func(t *testing.T) {
+ maps := store.IngestNewObservations(db.GetMaxObservationID(), 500)
+ if maps != nil {
+ t.Errorf("expected nil maps for no new observations, got %d", len(maps))
+ }
+ })
+
+ t.Run("dedup by observer+path", func(t *testing.T) {
+ // Insert duplicate observation (same observer + path as existing)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (1, 1, 12.5, -90, '["aa","bb"]', ?)`, time.Now().Unix())
+ prevCount := initialTx.ObservationCount
+ maps := store.IngestNewObservations(db.GetMaxObservationID()-1, 500)
+ if initialTx.ObservationCount != prevCount {
+ t.Errorf("duplicate obs should not increase count, was %d now %d",
+ prevCount, initialTx.ObservationCount)
+ }
+ if len(maps) != 0 {
+ t.Errorf("expected 0 broadcast maps for duplicate obs, got %d", len(maps))
+ }
+ })
+
+ t.Run("default limit", func(t *testing.T) {
+ _ = store.IngestNewObservations(db.GetMaxObservationID(), 0)
+ })
+}
+
+func TestIngestNewObservationsV2(t *testing.T) {
+ db := setupTestDBv2(t)
+ defer db.Close()
+ seedV2Data(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ tx := store.byHash["abc123def4567890"]
+ if tx == nil {
+ t.Fatal("expected to find transmission in store")
+ }
+ initialCount := tx.ObservationCount
+
+ maxObsID := db.GetMaxObservationID()
+
+ // Add new observation for existing transmission
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_id, observer_name, snr, rssi, path_json, timestamp)
+ VALUES (1, 'obs2', 'Obs Two', 6.0, -98, '["dd","ee"]', ?)`, time.Now().Unix())
+
+ newMaps := store.IngestNewObservations(maxObsID, 500)
+ if len(newMaps) != 1 {
+ t.Errorf("expected 1 observation broadcast map, got %d", len(newMaps))
+ }
+ if tx.ObservationCount != initialCount+1 {
+ t.Errorf("expected obs count %d, got %d", initialCount+1, tx.ObservationCount)
+ }
+}
+
+func TestGetMaxObservationID(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+
+ maxID := db.GetMaxObservationID()
+ if maxID != 0 {
+ t.Errorf("expected 0 for empty table, got %d", maxID)
+ }
+
+ seedTestData(t, db)
+ maxID = db.GetMaxObservationID()
+ if maxID <= 0 {
+ t.Errorf("expected positive max obs ID, got %d", maxID)
+ }
+}
+
+// --- perfMiddleware with endpoint normalization ---
+
+func TestPerfMiddlewareEndpointNormalization(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ // Hit a route with a hex hash — should normalize to :id
+ req := httptest.NewRequest("GET", "/api/packets/abc123def4567890", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ // The hex id should have been normalized in perf stats
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- handleNodeAnalytics edge cases ---
+
+func TestHandleNodeAnalyticsNameless(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ // Insert a node without a name
+ db.conn.Exec(`INSERT INTO nodes (public_key, role, lat, lon, last_seen, first_seen, advert_count)
+ VALUES ('nameless_node_pk_1', 'repeater', 37.5, -122.0, ?, '2026-01-01', 1)`,
+ time.Now().UTC().Format(time.RFC3339))
+
+ cfg := &Config{Port: 3000}
+ hub := NewHub()
+ srv := NewServer(db, cfg, hub)
+ store := NewPacketStore(db)
+ store.Load()
+ srv.store = store
+ router := mux.NewRouter()
+ srv.RegisterRoutes(router)
+
+ req := httptest.NewRequest("GET", "/api/nodes/nameless_node_pk_1/analytics?days=1", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ if w.Code != 200 {
+ t.Fatalf("expected 200, got %d", w.Code)
+ }
+}
+
+// --- PerfStats overflow (>100 recent entries) ---
+
+func TestPerfStatsRecentOverflow(t *testing.T) {
+ _, router := setupTestServer(t)
+ // Hit an endpoint 120 times to overflow the Recent buffer (capped at 100)
+ for i := 0; i < 120; i++ {
+ req := httptest.NewRequest("GET", fmt.Sprintf("/api/health?i=%d", i), nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ }
+}
+
+// --- handleAudioLabBuckets ---
+
+func TestHandleAudioLabBucketsNoStore(t *testing.T) {
+ _, router := setupNoStoreServer(t)
+ req := httptest.NewRequest("GET", "/api/audio-lab/buckets", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+ // Just verify no crash
+}
+
+// --- Store region filter paths ---
+
+func TestStoreQueryPacketsRegionFilter(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ result := store.QueryPackets(PacketQuery{Region: "SJC", Limit: 50, Order: "DESC"})
+ _ = result
+
+ result2 := store.QueryPackets(PacketQuery{Region: "NONEXIST", Limit: 50, Order: "DESC"})
+ if result2.Total != 0 {
+ t.Errorf("expected 0 for non-existent region, got %d", result2.Total)
+ }
+}
+
+// --- DB.GetObserverIdsForRegion ---
+
+func TestDBGetObserverIdsForRegion(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ ids, err := db.GetObserverIdsForRegion("SJC")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(ids) == 0 {
+ t.Error("expected observer IDs for SJC")
+ }
+
+ ids2, err := db.GetObserverIdsForRegion("NONEXIST")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(ids2) != 0 {
+ t.Errorf("expected 0 for NONEXIST, got %d", len(ids2))
+ }
+}
+
+// --- DB.GetDistinctIATAs ---
+
+func TestDBGetDistinctIATAs(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ iatas, err := db.GetDistinctIATAs()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(iatas) == 0 {
+ t.Error("expected at least one IATA code")
+ }
+}
+
+// --- DB.SearchNodes ---
+
+func TestDBSearchNodes(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ nodes, err := db.SearchNodes("Test", 10)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(nodes) == 0 {
+ t.Error("expected nodes matching 'Test'")
+ }
+}
+
+// --- Ensure non-panic on GetDBSizeStats with path ---
+
+func TestGetDBSizeStatsMemory(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+
+ stats := db.GetDBSizeStats()
+ if stats["dbSizeMB"] != float64(0) {
+ t.Errorf("expected 0 for in-memory, got %v", stats["dbSizeMB"])
+ }
+}
+
+// Regression test for #198: channel messages must include newly ingested packets.
+// byPayloadType must maintain newest-first ordering after IngestNewFromDB so that
+// GetChannelMessages reverse iteration returns the latest messages.
+func TestGetChannelMessagesAfterIngest(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ store.Load()
+
+ initialMax := store.MaxTransmissionID()
+
+ // Get baseline message count
+ _, totalBefore := store.GetChannelMessages("#test", 100, 0)
+
+ // Insert a new channel message into the DB (newer than anything loaded)
+ now := time.Now().UTC()
+ nowStr := now.Format(time.RFC3339)
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('FF01', 'newchannelmsg19800', ?, 1, 5, '{"type":"CHAN","channel":"#test","text":"NewUser: brand new message","sender":"NewUser"}')`, nowStr)
+ newTxID := 0
+ db.conn.QueryRow("SELECT MAX(id) FROM transmissions").Scan(&newTxID)
+ db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (?, 1, 12.0, -88, '[]', ?)`, newTxID, now.Unix())
+
+ // Ingest the new data
+ _, newMax := store.IngestNewFromDB(initialMax, 100)
+ if newMax <= initialMax {
+ t.Fatalf("ingest did not advance maxID: %d -> %d", initialMax, newMax)
+ }
+
+ // GetChannelMessages must now include the new message
+ msgs, totalAfter := store.GetChannelMessages("#test", 100, 0)
+ if totalAfter <= totalBefore {
+ t.Errorf("expected more messages after ingest: before=%d after=%d", totalBefore, totalAfter)
+ }
+
+ // The newest message (last in the returned slice) must be the one we just inserted
+ if len(msgs) == 0 {
+ t.Fatal("expected at least one message")
+ }
+ lastMsg := msgs[len(msgs)-1]
+ if lastMsg["text"] != "brand new message" {
+ t.Errorf("newest message should be 'brand new message', got %q", lastMsg["text"])
+ }
+}
diff --git a/cmd/server/db_test.go b/cmd/server/db_test.go
index 05fa785..d571b8d 100644
--- a/cmd/server/db_test.go
+++ b/cmd/server/db_test.go
@@ -17,6 +17,8 @@ func setupTestDB(t *testing.T) *DB {
if err != nil {
t.Fatal(err)
}
+ // Force single connection so all goroutines share the same in-memory DB
+ conn.SetMaxOpenConns(1)
// Create schema matching MeshCore Analyzer v3
schema := `
diff --git a/cmd/server/decoder.go b/cmd/server/decoder.go
index fd51a7e..8058e02 100644
--- a/cmd/server/decoder.go
+++ b/cmd/server/decoder.go
@@ -54,8 +54,8 @@ type Header struct {
// TransportCodes are present on TRANSPORT_FLOOD and TRANSPORT_DIRECT routes.
type TransportCodes struct {
- NextHop string `json:"nextHop"`
- LastHop string `json:"lastHop"`
+ Code1 string `json:"code1"`
+ Code2 string `json:"code2"`
}
// Path holds decoded path/hop information.
@@ -74,6 +74,8 @@ type AdvertFlags struct {
Room bool `json:"room"`
Sensor bool `json:"sensor"`
HasLocation bool `json:"hasLocation"`
+ HasFeat1 bool `json:"hasFeat1"`
+ HasFeat2 bool `json:"hasFeat2"`
HasName bool `json:"hasName"`
}
@@ -97,6 +99,8 @@ type Payload struct {
EphemeralPubKey string `json:"ephemeralPubKey,omitempty"`
PathData string `json:"pathData,omitempty"`
Tag uint32 `json:"tag,omitempty"`
+ AuthCode uint32 `json:"authCode,omitempty"`
+ TraceFlags *int `json:"traceFlags,omitempty"`
RawHex string `json:"raw,omitempty"`
Error string `json:"error,omitempty"`
}
@@ -173,14 +177,13 @@ func decodeEncryptedPayload(typeName string, buf []byte) Payload {
}
func decodeAck(buf []byte) Payload {
- if len(buf) < 6 {
+ if len(buf) < 4 {
return Payload{Type: "ACK", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
+ checksum := binary.LittleEndian.Uint32(buf[0:4])
return Payload{
Type: "ACK",
- DestHash: hex.EncodeToString(buf[0:1]),
- SrcHash: hex.EncodeToString(buf[1:2]),
- ExtraHash: hex.EncodeToString(buf[2:6]),
+ ExtraHash: fmt.Sprintf("%08x", checksum),
}
}
@@ -205,6 +208,8 @@ func decodeAdvert(buf []byte) Payload {
if len(appdata) > 0 {
flags := appdata[0]
advType := int(flags & 0x0F)
+ hasFeat1 := flags&0x20 != 0
+ hasFeat2 := flags&0x40 != 0
p.Flags = &AdvertFlags{
Raw: int(flags),
Type: advType,
@@ -213,6 +218,8 @@ func decodeAdvert(buf []byte) Payload {
Room: advType == 3,
Sensor: advType == 4,
HasLocation: flags&0x10 != 0,
+ HasFeat1: hasFeat1,
+ HasFeat2: hasFeat2,
HasName: flags&0x80 != 0,
}
@@ -226,6 +233,12 @@ func decodeAdvert(buf []byte) Payload {
p.Lon = &lon
off += 8
}
+ if hasFeat1 && len(appdata) >= off+2 {
+ off += 2 // skip feat1 bytes (reserved for future use)
+ }
+ if hasFeat2 && len(appdata) >= off+2 {
+ off += 2 // skip feat2 bytes (reserved for future use)
+ }
if p.Flags.HasName {
name := string(appdata[off:])
name = strings.TrimRight(name, "\x00")
@@ -276,15 +289,22 @@ func decodePathPayload(buf []byte) Payload {
}
func decodeTrace(buf []byte) Payload {
- if len(buf) < 12 {
+ if len(buf) < 9 {
return Payload{Type: "TRACE", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
- return Payload{
- Type: "TRACE",
- DestHash: hex.EncodeToString(buf[5:11]),
- SrcHash: hex.EncodeToString(buf[11:12]),
- Tag: binary.LittleEndian.Uint32(buf[1:5]),
+ tag := binary.LittleEndian.Uint32(buf[0:4])
+ authCode := binary.LittleEndian.Uint32(buf[4:8])
+ flags := int(buf[8])
+ p := Payload{
+ Type: "TRACE",
+ Tag: tag,
+ AuthCode: authCode,
+ TraceFlags: &flags,
}
+ if len(buf) > 9 {
+ p.PathData = hex.EncodeToString(buf[9:])
+ }
+ return p
}
func decodePayload(payloadType int, buf []byte) Payload {
@@ -327,8 +347,7 @@ func DecodePacket(hexString string) (*DecodedPacket, error) {
}
header := decodeHeader(buf[0])
- pathByte := buf[1]
- offset := 2
+ offset := 1
var tc *TransportCodes
if isTransportRoute(header.RouteType) {
@@ -336,12 +355,18 @@ func DecodePacket(hexString string) (*DecodedPacket, error) {
return nil, fmt.Errorf("packet too short for transport codes")
}
tc = &TransportCodes{
- NextHop: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
- LastHop: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
+ Code1: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
+ Code2: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
}
offset += 4
}
+ if offset >= len(buf) {
+ return nil, fmt.Errorf("packet too short (no path byte)")
+ }
+ pathByte := buf[offset]
+ offset++
+
path, bytesConsumed := decodePath(pathByte, buf, offset)
offset += bytesConsumed
@@ -367,16 +392,24 @@ func ComputeContentHash(rawHex string) string {
return rawHex
}
- pathByte := buf[1]
+ headerByte := buf[0]
+ offset := 1
+ if isTransportRoute(int(headerByte & 0x03)) {
+ offset += 4
+ }
+ if offset >= len(buf) {
+ if len(rawHex) >= 16 {
+ return rawHex[:16]
+ }
+ return rawHex
+ }
+ pathByte := buf[offset]
+ offset++
hashSize := int((pathByte>>6)&0x3) + 1
hashCount := int(pathByte & 0x3F)
pathBytes := hashSize * hashCount
- headerByte := buf[0]
- payloadStart := 2 + pathBytes
- if isTransportRoute(int(headerByte & 0x03)) {
- payloadStart += 4
- }
+ payloadStart := offset + pathBytes
if payloadStart > len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
diff --git a/cmd/server/parity_test.go b/cmd/server/parity_test.go
index 270eb31..497d2ea 100644
--- a/cmd/server/parity_test.go
+++ b/cmd/server/parity_test.go
@@ -1,403 +1,506 @@
-package main
-
-// parity_test.go — Golden fixture shape tests.
-// Validates that Go API responses match the shape of Node.js API responses.
-// Shapes were captured from the production Node.js server and stored in
-// testdata/golden/shapes.json.
-
-import (
- "encoding/json"
- "fmt"
- "net/http/httptest"
- "os"
- "path/filepath"
- "runtime"
- "strings"
- "testing"
-)
-
-// shapeSpec describes the expected JSON structure from the Node.js server.
-type shapeSpec struct {
- Type string `json:"type"`
- Keys map[string]shapeSpec `json:"keys,omitempty"`
- ElementShape *shapeSpec `json:"elementShape,omitempty"`
- DynamicKeys bool `json:"dynamicKeys,omitempty"`
- ValueShape *shapeSpec `json:"valueShape,omitempty"`
- RequiredKeys map[string]shapeSpec `json:"requiredKeys,omitempty"`
-}
-
-// loadShapes reads testdata/golden/shapes.json relative to this source file.
-func loadShapes(t *testing.T) map[string]shapeSpec {
- t.Helper()
- _, thisFile, _, _ := runtime.Caller(0)
- dir := filepath.Dir(thisFile)
- data, err := os.ReadFile(filepath.Join(dir, "testdata", "golden", "shapes.json"))
- if err != nil {
- t.Fatalf("cannot load shapes.json: %v", err)
- }
- var shapes map[string]shapeSpec
- if err := json.Unmarshal(data, &shapes); err != nil {
- t.Fatalf("cannot parse shapes.json: %v", err)
- }
- return shapes
-}
-
-// validateShape recursively checks that `actual` matches the expected `spec`.
-// `path` tracks the JSON path for error messages.
-// Returns a list of mismatch descriptions.
-func validateShape(actual interface{}, spec shapeSpec, path string) []string {
- var errs []string
-
- switch spec.Type {
- case "null", "nullable":
- // nullable means: value can be null OR matching type. Accept anything.
- return nil
- case "nullable_number":
- // Can be null or number
- if actual != nil {
- if _, ok := actual.(float64); !ok {
- errs = append(errs, fmt.Sprintf("%s: expected number or null, got %T", path, actual))
- }
- }
- return errs
- case "string":
- if actual == nil {
- errs = append(errs, fmt.Sprintf("%s: expected string, got null", path))
- } else if _, ok := actual.(string); !ok {
- errs = append(errs, fmt.Sprintf("%s: expected string, got %T", path, actual))
- }
- case "number":
- if actual == nil {
- errs = append(errs, fmt.Sprintf("%s: expected number, got null", path))
- } else if _, ok := actual.(float64); !ok {
- errs = append(errs, fmt.Sprintf("%s: expected number, got %T (%v)", path, actual, actual))
- }
- case "boolean":
- if actual == nil {
- errs = append(errs, fmt.Sprintf("%s: expected boolean, got null", path))
- } else if _, ok := actual.(bool); !ok {
- errs = append(errs, fmt.Sprintf("%s: expected boolean, got %T", path, actual))
- }
- case "array":
- if actual == nil {
- errs = append(errs, fmt.Sprintf("%s: expected array, got null (arrays must be [] not null)", path))
- return errs
- }
- arr, ok := actual.([]interface{})
- if !ok {
- errs = append(errs, fmt.Sprintf("%s: expected array, got %T", path, actual))
- return errs
- }
- if spec.ElementShape != nil && len(arr) > 0 {
- errs = append(errs, validateShape(arr[0], *spec.ElementShape, path+"[0]")...)
- }
- case "object":
- if actual == nil {
- errs = append(errs, fmt.Sprintf("%s: expected object, got null", path))
- return errs
- }
- obj, ok := actual.(map[string]interface{})
- if !ok {
- errs = append(errs, fmt.Sprintf("%s: expected object, got %T", path, actual))
- return errs
- }
-
- if spec.DynamicKeys {
- // Object with dynamic keys — validate value shapes
- if spec.ValueShape != nil && len(obj) > 0 {
- for k, v := range obj {
- errs = append(errs, validateShape(v, *spec.ValueShape, path+"."+k)...)
- break // check just one sample
- }
- }
- if spec.RequiredKeys != nil {
- for rk, rs := range spec.RequiredKeys {
- v, exists := obj[rk]
- if !exists {
- errs = append(errs, fmt.Sprintf("%s: missing required key %q in dynamic-key object", path, rk))
- } else {
- errs = append(errs, validateShape(v, rs, path+"."+rk)...)
- }
- }
- }
- } else if spec.Keys != nil {
- // Object with known keys — check each expected key exists and has correct type
- for key, keySpec := range spec.Keys {
- val, exists := obj[key]
- if !exists {
- errs = append(errs, fmt.Sprintf("%s: missing field %q (expected %s)", path, key, keySpec.Type))
- } else {
- errs = append(errs, validateShape(val, keySpec, path+"."+key)...)
- }
- }
- }
- }
-
- return errs
-}
-
-// parityEndpoint defines one endpoint to test for parity.
-type parityEndpoint struct {
- name string // key in shapes.json
- path string // HTTP path to request
-}
-
-func TestParityShapes(t *testing.T) {
- shapes := loadShapes(t)
- _, router := setupTestServer(t)
-
- endpoints := []parityEndpoint{
- {"stats", "/api/stats"},
- {"nodes", "/api/nodes?limit=5"},
- {"packets", "/api/packets?limit=5"},
- {"packets_grouped", "/api/packets?limit=5&groupByHash=true"},
- {"observers", "/api/observers"},
- {"channels", "/api/channels"},
- {"channel_messages", "/api/channels/0000000000000000/messages?limit=5"},
- {"analytics_rf", "/api/analytics/rf?days=7"},
- {"analytics_topology", "/api/analytics/topology?days=7"},
- {"analytics_hash_sizes", "/api/analytics/hash-sizes?days=7"},
- {"analytics_distance", "/api/analytics/distance?days=7"},
- {"analytics_subpaths", "/api/analytics/subpaths?days=7"},
- {"bulk_health", "/api/nodes/bulk-health"},
- {"health", "/api/health"},
- {"perf", "/api/perf"},
- }
-
- for _, ep := range endpoints {
- t.Run("Parity_"+ep.name, func(t *testing.T) {
- spec, ok := shapes[ep.name]
- if !ok {
- t.Fatalf("no shape spec found for %q in shapes.json", ep.name)
- }
-
- req := httptest.NewRequest("GET", ep.path, nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if w.Code != 200 {
- t.Fatalf("GET %s returned %d, expected 200. Body: %s",
- ep.path, w.Code, w.Body.String())
- }
-
- var body interface{}
- if err := json.Unmarshal(w.Body.Bytes(), &body); err != nil {
- t.Fatalf("GET %s returned invalid JSON: %v\nBody: %s",
- ep.path, err, w.Body.String())
- }
-
- mismatches := validateShape(body, spec, ep.path)
- if len(mismatches) > 0 {
- t.Errorf("Go %s has %d shape mismatches vs Node.js golden:\n %s",
- ep.path, len(mismatches), strings.Join(mismatches, "\n "))
- }
- })
- }
-}
-
-// TestParityNodeDetail tests node detail endpoint shape.
-// Uses a known test node public key from seeded data.
-func TestParityNodeDetail(t *testing.T) {
- shapes := loadShapes(t)
- _, router := setupTestServer(t)
-
- spec, ok := shapes["node_detail"]
- if !ok {
- t.Fatal("no shape spec for node_detail in shapes.json")
- }
-
- req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if w.Code != 200 {
- t.Fatalf("node detail returned %d: %s", w.Code, w.Body.String())
- }
-
- var body interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
-
- mismatches := validateShape(body, spec, "/api/nodes/{pubkey}")
- if len(mismatches) > 0 {
- t.Errorf("Go node detail has %d shape mismatches vs Node.js golden:\n %s",
- len(mismatches), strings.Join(mismatches, "\n "))
- }
-}
-
-// TestParityArraysNotNull verifies that array-typed fields in Go responses are
-// [] (empty array) rather than null. This is a common Go/JSON pitfall where
-// nil slices marshal as null instead of [].
-// Uses shapes.json to know which fields SHOULD be arrays.
-func TestParityArraysNotNull(t *testing.T) {
- shapes := loadShapes(t)
- _, router := setupTestServer(t)
-
- endpoints := []struct {
- name string
- path string
- }{
- {"stats", "/api/stats"},
- {"nodes", "/api/nodes?limit=5"},
- {"packets", "/api/packets?limit=5"},
- {"packets_grouped", "/api/packets?limit=5&groupByHash=true"},
- {"observers", "/api/observers"},
- {"channels", "/api/channels"},
- {"bulk_health", "/api/nodes/bulk-health"},
- {"analytics_rf", "/api/analytics/rf?days=7"},
- {"analytics_topology", "/api/analytics/topology?days=7"},
- {"analytics_hash_sizes", "/api/analytics/hash-sizes?days=7"},
- {"analytics_distance", "/api/analytics/distance?days=7"},
- {"analytics_subpaths", "/api/analytics/subpaths?days=7"},
- }
-
- for _, ep := range endpoints {
- t.Run("NullArrayCheck_"+ep.name, func(t *testing.T) {
- spec, ok := shapes[ep.name]
- if !ok {
- t.Skipf("no shape spec for %s", ep.name)
- }
-
- req := httptest.NewRequest("GET", ep.path, nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- if w.Code != 200 {
- t.Skipf("GET %s returned %d, skipping null-array check", ep.path, w.Code)
- }
-
- var body interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
-
- nullArrays := findNullArrays(body, spec, ep.path)
- if len(nullArrays) > 0 {
- t.Errorf("Go %s has null where [] expected:\n %s\n"+
- "Go nil slices marshal as null — initialize with make() or literal",
- ep.path, strings.Join(nullArrays, "\n "))
- }
- })
- }
-}
-
-// findNullArrays walks JSON data alongside a shape spec and returns paths
-// where the spec says the field should be an array but Go returned null.
-func findNullArrays(actual interface{}, spec shapeSpec, path string) []string {
- var nulls []string
-
- switch spec.Type {
- case "array":
- if actual == nil {
- nulls = append(nulls, fmt.Sprintf("%s: null (should be [])", path))
- } else if arr, ok := actual.([]interface{}); ok && spec.ElementShape != nil {
- for i, elem := range arr {
- nulls = append(nulls, findNullArrays(elem, *spec.ElementShape, fmt.Sprintf("%s[%d]", path, i))...)
- }
- }
- case "object":
- obj, ok := actual.(map[string]interface{})
- if !ok || obj == nil {
- return nulls
- }
- if spec.Keys != nil {
- for key, keySpec := range spec.Keys {
- if val, exists := obj[key]; exists {
- nulls = append(nulls, findNullArrays(val, keySpec, path+"."+key)...)
- } else if keySpec.Type == "array" {
- // Key missing entirely — also a null-array problem
- nulls = append(nulls, fmt.Sprintf("%s.%s: missing (should be [])", path, key))
- }
- }
- }
- if spec.DynamicKeys && spec.ValueShape != nil {
- for k, v := range obj {
- nulls = append(nulls, findNullArrays(v, *spec.ValueShape, path+"."+k)...)
- break // sample one
- }
- }
- }
-
- return nulls
-}
-
-// TestParityHealthEngine verifies Go health endpoint declares engine=go
-// while Node declares engine=node (or omits it). The Go server must always
-// identify itself.
-func TestParityHealthEngine(t *testing.T) {
- _, router := setupTestServer(t)
-
- req := httptest.NewRequest("GET", "/api/health", nil)
- w := httptest.NewRecorder()
- router.ServeHTTP(w, req)
-
- var body map[string]interface{}
- json.Unmarshal(w.Body.Bytes(), &body)
-
- engine, ok := body["engine"]
- if !ok {
- t.Error("health response missing 'engine' field (Go server must include engine=go)")
- } else if engine != "go" {
- t.Errorf("health engine=%v, expected 'go'", engine)
- }
-}
-
-// TestValidateShapeFunction directly tests the shape validator itself.
-func TestValidateShapeFunction(t *testing.T) {
- t.Run("string match", func(t *testing.T) {
- errs := validateShape("hello", shapeSpec{Type: "string"}, "$.x")
- if len(errs) != 0 {
- t.Errorf("unexpected errors: %v", errs)
- }
- })
-
- t.Run("string mismatch", func(t *testing.T) {
- errs := validateShape(42.0, shapeSpec{Type: "string"}, "$.x")
- if len(errs) != 1 {
- t.Errorf("expected 1 error, got %d: %v", len(errs), errs)
- }
- })
-
- t.Run("null array rejected", func(t *testing.T) {
- errs := validateShape(nil, shapeSpec{Type: "array"}, "$.arr")
- if len(errs) != 1 || !strings.Contains(errs[0], "null") {
- t.Errorf("expected null-array error, got: %v", errs)
- }
- })
-
- t.Run("empty array OK", func(t *testing.T) {
- errs := validateShape([]interface{}{}, shapeSpec{Type: "array"}, "$.arr")
- if len(errs) != 0 {
- t.Errorf("unexpected errors for empty array: %v", errs)
- }
- })
-
- t.Run("missing object key", func(t *testing.T) {
- spec := shapeSpec{Type: "object", Keys: map[string]shapeSpec{
- "name": {Type: "string"},
- "age": {Type: "number"},
- }}
- obj := map[string]interface{}{"name": "test"}
- errs := validateShape(obj, spec, "$.user")
- if len(errs) != 1 || !strings.Contains(errs[0], "age") {
- t.Errorf("expected missing age error, got: %v", errs)
- }
- })
-
- t.Run("nullable allows null", func(t *testing.T) {
- errs := validateShape(nil, shapeSpec{Type: "nullable"}, "$.x")
- if len(errs) != 0 {
- t.Errorf("nullable should accept null: %v", errs)
- }
- })
-
- t.Run("dynamic keys validates value shape", func(t *testing.T) {
- spec := shapeSpec{
- Type: "object",
- DynamicKeys: true,
- ValueShape: &shapeSpec{Type: "number"},
- }
- obj := map[string]interface{}{"a": 1.0, "b": 2.0}
- errs := validateShape(obj, spec, "$.dyn")
- if len(errs) != 0 {
- t.Errorf("unexpected errors: %v", errs)
- }
- })
-}
+package main
+
+// parity_test.go — Golden fixture shape tests.
+// Validates that Go API responses match the shape of Node.js API responses.
+// Shapes were captured from the production Node.js server and stored in
+// testdata/golden/shapes.json.
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http/httptest"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+ "testing"
+ "time"
+)
+
+// shapeSpec describes the expected JSON structure from the Node.js server.
+type shapeSpec struct {
+ Type string `json:"type"`
+ Keys map[string]shapeSpec `json:"keys,omitempty"`
+ ElementShape *shapeSpec `json:"elementShape,omitempty"`
+ DynamicKeys bool `json:"dynamicKeys,omitempty"`
+ ValueShape *shapeSpec `json:"valueShape,omitempty"`
+ RequiredKeys map[string]shapeSpec `json:"requiredKeys,omitempty"`
+}
+
+// loadShapes reads testdata/golden/shapes.json relative to this source file.
+func loadShapes(t *testing.T) map[string]shapeSpec {
+ t.Helper()
+ _, thisFile, _, _ := runtime.Caller(0)
+ dir := filepath.Dir(thisFile)
+ data, err := os.ReadFile(filepath.Join(dir, "testdata", "golden", "shapes.json"))
+ if err != nil {
+ t.Fatalf("cannot load shapes.json: %v", err)
+ }
+ var shapes map[string]shapeSpec
+ if err := json.Unmarshal(data, &shapes); err != nil {
+ t.Fatalf("cannot parse shapes.json: %v", err)
+ }
+ return shapes
+}
+
+// validateShape recursively checks that `actual` matches the expected `spec`.
+// `path` tracks the JSON path for error messages.
+// Returns a list of mismatch descriptions.
+func validateShape(actual interface{}, spec shapeSpec, path string) []string {
+ var errs []string
+
+ switch spec.Type {
+ case "null", "nullable":
+ // nullable means: value can be null OR matching type. Accept anything.
+ return nil
+ case "nullable_number":
+ // Can be null or number
+ if actual != nil {
+ if _, ok := actual.(float64); !ok {
+ errs = append(errs, fmt.Sprintf("%s: expected number or null, got %T", path, actual))
+ }
+ }
+ return errs
+ case "string":
+ if actual == nil {
+ errs = append(errs, fmt.Sprintf("%s: expected string, got null", path))
+ } else if _, ok := actual.(string); !ok {
+ errs = append(errs, fmt.Sprintf("%s: expected string, got %T", path, actual))
+ }
+ case "number":
+ if actual == nil {
+ errs = append(errs, fmt.Sprintf("%s: expected number, got null", path))
+ } else if _, ok := actual.(float64); !ok {
+ errs = append(errs, fmt.Sprintf("%s: expected number, got %T (%v)", path, actual, actual))
+ }
+ case "boolean":
+ if actual == nil {
+ errs = append(errs, fmt.Sprintf("%s: expected boolean, got null", path))
+ } else if _, ok := actual.(bool); !ok {
+ errs = append(errs, fmt.Sprintf("%s: expected boolean, got %T", path, actual))
+ }
+ case "array":
+ if actual == nil {
+ errs = append(errs, fmt.Sprintf("%s: expected array, got null (arrays must be [] not null)", path))
+ return errs
+ }
+ arr, ok := actual.([]interface{})
+ if !ok {
+ errs = append(errs, fmt.Sprintf("%s: expected array, got %T", path, actual))
+ return errs
+ }
+ if spec.ElementShape != nil && len(arr) > 0 {
+ errs = append(errs, validateShape(arr[0], *spec.ElementShape, path+"[0]")...)
+ }
+ case "object":
+ if actual == nil {
+ errs = append(errs, fmt.Sprintf("%s: expected object, got null", path))
+ return errs
+ }
+ obj, ok := actual.(map[string]interface{})
+ if !ok {
+ errs = append(errs, fmt.Sprintf("%s: expected object, got %T", path, actual))
+ return errs
+ }
+
+ if spec.DynamicKeys {
+ // Object with dynamic keys — validate value shapes
+ if spec.ValueShape != nil && len(obj) > 0 {
+ for k, v := range obj {
+ errs = append(errs, validateShape(v, *spec.ValueShape, path+"."+k)...)
+ break // check just one sample
+ }
+ }
+ if spec.RequiredKeys != nil {
+ for rk, rs := range spec.RequiredKeys {
+ v, exists := obj[rk]
+ if !exists {
+ errs = append(errs, fmt.Sprintf("%s: missing required key %q in dynamic-key object", path, rk))
+ } else {
+ errs = append(errs, validateShape(v, rs, path+"."+rk)...)
+ }
+ }
+ }
+ } else if spec.Keys != nil {
+ // Object with known keys — check each expected key exists and has correct type
+ for key, keySpec := range spec.Keys {
+ val, exists := obj[key]
+ if !exists {
+ errs = append(errs, fmt.Sprintf("%s: missing field %q (expected %s)", path, key, keySpec.Type))
+ } else {
+ errs = append(errs, validateShape(val, keySpec, path+"."+key)...)
+ }
+ }
+ }
+ }
+
+ return errs
+}
+
+// parityEndpoint defines one endpoint to test for parity.
+type parityEndpoint struct {
+ name string // key in shapes.json
+ path string // HTTP path to request
+}
+
+func TestParityShapes(t *testing.T) {
+ shapes := loadShapes(t)
+ _, router := setupTestServer(t)
+
+ endpoints := []parityEndpoint{
+ {"stats", "/api/stats"},
+ {"nodes", "/api/nodes?limit=5"},
+ {"packets", "/api/packets?limit=5"},
+ {"packets_grouped", "/api/packets?limit=5&groupByHash=true"},
+ {"observers", "/api/observers"},
+ {"channels", "/api/channels"},
+ {"channel_messages", "/api/channels/0000000000000000/messages?limit=5"},
+ {"analytics_rf", "/api/analytics/rf?days=7"},
+ {"analytics_topology", "/api/analytics/topology?days=7"},
+ {"analytics_hash_sizes", "/api/analytics/hash-sizes?days=7"},
+ {"analytics_distance", "/api/analytics/distance?days=7"},
+ {"analytics_subpaths", "/api/analytics/subpaths?days=7"},
+ {"bulk_health", "/api/nodes/bulk-health"},
+ {"health", "/api/health"},
+ {"perf", "/api/perf"},
+ }
+
+ for _, ep := range endpoints {
+ t.Run("Parity_"+ep.name, func(t *testing.T) {
+ spec, ok := shapes[ep.name]
+ if !ok {
+ t.Fatalf("no shape spec found for %q in shapes.json", ep.name)
+ }
+
+ req := httptest.NewRequest("GET", ep.path, nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if w.Code != 200 {
+ t.Fatalf("GET %s returned %d, expected 200. Body: %s",
+ ep.path, w.Code, w.Body.String())
+ }
+
+ var body interface{}
+ if err := json.Unmarshal(w.Body.Bytes(), &body); err != nil {
+ t.Fatalf("GET %s returned invalid JSON: %v\nBody: %s",
+ ep.path, err, w.Body.String())
+ }
+
+ mismatches := validateShape(body, spec, ep.path)
+ if len(mismatches) > 0 {
+ t.Errorf("Go %s has %d shape mismatches vs Node.js golden:\n %s",
+ ep.path, len(mismatches), strings.Join(mismatches, "\n "))
+ }
+ })
+ }
+}
+
+// TestParityNodeDetail tests node detail endpoint shape.
+// Uses a known test node public key from seeded data.
+func TestParityNodeDetail(t *testing.T) {
+ shapes := loadShapes(t)
+ _, router := setupTestServer(t)
+
+ spec, ok := shapes["node_detail"]
+ if !ok {
+ t.Fatal("no shape spec for node_detail in shapes.json")
+ }
+
+ req := httptest.NewRequest("GET", "/api/nodes/aabbccdd11223344", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if w.Code != 200 {
+ t.Fatalf("node detail returned %d: %s", w.Code, w.Body.String())
+ }
+
+ var body interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+
+ mismatches := validateShape(body, spec, "/api/nodes/{pubkey}")
+ if len(mismatches) > 0 {
+ t.Errorf("Go node detail has %d shape mismatches vs Node.js golden:\n %s",
+ len(mismatches), strings.Join(mismatches, "\n "))
+ }
+}
+
+// TestParityArraysNotNull verifies that array-typed fields in Go responses are
+// [] (empty array) rather than null. This is a common Go/JSON pitfall where
+// nil slices marshal as null instead of [].
+// Uses shapes.json to know which fields SHOULD be arrays.
+func TestParityArraysNotNull(t *testing.T) {
+ shapes := loadShapes(t)
+ _, router := setupTestServer(t)
+
+ endpoints := []struct {
+ name string
+ path string
+ }{
+ {"stats", "/api/stats"},
+ {"nodes", "/api/nodes?limit=5"},
+ {"packets", "/api/packets?limit=5"},
+ {"packets_grouped", "/api/packets?limit=5&groupByHash=true"},
+ {"observers", "/api/observers"},
+ {"channels", "/api/channels"},
+ {"bulk_health", "/api/nodes/bulk-health"},
+ {"analytics_rf", "/api/analytics/rf?days=7"},
+ {"analytics_topology", "/api/analytics/topology?days=7"},
+ {"analytics_hash_sizes", "/api/analytics/hash-sizes?days=7"},
+ {"analytics_distance", "/api/analytics/distance?days=7"},
+ {"analytics_subpaths", "/api/analytics/subpaths?days=7"},
+ }
+
+ for _, ep := range endpoints {
+ t.Run("NullArrayCheck_"+ep.name, func(t *testing.T) {
+ spec, ok := shapes[ep.name]
+ if !ok {
+ t.Skipf("no shape spec for %s", ep.name)
+ }
+
+ req := httptest.NewRequest("GET", ep.path, nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ if w.Code != 200 {
+ t.Skipf("GET %s returned %d, skipping null-array check", ep.path, w.Code)
+ }
+
+ var body interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+
+ nullArrays := findNullArrays(body, spec, ep.path)
+ if len(nullArrays) > 0 {
+ t.Errorf("Go %s has null where [] expected:\n %s\n"+
+ "Go nil slices marshal as null — initialize with make() or literal",
+ ep.path, strings.Join(nullArrays, "\n "))
+ }
+ })
+ }
+}
+
+// findNullArrays walks JSON data alongside a shape spec and returns paths
+// where the spec says the field should be an array but Go returned null.
+func findNullArrays(actual interface{}, spec shapeSpec, path string) []string {
+ var nulls []string
+
+ switch spec.Type {
+ case "array":
+ if actual == nil {
+ nulls = append(nulls, fmt.Sprintf("%s: null (should be [])", path))
+ } else if arr, ok := actual.([]interface{}); ok && spec.ElementShape != nil {
+ for i, elem := range arr {
+ nulls = append(nulls, findNullArrays(elem, *spec.ElementShape, fmt.Sprintf("%s[%d]", path, i))...)
+ }
+ }
+ case "object":
+ obj, ok := actual.(map[string]interface{})
+ if !ok || obj == nil {
+ return nulls
+ }
+ if spec.Keys != nil {
+ for key, keySpec := range spec.Keys {
+ if val, exists := obj[key]; exists {
+ nulls = append(nulls, findNullArrays(val, keySpec, path+"."+key)...)
+ } else if keySpec.Type == "array" {
+ // Key missing entirely — also a null-array problem
+ nulls = append(nulls, fmt.Sprintf("%s.%s: missing (should be [])", path, key))
+ }
+ }
+ }
+ if spec.DynamicKeys && spec.ValueShape != nil {
+ for k, v := range obj {
+ nulls = append(nulls, findNullArrays(v, *spec.ValueShape, path+"."+k)...)
+ break // sample one
+ }
+ }
+ }
+
+ return nulls
+}
+
+// TestParityHealthEngine verifies Go health endpoint declares engine=go
+// while Node declares engine=node (or omits it). The Go server must always
+// identify itself.
+func TestParityHealthEngine(t *testing.T) {
+ _, router := setupTestServer(t)
+
+ req := httptest.NewRequest("GET", "/api/health", nil)
+ w := httptest.NewRecorder()
+ router.ServeHTTP(w, req)
+
+ var body map[string]interface{}
+ json.Unmarshal(w.Body.Bytes(), &body)
+
+ engine, ok := body["engine"]
+ if !ok {
+ t.Error("health response missing 'engine' field (Go server must include engine=go)")
+ } else if engine != "go" {
+ t.Errorf("health engine=%v, expected 'go'", engine)
+ }
+}
+
+// TestValidateShapeFunction directly tests the shape validator itself.
+func TestValidateShapeFunction(t *testing.T) {
+ t.Run("string match", func(t *testing.T) {
+ errs := validateShape("hello", shapeSpec{Type: "string"}, "$.x")
+ if len(errs) != 0 {
+ t.Errorf("unexpected errors: %v", errs)
+ }
+ })
+
+ t.Run("string mismatch", func(t *testing.T) {
+ errs := validateShape(42.0, shapeSpec{Type: "string"}, "$.x")
+ if len(errs) != 1 {
+ t.Errorf("expected 1 error, got %d: %v", len(errs), errs)
+ }
+ })
+
+ t.Run("null array rejected", func(t *testing.T) {
+ errs := validateShape(nil, shapeSpec{Type: "array"}, "$.arr")
+ if len(errs) != 1 || !strings.Contains(errs[0], "null") {
+ t.Errorf("expected null-array error, got: %v", errs)
+ }
+ })
+
+ t.Run("empty array OK", func(t *testing.T) {
+ errs := validateShape([]interface{}{}, shapeSpec{Type: "array"}, "$.arr")
+ if len(errs) != 0 {
+ t.Errorf("unexpected errors for empty array: %v", errs)
+ }
+ })
+
+ t.Run("missing object key", func(t *testing.T) {
+ spec := shapeSpec{Type: "object", Keys: map[string]shapeSpec{
+ "name": {Type: "string"},
+ "age": {Type: "number"},
+ }}
+ obj := map[string]interface{}{"name": "test"}
+ errs := validateShape(obj, spec, "$.user")
+ if len(errs) != 1 || !strings.Contains(errs[0], "age") {
+ t.Errorf("expected missing age error, got: %v", errs)
+ }
+ })
+
+ t.Run("nullable allows null", func(t *testing.T) {
+ errs := validateShape(nil, shapeSpec{Type: "nullable"}, "$.x")
+ if len(errs) != 0 {
+ t.Errorf("nullable should accept null: %v", errs)
+ }
+ })
+
+ t.Run("dynamic keys validates value shape", func(t *testing.T) {
+ spec := shapeSpec{
+ Type: "object",
+ DynamicKeys: true,
+ ValueShape: &shapeSpec{Type: "number"},
+ }
+ obj := map[string]interface{}{"a": 1.0, "b": 2.0}
+ errs := validateShape(obj, spec, "$.dyn")
+ if len(errs) != 0 {
+ t.Errorf("unexpected errors: %v", errs)
+ }
+ })
+}
+
+func TestParityWSMultiObserverGolden(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ hub := NewHub()
+ store := NewPacketStore(db)
+ if err := store.Load(); err != nil {
+ t.Fatalf("store load failed: %v", err)
+ }
+
+ poller := NewPoller(db, hub, 50*time.Millisecond)
+ poller.store = store
+
+ client := &Client{send: make(chan []byte, 256)}
+ hub.Register(client)
+ defer hub.Unregister(client)
+
+ go poller.Start()
+ defer poller.Stop()
+
+ // Wait for poller to initialize its lastID/lastObsID cursors before
+ // inserting new data; otherwise the poller may snapshot a lastID that
+ // already includes the test data and never broadcast it.
+ time.Sleep(100 * time.Millisecond)
+
+ now := time.Now().UTC().Format(time.RFC3339)
+ if _, err := db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('BEEF', 'goldenstarburst237', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now); err != nil {
+ t.Fatalf("insert tx failed: %v", err)
+ }
+ var txID int
+ if err := db.conn.QueryRow(`SELECT id FROM transmissions WHERE hash='goldenstarburst237'`).Scan(&txID); err != nil {
+ t.Fatalf("query tx id failed: %v", err)
+ }
+ ts := time.Now().Unix()
+ if _, err := db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (?, 1, 11.0, -88, '["p1"]', ?),
+ (?, 2, 9.0, -92, '["p1","p2"]', ?),
+ (?, 1, 7.0, -96, '["p1","p2","p3"]', ?)`,
+ txID, ts, txID, ts+1, txID, ts+2); err != nil {
+ t.Fatalf("insert obs failed: %v", err)
+ }
+
+ type golden struct {
+ Hash string
+ Count int
+ Paths []string
+ ObserverIDs []string
+ }
+ expected := golden{
+ Hash: "goldenstarburst237",
+ Count: 3,
+ Paths: []string{`["p1"]`, `["p1","p2"]`, `["p1","p2","p3"]`},
+ ObserverIDs: []string{"obs1", "obs2"},
+ }
+
+ gotPaths := make([]string, 0, expected.Count)
+ gotObservers := make(map[string]bool)
+ deadline := time.After(2 * time.Second)
+ for len(gotPaths) < expected.Count {
+ select {
+ case raw := <-client.send:
+ var msg map[string]interface{}
+ if err := json.Unmarshal(raw, &msg); err != nil {
+ t.Fatalf("unmarshal ws message failed: %v", err)
+ }
+ if msg["type"] != "packet" {
+ continue
+ }
+ data, _ := msg["data"].(map[string]interface{})
+ if data == nil || data["hash"] != expected.Hash {
+ continue
+ }
+ if path, ok := data["path_json"].(string); ok {
+ gotPaths = append(gotPaths, path)
+ }
+ if oid, ok := data["observer_id"].(string); ok && oid != "" {
+ gotObservers[oid] = true
+ }
+ case <-deadline:
+ t.Fatalf("timed out waiting for %d ws messages, got %d", expected.Count, len(gotPaths))
+ }
+ }
+
+ sort.Strings(gotPaths)
+ sort.Strings(expected.Paths)
+ if len(gotPaths) != len(expected.Paths) {
+ t.Fatalf("path count mismatch: got %d want %d", len(gotPaths), len(expected.Paths))
+ }
+ for i := range expected.Paths {
+ if gotPaths[i] != expected.Paths[i] {
+ t.Fatalf("path mismatch at %d: got %q want %q", i, gotPaths[i], expected.Paths[i])
+ }
+ }
+ for _, oid := range expected.ObserverIDs {
+ if !gotObservers[oid] {
+ t.Fatalf("missing expected observer %q in ws messages", oid)
+ }
+ }
+}
diff --git a/cmd/server/store.go b/cmd/server/store.go
index d8ce1bc..05d40d3 100644
--- a/cmd/server/store.go
+++ b/cmd/server/store.go
@@ -1039,7 +1039,7 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
}
}
- // Build broadcast maps (same shape as Node.js WS broadcast)
+ // Build broadcast maps (same shape as Node.js WS broadcast), one per observation.
result := make([]map[string]interface{}, 0, len(broadcastOrder))
for _, txID := range broadcastOrder {
tx := broadcastTxs[txID]
@@ -1055,32 +1055,34 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
decoded["payload"] = payload
}
}
- // Build the nested packet object (packets.js checks m.data.packet)
- pkt := map[string]interface{}{
- "id": tx.ID,
- "raw_hex": strOrNil(tx.RawHex),
- "hash": strOrNil(tx.Hash),
- "first_seen": strOrNil(tx.FirstSeen),
- "timestamp": strOrNil(tx.FirstSeen),
- "route_type": intPtrOrNil(tx.RouteType),
- "payload_type": intPtrOrNil(tx.PayloadType),
- "decoded_json": strOrNil(tx.DecodedJSON),
- "observer_id": strOrNil(tx.ObserverID),
- "observer_name": strOrNil(tx.ObserverName),
- "snr": floatPtrOrNil(tx.SNR),
- "rssi": floatPtrOrNil(tx.RSSI),
- "path_json": strOrNil(tx.PathJSON),
- "direction": strOrNil(tx.Direction),
- "observation_count": tx.ObservationCount,
+ for _, obs := range tx.Observations {
+ // Build the nested packet object (packets.js checks m.data.packet)
+ pkt := map[string]interface{}{
+ "id": tx.ID,
+ "raw_hex": strOrNil(tx.RawHex),
+ "hash": strOrNil(tx.Hash),
+ "first_seen": strOrNil(tx.FirstSeen),
+ "timestamp": strOrNil(tx.FirstSeen),
+ "route_type": intPtrOrNil(tx.RouteType),
+ "payload_type": intPtrOrNil(tx.PayloadType),
+ "decoded_json": strOrNil(tx.DecodedJSON),
+ "observer_id": strOrNil(obs.ObserverID),
+ "observer_name": strOrNil(obs.ObserverName),
+ "snr": floatPtrOrNil(obs.SNR),
+ "rssi": floatPtrOrNil(obs.RSSI),
+ "path_json": strOrNil(obs.PathJSON),
+ "direction": strOrNil(obs.Direction),
+ "observation_count": tx.ObservationCount,
+ }
+ // Broadcast map: top-level fields for live.js + nested packet for packets.js
+ broadcastMap := make(map[string]interface{}, len(pkt)+2)
+ for k, v := range pkt {
+ broadcastMap[k] = v
+ }
+ broadcastMap["decoded"] = decoded
+ broadcastMap["packet"] = pkt
+ result = append(result, broadcastMap)
}
- // Broadcast map: top-level fields for live.js + nested packet for packets.js
- broadcastMap := make(map[string]interface{}, len(pkt)+2)
- for k, v := range pkt {
- broadcastMap[k] = v
- }
- broadcastMap["decoded"] = decoded
- broadcastMap["packet"] = pkt
- result = append(result, broadcastMap)
}
// Invalidate analytics caches since new data was ingested
@@ -1101,7 +1103,7 @@ func (s *PacketStore) IngestNewFromDB(sinceID, limit int) ([]map[string]interfac
// IngestNewObservations loads new observations for transmissions already in the
// store. This catches observations that arrive after IngestNewFromDB has already
// advanced past the transmission's ID (fixes #174).
-func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) int {
+func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) []map[string]interface{} {
if limit <= 0 {
limit = 500
}
@@ -1127,7 +1129,7 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) int {
rows, err := s.db.conn.Query(querySQL, sinceObsID, limit)
if err != nil {
log.Printf("[store] ingest observations query error: %v", err)
- return sinceObsID
+ return nil
}
defer rows.Close()
@@ -1170,20 +1172,16 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) int {
}
if len(obsRows) == 0 {
- return sinceObsID
+ return nil
}
s.mu.Lock()
defer s.mu.Unlock()
- newMaxObsID := sinceObsID
updatedTxs := make(map[int]*StoreTx)
+ broadcastMaps := make([]map[string]interface{}, 0, len(obsRows))
for _, r := range obsRows {
- if r.obsID > newMaxObsID {
- newMaxObsID = r.obsID
- }
-
// Already ingested (e.g. by IngestNewFromDB in same cycle)
if _, exists := s.byObsID[r.obsID]; exists {
continue
@@ -1226,6 +1224,43 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) int {
}
s.totalObs++
updatedTxs[r.txID] = tx
+
+ decoded := map[string]interface{}{
+ "header": map[string]interface{}{
+ "payloadTypeName": resolvePayloadTypeName(tx.PayloadType),
+ },
+ }
+ if tx.DecodedJSON != "" {
+ var payload map[string]interface{}
+ if json.Unmarshal([]byte(tx.DecodedJSON), &payload) == nil {
+ decoded["payload"] = payload
+ }
+ }
+
+ pkt := map[string]interface{}{
+ "id": tx.ID,
+ "raw_hex": strOrNil(tx.RawHex),
+ "hash": strOrNil(tx.Hash),
+ "first_seen": strOrNil(tx.FirstSeen),
+ "timestamp": strOrNil(tx.FirstSeen),
+ "route_type": intPtrOrNil(tx.RouteType),
+ "payload_type": intPtrOrNil(tx.PayloadType),
+ "decoded_json": strOrNil(tx.DecodedJSON),
+ "observer_id": strOrNil(obs.ObserverID),
+ "observer_name": strOrNil(obs.ObserverName),
+ "snr": floatPtrOrNil(obs.SNR),
+ "rssi": floatPtrOrNil(obs.RSSI),
+ "path_json": strOrNil(obs.PathJSON),
+ "direction": strOrNil(obs.Direction),
+ "observation_count": tx.ObservationCount,
+ }
+ broadcastMap := make(map[string]interface{}, len(pkt)+2)
+ for k, v := range pkt {
+ broadcastMap[k] = v
+ }
+ broadcastMap["decoded"] = decoded
+ broadcastMap["packet"] = pkt
+ broadcastMaps = append(broadcastMaps, broadcastMap)
}
// Re-pick best observation for updated transmissions and update subpath index
@@ -1280,7 +1315,7 @@ func (s *PacketStore) IngestNewObservations(sinceObsID, limit int) int {
// analytics caches cleared; no per-cycle log to avoid stdout overhead
}
- return newMaxObsID
+ return broadcastMaps
}
// MaxTransmissionID returns the highest transmission ID in the store.
diff --git a/cmd/server/websocket.go b/cmd/server/websocket.go
index d8cb19d..e4696bc 100644
--- a/cmd/server/websocket.go
+++ b/cmd/server/websocket.go
@@ -1,229 +1,245 @@
-package main
-
-import (
- "encoding/json"
- "log"
- "net/http"
- "strings"
- "sync"
- "time"
-
- "github.com/gorilla/websocket"
-)
-
-var upgrader = websocket.Upgrader{
- ReadBufferSize: 1024,
- WriteBufferSize: 4096,
- CheckOrigin: func(r *http.Request) bool { return true },
-}
-
-// Hub manages WebSocket clients and broadcasts.
-type Hub struct {
- mu sync.RWMutex
- clients map[*Client]bool
-}
-
-// Client is a single WebSocket connection.
-type Client struct {
- conn *websocket.Conn
- send chan []byte
-}
-
-func NewHub() *Hub {
- return &Hub{
- clients: make(map[*Client]bool),
- }
-}
-
-func (h *Hub) ClientCount() int {
- h.mu.RLock()
- defer h.mu.RUnlock()
- return len(h.clients)
-}
-
-func (h *Hub) Register(c *Client) {
- h.mu.Lock()
- h.clients[c] = true
- h.mu.Unlock()
- log.Printf("[ws] client connected (%d total)", h.ClientCount())
-}
-
-func (h *Hub) Unregister(c *Client) {
- h.mu.Lock()
- if _, ok := h.clients[c]; ok {
- delete(h.clients, c)
- close(c.send)
- }
- h.mu.Unlock()
- log.Printf("[ws] client disconnected (%d total)", h.ClientCount())
-}
-
-// Broadcast sends a message to all connected clients.
-func (h *Hub) Broadcast(msg interface{}) {
- data, err := json.Marshal(msg)
- if err != nil {
- log.Printf("[ws] marshal error: %v", err)
- return
- }
- h.mu.RLock()
- defer h.mu.RUnlock()
- for c := range h.clients {
- select {
- case c.send <- data:
- default:
- // Client buffer full — drop
- }
- }
-}
-
-// ServeWS handles the WebSocket upgrade and runs the client.
-func (h *Hub) ServeWS(w http.ResponseWriter, r *http.Request) {
- conn, err := upgrader.Upgrade(w, r, nil)
- if err != nil {
- log.Printf("[ws] upgrade error: %v", err)
- return
- }
-
- client := &Client{
- conn: conn,
- send: make(chan []byte, 256),
- }
- h.Register(client)
-
- go client.writePump()
- go client.readPump(h)
-}
-
-// wsOrStatic upgrades WebSocket requests at any path, serves static files otherwise.
-func wsOrStatic(hub *Hub, static http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- if strings.EqualFold(r.Header.Get("Upgrade"), "websocket") {
- hub.ServeWS(w, r)
- return
- }
- static.ServeHTTP(w, r)
- })
-}
-
-func (c *Client) readPump(hub *Hub) {
- defer func() {
- hub.Unregister(c)
- c.conn.Close()
- }()
- c.conn.SetReadLimit(512)
- c.conn.SetReadDeadline(time.Now().Add(60 * time.Second))
- c.conn.SetPongHandler(func(string) error {
- c.conn.SetReadDeadline(time.Now().Add(60 * time.Second))
- return nil
- })
- for {
- _, _, err := c.conn.ReadMessage()
- if err != nil {
- break
- }
- }
-}
-
-func (c *Client) writePump() {
- ticker := time.NewTicker(30 * time.Second)
- defer func() {
- ticker.Stop()
- c.conn.Close()
- }()
- for {
- select {
- case message, ok := <-c.send:
- c.conn.SetWriteDeadline(time.Now().Add(10 * time.Second))
- if !ok {
- c.conn.WriteMessage(websocket.CloseMessage, []byte{})
- return
- }
- if err := c.conn.WriteMessage(websocket.TextMessage, message); err != nil {
- return
- }
- case <-ticker.C:
- c.conn.SetWriteDeadline(time.Now().Add(10 * time.Second))
- if err := c.conn.WriteMessage(websocket.PingMessage, nil); err != nil {
- return
- }
- }
- }
-}
-
-// Poller watches for new transmissions in SQLite and broadcasts them.
-type Poller struct {
- db *DB
- hub *Hub
- store *PacketStore // optional: if set, new transmissions are ingested into memory
- interval time.Duration
- stop chan struct{}
-}
-
-func NewPoller(db *DB, hub *Hub, interval time.Duration) *Poller {
- return &Poller{db: db, hub: hub, interval: interval, stop: make(chan struct{})}
-}
-
-func (p *Poller) Start() {
- lastID := p.db.GetMaxTransmissionID()
- lastObsID := p.db.GetMaxObservationID()
- log.Printf("[poller] starting from transmission ID %d, obs ID %d, interval %v", lastID, lastObsID, p.interval)
-
- ticker := time.NewTicker(p.interval)
- defer ticker.Stop()
-
- for {
- select {
- case <-ticker.C:
- if p.store != nil {
- // Ingest new transmissions into in-memory store and broadcast
- newTxs, newMax := p.store.IngestNewFromDB(lastID, 100)
- if newMax > lastID {
- lastID = newMax
- }
- // Ingest new observations for existing transmissions (fixes #174)
- newObsMax := p.store.IngestNewObservations(lastObsID, 500)
- if newObsMax > lastObsID {
- lastObsID = newObsMax
- }
- if len(newTxs) > 0 {
- log.Printf("[broadcast] sending %d packets to %d clients (lastID now %d)", len(newTxs), p.hub.ClientCount(), lastID)
- }
- for _, tx := range newTxs {
- p.hub.Broadcast(WSMessage{
- Type: "packet",
- Data: tx,
- })
- }
- } else {
- // Fallback: direct DB query (used when store is nil, e.g. tests)
- newTxs, err := p.db.GetNewTransmissionsSince(lastID, 100)
- if err != nil {
- log.Printf("[poller] error: %v", err)
- continue
- }
- for _, tx := range newTxs {
- id, _ := tx["id"].(int)
- if id > lastID {
- lastID = id
- }
- // Copy packet fields for the nested packet (avoids circular ref)
- pkt := make(map[string]interface{}, len(tx))
- for k, v := range tx {
- pkt[k] = v
- }
- tx["packet"] = pkt
- p.hub.Broadcast(WSMessage{
- Type: "packet",
- Data: tx,
- })
- }
- }
- case <-p.stop:
- return
- }
- }
-}
-
-func (p *Poller) Stop() {
- close(p.stop)
-}
+package main
+
+import (
+ "encoding/json"
+ "log"
+ "net/http"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gorilla/websocket"
+)
+
+var upgrader = websocket.Upgrader{
+ ReadBufferSize: 1024,
+ WriteBufferSize: 4096,
+ CheckOrigin: func(r *http.Request) bool { return true },
+}
+
+// Hub manages WebSocket clients and broadcasts.
+type Hub struct {
+ mu sync.RWMutex
+ clients map[*Client]bool
+}
+
+// Client is a single WebSocket connection.
+type Client struct {
+ conn *websocket.Conn
+ send chan []byte
+}
+
+func NewHub() *Hub {
+ return &Hub{
+ clients: make(map[*Client]bool),
+ }
+}
+
+func (h *Hub) ClientCount() int {
+ h.mu.RLock()
+ defer h.mu.RUnlock()
+ return len(h.clients)
+}
+
+func (h *Hub) Register(c *Client) {
+ h.mu.Lock()
+ h.clients[c] = true
+ h.mu.Unlock()
+ log.Printf("[ws] client connected (%d total)", h.ClientCount())
+}
+
+func (h *Hub) Unregister(c *Client) {
+ h.mu.Lock()
+ if _, ok := h.clients[c]; ok {
+ delete(h.clients, c)
+ close(c.send)
+ }
+ h.mu.Unlock()
+ log.Printf("[ws] client disconnected (%d total)", h.ClientCount())
+}
+
+// Broadcast sends a message to all connected clients.
+func (h *Hub) Broadcast(msg interface{}) {
+ data, err := json.Marshal(msg)
+ if err != nil {
+ log.Printf("[ws] marshal error: %v", err)
+ return
+ }
+ h.mu.RLock()
+ defer h.mu.RUnlock()
+ for c := range h.clients {
+ select {
+ case c.send <- data:
+ default:
+ // Client buffer full — drop
+ }
+ }
+}
+
+// ServeWS handles the WebSocket upgrade and runs the client.
+func (h *Hub) ServeWS(w http.ResponseWriter, r *http.Request) {
+ conn, err := upgrader.Upgrade(w, r, nil)
+ if err != nil {
+ log.Printf("[ws] upgrade error: %v", err)
+ return
+ }
+
+ client := &Client{
+ conn: conn,
+ send: make(chan []byte, 256),
+ }
+ h.Register(client)
+
+ go client.writePump()
+ go client.readPump(h)
+}
+
+// wsOrStatic upgrades WebSocket requests at any path, serves static files otherwise.
+func wsOrStatic(hub *Hub, static http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if strings.EqualFold(r.Header.Get("Upgrade"), "websocket") {
+ hub.ServeWS(w, r)
+ return
+ }
+ static.ServeHTTP(w, r)
+ })
+}
+
+func (c *Client) readPump(hub *Hub) {
+ defer func() {
+ hub.Unregister(c)
+ c.conn.Close()
+ }()
+ c.conn.SetReadLimit(512)
+ c.conn.SetReadDeadline(time.Now().Add(60 * time.Second))
+ c.conn.SetPongHandler(func(string) error {
+ c.conn.SetReadDeadline(time.Now().Add(60 * time.Second))
+ return nil
+ })
+ for {
+ _, _, err := c.conn.ReadMessage()
+ if err != nil {
+ break
+ }
+ }
+}
+
+func (c *Client) writePump() {
+ ticker := time.NewTicker(30 * time.Second)
+ defer func() {
+ ticker.Stop()
+ c.conn.Close()
+ }()
+ for {
+ select {
+ case message, ok := <-c.send:
+ c.conn.SetWriteDeadline(time.Now().Add(10 * time.Second))
+ if !ok {
+ c.conn.WriteMessage(websocket.CloseMessage, []byte{})
+ return
+ }
+ if err := c.conn.WriteMessage(websocket.TextMessage, message); err != nil {
+ return
+ }
+ case <-ticker.C:
+ c.conn.SetWriteDeadline(time.Now().Add(10 * time.Second))
+ if err := c.conn.WriteMessage(websocket.PingMessage, nil); err != nil {
+ return
+ }
+ }
+ }
+}
+
+// Poller watches for new transmissions in SQLite and broadcasts them.
+type Poller struct {
+ db *DB
+ hub *Hub
+ store *PacketStore // optional: if set, new transmissions are ingested into memory
+ interval time.Duration
+ stop chan struct{}
+}
+
+func NewPoller(db *DB, hub *Hub, interval time.Duration) *Poller {
+ return &Poller{db: db, hub: hub, interval: interval, stop: make(chan struct{})}
+}
+
+func (p *Poller) Start() {
+ lastID := p.db.GetMaxTransmissionID()
+ lastObsID := p.db.GetMaxObservationID()
+ log.Printf("[poller] starting from transmission ID %d, obs ID %d, interval %v", lastID, lastObsID, p.interval)
+
+ ticker := time.NewTicker(p.interval)
+ defer ticker.Stop()
+
+ for {
+ select {
+ case <-ticker.C:
+ if p.store != nil {
+ // Ingest new transmissions into in-memory store and broadcast
+ newTxs, newMax := p.store.IngestNewFromDB(lastID, 100)
+ if newMax > lastID {
+ lastID = newMax
+ }
+ // Ingest new observations for existing transmissions (fixes #174)
+ nextObsID := lastObsID
+ if err := p.db.conn.QueryRow(`
+ SELECT COALESCE(MAX(id), ?) FROM (
+ SELECT id FROM observations
+ WHERE id > ?
+ ORDER BY id ASC
+ LIMIT 500
+ )`, lastObsID, lastObsID).Scan(&nextObsID); err != nil {
+ nextObsID = lastObsID
+ }
+ newObs := p.store.IngestNewObservations(lastObsID, 500)
+ if nextObsID > lastObsID {
+ lastObsID = nextObsID
+ }
+ if len(newTxs) > 0 {
+ log.Printf("[broadcast] sending %d packets to %d clients (lastID now %d)", len(newTxs), p.hub.ClientCount(), lastID)
+ }
+ for _, tx := range newTxs {
+ p.hub.Broadcast(WSMessage{
+ Type: "packet",
+ Data: tx,
+ })
+ }
+ for _, obs := range newObs {
+ p.hub.Broadcast(WSMessage{
+ Type: "packet",
+ Data: obs,
+ })
+ }
+ } else {
+ // Fallback: direct DB query (used when store is nil, e.g. tests)
+ newTxs, err := p.db.GetNewTransmissionsSince(lastID, 100)
+ if err != nil {
+ log.Printf("[poller] error: %v", err)
+ continue
+ }
+ for _, tx := range newTxs {
+ id, _ := tx["id"].(int)
+ if id > lastID {
+ lastID = id
+ }
+ // Copy packet fields for the nested packet (avoids circular ref)
+ pkt := make(map[string]interface{}, len(tx))
+ for k, v := range tx {
+ pkt[k] = v
+ }
+ tx["packet"] = pkt
+ p.hub.Broadcast(WSMessage{
+ Type: "packet",
+ Data: tx,
+ })
+ }
+ }
+ case <-p.stop:
+ return
+ }
+ }
+}
+
+func (p *Poller) Stop() {
+ close(p.stop)
+}
diff --git a/cmd/server/websocket_test.go b/cmd/server/websocket_test.go
index 0ae6988..22b68d8 100644
--- a/cmd/server/websocket_test.go
+++ b/cmd/server/websocket_test.go
@@ -1,275 +1,415 @@
-package main
-
-import (
- "encoding/json"
- "net/http"
- "net/http/httptest"
- "testing"
- "time"
-
- "github.com/gorilla/websocket"
-)
-
-func TestHubBroadcast(t *testing.T) {
- hub := NewHub()
-
- if hub.ClientCount() != 0 {
- t.Errorf("expected 0 clients, got %d", hub.ClientCount())
- }
-
- // Create a test server with WebSocket endpoint
- srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- hub.ServeWS(w, r)
- }))
- defer srv.Close()
-
- // Connect a WebSocket client
- wsURL := "ws" + srv.URL[4:] // replace http with ws
- conn, _, err := websocket.DefaultDialer.Dial(wsURL, nil)
- if err != nil {
- t.Fatalf("dial error: %v", err)
- }
- defer conn.Close()
-
- // Wait for registration
- time.Sleep(50 * time.Millisecond)
-
- if hub.ClientCount() != 1 {
- t.Errorf("expected 1 client, got %d", hub.ClientCount())
- }
-
- // Broadcast a message
- hub.Broadcast(map[string]interface{}{
- "type": "packet",
- "data": map[string]interface{}{"id": 1, "hash": "test123"},
- })
-
- // Read the message
- conn.SetReadDeadline(time.Now().Add(2 * time.Second))
- _, msg, err := conn.ReadMessage()
- if err != nil {
- t.Fatalf("read error: %v", err)
- }
- if len(msg) == 0 {
- t.Error("expected non-empty message")
- }
-
- // Disconnect
- conn.Close()
- time.Sleep(100 * time.Millisecond)
-}
-
-func TestPollerCreation(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- hub := NewHub()
-
- poller := NewPoller(db, hub, 100*time.Millisecond)
- if poller == nil {
- t.Fatal("expected poller")
- }
-
- // Start and stop
- go poller.Start()
- time.Sleep(200 * time.Millisecond)
- poller.Stop()
-}
-
-func TestHubMultipleClients(t *testing.T) {
- hub := NewHub()
-
- srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- hub.ServeWS(w, r)
- }))
- defer srv.Close()
-
- wsURL := "ws" + srv.URL[4:]
-
- // Connect two clients
- conn1, _, err := websocket.DefaultDialer.Dial(wsURL, nil)
- if err != nil {
- t.Fatalf("dial error: %v", err)
- }
- defer conn1.Close()
-
- conn2, _, err := websocket.DefaultDialer.Dial(wsURL, nil)
- if err != nil {
- t.Fatalf("dial error: %v", err)
- }
- defer conn2.Close()
-
- time.Sleep(100 * time.Millisecond)
-
- if hub.ClientCount() != 2 {
- t.Errorf("expected 2 clients, got %d", hub.ClientCount())
- }
-
- // Broadcast and both should receive
- hub.Broadcast(map[string]interface{}{"type": "test", "data": "hello"})
-
- conn1.SetReadDeadline(time.Now().Add(2 * time.Second))
- _, msg1, err := conn1.ReadMessage()
- if err != nil {
- t.Fatalf("conn1 read error: %v", err)
- }
- if len(msg1) == 0 {
- t.Error("expected non-empty message on conn1")
- }
-
- conn2.SetReadDeadline(time.Now().Add(2 * time.Second))
- _, msg2, err := conn2.ReadMessage()
- if err != nil {
- t.Fatalf("conn2 read error: %v", err)
- }
- if len(msg2) == 0 {
- t.Error("expected non-empty message on conn2")
- }
-
- // Disconnect one
- conn1.Close()
- time.Sleep(100 * time.Millisecond)
-
- // Remaining client should still work
- hub.Broadcast(map[string]interface{}{"type": "test2"})
-
- conn2.SetReadDeadline(time.Now().Add(2 * time.Second))
- _, msg3, err := conn2.ReadMessage()
- if err != nil {
- t.Fatalf("conn2 read error after disconnect: %v", err)
- }
- if len(msg3) == 0 {
- t.Error("expected non-empty message")
- }
-}
-
-func TestBroadcastFullBuffer(t *testing.T) {
- hub := NewHub()
-
- // Create a client with tiny buffer (1)
- client := &Client{
- send: make(chan []byte, 1),
- }
- hub.mu.Lock()
- hub.clients[client] = true
- hub.mu.Unlock()
-
- // Fill the buffer
- client.send <- []byte("first")
-
- // This broadcast should drop the message (buffer full)
- hub.Broadcast(map[string]interface{}{"type": "dropped"})
-
- // Channel should still only have the first message
- select {
- case msg := <-client.send:
- if string(msg) != "first" {
- t.Errorf("expected 'first', got %s", string(msg))
- }
- default:
- t.Error("expected message in channel")
- }
-
- // Clean up
- hub.mu.Lock()
- delete(hub.clients, client)
- hub.mu.Unlock()
-}
-
-func TestBroadcastMarshalError(t *testing.T) {
- hub := NewHub()
-
- // Marshal error: functions can't be marshaled to JSON
- hub.Broadcast(map[string]interface{}{"bad": func() {}})
- // Should not panic — just log and return
-}
-
-func TestPollerBroadcastsNewData(t *testing.T) {
- db := setupTestDB(t)
- defer db.Close()
- seedTestData(t, db)
- hub := NewHub()
-
- // Create a client to receive broadcasts
- client := &Client{
- send: make(chan []byte, 256),
- }
- hub.mu.Lock()
- hub.clients[client] = true
- hub.mu.Unlock()
-
- poller := NewPoller(db, hub, 50*time.Millisecond)
- go poller.Start()
-
- // Insert new data to trigger broadcast
- db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type)
- VALUES ('EEFF', 'newhash123456789', '2026-01-16T10:00:00Z', 1, 4)`)
-
- time.Sleep(200 * time.Millisecond)
- poller.Stop()
-
- // Check if client received broadcast with packet field (fixes #162)
- select {
- case msg := <-client.send:
- if len(msg) == 0 {
- t.Error("expected non-empty broadcast message")
- }
- var parsed map[string]interface{}
- if err := json.Unmarshal(msg, &parsed); err != nil {
- t.Fatalf("failed to parse broadcast: %v", err)
- }
- if parsed["type"] != "packet" {
- t.Errorf("expected type=packet, got %v", parsed["type"])
- }
- data, ok := parsed["data"].(map[string]interface{})
- if !ok {
- t.Fatal("expected data to be an object")
- }
- // packets.js filters on m.data.packet — must exist
- pkt, ok := data["packet"]
- if !ok || pkt == nil {
- t.Error("expected data.packet to exist (required by packets.js WS handler)")
- }
- pktMap, ok := pkt.(map[string]interface{})
- if !ok {
- t.Fatal("expected data.packet to be an object")
- }
- // Verify key fields exist in nested packet (timestamp required by packets.js)
- for _, field := range []string{"id", "hash", "payload_type", "timestamp"} {
- if _, exists := pktMap[field]; !exists {
- t.Errorf("expected data.packet.%s to exist", field)
- }
- }
- default:
- // Might not have received due to timing
- }
-
- // Clean up
- hub.mu.Lock()
- delete(hub.clients, client)
- hub.mu.Unlock()
-}
-
-func TestHubRegisterUnregister(t *testing.T) {
- hub := NewHub()
-
- client := &Client{
- send: make(chan []byte, 256),
- }
-
- hub.Register(client)
- if hub.ClientCount() != 1 {
- t.Errorf("expected 1 client after register, got %d", hub.ClientCount())
- }
-
- hub.Unregister(client)
- if hub.ClientCount() != 0 {
- t.Errorf("expected 0 clients after unregister, got %d", hub.ClientCount())
- }
-
- // Unregister again should be safe
- hub.Unregister(client)
- if hub.ClientCount() != 0 {
- t.Errorf("expected 0 clients, got %d", hub.ClientCount())
- }
-}
+package main
+
+import (
+ "encoding/json"
+ "net/http"
+ "net/http/httptest"
+ "sort"
+ "testing"
+ "time"
+
+ "github.com/gorilla/websocket"
+)
+
+func TestHubBroadcast(t *testing.T) {
+ hub := NewHub()
+
+ if hub.ClientCount() != 0 {
+ t.Errorf("expected 0 clients, got %d", hub.ClientCount())
+ }
+
+ // Create a test server with WebSocket endpoint
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ hub.ServeWS(w, r)
+ }))
+ defer srv.Close()
+
+ // Connect a WebSocket client
+ wsURL := "ws" + srv.URL[4:] // replace http with ws
+ conn, _, err := websocket.DefaultDialer.Dial(wsURL, nil)
+ if err != nil {
+ t.Fatalf("dial error: %v", err)
+ }
+ defer conn.Close()
+
+ // Wait for registration
+ time.Sleep(50 * time.Millisecond)
+
+ if hub.ClientCount() != 1 {
+ t.Errorf("expected 1 client, got %d", hub.ClientCount())
+ }
+
+ // Broadcast a message
+ hub.Broadcast(map[string]interface{}{
+ "type": "packet",
+ "data": map[string]interface{}{"id": 1, "hash": "test123"},
+ })
+
+ // Read the message
+ conn.SetReadDeadline(time.Now().Add(2 * time.Second))
+ _, msg, err := conn.ReadMessage()
+ if err != nil {
+ t.Fatalf("read error: %v", err)
+ }
+ if len(msg) == 0 {
+ t.Error("expected non-empty message")
+ }
+
+ // Disconnect
+ conn.Close()
+ time.Sleep(100 * time.Millisecond)
+}
+
+func TestPollerCreation(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ hub := NewHub()
+
+ poller := NewPoller(db, hub, 100*time.Millisecond)
+ if poller == nil {
+ t.Fatal("expected poller")
+ }
+
+ // Start and stop
+ go poller.Start()
+ time.Sleep(200 * time.Millisecond)
+ poller.Stop()
+}
+
+func TestHubMultipleClients(t *testing.T) {
+ hub := NewHub()
+
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ hub.ServeWS(w, r)
+ }))
+ defer srv.Close()
+
+ wsURL := "ws" + srv.URL[4:]
+
+ // Connect two clients
+ conn1, _, err := websocket.DefaultDialer.Dial(wsURL, nil)
+ if err != nil {
+ t.Fatalf("dial error: %v", err)
+ }
+ defer conn1.Close()
+
+ conn2, _, err := websocket.DefaultDialer.Dial(wsURL, nil)
+ if err != nil {
+ t.Fatalf("dial error: %v", err)
+ }
+ defer conn2.Close()
+
+ time.Sleep(100 * time.Millisecond)
+
+ if hub.ClientCount() != 2 {
+ t.Errorf("expected 2 clients, got %d", hub.ClientCount())
+ }
+
+ // Broadcast and both should receive
+ hub.Broadcast(map[string]interface{}{"type": "test", "data": "hello"})
+
+ conn1.SetReadDeadline(time.Now().Add(2 * time.Second))
+ _, msg1, err := conn1.ReadMessage()
+ if err != nil {
+ t.Fatalf("conn1 read error: %v", err)
+ }
+ if len(msg1) == 0 {
+ t.Error("expected non-empty message on conn1")
+ }
+
+ conn2.SetReadDeadline(time.Now().Add(2 * time.Second))
+ _, msg2, err := conn2.ReadMessage()
+ if err != nil {
+ t.Fatalf("conn2 read error: %v", err)
+ }
+ if len(msg2) == 0 {
+ t.Error("expected non-empty message on conn2")
+ }
+
+ // Disconnect one
+ conn1.Close()
+ time.Sleep(100 * time.Millisecond)
+
+ // Remaining client should still work
+ hub.Broadcast(map[string]interface{}{"type": "test2"})
+
+ conn2.SetReadDeadline(time.Now().Add(2 * time.Second))
+ _, msg3, err := conn2.ReadMessage()
+ if err != nil {
+ t.Fatalf("conn2 read error after disconnect: %v", err)
+ }
+ if len(msg3) == 0 {
+ t.Error("expected non-empty message")
+ }
+}
+
+func TestBroadcastFullBuffer(t *testing.T) {
+ hub := NewHub()
+
+ // Create a client with tiny buffer (1)
+ client := &Client{
+ send: make(chan []byte, 1),
+ }
+ hub.mu.Lock()
+ hub.clients[client] = true
+ hub.mu.Unlock()
+
+ // Fill the buffer
+ client.send <- []byte("first")
+
+ // This broadcast should drop the message (buffer full)
+ hub.Broadcast(map[string]interface{}{"type": "dropped"})
+
+ // Channel should still only have the first message
+ select {
+ case msg := <-client.send:
+ if string(msg) != "first" {
+ t.Errorf("expected 'first', got %s", string(msg))
+ }
+ default:
+ t.Error("expected message in channel")
+ }
+
+ // Clean up
+ hub.mu.Lock()
+ delete(hub.clients, client)
+ hub.mu.Unlock()
+}
+
+func TestBroadcastMarshalError(t *testing.T) {
+ hub := NewHub()
+
+ // Marshal error: functions can't be marshaled to JSON
+ hub.Broadcast(map[string]interface{}{"bad": func() {}})
+ // Should not panic — just log and return
+}
+
+func TestPollerBroadcastsNewData(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ hub := NewHub()
+
+ // Create a client to receive broadcasts
+ client := &Client{
+ send: make(chan []byte, 256),
+ }
+ hub.mu.Lock()
+ hub.clients[client] = true
+ hub.mu.Unlock()
+
+ poller := NewPoller(db, hub, 50*time.Millisecond)
+ go poller.Start()
+
+ // Insert new data to trigger broadcast
+ db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type)
+ VALUES ('EEFF', 'newhash123456789', '2026-01-16T10:00:00Z', 1, 4)`)
+
+ time.Sleep(200 * time.Millisecond)
+ poller.Stop()
+
+ // Check if client received broadcast with packet field (fixes #162)
+ select {
+ case msg := <-client.send:
+ if len(msg) == 0 {
+ t.Error("expected non-empty broadcast message")
+ }
+ var parsed map[string]interface{}
+ if err := json.Unmarshal(msg, &parsed); err != nil {
+ t.Fatalf("failed to parse broadcast: %v", err)
+ }
+ if parsed["type"] != "packet" {
+ t.Errorf("expected type=packet, got %v", parsed["type"])
+ }
+ data, ok := parsed["data"].(map[string]interface{})
+ if !ok {
+ t.Fatal("expected data to be an object")
+ }
+ // packets.js filters on m.data.packet — must exist
+ pkt, ok := data["packet"]
+ if !ok || pkt == nil {
+ t.Error("expected data.packet to exist (required by packets.js WS handler)")
+ }
+ pktMap, ok := pkt.(map[string]interface{})
+ if !ok {
+ t.Fatal("expected data.packet to be an object")
+ }
+ // Verify key fields exist in nested packet (timestamp required by packets.js)
+ for _, field := range []string{"id", "hash", "payload_type", "timestamp"} {
+ if _, exists := pktMap[field]; !exists {
+ t.Errorf("expected data.packet.%s to exist", field)
+ }
+ }
+ default:
+ // Might not have received due to timing
+ }
+
+ // Clean up
+ hub.mu.Lock()
+ delete(hub.clients, client)
+ hub.mu.Unlock()
+}
+
+func TestPollerBroadcastsMultipleObservations(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ hub := NewHub()
+
+ client := &Client{
+ send: make(chan []byte, 256),
+ }
+ hub.mu.Lock()
+ hub.clients[client] = true
+ hub.mu.Unlock()
+ defer func() {
+ hub.mu.Lock()
+ delete(hub.clients, client)
+ hub.mu.Unlock()
+ }()
+
+ poller := NewPoller(db, hub, 50*time.Millisecond)
+ store := NewPacketStore(db)
+ if err := store.Load(); err != nil {
+ t.Fatalf("store load failed: %v", err)
+ }
+ poller.store = store
+ go poller.Start()
+ defer poller.Stop()
+
+ // Wait for poller to initialize its lastID/lastObsID cursors before
+ // inserting new data; otherwise the poller may snapshot a lastID that
+ // already includes the test data and never broadcast it.
+ time.Sleep(100 * time.Millisecond)
+
+ now := time.Now().UTC().Format(time.RFC3339)
+ if _, err := db.conn.Exec(`INSERT INTO transmissions (raw_hex, hash, first_seen, route_type, payload_type, decoded_json)
+ VALUES ('FACE', 'starbursthash237a', ?, 1, 4, '{"pubKey":"aabbccdd11223344","type":"ADVERT"}')`, now); err != nil {
+ t.Fatalf("insert tx failed: %v", err)
+ }
+ var txID int
+ if err := db.conn.QueryRow(`SELECT id FROM transmissions WHERE hash='starbursthash237a'`).Scan(&txID); err != nil {
+ t.Fatalf("query tx id failed: %v", err)
+ }
+ ts := time.Now().Unix()
+ if _, err := db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (?, 1, 14.0, -82, '["aa"]', ?),
+ (?, 2, 10.5, -90, '["aa","bb"]', ?),
+ (?, 1, 7.0, -96, '["aa","bb","cc"]', ?)`,
+ txID, ts, txID, ts+1, txID, ts+2); err != nil {
+ t.Fatalf("insert observations failed: %v", err)
+ }
+
+ deadline := time.After(2 * time.Second)
+ var dataMsgs []map[string]interface{}
+ for len(dataMsgs) < 3 {
+ select {
+ case raw := <-client.send:
+ var parsed map[string]interface{}
+ if err := json.Unmarshal(raw, &parsed); err != nil {
+ t.Fatalf("unmarshal ws msg failed: %v", err)
+ }
+ if parsed["type"] != "packet" {
+ continue
+ }
+ data, ok := parsed["data"].(map[string]interface{})
+ if !ok {
+ continue
+ }
+ if data["hash"] == "starbursthash237a" {
+ dataMsgs = append(dataMsgs, data)
+ }
+ case <-deadline:
+ t.Fatalf("timed out waiting for 3 observation broadcasts, got %d", len(dataMsgs))
+ }
+ }
+
+ if len(dataMsgs) != 3 {
+ t.Fatalf("expected 3 messages, got %d", len(dataMsgs))
+ }
+
+ paths := make([]string, 0, 3)
+ observers := make(map[string]bool)
+ for _, m := range dataMsgs {
+ hash, _ := m["hash"].(string)
+ if hash != "starbursthash237a" {
+ t.Fatalf("unexpected hash %q", hash)
+ }
+ p, _ := m["path_json"].(string)
+ paths = append(paths, p)
+ if oid, ok := m["observer_id"].(string); ok && oid != "" {
+ observers[oid] = true
+ }
+ }
+ sort.Strings(paths)
+ wantPaths := []string{`["aa","bb","cc"]`, `["aa","bb"]`, `["aa"]`}
+ sort.Strings(wantPaths)
+ for i := range wantPaths {
+ if paths[i] != wantPaths[i] {
+ t.Fatalf("path mismatch at %d: got %q want %q", i, paths[i], wantPaths[i])
+ }
+ }
+ if len(observers) < 2 {
+ t.Fatalf("expected observations from >=2 observers, got %d", len(observers))
+ }
+}
+
+func TestIngestNewObservationsBroadcast(t *testing.T) {
+ db := setupTestDB(t)
+ defer db.Close()
+ seedTestData(t, db)
+ store := NewPacketStore(db)
+ if err := store.Load(); err != nil {
+ t.Fatalf("store load failed: %v", err)
+ }
+
+ maxObs := db.GetMaxObservationID()
+ now := time.Now().Unix()
+ if _, err := db.conn.Exec(`INSERT INTO observations (transmission_id, observer_idx, snr, rssi, path_json, timestamp)
+ VALUES (1, 2, 6.0, -100, '["aa","zz"]', ?),
+ (1, 1, 5.0, -101, '["aa","yy"]', ?)`, now, now+1); err != nil {
+ t.Fatalf("insert new observations failed: %v", err)
+ }
+
+ maps := store.IngestNewObservations(maxObs, 500)
+ if len(maps) != 2 {
+ t.Fatalf("expected 2 broadcast maps, got %d", len(maps))
+ }
+ for _, m := range maps {
+ if m["hash"] != "abc123def4567890" {
+ t.Fatalf("unexpected hash in map: %v", m["hash"])
+ }
+ path, ok := m["path_json"].(string)
+ if !ok || path == "" {
+ t.Fatalf("missing path_json in map: %#v", m)
+ }
+ if _, ok := m["observer_id"]; !ok {
+ t.Fatalf("missing observer_id in map: %#v", m)
+ }
+ }
+}
+
+func TestHubRegisterUnregister(t *testing.T) {
+ hub := NewHub()
+
+ client := &Client{
+ send: make(chan []byte, 256),
+ }
+
+ hub.Register(client)
+ if hub.ClientCount() != 1 {
+ t.Errorf("expected 1 client after register, got %d", hub.ClientCount())
+ }
+
+ hub.Unregister(client)
+ if hub.ClientCount() != 0 {
+ t.Errorf("expected 0 clients after unregister, got %d", hub.ClientCount())
+ }
+
+ // Unregister again should be safe
+ hub.Unregister(client)
+ if hub.ClientCount() != 0 {
+ t.Errorf("expected 0 clients, got %d", hub.ClientCount())
+ }
+}
diff --git a/decoder.js b/decoder.js
index 16270de..cf26786 100644
--- a/decoder.js
+++ b/decoder.js
@@ -2,8 +2,8 @@
* MeshCore Packet Decoder
* Custom implementation — does NOT use meshcore-decoder library (known path_length bug).
*
- * Packet layout:
- * [header(1)] [pathLength(1)] [transportCodes?] [path hops] [payload...]
+ * Packet layout (per firmware docs/packet_format.md):
+ * [header(1)] [transportCodes?(4)] [pathLength(1)] [path hops] [payload...]
*
* Header byte (LSB first):
* bits 1-0: routeType (0=TRANSPORT_FLOOD, 1=FLOOD, 2=DIRECT, 3=TRANSPORT_DIRECT)
@@ -42,7 +42,7 @@ const PAYLOAD_TYPES = {
0x0F: 'RAW_CUSTOM',
};
-// Route types that carry transport codes (nextHop + lastHop, 2 bytes each)
+// Route types that carry transport codes (2x uint16_t, 4 bytes total)
const TRANSPORT_ROUTES = new Set([0, 3]); // TRANSPORT_FLOOD, TRANSPORT_DIRECT
// --- Header parsing ---
@@ -94,13 +94,11 @@ function decodeEncryptedPayload(buf) {
};
}
-/** ACK: dest(1) + src(1) + ack_hash(4) (per Mesh.cpp) */
+/** ACK: checksum(4) — CRC of message timestamp + text + sender pubkey (per Mesh.cpp createAck) */
function decodeAck(buf) {
- if (buf.length < 6) return { error: 'too short', raw: buf.toString('hex') };
+ if (buf.length < 4) return { error: 'too short', raw: buf.toString('hex') };
return {
- destHash: buf.subarray(0, 1).toString('hex'),
- srcHash: buf.subarray(1, 2).toString('hex'),
- extraHash: buf.subarray(2, 6).toString('hex'),
+ ackChecksum: buf.subarray(0, 4).toString('hex'),
};
}
@@ -125,6 +123,8 @@ function decodeAdvert(buf) {
room: advType === 3,
sensor: advType === 4,
hasLocation: !!(flags & 0x10),
+ hasFeat1: !!(flags & 0x20),
+ hasFeat2: !!(flags & 0x40),
hasName: !!(flags & 0x80),
};
@@ -134,6 +134,14 @@ function decodeAdvert(buf) {
result.lon = appdata.readInt32LE(off + 4) / 1e6;
off += 8;
}
+ if (result.flags.hasFeat1 && appdata.length >= off + 2) {
+ result.feat1 = appdata.readUInt16LE(off);
+ off += 2;
+ }
+ if (result.flags.hasFeat2 && appdata.length >= off + 2) {
+ result.feat2 = appdata.readUInt16LE(off);
+ off += 2;
+ }
if (result.flags.hasName) {
// Find null terminator to separate name from trailing telemetry bytes
let nameEnd = appdata.length;
@@ -231,7 +239,7 @@ function decodeGrpTxt(buf, channelKeys) {
return { type: 'GRP_TXT', channelHash, channelHashHex, decryptionStatus: 'no_key', mac, encryptedData };
}
-/** ANON_REQ: dest(6) + ephemeral_pubkey(32) + MAC(4) + encrypted */
+/** ANON_REQ: dest(1) + ephemeral_pubkey(32) + MAC(2) + encrypted */
function decodeAnonReq(buf) {
if (buf.length < 35) return { error: 'too short', raw: buf.toString('hex') };
return {
@@ -242,7 +250,7 @@ function decodeAnonReq(buf) {
};
}
-/** PATH: dest(6) + src(6) + MAC(4) + path_data */
+/** PATH: dest(1) + src(1) + MAC(2) + path_data */
function decodePath_payload(buf) {
if (buf.length < 4) return { error: 'too short', raw: buf.toString('hex') };
return {
@@ -253,14 +261,14 @@ function decodePath_payload(buf) {
};
}
-/** TRACE: flags(1) + tag(4) + dest(6) + src(1) */
+/** TRACE: tag(4) + authCode(4) + flags(1) + pathData (per Mesh.cpp onRecvPacket TRACE) */
function decodeTrace(buf) {
- if (buf.length < 12) return { error: 'too short', raw: buf.toString('hex') };
+ if (buf.length < 9) return { error: 'too short', raw: buf.toString('hex') };
return {
- flags: buf[0],
- tag: buf.readUInt32LE(1),
- destHash: buf.subarray(5, 11).toString('hex'),
- srcHash: buf.subarray(11, 12).toString('hex'),
+ tag: buf.readUInt32LE(0),
+ authCode: buf.subarray(4, 8).toString('hex'),
+ flags: buf[8],
+ pathData: buf.subarray(9).toString('hex'),
};
}
@@ -289,20 +297,22 @@ function decodePacket(hexString, channelKeys) {
if (buf.length < 2) throw new Error('Packet too short (need at least header + pathLength)');
const header = decodeHeader(buf[0]);
- const pathByte = buf[1];
- let offset = 2;
+ let offset = 1;
- // Transport codes for TRANSPORT_FLOOD / TRANSPORT_DIRECT
+ // Transport codes for TRANSPORT_FLOOD / TRANSPORT_DIRECT — BEFORE path_length per spec
let transportCodes = null;
if (TRANSPORT_ROUTES.has(header.routeType)) {
if (buf.length < offset + 4) throw new Error('Packet too short for transport codes');
transportCodes = {
- nextHop: buf.subarray(offset, offset + 2).toString('hex').toUpperCase(),
- lastHop: buf.subarray(offset + 2, offset + 4).toString('hex').toUpperCase(),
+ code1: buf.subarray(offset, offset + 2).toString('hex').toUpperCase(),
+ code2: buf.subarray(offset + 2, offset + 4).toString('hex').toUpperCase(),
};
offset += 4;
}
+ // Path length byte — AFTER transport codes per spec
+ const pathByte = buf[offset++];
+
// Path
const path = decodePath(pathByte, buf, offset);
offset += path.bytesConsumed;
@@ -386,7 +396,7 @@ module.exports = { decodePacket, validateAdvert, hasNonPrintableChars, ROUTE_TYP
// --- Tests ---
if (require.main === module) {
- console.log('=== Test 1: ADVERT, FLOOD, 5 hops (2-byte hashes), "Test Repeater" ===');
+ console.log('=== Test 1: ADVERT, FLOOD, 5 hops (2-byte hashes), "Kpa Roof Solar" ===');
const pkt1 = decodePacket(
'11451000D818206D3AAC152C8A91F89957E6D30CA51F36E28790228971C473B755F244F718754CF5EE4A2FD58D944466E42CDED140C66D0CC590183E32BAF40F112BE8F3F2BDF6012B4B2793C52F1D36F69EE054D9A05593286F78453E56C0EC4A3EB95DDA2A7543FCCC00B939CACC009278603902FC12BCF84B706120526F6F6620536F6C6172'
);
@@ -402,7 +412,7 @@ if (require.main === module) {
assert(pkt1.path.hops[0] === '1000', 'first hop should be 1000');
assert(pkt1.path.hops[1] === 'D818', 'second hop should be D818');
assert(pkt1.transportCodes === null, 'FLOOD has no transport codes');
- assert(pkt1.payload.name === 'Test Repeater', 'name should be "Test Repeater"');
+ assert(pkt1.payload.name === 'Kpa Roof Solar', 'name should be "Kpa Roof Solar"');
console.log('✅ Test 1 passed\n');
console.log('=== Test 2: ADVERT, FLOOD, 0 hops (zero-path) ===');
diff --git a/public/index.html b/public/index.html
index 05e25af..a6cb97d 100644
--- a/public/index.html
+++ b/public/index.html
@@ -22,9 +22,9 @@
-
-
-
+
+
+
@@ -81,29 +81,29 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+