fix(go): channels null arrays + hash size enrichment on nodes

- Fix #148: channels endpoint returned null for msgLengths when no
  decrypted messages exist. Initialize msgLengths as make([]int, 0)
  in store path and guard channels slice in DB fallback path.

- Fix #149: nodes endpoint always returned hash_size=null and
  hash_size_inconsistent=false. Add GetNodeHashSizeInfo() to
  PacketStore that scans advert packets to compute per-node hash
  size, flip-flop detection, and sizes_seen. Enrich nodes in both
  handleNodes and handleNodeDetail with computed hash data.

fixes #148, fixes #149

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
Kpa-clawbot
2026-03-27 11:21:41 -07:00
parent 407c49e017
commit 5bb5bea444
3 changed files with 299 additions and 3 deletions

View File

@@ -678,6 +678,14 @@ func (s *Server) handleNodes(w http.ResponseWriter, r *http.Request) {
writeError(w, 500, err.Error())
return
}
if s.store != nil {
hashInfo := s.store.GetNodeHashSizeInfo()
for _, node := range nodes {
if pk, ok := node["public_key"].(string); ok {
EnrichNodeWithHashSize(node, hashInfo[pk])
}
}
}
writeJSON(w, map[string]interface{}{"nodes": nodes, "total": total, "counts": counts})
}
@@ -703,6 +711,11 @@ func (s *Server) handleNodeDetail(w http.ResponseWriter, r *http.Request) {
return
}
if s.store != nil {
hashInfo := s.store.GetNodeHashSizeInfo()
EnrichNodeWithHashSize(node, hashInfo[pubkey])
}
name := ""
if n, ok := node["name"]; ok && n != nil {
name = fmt.Sprintf("%v", n)
@@ -1073,8 +1086,10 @@ func (s *Server) handleAnalyticsChannels(w http.ResponseWriter, r *http.Request)
writeJSON(w, s.store.GetAnalyticsChannels(region))
return
}
var channels []map[string]interface{}
channels, _ = s.db.GetChannels()
channels, _ := s.db.GetChannels()
if channels == nil {
channels = make([]map[string]interface{}, 0)
}
writeJSON(w, map[string]interface{}{
"activeChannels": len(channels),
"decryptable": len(channels),

View File

@@ -4,6 +4,7 @@ import (
"encoding/json"
"net/http"
"net/http/httptest"
"strconv"
"testing"
"github.com/gorilla/mux"
@@ -1742,6 +1743,197 @@ func TestHandlerErrorBulkHealth(t *testing.T) {
}
}
func TestAnalyticsChannelsNoNullArrays(t *testing.T) {
_, router := setupTestServer(t)
req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
raw := w.Body.String()
var body map[string]interface{}
if err := json.Unmarshal([]byte(raw), &body); err != nil {
t.Fatalf("invalid JSON: %v", err)
}
arrayFields := []string{"channels", "topSenders", "channelTimeline", "msgLengths"}
for _, field := range arrayFields {
val, exists := body[field]
if !exists {
t.Errorf("missing field %q", field)
continue
}
if val == nil {
t.Errorf("field %q is null, expected empty array []", field)
continue
}
if _, ok := val.([]interface{}); !ok {
t.Errorf("field %q is not an array, got %T", field, val)
}
}
}
func TestAnalyticsChannelsNoStoreFallbackNoNulls(t *testing.T) {
db := setupTestDB(t)
seedTestData(t, db)
cfg := &Config{Port: 3000}
hub := NewHub()
srv := NewServer(db, cfg, hub)
router := mux.NewRouter()
srv.RegisterRoutes(router)
req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
var body map[string]interface{}
json.Unmarshal(w.Body.Bytes(), &body)
arrayFields := []string{"channels", "topSenders", "channelTimeline", "msgLengths"}
for _, field := range arrayFields {
if body[field] == nil {
t.Errorf("field %q is null in DB fallback, expected []", field)
}
}
}
func TestNodeHashSizeEnrichment(t *testing.T) {
t.Run("nil info leaves defaults", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
EnrichNodeWithHashSize(node, nil)
if node["hash_size"] != nil {
t.Error("expected hash_size to remain nil with nil info")
}
})
t.Run("enriches with computed data", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
info := &hashSizeNodeInfo{
HashSize: 2,
AllSizes: map[int]bool{1: true, 2: true},
Seq: []int{1, 2, 1, 2},
Inconsistent: true,
}
EnrichNodeWithHashSize(node, info)
if node["hash_size"] != 2 {
t.Errorf("expected hash_size 2, got %v", node["hash_size"])
}
if node["hash_size_inconsistent"] != true {
t.Error("expected hash_size_inconsistent true")
}
sizes, ok := node["hash_sizes_seen"].([]int)
if !ok {
t.Fatal("expected hash_sizes_seen to be []int")
}
if len(sizes) != 2 || sizes[0] != 1 || sizes[1] != 2 {
t.Errorf("expected [1,2], got %v", sizes)
}
})
t.Run("single size omits sizes_seen", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
info := &hashSizeNodeInfo{
HashSize: 3,
AllSizes: map[int]bool{3: true},
Seq: []int{3, 3, 3},
}
EnrichNodeWithHashSize(node, info)
if node["hash_size"] != 3 {
t.Errorf("expected hash_size 3, got %v", node["hash_size"])
}
if node["hash_size_inconsistent"] != false {
t.Error("expected hash_size_inconsistent false")
}
if _, exists := node["hash_sizes_seen"]; exists {
t.Error("hash_sizes_seen should not be set for single size")
}
})
}
func TestGetNodeHashSizeInfoFlipFlop(t *testing.T) {
db := setupTestDB(t)
seedTestData(t, db)
store := NewPacketStore(db)
store.Load()
pk := "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'TestNode', 'repeater')", pk)
decoded := `{"name":"TestNode","pubKey":"` + pk + `"}`
raw1 := "04" + "00" + "aabb"
raw2 := "04" + "40" + "aabb"
payloadType := 4
for i := 0; i < 3; i++ {
rawHex := raw1
if i%2 == 1 {
rawHex = raw2
}
tx := &StoreTx{
ID: 9000 + i,
RawHex: rawHex,
Hash: "testhash" + strconv.Itoa(i),
FirstSeen: "2024-01-01T00:00:00Z",
PayloadType: &payloadType,
DecodedJSON: decoded,
}
store.packets = append(store.packets, tx)
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
}
info := store.GetNodeHashSizeInfo()
ni := info[pk]
if ni == nil {
t.Fatal("expected hash info for test node")
}
if len(ni.AllSizes) != 2 {
t.Errorf("expected 2 unique sizes, got %d", len(ni.AllSizes))
}
if !ni.Inconsistent {
t.Error("expected inconsistent flag to be true for flip-flop pattern")
}
}
func TestAnalyticsHashSizesNoNullArrays(t *testing.T) {
_, router := setupTestServer(t)
req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
var body map[string]interface{}
json.Unmarshal(w.Body.Bytes(), &body)
arrayFields := []string{"hourly", "topHops", "multiByteNodes"}
for _, field := range arrayFields {
if body[field] == nil {
t.Errorf("field %q is null, expected []", field)
}
}
}
func min(a, b int) int {
if a < b {
return a

View File

@@ -1321,7 +1321,7 @@ func (s *PacketStore) GetAnalyticsChannels(region string) map[string]interface{}
channelMap := map[string]*chanInfo{}
senderCounts := map[string]int{}
var msgLengths []int
msgLengths := make([]int, 0)
timeline := map[string]int{} // hour|channelName → count
grpTxts := s.byPayloadType[5]
@@ -2946,6 +2946,95 @@ func (s *PacketStore) GetAnalyticsHashSizes(region string) map[string]interface{
}
}
// hashSizeNodeInfo holds per-node hash size tracking data.
type hashSizeNodeInfo struct {
HashSize int
AllSizes map[int]bool
Seq []int
Inconsistent bool
}
// GetNodeHashSizeInfo scans advert packets to compute per-node hash size data.
func (s *PacketStore) GetNodeHashSizeInfo() map[string]*hashSizeNodeInfo {
s.mu.RLock()
defer s.mu.RUnlock()
info := make(map[string]*hashSizeNodeInfo)
adverts := s.byPayloadType[4]
for _, tx := range adverts {
if tx.RawHex == "" || tx.DecodedJSON == "" {
continue
}
if len(tx.RawHex) < 4 {
continue
}
pathByte, err := strconv.ParseUint(tx.RawHex[2:4], 16, 8)
if err != nil {
continue
}
hs := int((pathByte>>6)&0x3) + 1
var d map[string]interface{}
if json.Unmarshal([]byte(tx.DecodedJSON), &d) != nil {
continue
}
pk := ""
if v, ok := d["pubKey"].(string); ok {
pk = v
} else if v, ok := d["public_key"].(string); ok {
pk = v
}
if pk == "" {
continue
}
ni := info[pk]
if ni == nil {
ni = &hashSizeNodeInfo{AllSizes: make(map[int]bool)}
info[pk] = ni
}
ni.HashSize = hs
ni.AllSizes[hs] = true
ni.Seq = append(ni.Seq, hs)
}
// Compute flip-flop (inconsistent) flag: need >= 3 observations,
// >= 2 unique sizes, and >= 2 transitions in the sequence.
for _, ni := range info {
if len(ni.Seq) < 3 || len(ni.AllSizes) < 2 {
continue
}
transitions := 0
for i := 1; i < len(ni.Seq); i++ {
if ni.Seq[i] != ni.Seq[i-1] {
transitions++
}
}
ni.Inconsistent = transitions >= 2
}
return info
}
// EnrichNodeWithHashSize populates hash_size, hash_size_inconsistent, and
// hash_sizes_seen on a node map using precomputed hash size info.
func EnrichNodeWithHashSize(node map[string]interface{}, info *hashSizeNodeInfo) {
if info == nil {
return
}
node["hash_size"] = info.HashSize
node["hash_size_inconsistent"] = info.Inconsistent
if len(info.AllSizes) > 1 {
sizes := make([]int, 0, len(info.AllSizes))
for s := range info.AllSizes {
sizes = append(sizes, s)
}
sort.Ints(sizes)
node["hash_sizes_seen"] = sizes
}
}
// --- Bulk Health (in-memory) ---
func (s *PacketStore) GetBulkHealth(limit int, region string) []map[string]interface{} {