Compare commits

...

6 Commits

Author SHA1 Message Date
Kpa-clawbot
1e8405d8b8 Fix Docker Go build workdirs for replace paths
Align builder stage directories with repo layout so cmd/server and cmd/ingestor replace github.com/corescope => ../.. resolves to /build where root go.mod is copied.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-03-30 23:29:50 -07:00
Kpa-clawbot
b75bd650e9 fix(docker): copy shared module for go mod replace in builder stages
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-03-30 21:40:40 -07:00
Kpa-clawbot
152c62b5a8 fix(go): normalize go.mod replace paths for linux CI
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-03-30 21:10:44 -07:00
Kpa-clawbot
9333d9b151 fix: unprotect decode route and update auth tests
- make POST /api/decode public while keeping /api/perf/reset and POST /api/packets protected

- update API key tests to verify decode works without key and protected endpoints still block

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-03-30 21:03:00 -07:00
Kpa-clawbot
295fa0e6ee Fix #304: unblock decode endpoint and consolidate decoder
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-03-30 21:03:00 -07:00
Kpa-clawbot
0f41f8daf2 Fix channels region filtering for messages and WS
Refs #280

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-03-30 21:03:00 -07:00
14 changed files with 2691 additions and 3496 deletions

View File

@@ -9,6 +9,8 @@ ARG BUILD_TIME=unknown
# Build server
WORKDIR /build/server
COPY cmd/server/go.mod cmd/server/go.sum ./
COPY go.mod /build/go.mod
COPY internal/decoder/ /build/internal/decoder/
RUN go mod download
COPY cmd/server/ ./
RUN go build -ldflags "-X main.Version=${APP_VERSION} -X main.Commit=${GIT_COMMIT} -X main.BuildTime=${BUILD_TIME}" -o /corescope-server .
@@ -16,6 +18,8 @@ RUN go build -ldflags "-X main.Version=${APP_VERSION} -X main.Commit=${GIT_COMMI
# Build ingestor
WORKDIR /build/ingestor
COPY cmd/ingestor/go.mod cmd/ingestor/go.sum ./
COPY go.mod /build/go.mod
COPY internal/decoder/ /build/internal/decoder/
RUN go mod download
COPY cmd/ingestor/ ./
RUN go build -o /corescope-ingestor .

View File

@@ -7,14 +7,20 @@ ARG GIT_COMMIT=unknown
ARG BUILD_TIME=unknown
# Build server
WORKDIR /build/server
WORKDIR /build
COPY go.mod ./go.mod
COPY internal/decoder/ ./internal/decoder/
WORKDIR /build/cmd/server
COPY cmd/server/go.mod cmd/server/go.sum ./
RUN go mod download
COPY cmd/server/ ./
RUN go build -ldflags "-X main.Version=${APP_VERSION} -X main.Commit=${GIT_COMMIT} -X main.BuildTime=${BUILD_TIME}" -o /corescope-server .
# Build ingestor
WORKDIR /build/ingestor
WORKDIR /build
COPY go.mod ./go.mod
COPY internal/decoder/ ./internal/decoder/
WORKDIR /build/cmd/ingestor
COPY cmd/ingestor/go.mod cmd/ingestor/go.sum ./
RUN go mod download
COPY cmd/ingestor/ ./

View File

@@ -1,739 +1,55 @@
package main
import (
"crypto/aes"
"crypto/hmac"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"encoding/json"
"fmt"
"math"
"strings"
"unicode/utf8"
)
// Route type constants (header bits 1-0)
const (
RouteTransportFlood = 0
RouteFlood = 1
RouteDirect = 2
RouteTransportDirect = 3
)
// Payload type constants (header bits 5-2)
const (
PayloadREQ = 0x00
PayloadRESPONSE = 0x01
PayloadTXT_MSG = 0x02
PayloadACK = 0x03
PayloadADVERT = 0x04
PayloadGRP_TXT = 0x05
PayloadGRP_DATA = 0x06
PayloadANON_REQ = 0x07
PayloadPATH = 0x08
PayloadTRACE = 0x09
PayloadMULTIPART = 0x0A
PayloadCONTROL = 0x0B
PayloadRAW_CUSTOM = 0x0F
)
var routeTypeNames = map[int]string{
0: "TRANSPORT_FLOOD",
1: "FLOOD",
2: "DIRECT",
3: "TRANSPORT_DIRECT",
}
var payloadTypeNames = map[int]string{
0x00: "REQ",
0x01: "RESPONSE",
0x02: "TXT_MSG",
0x03: "ACK",
0x04: "ADVERT",
0x05: "GRP_TXT",
0x06: "GRP_DATA",
0x07: "ANON_REQ",
0x08: "PATH",
0x09: "TRACE",
0x0A: "MULTIPART",
0x0B: "CONTROL",
0x0F: "RAW_CUSTOM",
}
// Header is the decoded packet header.
type Header struct {
RouteType int `json:"routeType"`
RouteTypeName string `json:"routeTypeName"`
PayloadType int `json:"payloadType"`
PayloadTypeName string `json:"payloadTypeName"`
PayloadVersion int `json:"payloadVersion"`
}
// TransportCodes are present on TRANSPORT_FLOOD and TRANSPORT_DIRECT routes.
type TransportCodes struct {
Code1 string `json:"code1"`
Code2 string `json:"code2"`
}
// Path holds decoded path/hop information.
type Path struct {
HashSize int `json:"hashSize"`
HashCount int `json:"hashCount"`
Hops []string `json:"hops"`
}
// AdvertFlags holds decoded advert flag bits.
type AdvertFlags struct {
Raw int `json:"raw"`
Type int `json:"type"`
Chat bool `json:"chat"`
Repeater bool `json:"repeater"`
Room bool `json:"room"`
Sensor bool `json:"sensor"`
HasLocation bool `json:"hasLocation"`
HasFeat1 bool `json:"hasFeat1"`
HasFeat2 bool `json:"hasFeat2"`
HasName bool `json:"hasName"`
}
// Payload is a generic decoded payload. Fields are populated depending on type.
type Payload struct {
Type string `json:"type"`
DestHash string `json:"destHash,omitempty"`
SrcHash string `json:"srcHash,omitempty"`
MAC string `json:"mac,omitempty"`
EncryptedData string `json:"encryptedData,omitempty"`
ExtraHash string `json:"extraHash,omitempty"`
PubKey string `json:"pubKey,omitempty"`
Timestamp uint32 `json:"timestamp,omitempty"`
TimestampISO string `json:"timestampISO,omitempty"`
Signature string `json:"signature,omitempty"`
Flags *AdvertFlags `json:"flags,omitempty"`
Lat *float64 `json:"lat,omitempty"`
Lon *float64 `json:"lon,omitempty"`
Name string `json:"name,omitempty"`
Feat1 *int `json:"feat1,omitempty"`
Feat2 *int `json:"feat2,omitempty"`
BatteryMv *int `json:"battery_mv,omitempty"`
TemperatureC *float64 `json:"temperature_c,omitempty"`
ChannelHash int `json:"channelHash,omitempty"`
ChannelHashHex string `json:"channelHashHex,omitempty"`
DecryptionStatus string `json:"decryptionStatus,omitempty"`
Channel string `json:"channel,omitempty"`
Text string `json:"text,omitempty"`
Sender string `json:"sender,omitempty"`
SenderTimestamp uint32 `json:"sender_timestamp,omitempty"`
EphemeralPubKey string `json:"ephemeralPubKey,omitempty"`
PathData string `json:"pathData,omitempty"`
Tag uint32 `json:"tag,omitempty"`
AuthCode uint32 `json:"authCode,omitempty"`
TraceFlags *int `json:"traceFlags,omitempty"`
RawHex string `json:"raw,omitempty"`
Error string `json:"error,omitempty"`
}
// DecodedPacket is the full decoded result.
type DecodedPacket struct {
Header Header `json:"header"`
TransportCodes *TransportCodes `json:"transportCodes"`
Path Path `json:"path"`
Payload Payload `json:"payload"`
Raw string `json:"raw"`
}
func decodeHeader(b byte) Header {
rt := int(b & 0x03)
pt := int((b >> 2) & 0x0F)
pv := int((b >> 6) & 0x03)
rtName := routeTypeNames[rt]
if rtName == "" {
rtName = "UNKNOWN"
}
ptName := payloadTypeNames[pt]
if ptName == "" {
ptName = "UNKNOWN"
}
return Header{
RouteType: rt,
RouteTypeName: rtName,
PayloadType: pt,
PayloadTypeName: ptName,
PayloadVersion: pv,
}
}
func decodePath(pathByte byte, buf []byte, offset int) (Path, int) {
hashSize := int(pathByte>>6) + 1
hashCount := int(pathByte & 0x3F)
totalBytes := hashSize * hashCount
hops := make([]string, 0, hashCount)
for i := 0; i < hashCount; i++ {
start := offset + i*hashSize
end := start + hashSize
if end > len(buf) {
break
}
hops = append(hops, strings.ToUpper(hex.EncodeToString(buf[start:end])))
}
return Path{
HashSize: hashSize,
HashCount: hashCount,
Hops: hops,
}, totalBytes
}
func isTransportRoute(routeType int) bool {
return routeType == RouteTransportFlood || routeType == RouteTransportDirect
}
func decodeEncryptedPayload(typeName string, buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: typeName, Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: typeName,
DestHash: hex.EncodeToString(buf[0:1]),
SrcHash: hex.EncodeToString(buf[1:2]),
MAC: hex.EncodeToString(buf[2:4]),
EncryptedData: hex.EncodeToString(buf[4:]),
}
}
func decodeAck(buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: "ACK", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
checksum := binary.LittleEndian.Uint32(buf[0:4])
return Payload{
Type: "ACK",
ExtraHash: fmt.Sprintf("%08x", checksum),
}
}
func decodeAdvert(buf []byte) Payload {
if len(buf) < 100 {
return Payload{Type: "ADVERT", Error: "too short for advert", RawHex: hex.EncodeToString(buf)}
}
pubKey := hex.EncodeToString(buf[0:32])
timestamp := binary.LittleEndian.Uint32(buf[32:36])
signature := hex.EncodeToString(buf[36:100])
appdata := buf[100:]
p := Payload{
Type: "ADVERT",
PubKey: pubKey,
Timestamp: timestamp,
TimestampISO: fmt.Sprintf("%s", epochToISO(timestamp)),
Signature: signature,
}
if len(appdata) > 0 {
flags := appdata[0]
advType := int(flags & 0x0F)
hasFeat1 := flags&0x20 != 0
hasFeat2 := flags&0x40 != 0
p.Flags = &AdvertFlags{
Raw: int(flags),
Type: advType,
Chat: advType == 1,
Repeater: advType == 2,
Room: advType == 3,
Sensor: advType == 4,
HasLocation: flags&0x10 != 0,
HasFeat1: hasFeat1,
HasFeat2: hasFeat2,
HasName: flags&0x80 != 0,
}
off := 1
if p.Flags.HasLocation && len(appdata) >= off+8 {
latRaw := int32(binary.LittleEndian.Uint32(appdata[off : off+4]))
lonRaw := int32(binary.LittleEndian.Uint32(appdata[off+4 : off+8]))
lat := float64(latRaw) / 1e6
lon := float64(lonRaw) / 1e6
p.Lat = &lat
p.Lon = &lon
off += 8
}
if hasFeat1 && len(appdata) >= off+2 {
feat1 := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
p.Feat1 = &feat1
off += 2
}
if hasFeat2 && len(appdata) >= off+2 {
feat2 := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
p.Feat2 = &feat2
off += 2
}
if p.Flags.HasName {
// Find null terminator to separate name from trailing telemetry bytes
nameEnd := len(appdata)
for i := off; i < len(appdata); i++ {
if appdata[i] == 0x00 {
nameEnd = i
break
}
}
name := string(appdata[off:nameEnd])
name = sanitizeName(name)
p.Name = name
off = nameEnd
// Skip null terminator(s)
for off < len(appdata) && appdata[off] == 0x00 {
off++
}
}
// Telemetry bytes after name: battery_mv(2 LE) + temperature_c(2 LE, signed, /100)
// Only sensor nodes (advType=4) carry telemetry bytes.
if p.Flags.Sensor && off+4 <= len(appdata) {
batteryMv := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
tempRaw := int16(binary.LittleEndian.Uint16(appdata[off+2 : off+4]))
tempC := float64(tempRaw) / 100.0
if batteryMv > 0 && batteryMv <= 10000 {
p.BatteryMv = &batteryMv
}
// Raw int16 / 100 → °C; accept -50°C to 100°C (raw: -5000 to 10000)
if tempRaw >= -5000 && tempRaw <= 10000 {
p.TemperatureC = &tempC
}
}
}
return p
}
// channelDecryptResult holds the decrypted channel message fields.
type channelDecryptResult struct {
Timestamp uint32
Flags byte
Sender string
Message string
}
// countNonPrintable counts characters that are non-printable (< 0x20 except \n, \t).
func countNonPrintable(s string) int {
count := 0
for _, r := range s {
if r < 0x20 && r != '\n' && r != '\t' {
count++
} else if r == utf8.RuneError {
count++
}
}
return count
}
// decryptChannelMessage implements MeshCore channel decryption:
// HMAC-SHA256 MAC verification followed by AES-128-ECB decryption.
func decryptChannelMessage(ciphertextHex, macHex, channelKeyHex string) (*channelDecryptResult, error) {
channelKey, err := hex.DecodeString(channelKeyHex)
if err != nil || len(channelKey) != 16 {
return nil, fmt.Errorf("invalid channel key")
}
macBytes, err := hex.DecodeString(macHex)
if err != nil || len(macBytes) != 2 {
return nil, fmt.Errorf("invalid MAC")
}
ciphertext, err := hex.DecodeString(ciphertextHex)
if err != nil || len(ciphertext) == 0 {
return nil, fmt.Errorf("invalid ciphertext")
}
// 32-byte channel secret: 16-byte key + 16 zero bytes
channelSecret := make([]byte, 32)
copy(channelSecret, channelKey)
// Verify HMAC-SHA256 (first 2 bytes must match provided MAC)
h := hmac.New(sha256.New, channelSecret)
h.Write(ciphertext)
calculatedMac := h.Sum(nil)
if calculatedMac[0] != macBytes[0] || calculatedMac[1] != macBytes[1] {
return nil, fmt.Errorf("MAC verification failed")
}
// AES-128-ECB decrypt (block-by-block, no padding)
if len(ciphertext)%aes.BlockSize != 0 {
return nil, fmt.Errorf("ciphertext not aligned to AES block size")
}
block, err := aes.NewCipher(channelKey)
if err != nil {
return nil, fmt.Errorf("AES cipher: %w", err)
}
plaintext := make([]byte, len(ciphertext))
for i := 0; i < len(ciphertext); i += aes.BlockSize {
block.Decrypt(plaintext[i:i+aes.BlockSize], ciphertext[i:i+aes.BlockSize])
}
// Parse: timestamp(4 LE) + flags(1) + message(UTF-8, null-terminated)
if len(plaintext) < 5 {
return nil, fmt.Errorf("decrypted content too short")
}
timestamp := binary.LittleEndian.Uint32(plaintext[0:4])
flags := plaintext[4]
messageText := string(plaintext[5:])
if idx := strings.IndexByte(messageText, 0); idx >= 0 {
messageText = messageText[:idx]
}
// Validate decrypted text is printable UTF-8 (not binary garbage)
if !utf8.ValidString(messageText) || countNonPrintable(messageText) > 2 {
return nil, fmt.Errorf("decrypted text contains non-printable characters")
}
result := &channelDecryptResult{Timestamp: timestamp, Flags: flags}
// Parse "sender: message" format
colonIdx := strings.Index(messageText, ": ")
if colonIdx > 0 && colonIdx < 50 {
potentialSender := messageText[:colonIdx]
if !strings.ContainsAny(potentialSender, ":[]") {
result.Sender = potentialSender
result.Message = messageText[colonIdx+2:]
} else {
result.Message = messageText
}
} else {
result.Message = messageText
}
return result, nil
}
func decodeGrpTxt(buf []byte, channelKeys map[string]string) Payload {
if len(buf) < 3 {
return Payload{Type: "GRP_TXT", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
channelHash := int(buf[0])
channelHashHex := fmt.Sprintf("%02X", buf[0])
mac := hex.EncodeToString(buf[1:3])
encryptedData := hex.EncodeToString(buf[3:])
hasKeys := len(channelKeys) > 0
// Match Node.js: only attempt decryption if encrypted data >= 5 bytes (10 hex chars)
if hasKeys && len(encryptedData) >= 10 {
for name, key := range channelKeys {
result, err := decryptChannelMessage(encryptedData, mac, key)
if err != nil {
continue
}
text := result.Message
if result.Sender != "" && result.Message != "" {
text = result.Sender + ": " + result.Message
}
return Payload{
Type: "CHAN",
Channel: name,
ChannelHash: channelHash,
ChannelHashHex: channelHashHex,
DecryptionStatus: "decrypted",
Sender: result.Sender,
Text: text,
SenderTimestamp: result.Timestamp,
}
}
return Payload{
Type: "GRP_TXT",
ChannelHash: channelHash,
ChannelHashHex: channelHashHex,
DecryptionStatus: "decryption_failed",
MAC: mac,
EncryptedData: encryptedData,
}
}
return Payload{
Type: "GRP_TXT",
ChannelHash: channelHash,
ChannelHashHex: channelHashHex,
DecryptionStatus: "no_key",
MAC: mac,
EncryptedData: encryptedData,
}
}
func decodeAnonReq(buf []byte) Payload {
if len(buf) < 35 {
return Payload{Type: "ANON_REQ", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "ANON_REQ",
DestHash: hex.EncodeToString(buf[0:1]),
EphemeralPubKey: hex.EncodeToString(buf[1:33]),
MAC: hex.EncodeToString(buf[33:35]),
EncryptedData: hex.EncodeToString(buf[35:]),
}
}
func decodePathPayload(buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: "PATH", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "PATH",
DestHash: hex.EncodeToString(buf[0:1]),
SrcHash: hex.EncodeToString(buf[1:2]),
MAC: hex.EncodeToString(buf[2:4]),
PathData: hex.EncodeToString(buf[4:]),
}
}
func decodeTrace(buf []byte) Payload {
if len(buf) < 9 {
return Payload{Type: "TRACE", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
tag := binary.LittleEndian.Uint32(buf[0:4])
authCode := binary.LittleEndian.Uint32(buf[4:8])
flags := int(buf[8])
p := Payload{
Type: "TRACE",
Tag: tag,
AuthCode: authCode,
TraceFlags: &flags,
}
if len(buf) > 9 {
p.PathData = hex.EncodeToString(buf[9:])
}
return p
}
func decodePayload(payloadType int, buf []byte, channelKeys map[string]string) Payload {
switch payloadType {
case PayloadREQ:
return decodeEncryptedPayload("REQ", buf)
case PayloadRESPONSE:
return decodeEncryptedPayload("RESPONSE", buf)
case PayloadTXT_MSG:
return decodeEncryptedPayload("TXT_MSG", buf)
case PayloadACK:
return decodeAck(buf)
case PayloadADVERT:
return decodeAdvert(buf)
case PayloadGRP_TXT:
return decodeGrpTxt(buf, channelKeys)
case PayloadANON_REQ:
return decodeAnonReq(buf)
case PayloadPATH:
return decodePathPayload(buf)
case PayloadTRACE:
return decodeTrace(buf)
default:
return Payload{Type: "UNKNOWN", RawHex: hex.EncodeToString(buf)}
}
}
// DecodePacket decodes a hex-encoded MeshCore packet.
func DecodePacket(hexString string, channelKeys map[string]string) (*DecodedPacket, error) {
hexString = strings.ReplaceAll(hexString, " ", "")
hexString = strings.ReplaceAll(hexString, "\n", "")
hexString = strings.ReplaceAll(hexString, "\r", "")
buf, err := hex.DecodeString(hexString)
if err != nil {
return nil, fmt.Errorf("invalid hex: %w", err)
}
if len(buf) < 2 {
return nil, fmt.Errorf("packet too short (need at least header + pathLength)")
}
header := decodeHeader(buf[0])
offset := 1
var tc *TransportCodes
if isTransportRoute(header.RouteType) {
if len(buf) < offset+4 {
return nil, fmt.Errorf("packet too short for transport codes")
}
tc = &TransportCodes{
Code1: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
Code2: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
}
offset += 4
}
if offset >= len(buf) {
return nil, fmt.Errorf("packet too short (no path byte)")
}
pathByte := buf[offset]
offset++
path, bytesConsumed := decodePath(pathByte, buf, offset)
offset += bytesConsumed
payloadBuf := buf[offset:]
payload := decodePayload(header.PayloadType, payloadBuf, channelKeys)
// TRACE packets store hop IDs in the payload (buf[9:]) rather than the header
// path field. The header path byte still encodes hashSize in bits 6-7, which
// we use to split the payload path data into individual hop prefixes.
if header.PayloadType == PayloadTRACE && payload.PathData != "" {
pathBytes, err := hex.DecodeString(payload.PathData)
if err == nil && path.HashSize > 0 {
hops := make([]string, 0, len(pathBytes)/path.HashSize)
for i := 0; i+path.HashSize <= len(pathBytes); i += path.HashSize {
hops = append(hops, strings.ToUpper(hex.EncodeToString(pathBytes[i:i+path.HashSize])))
}
path.Hops = hops
path.HashCount = len(hops)
}
}
return &DecodedPacket{
Header: header,
TransportCodes: tc,
Path: path,
Payload: payload,
Raw: strings.ToUpper(hexString),
}, nil
}
// ComputeContentHash computes the SHA-256-based content hash (first 16 hex chars).
// It hashes the header byte + payload (skipping path bytes) to produce a
// path-independent identifier for the same transmission.
func ComputeContentHash(rawHex string) string {
buf, err := hex.DecodeString(rawHex)
if err != nil || len(buf) < 2 {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
headerByte := buf[0]
offset := 1
if isTransportRoute(int(headerByte & 0x03)) {
offset += 4
}
if offset >= len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
pathByte := buf[offset]
offset++
hashSize := int((pathByte>>6)&0x3) + 1
hashCount := int(pathByte & 0x3F)
pathBytes := hashSize * hashCount
payloadStart := offset + pathBytes
if payloadStart > len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
payload := buf[payloadStart:]
toHash := append([]byte{headerByte}, payload...)
h := sha256.Sum256(toHash)
return hex.EncodeToString(h[:])[:16]
}
// PayloadJSON serializes the payload to JSON for DB storage.
func PayloadJSON(p *Payload) string {
b, err := json.Marshal(p)
if err != nil {
return "{}"
}
return string(b)
}
// ValidateAdvert checks decoded advert data before DB insertion.
func ValidateAdvert(p *Payload) (bool, string) {
if p == nil || p.Error != "" {
reason := "null advert"
if p != nil {
reason = p.Error
}
return false, reason
}
pk := p.PubKey
if len(pk) < 16 {
return false, fmt.Sprintf("pubkey too short (%d hex chars)", len(pk))
}
allZero := true
for _, c := range pk {
if c != '0' {
allZero = false
break
}
}
if allZero {
return false, "pubkey is all zeros"
}
if p.Lat != nil {
if math.IsInf(*p.Lat, 0) || math.IsNaN(*p.Lat) || *p.Lat < -90 || *p.Lat > 90 {
return false, fmt.Sprintf("invalid lat: %f", *p.Lat)
}
}
if p.Lon != nil {
if math.IsInf(*p.Lon, 0) || math.IsNaN(*p.Lon) || *p.Lon < -180 || *p.Lon > 180 {
return false, fmt.Sprintf("invalid lon: %f", *p.Lon)
}
}
if p.Name != "" {
for _, c := range p.Name {
if (c >= 0x00 && c <= 0x08) || c == 0x0b || c == 0x0c || (c >= 0x0e && c <= 0x1f) || c == 0x7f {
return false, "name contains control characters"
}
}
if len(p.Name) > 64 {
return false, fmt.Sprintf("name too long (%d chars)", len(p.Name))
}
}
if p.Flags != nil {
role := advertRole(p.Flags)
validRoles := map[string]bool{"repeater": true, "companion": true, "room": true, "sensor": true}
if !validRoles[role] {
return false, fmt.Sprintf("unknown role: %s", role)
}
}
return true, ""
}
// sanitizeName strips non-printable characters (< 0x20 except tab/newline) and DEL.
func sanitizeName(s string) string {
var b strings.Builder
b.Grow(len(s))
for _, c := range s {
if c == '\t' || c == '\n' || (c >= 0x20 && c != 0x7f) {
b.WriteRune(c)
}
}
return b.String()
}
func advertRole(f *AdvertFlags) string {
if f.Repeater {
return "repeater"
}
if f.Room {
return "room"
}
if f.Sensor {
return "sensor"
}
return "companion"
}
func epochToISO(epoch uint32) string {
// Go time from Unix epoch
t := unixTime(int64(epoch))
return t.UTC().Format("2006-01-02T15:04:05.000Z")
}
package main
import dec "github.com/corescope/internal/decoder"
const (
RouteTransportFlood = dec.RouteTransportFlood
RouteFlood = dec.RouteFlood
RouteDirect = dec.RouteDirect
RouteTransportDirect = dec.RouteTransportDirect
PayloadREQ = dec.PayloadREQ
PayloadRESPONSE = dec.PayloadRESPONSE
PayloadTXT_MSG = dec.PayloadTXT_MSG
PayloadACK = dec.PayloadACK
PayloadADVERT = dec.PayloadADVERT
PayloadGRP_TXT = dec.PayloadGRP_TXT
PayloadGRP_DATA = dec.PayloadGRP_DATA
PayloadANON_REQ = dec.PayloadANON_REQ
PayloadPATH = dec.PayloadPATH
PayloadTRACE = dec.PayloadTRACE
PayloadMULTIPART = dec.PayloadMULTIPART
PayloadCONTROL = dec.PayloadCONTROL
PayloadRAW_CUSTOM = dec.PayloadRAW_CUSTOM
)
type Header = dec.Header
type TransportCodes = dec.TransportCodes
type Path = dec.Path
type AdvertFlags = dec.AdvertFlags
type Payload = dec.Payload
type DecodedPacket = dec.DecodedPacket
func DecodePacket(hexString string, channelKeys map[string]string) (*DecodedPacket, error) {
return dec.DecodePacket(hexString, channelKeys)
}
func ComputeContentHash(rawHex string) string {
return dec.ComputeContentHash(rawHex)
}
func PayloadJSON(p *Payload) string {
return dec.PayloadJSON(p)
}
func ValidateAdvert(p *Payload) (bool, string) {
return dec.ValidateAdvert(p)
}
func advertRole(f *AdvertFlags) string {
return dec.AdvertRole(f)
}
func epochToISO(epoch uint32) string {
return dec.EpochToISO(epoch)
}

View File

@@ -3,6 +3,7 @@ module github.com/corescope/ingestor
go 1.22
require (
github.com/corescope v0.0.0
github.com/eclipse/paho.mqtt.golang v1.5.0
modernc.org/sqlite v1.34.5
)
@@ -21,3 +22,5 @@ require (
modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.8.0 // indirect
)
replace github.com/corescope => ../..

View File

@@ -1,537 +1,47 @@
package main
import (
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"encoding/json"
"fmt"
"math"
"strings"
"time"
)
// Route type constants (header bits 1-0)
const (
RouteTransportFlood = 0
RouteFlood = 1
RouteDirect = 2
RouteTransportDirect = 3
)
// Payload type constants (header bits 5-2)
const (
PayloadREQ = 0x00
PayloadRESPONSE = 0x01
PayloadTXT_MSG = 0x02
PayloadACK = 0x03
PayloadADVERT = 0x04
PayloadGRP_TXT = 0x05
PayloadGRP_DATA = 0x06
PayloadANON_REQ = 0x07
PayloadPATH = 0x08
PayloadTRACE = 0x09
PayloadMULTIPART = 0x0A
PayloadCONTROL = 0x0B
PayloadRAW_CUSTOM = 0x0F
)
var routeTypeNames = map[int]string{
0: "TRANSPORT_FLOOD",
1: "FLOOD",
2: "DIRECT",
3: "TRANSPORT_DIRECT",
}
// Header is the decoded packet header.
type Header struct {
RouteType int `json:"routeType"`
RouteTypeName string `json:"routeTypeName"`
PayloadType int `json:"payloadType"`
PayloadTypeName string `json:"payloadTypeName"`
PayloadVersion int `json:"payloadVersion"`
}
// TransportCodes are present on TRANSPORT_FLOOD and TRANSPORT_DIRECT routes.
type TransportCodes struct {
Code1 string `json:"code1"`
Code2 string `json:"code2"`
}
// Path holds decoded path/hop information.
type Path struct {
HashSize int `json:"hashSize"`
HashCount int `json:"hashCount"`
Hops []string `json:"hops"`
}
// AdvertFlags holds decoded advert flag bits.
type AdvertFlags struct {
Raw int `json:"raw"`
Type int `json:"type"`
Chat bool `json:"chat"`
Repeater bool `json:"repeater"`
Room bool `json:"room"`
Sensor bool `json:"sensor"`
HasLocation bool `json:"hasLocation"`
HasFeat1 bool `json:"hasFeat1"`
HasFeat2 bool `json:"hasFeat2"`
HasName bool `json:"hasName"`
}
// Payload is a generic decoded payload. Fields are populated depending on type.
type Payload struct {
Type string `json:"type"`
DestHash string `json:"destHash,omitempty"`
SrcHash string `json:"srcHash,omitempty"`
MAC string `json:"mac,omitempty"`
EncryptedData string `json:"encryptedData,omitempty"`
ExtraHash string `json:"extraHash,omitempty"`
PubKey string `json:"pubKey,omitempty"`
Timestamp uint32 `json:"timestamp,omitempty"`
TimestampISO string `json:"timestampISO,omitempty"`
Signature string `json:"signature,omitempty"`
Flags *AdvertFlags `json:"flags,omitempty"`
Lat *float64 `json:"lat,omitempty"`
Lon *float64 `json:"lon,omitempty"`
Name string `json:"name,omitempty"`
ChannelHash int `json:"channelHash,omitempty"`
EphemeralPubKey string `json:"ephemeralPubKey,omitempty"`
PathData string `json:"pathData,omitempty"`
Tag uint32 `json:"tag,omitempty"`
AuthCode uint32 `json:"authCode,omitempty"`
TraceFlags *int `json:"traceFlags,omitempty"`
RawHex string `json:"raw,omitempty"`
Error string `json:"error,omitempty"`
}
// DecodedPacket is the full decoded result.
type DecodedPacket struct {
Header Header `json:"header"`
TransportCodes *TransportCodes `json:"transportCodes"`
Path Path `json:"path"`
Payload Payload `json:"payload"`
Raw string `json:"raw"`
}
func decodeHeader(b byte) Header {
rt := int(b & 0x03)
pt := int((b >> 2) & 0x0F)
pv := int((b >> 6) & 0x03)
rtName := routeTypeNames[rt]
if rtName == "" {
rtName = "UNKNOWN"
}
ptName := payloadTypeNames[pt]
if ptName == "" {
ptName = "UNKNOWN"
}
return Header{
RouteType: rt,
RouteTypeName: rtName,
PayloadType: pt,
PayloadTypeName: ptName,
PayloadVersion: pv,
}
}
func decodePath(pathByte byte, buf []byte, offset int) (Path, int) {
hashSize := int(pathByte>>6) + 1
hashCount := int(pathByte & 0x3F)
totalBytes := hashSize * hashCount
hops := make([]string, 0, hashCount)
for i := 0; i < hashCount; i++ {
start := offset + i*hashSize
end := start + hashSize
if end > len(buf) {
break
}
hops = append(hops, strings.ToUpper(hex.EncodeToString(buf[start:end])))
}
return Path{
HashSize: hashSize,
HashCount: hashCount,
Hops: hops,
}, totalBytes
}
func isTransportRoute(routeType int) bool {
return routeType == RouteTransportFlood || routeType == RouteTransportDirect
}
func decodeEncryptedPayload(typeName string, buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: typeName, Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: typeName,
DestHash: hex.EncodeToString(buf[0:1]),
SrcHash: hex.EncodeToString(buf[1:2]),
MAC: hex.EncodeToString(buf[2:4]),
EncryptedData: hex.EncodeToString(buf[4:]),
}
}
func decodeAck(buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: "ACK", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
checksum := binary.LittleEndian.Uint32(buf[0:4])
return Payload{
Type: "ACK",
ExtraHash: fmt.Sprintf("%08x", checksum),
}
}
func decodeAdvert(buf []byte) Payload {
if len(buf) < 100 {
return Payload{Type: "ADVERT", Error: "too short for advert", RawHex: hex.EncodeToString(buf)}
}
pubKey := hex.EncodeToString(buf[0:32])
timestamp := binary.LittleEndian.Uint32(buf[32:36])
signature := hex.EncodeToString(buf[36:100])
appdata := buf[100:]
p := Payload{
Type: "ADVERT",
PubKey: pubKey,
Timestamp: timestamp,
TimestampISO: fmt.Sprintf("%s", epochToISO(timestamp)),
Signature: signature,
}
if len(appdata) > 0 {
flags := appdata[0]
advType := int(flags & 0x0F)
hasFeat1 := flags&0x20 != 0
hasFeat2 := flags&0x40 != 0
p.Flags = &AdvertFlags{
Raw: int(flags),
Type: advType,
Chat: advType == 1,
Repeater: advType == 2,
Room: advType == 3,
Sensor: advType == 4,
HasLocation: flags&0x10 != 0,
HasFeat1: hasFeat1,
HasFeat2: hasFeat2,
HasName: flags&0x80 != 0,
}
off := 1
if p.Flags.HasLocation && len(appdata) >= off+8 {
latRaw := int32(binary.LittleEndian.Uint32(appdata[off : off+4]))
lonRaw := int32(binary.LittleEndian.Uint32(appdata[off+4 : off+8]))
lat := float64(latRaw) / 1e6
lon := float64(lonRaw) / 1e6
p.Lat = &lat
p.Lon = &lon
off += 8
}
if hasFeat1 && len(appdata) >= off+2 {
off += 2 // skip feat1 bytes (reserved for future use)
}
if hasFeat2 && len(appdata) >= off+2 {
off += 2 // skip feat2 bytes (reserved for future use)
}
if p.Flags.HasName {
name := string(appdata[off:])
name = strings.TrimRight(name, "\x00")
name = sanitizeName(name)
p.Name = name
}
}
return p
}
func decodeGrpTxt(buf []byte) Payload {
if len(buf) < 3 {
return Payload{Type: "GRP_TXT", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "GRP_TXT",
ChannelHash: int(buf[0]),
MAC: hex.EncodeToString(buf[1:3]),
EncryptedData: hex.EncodeToString(buf[3:]),
}
}
func decodeAnonReq(buf []byte) Payload {
if len(buf) < 35 {
return Payload{Type: "ANON_REQ", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "ANON_REQ",
DestHash: hex.EncodeToString(buf[0:1]),
EphemeralPubKey: hex.EncodeToString(buf[1:33]),
MAC: hex.EncodeToString(buf[33:35]),
EncryptedData: hex.EncodeToString(buf[35:]),
}
}
func decodePathPayload(buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: "PATH", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "PATH",
DestHash: hex.EncodeToString(buf[0:1]),
SrcHash: hex.EncodeToString(buf[1:2]),
MAC: hex.EncodeToString(buf[2:4]),
PathData: hex.EncodeToString(buf[4:]),
}
}
func decodeTrace(buf []byte) Payload {
if len(buf) < 9 {
return Payload{Type: "TRACE", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
tag := binary.LittleEndian.Uint32(buf[0:4])
authCode := binary.LittleEndian.Uint32(buf[4:8])
flags := int(buf[8])
p := Payload{
Type: "TRACE",
Tag: tag,
AuthCode: authCode,
TraceFlags: &flags,
}
if len(buf) > 9 {
p.PathData = hex.EncodeToString(buf[9:])
}
return p
}
func decodePayload(payloadType int, buf []byte) Payload {
switch payloadType {
case PayloadREQ:
return decodeEncryptedPayload("REQ", buf)
case PayloadRESPONSE:
return decodeEncryptedPayload("RESPONSE", buf)
case PayloadTXT_MSG:
return decodeEncryptedPayload("TXT_MSG", buf)
case PayloadACK:
return decodeAck(buf)
case PayloadADVERT:
return decodeAdvert(buf)
case PayloadGRP_TXT:
return decodeGrpTxt(buf)
case PayloadANON_REQ:
return decodeAnonReq(buf)
case PayloadPATH:
return decodePathPayload(buf)
case PayloadTRACE:
return decodeTrace(buf)
default:
return Payload{Type: "UNKNOWN", RawHex: hex.EncodeToString(buf)}
}
}
// DecodePacket decodes a hex-encoded MeshCore packet.
func DecodePacket(hexString string) (*DecodedPacket, error) {
hexString = strings.ReplaceAll(hexString, " ", "")
hexString = strings.ReplaceAll(hexString, "\n", "")
hexString = strings.ReplaceAll(hexString, "\r", "")
buf, err := hex.DecodeString(hexString)
if err != nil {
return nil, fmt.Errorf("invalid hex: %w", err)
}
if len(buf) < 2 {
return nil, fmt.Errorf("packet too short (need at least header + pathLength)")
}
header := decodeHeader(buf[0])
offset := 1
var tc *TransportCodes
if isTransportRoute(header.RouteType) {
if len(buf) < offset+4 {
return nil, fmt.Errorf("packet too short for transport codes")
}
tc = &TransportCodes{
Code1: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
Code2: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
}
offset += 4
}
if offset >= len(buf) {
return nil, fmt.Errorf("packet too short (no path byte)")
}
pathByte := buf[offset]
offset++
path, bytesConsumed := decodePath(pathByte, buf, offset)
offset += bytesConsumed
payloadBuf := buf[offset:]
payload := decodePayload(header.PayloadType, payloadBuf)
// TRACE packets store hop IDs in the payload (buf[9:]) rather than the header
// path field. The header path byte still encodes hashSize in bits 6-7, which
// we use to split the payload path data into individual hop prefixes.
if header.PayloadType == PayloadTRACE && payload.PathData != "" {
pathBytes, err := hex.DecodeString(payload.PathData)
if err == nil && path.HashSize > 0 {
hops := make([]string, 0, len(pathBytes)/path.HashSize)
for i := 0; i+path.HashSize <= len(pathBytes); i += path.HashSize {
hops = append(hops, strings.ToUpper(hex.EncodeToString(pathBytes[i:i+path.HashSize])))
}
path.Hops = hops
path.HashCount = len(hops)
}
}
return &DecodedPacket{
Header: header,
TransportCodes: tc,
Path: path,
Payload: payload,
Raw: strings.ToUpper(hexString),
}, nil
}
// ComputeContentHash computes the SHA-256-based content hash (first 16 hex chars).
func ComputeContentHash(rawHex string) string {
buf, err := hex.DecodeString(rawHex)
if err != nil || len(buf) < 2 {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
headerByte := buf[0]
offset := 1
if isTransportRoute(int(headerByte & 0x03)) {
offset += 4
}
if offset >= len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
pathByte := buf[offset]
offset++
hashSize := int((pathByte>>6)&0x3) + 1
hashCount := int(pathByte & 0x3F)
pathBytes := hashSize * hashCount
payloadStart := offset + pathBytes
if payloadStart > len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
payload := buf[payloadStart:]
toHash := append([]byte{headerByte}, payload...)
h := sha256.Sum256(toHash)
return hex.EncodeToString(h[:])[:16]
}
// PayloadJSON serializes the payload to JSON for DB storage.
func PayloadJSON(p *Payload) string {
b, err := json.Marshal(p)
if err != nil {
return "{}"
}
return string(b)
}
// ValidateAdvert checks decoded advert data before DB insertion.
func ValidateAdvert(p *Payload) (bool, string) {
if p == nil || p.Error != "" {
reason := "null advert"
if p != nil {
reason = p.Error
}
return false, reason
}
pk := p.PubKey
if len(pk) < 16 {
return false, fmt.Sprintf("pubkey too short (%d hex chars)", len(pk))
}
allZero := true
for _, c := range pk {
if c != '0' {
allZero = false
break
}
}
if allZero {
return false, "pubkey is all zeros"
}
if p.Lat != nil {
if math.IsInf(*p.Lat, 0) || math.IsNaN(*p.Lat) || *p.Lat < -90 || *p.Lat > 90 {
return false, fmt.Sprintf("invalid lat: %f", *p.Lat)
}
}
if p.Lon != nil {
if math.IsInf(*p.Lon, 0) || math.IsNaN(*p.Lon) || *p.Lon < -180 || *p.Lon > 180 {
return false, fmt.Sprintf("invalid lon: %f", *p.Lon)
}
}
if p.Name != "" {
for _, c := range p.Name {
if (c >= 0x00 && c <= 0x08) || c == 0x0b || c == 0x0c || (c >= 0x0e && c <= 0x1f) || c == 0x7f {
return false, "name contains control characters"
}
}
if len(p.Name) > 64 {
return false, fmt.Sprintf("name too long (%d chars)", len(p.Name))
}
}
if p.Flags != nil {
role := advertRole(p.Flags)
validRoles := map[string]bool{"repeater": true, "companion": true, "room": true, "sensor": true}
if !validRoles[role] {
return false, fmt.Sprintf("unknown role: %s", role)
}
}
return true, ""
}
// sanitizeName strips non-printable characters (< 0x20 except tab/newline) and DEL.
func sanitizeName(s string) string {
var b strings.Builder
b.Grow(len(s))
for _, c := range s {
if c == '\t' || c == '\n' || (c >= 0x20 && c != 0x7f) {
b.WriteRune(c)
}
}
return b.String()
}
func advertRole(f *AdvertFlags) string {
if f.Repeater {
return "repeater"
}
if f.Room {
return "room"
}
if f.Sensor {
return "sensor"
}
return "companion"
}
func epochToISO(epoch uint32) string {
t := time.Unix(int64(epoch), 0)
return t.UTC().Format("2006-01-02T15:04:05.000Z")
}
package main
import dec "github.com/corescope/internal/decoder"
const (
RouteTransportFlood = dec.RouteTransportFlood
RouteFlood = dec.RouteFlood
RouteDirect = dec.RouteDirect
RouteTransportDirect = dec.RouteTransportDirect
PayloadREQ = dec.PayloadREQ
PayloadRESPONSE = dec.PayloadRESPONSE
PayloadTXT_MSG = dec.PayloadTXT_MSG
PayloadACK = dec.PayloadACK
PayloadADVERT = dec.PayloadADVERT
PayloadGRP_TXT = dec.PayloadGRP_TXT
PayloadGRP_DATA = dec.PayloadGRP_DATA
PayloadANON_REQ = dec.PayloadANON_REQ
PayloadPATH = dec.PayloadPATH
PayloadTRACE = dec.PayloadTRACE
PayloadMULTIPART = dec.PayloadMULTIPART
PayloadCONTROL = dec.PayloadCONTROL
PayloadRAW_CUSTOM = dec.PayloadRAW_CUSTOM
)
type Header = dec.Header
type TransportCodes = dec.TransportCodes
type Path = dec.Path
type AdvertFlags = dec.AdvertFlags
type Payload = dec.Payload
type DecodedPacket = dec.DecodedPacket
func DecodePacket(hexString string) (*DecodedPacket, error) {
return dec.DecodePacket(hexString, nil)
}
func ComputeContentHash(rawHex string) string {
return dec.ComputeContentHash(rawHex)
}
func PayloadJSON(p *Payload) string {
return dec.PayloadJSON(p)
}
func ValidateAdvert(p *Payload) (bool, string) {
return dec.ValidateAdvert(p)
}

View File

@@ -3,6 +3,7 @@ module github.com/corescope/server
go 1.22
require (
github.com/corescope v0.0.0
github.com/gorilla/mux v1.8.1
github.com/gorilla/websocket v1.5.3
modernc.org/sqlite v1.34.5
@@ -19,3 +20,5 @@ require (
modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.8.0 // indirect
)
replace github.com/corescope => ../..

View File

@@ -30,13 +30,13 @@ type Server struct {
buildTime string
// Cached runtime.MemStats to avoid stop-the-world pauses on every health check
memStatsMu sync.Mutex
memStatsCache runtime.MemStats
memStatsMu sync.Mutex
memStatsCache runtime.MemStats
memStatsCachedAt time.Time
// Cached /api/stats response — recomputed at most once every 10s
statsMu sync.Mutex
statsCache *StatsResponse
statsMu sync.Mutex
statsCache *StatsResponse
statsCachedAt time.Time
}
@@ -117,7 +117,7 @@ func (s *Server) RegisterRoutes(r *mux.Router) {
r.Handle("/api/packets", s.requireAPIKey(http.HandlerFunc(s.handlePostPacket))).Methods("POST")
// Decode endpoint
r.Handle("/api/decode", s.requireAPIKey(http.HandlerFunc(s.handleDecode))).Methods("POST")
r.HandleFunc("/api/decode", s.handleDecode).Methods("POST")
// Node endpoints — fixed routes BEFORE parameterized
r.HandleFunc("/api/nodes/search", s.handleNodeSearch).Methods("GET")
@@ -1181,7 +1181,7 @@ func (s *Server) handleAnalyticsHashSizes(w http.ResponseWriter, r *http.Request
return
}
writeJSON(w, map[string]interface{}{
"total": 0,
"total": 0,
"distribution": map[string]int{"1": 0, "2": 0, "3": 0},
"distributionByRepeaters": map[string]int{"1": 0, "2": 0, "3": 0},
"hourly": []HashSizeHourly{},
@@ -1352,12 +1352,12 @@ func (s *Server) handleObservers(w http.ResponseWriter, r *http.Request) {
ID: o.ID, Name: o.Name, IATA: o.IATA,
LastSeen: o.LastSeen, FirstSeen: o.FirstSeen,
PacketCount: o.PacketCount,
Model: o.Model, Firmware: o.Firmware,
Model: o.Model, Firmware: o.Firmware,
ClientVersion: o.ClientVersion, Radio: o.Radio,
BatteryMv: o.BatteryMv, UptimeSecs: o.UptimeSecs,
NoiseFloor: o.NoiseFloor,
NoiseFloor: o.NoiseFloor,
PacketsLastHour: plh,
Lat: lat, Lon: lon, NodeRole: nodeRole,
Lat: lat, Lon: lon, NodeRole: nodeRole,
})
}
writeJSON(w, ObserverListResponse{
@@ -1386,10 +1386,10 @@ func (s *Server) handleObserverDetail(w http.ResponseWriter, r *http.Request) {
ID: obs.ID, Name: obs.Name, IATA: obs.IATA,
LastSeen: obs.LastSeen, FirstSeen: obs.FirstSeen,
PacketCount: obs.PacketCount,
Model: obs.Model, Firmware: obs.Firmware,
Model: obs.Model, Firmware: obs.Firmware,
ClientVersion: obs.ClientVersion, Radio: obs.Radio,
BatteryMv: obs.BatteryMv, UptimeSecs: obs.UptimeSecs,
NoiseFloor: obs.NoiseFloor,
NoiseFloor: obs.NoiseFloor,
PacketsLastHour: plh,
})
}

View File

@@ -48,7 +48,7 @@ func setupTestServerWithAPIKey(t *testing.T, apiKey string) (*Server, *mux.Route
func TestWriteEndpointsRequireAPIKey(t *testing.T) {
_, router := setupTestServerWithAPIKey(t, "test-secret")
t.Run("missing key returns 401", func(t *testing.T) {
t.Run("perf reset missing key returns 401", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/perf/reset", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
@@ -62,10 +62,9 @@ func TestWriteEndpointsRequireAPIKey(t *testing.T) {
}
})
t.Run("wrong key returns 401", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/decode", bytes.NewBufferString(`{"hex":"0200"}`))
t.Run("packets post missing key returns 401", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/packets", bytes.NewBufferString(`{"raw":"0200"}`))
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-API-Key", "wrong-secret")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != http.StatusUnauthorized {
@@ -73,10 +72,9 @@ func TestWriteEndpointsRequireAPIKey(t *testing.T) {
}
})
t.Run("correct key passes", func(t *testing.T) {
t.Run("decode succeeds without key", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/decode", bytes.NewBufferString(`{"hex":"0200"}`))
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-API-Key", "test-secret")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != http.StatusOK {
@@ -88,13 +86,34 @@ func TestWriteEndpointsRequireAPIKey(t *testing.T) {
func TestWriteEndpointsBlockWhenAPIKeyEmpty(t *testing.T) {
_, router := setupTestServerWithAPIKey(t, "")
req := httptest.NewRequest("POST", "/api/decode", bytes.NewBufferString(`{"hex":"0200"}`))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != http.StatusForbidden {
t.Fatalf("expected 403 with empty apiKey, got %d (body: %s)", w.Code, w.Body.String())
}
t.Run("perf reset blocked when api key unset", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/perf/reset", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != http.StatusForbidden {
t.Fatalf("expected 403 with empty apiKey, got %d (body: %s)", w.Code, w.Body.String())
}
})
t.Run("packets post blocked when api key unset", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/packets", bytes.NewBufferString(`{"raw":"0200"}`))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != http.StatusForbidden {
t.Fatalf("expected 403 with empty apiKey, got %d (body: %s)", w.Code, w.Body.String())
}
})
t.Run("decode remains open when api key unset", func(t *testing.T) {
req := httptest.NewRequest("POST", "/api/decode", bytes.NewBufferString(`{"hex":"0200"}`))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != http.StatusOK {
t.Fatalf("expected 200, got %d (body: %s)", w.Code, w.Body.String())
}
})
}
func TestHealthEndpoint(t *testing.T) {
@@ -1622,7 +1641,6 @@ func TestHandlerErrorPaths(t *testing.T) {
router := mux.NewRouter()
srv.RegisterRoutes(router)
t.Run("stats error", func(t *testing.T) {
db.conn.Exec("DROP TABLE IF EXISTS transmissions")
req := httptest.NewRequest("GET", "/api/stats", nil)
@@ -1843,240 +1861,239 @@ func TestHandlerErrorBulkHealth(t *testing.T) {
}
}
func TestAnalyticsChannelsNoNullArrays(t *testing.T) {
_, router := setupTestServer(t)
req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
_, router := setupTestServer(t)
req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
raw := w.Body.String()
var body map[string]interface{}
if err := json.Unmarshal([]byte(raw), &body); err != nil {
t.Fatalf("invalid JSON: %v", err)
}
raw := w.Body.String()
var body map[string]interface{}
if err := json.Unmarshal([]byte(raw), &body); err != nil {
t.Fatalf("invalid JSON: %v", err)
}
arrayFields := []string{"channels", "topSenders", "channelTimeline", "msgLengths"}
for _, field := range arrayFields {
val, exists := body[field]
if !exists {
t.Errorf("missing field %q", field)
continue
}
if val == nil {
t.Errorf("field %q is null, expected empty array []", field)
continue
}
if _, ok := val.([]interface{}); !ok {
t.Errorf("field %q is not an array, got %T", field, val)
}
}
arrayFields := []string{"channels", "topSenders", "channelTimeline", "msgLengths"}
for _, field := range arrayFields {
val, exists := body[field]
if !exists {
t.Errorf("missing field %q", field)
continue
}
if val == nil {
t.Errorf("field %q is null, expected empty array []", field)
continue
}
if _, ok := val.([]interface{}); !ok {
t.Errorf("field %q is not an array, got %T", field, val)
}
}
}
func TestAnalyticsChannelsNoStoreFallbackNoNulls(t *testing.T) {
db := setupTestDB(t)
seedTestData(t, db)
cfg := &Config{Port: 3000}
hub := NewHub()
srv := NewServer(db, cfg, hub)
router := mux.NewRouter()
srv.RegisterRoutes(router)
db := setupTestDB(t)
seedTestData(t, db)
cfg := &Config{Port: 3000}
hub := NewHub()
srv := NewServer(db, cfg, hub)
router := mux.NewRouter()
srv.RegisterRoutes(router)
req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
req := httptest.NewRequest("GET", "/api/analytics/channels", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
var body map[string]interface{}
json.Unmarshal(w.Body.Bytes(), &body)
var body map[string]interface{}
json.Unmarshal(w.Body.Bytes(), &body)
arrayFields := []string{"channels", "topSenders", "channelTimeline", "msgLengths"}
for _, field := range arrayFields {
if body[field] == nil {
t.Errorf("field %q is null in DB fallback, expected []", field)
}
}
arrayFields := []string{"channels", "topSenders", "channelTimeline", "msgLengths"}
for _, field := range arrayFields {
if body[field] == nil {
t.Errorf("field %q is null in DB fallback, expected []", field)
}
}
}
func TestNodeHashSizeEnrichment(t *testing.T) {
t.Run("nil info leaves defaults", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
EnrichNodeWithHashSize(node, nil)
if node["hash_size"] != nil {
t.Error("expected hash_size to remain nil with nil info")
}
})
t.Run("nil info leaves defaults", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
EnrichNodeWithHashSize(node, nil)
if node["hash_size"] != nil {
t.Error("expected hash_size to remain nil with nil info")
}
})
t.Run("enriches with computed data", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
info := &hashSizeNodeInfo{
HashSize: 2,
AllSizes: map[int]bool{1: true, 2: true},
Seq: []int{1, 2, 1, 2},
Inconsistent: true,
}
EnrichNodeWithHashSize(node, info)
if node["hash_size"] != 2 {
t.Errorf("expected hash_size 2, got %v", node["hash_size"])
}
if node["hash_size_inconsistent"] != true {
t.Error("expected hash_size_inconsistent true")
}
sizes, ok := node["hash_sizes_seen"].([]int)
if !ok {
t.Fatal("expected hash_sizes_seen to be []int")
}
if len(sizes) != 2 || sizes[0] != 1 || sizes[1] != 2 {
t.Errorf("expected [1,2], got %v", sizes)
}
})
t.Run("enriches with computed data", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
info := &hashSizeNodeInfo{
HashSize: 2,
AllSizes: map[int]bool{1: true, 2: true},
Seq: []int{1, 2, 1, 2},
Inconsistent: true,
}
EnrichNodeWithHashSize(node, info)
if node["hash_size"] != 2 {
t.Errorf("expected hash_size 2, got %v", node["hash_size"])
}
if node["hash_size_inconsistent"] != true {
t.Error("expected hash_size_inconsistent true")
}
sizes, ok := node["hash_sizes_seen"].([]int)
if !ok {
t.Fatal("expected hash_sizes_seen to be []int")
}
if len(sizes) != 2 || sizes[0] != 1 || sizes[1] != 2 {
t.Errorf("expected [1,2], got %v", sizes)
}
})
t.Run("single size omits sizes_seen", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
info := &hashSizeNodeInfo{
HashSize: 3,
AllSizes: map[int]bool{3: true},
Seq: []int{3, 3, 3},
}
EnrichNodeWithHashSize(node, info)
if node["hash_size"] != 3 {
t.Errorf("expected hash_size 3, got %v", node["hash_size"])
}
if node["hash_size_inconsistent"] != false {
t.Error("expected hash_size_inconsistent false")
}
if _, exists := node["hash_sizes_seen"]; exists {
t.Error("hash_sizes_seen should not be set for single size")
}
})
t.Run("single size omits sizes_seen", func(t *testing.T) {
node := map[string]interface{}{
"public_key": "abc123",
"hash_size": nil,
"hash_size_inconsistent": false,
}
info := &hashSizeNodeInfo{
HashSize: 3,
AllSizes: map[int]bool{3: true},
Seq: []int{3, 3, 3},
}
EnrichNodeWithHashSize(node, info)
if node["hash_size"] != 3 {
t.Errorf("expected hash_size 3, got %v", node["hash_size"])
}
if node["hash_size_inconsistent"] != false {
t.Error("expected hash_size_inconsistent false")
}
if _, exists := node["hash_sizes_seen"]; exists {
t.Error("hash_sizes_seen should not be set for single size")
}
})
}
func TestGetNodeHashSizeInfoFlipFlop(t *testing.T) {
db := setupTestDB(t)
seedTestData(t, db)
store := NewPacketStore(db, nil)
if err := store.Load(); err != nil {
t.Fatalf("store.Load failed: %v", err)
}
db := setupTestDB(t)
seedTestData(t, db)
store := NewPacketStore(db, nil)
if err := store.Load(); err != nil {
t.Fatalf("store.Load failed: %v", err)
}
pk := "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'TestNode', 'repeater')", pk)
pk := "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'TestNode', 'repeater')", pk)
decoded := `{"name":"TestNode","pubKey":"` + pk + `"}`
raw1 := "04" + "00" + "aabb"
raw2 := "04" + "40" + "aabb"
decoded := `{"name":"TestNode","pubKey":"` + pk + `"}`
raw1 := "04" + "00" + "aabb"
raw2 := "04" + "40" + "aabb"
payloadType := 4
for i := 0; i < 3; i++ {
rawHex := raw1
if i%2 == 1 {
rawHex = raw2
}
tx := &StoreTx{
ID: 9000 + i,
RawHex: rawHex,
Hash: "testhash" + strconv.Itoa(i),
FirstSeen: "2024-01-01T00:00:00Z",
PayloadType: &payloadType,
DecodedJSON: decoded,
}
store.packets = append(store.packets, tx)
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
}
payloadType := 4
for i := 0; i < 3; i++ {
rawHex := raw1
if i%2 == 1 {
rawHex = raw2
}
tx := &StoreTx{
ID: 9000 + i,
RawHex: rawHex,
Hash: "testhash" + strconv.Itoa(i),
FirstSeen: "2024-01-01T00:00:00Z",
PayloadType: &payloadType,
DecodedJSON: decoded,
}
store.packets = append(store.packets, tx)
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
}
info := store.GetNodeHashSizeInfo()
ni := info[pk]
if ni == nil {
t.Fatal("expected hash info for test node")
}
if len(ni.AllSizes) != 2 {
t.Errorf("expected 2 unique sizes, got %d", len(ni.AllSizes))
}
if !ni.Inconsistent {
t.Error("expected inconsistent flag to be true for flip-flop pattern")
}
info := store.GetNodeHashSizeInfo()
ni := info[pk]
if ni == nil {
t.Fatal("expected hash info for test node")
}
if len(ni.AllSizes) != 2 {
t.Errorf("expected 2 unique sizes, got %d", len(ni.AllSizes))
}
if !ni.Inconsistent {
t.Error("expected inconsistent flag to be true for flip-flop pattern")
}
}
func TestGetNodeHashSizeInfoDominant(t *testing.T) {
// A node that sends mostly 2-byte adverts but occasionally 1-byte (pathByte=0x00
// on direct sends) should report HashSize=2, not 1.
db := setupTestDB(t)
seedTestData(t, db)
store := NewPacketStore(db, nil)
if err := store.Load(); err != nil {
t.Fatalf("store.Load failed: %v", err)
}
pk := "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'Repeater2B', 'repeater')", pk)
decoded := `{"name":"Repeater2B","pubKey":"` + pk + `"}`
raw1byte := "04" + "00" + "aabb" // pathByte=0x00 → hashSize=1 (direct send, no hops)
raw2byte := "04" + "40" + "aabb" // pathByte=0x40 → hashSize=2
payloadType := 4
// 1 packet with hashSize=1, 4 packets with hashSize=2
raws := []string{raw1byte, raw2byte, raw2byte, raw2byte, raw2byte}
for i, raw := range raws {
tx := &StoreTx{
ID: 8000 + i,
RawHex: raw,
Hash: "dominant" + strconv.Itoa(i),
FirstSeen: "2024-01-01T00:00:00Z",
PayloadType: &payloadType,
DecodedJSON: decoded,
// A node that sends mostly 2-byte adverts but occasionally 1-byte (pathByte=0x00
// on direct sends) should report HashSize=2, not 1.
db := setupTestDB(t)
seedTestData(t, db)
store := NewPacketStore(db, nil)
if err := store.Load(); err != nil {
t.Fatalf("store.Load failed: %v", err)
}
store.packets = append(store.packets, tx)
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
}
info := store.GetNodeHashSizeInfo()
ni := info[pk]
if ni == nil {
t.Fatal("expected hash info for test node")
}
if ni.HashSize != 2 {
t.Errorf("HashSize=%d, want 2 (dominant size should win over occasional 1-byte)", ni.HashSize)
}
pk := "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
db.conn.Exec("INSERT OR IGNORE INTO nodes (public_key, name, role) VALUES (?, 'Repeater2B', 'repeater')", pk)
decoded := `{"name":"Repeater2B","pubKey":"` + pk + `"}`
raw1byte := "04" + "00" + "aabb" // pathByte=0x00 → hashSize=1 (direct send, no hops)
raw2byte := "04" + "40" + "aabb" // pathByte=0x40 → hashSize=2
payloadType := 4
// 1 packet with hashSize=1, 4 packets with hashSize=2
raws := []string{raw1byte, raw2byte, raw2byte, raw2byte, raw2byte}
for i, raw := range raws {
tx := &StoreTx{
ID: 8000 + i,
RawHex: raw,
Hash: "dominant" + strconv.Itoa(i),
FirstSeen: "2024-01-01T00:00:00Z",
PayloadType: &payloadType,
DecodedJSON: decoded,
}
store.packets = append(store.packets, tx)
store.byPayloadType[4] = append(store.byPayloadType[4], tx)
}
info := store.GetNodeHashSizeInfo()
ni := info[pk]
if ni == nil {
t.Fatal("expected hash info for test node")
}
if ni.HashSize != 2 {
t.Errorf("HashSize=%d, want 2 (dominant size should win over occasional 1-byte)", ni.HashSize)
}
}
func TestAnalyticsHashSizesNoNullArrays(t *testing.T) {
_, router := setupTestServer(t)
req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
_, router := setupTestServer(t)
req := httptest.NewRequest("GET", "/api/analytics/hash-sizes", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
if w.Code != 200 {
t.Fatalf("expected 200, got %d", w.Code)
}
var body map[string]interface{}
json.Unmarshal(w.Body.Bytes(), &body)
var body map[string]interface{}
json.Unmarshal(w.Body.Bytes(), &body)
arrayFields := []string{"hourly", "topHops", "multiByteNodes"}
for _, field := range arrayFields {
if body[field] == nil {
t.Errorf("field %q is null, expected []", field)
}
arrayFields := []string{"hourly", "topHops", "multiByteNodes"}
for _, field := range arrayFields {
if body[field] == nil {
t.Errorf("field %q is null, expected []", field)
}
}
}
func TestObserverAnalyticsNoStore(t *testing.T) {

3
go.mod Normal file
View File

@@ -0,0 +1,3 @@
module github.com/corescope
go 1.22

740
internal/decoder/decoder.go Normal file
View File

@@ -0,0 +1,740 @@
package decoder
import (
"crypto/aes"
"crypto/hmac"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"encoding/json"
"fmt"
"math"
"strings"
"time"
"unicode/utf8"
)
// Route type constants (header bits 1-0)
const (
RouteTransportFlood = 0
RouteFlood = 1
RouteDirect = 2
RouteTransportDirect = 3
)
// Payload type constants (header bits 5-2)
const (
PayloadREQ = 0x00
PayloadRESPONSE = 0x01
PayloadTXT_MSG = 0x02
PayloadACK = 0x03
PayloadADVERT = 0x04
PayloadGRP_TXT = 0x05
PayloadGRP_DATA = 0x06
PayloadANON_REQ = 0x07
PayloadPATH = 0x08
PayloadTRACE = 0x09
PayloadMULTIPART = 0x0A
PayloadCONTROL = 0x0B
PayloadRAW_CUSTOM = 0x0F
)
var routeTypeNames = map[int]string{
0: "TRANSPORT_FLOOD",
1: "FLOOD",
2: "DIRECT",
3: "TRANSPORT_DIRECT",
}
var payloadTypeNames = map[int]string{
0x00: "REQ",
0x01: "RESPONSE",
0x02: "TXT_MSG",
0x03: "ACK",
0x04: "ADVERT",
0x05: "GRP_TXT",
0x06: "GRP_DATA",
0x07: "ANON_REQ",
0x08: "PATH",
0x09: "TRACE",
0x0A: "MULTIPART",
0x0B: "CONTROL",
0x0F: "RAW_CUSTOM",
}
// Header is the decoded packet header.
type Header struct {
RouteType int `json:"routeType"`
RouteTypeName string `json:"routeTypeName"`
PayloadType int `json:"payloadType"`
PayloadTypeName string `json:"payloadTypeName"`
PayloadVersion int `json:"payloadVersion"`
}
// TransportCodes are present on TRANSPORT_FLOOD and TRANSPORT_DIRECT routes.
type TransportCodes struct {
Code1 string `json:"code1"`
Code2 string `json:"code2"`
}
// Path holds decoded path/hop information.
type Path struct {
HashSize int `json:"hashSize"`
HashCount int `json:"hashCount"`
Hops []string `json:"hops"`
}
// AdvertFlags holds decoded advert flag bits.
type AdvertFlags struct {
Raw int `json:"raw"`
Type int `json:"type"`
Chat bool `json:"chat"`
Repeater bool `json:"repeater"`
Room bool `json:"room"`
Sensor bool `json:"sensor"`
HasLocation bool `json:"hasLocation"`
HasFeat1 bool `json:"hasFeat1"`
HasFeat2 bool `json:"hasFeat2"`
HasName bool `json:"hasName"`
}
// Payload is a generic decoded payload. Fields are populated depending on type.
type Payload struct {
Type string `json:"type"`
DestHash string `json:"destHash,omitempty"`
SrcHash string `json:"srcHash,omitempty"`
MAC string `json:"mac,omitempty"`
EncryptedData string `json:"encryptedData,omitempty"`
ExtraHash string `json:"extraHash,omitempty"`
PubKey string `json:"pubKey,omitempty"`
Timestamp uint32 `json:"timestamp,omitempty"`
TimestampISO string `json:"timestampISO,omitempty"`
Signature string `json:"signature,omitempty"`
Flags *AdvertFlags `json:"flags,omitempty"`
Lat *float64 `json:"lat,omitempty"`
Lon *float64 `json:"lon,omitempty"`
Name string `json:"name,omitempty"`
Feat1 *int `json:"feat1,omitempty"`
Feat2 *int `json:"feat2,omitempty"`
BatteryMv *int `json:"battery_mv,omitempty"`
TemperatureC *float64 `json:"temperature_c,omitempty"`
ChannelHash int `json:"channelHash,omitempty"`
ChannelHashHex string `json:"channelHashHex,omitempty"`
DecryptionStatus string `json:"decryptionStatus,omitempty"`
Channel string `json:"channel,omitempty"`
Text string `json:"text,omitempty"`
Sender string `json:"sender,omitempty"`
SenderTimestamp uint32 `json:"sender_timestamp,omitempty"`
EphemeralPubKey string `json:"ephemeralPubKey,omitempty"`
PathData string `json:"pathData,omitempty"`
Tag uint32 `json:"tag,omitempty"`
AuthCode uint32 `json:"authCode,omitempty"`
TraceFlags *int `json:"traceFlags,omitempty"`
RawHex string `json:"raw,omitempty"`
Error string `json:"error,omitempty"`
}
// DecodedPacket is the full decoded result.
type DecodedPacket struct {
Header Header `json:"header"`
TransportCodes *TransportCodes `json:"transportCodes"`
Path Path `json:"path"`
Payload Payload `json:"payload"`
Raw string `json:"raw"`
}
func decodeHeader(b byte) Header {
rt := int(b & 0x03)
pt := int((b >> 2) & 0x0F)
pv := int((b >> 6) & 0x03)
rtName := routeTypeNames[rt]
if rtName == "" {
rtName = "UNKNOWN"
}
ptName := payloadTypeNames[pt]
if ptName == "" {
ptName = "UNKNOWN"
}
return Header{
RouteType: rt,
RouteTypeName: rtName,
PayloadType: pt,
PayloadTypeName: ptName,
PayloadVersion: pv,
}
}
func decodePath(pathByte byte, buf []byte, offset int) (Path, int) {
hashSize := int(pathByte>>6) + 1
hashCount := int(pathByte & 0x3F)
totalBytes := hashSize * hashCount
hops := make([]string, 0, hashCount)
for i := 0; i < hashCount; i++ {
start := offset + i*hashSize
end := start + hashSize
if end > len(buf) {
break
}
hops = append(hops, strings.ToUpper(hex.EncodeToString(buf[start:end])))
}
return Path{
HashSize: hashSize,
HashCount: hashCount,
Hops: hops,
}, totalBytes
}
func isTransportRoute(routeType int) bool {
return routeType == RouteTransportFlood || routeType == RouteTransportDirect
}
func decodeEncryptedPayload(typeName string, buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: typeName, Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: typeName,
DestHash: hex.EncodeToString(buf[0:1]),
SrcHash: hex.EncodeToString(buf[1:2]),
MAC: hex.EncodeToString(buf[2:4]),
EncryptedData: hex.EncodeToString(buf[4:]),
}
}
func decodeAck(buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: "ACK", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
checksum := binary.LittleEndian.Uint32(buf[0:4])
return Payload{
Type: "ACK",
ExtraHash: fmt.Sprintf("%08x", checksum),
}
}
func decodeAdvert(buf []byte) Payload {
if len(buf) < 100 {
return Payload{Type: "ADVERT", Error: "too short for advert", RawHex: hex.EncodeToString(buf)}
}
pubKey := hex.EncodeToString(buf[0:32])
timestamp := binary.LittleEndian.Uint32(buf[32:36])
signature := hex.EncodeToString(buf[36:100])
appdata := buf[100:]
p := Payload{
Type: "ADVERT",
PubKey: pubKey,
Timestamp: timestamp,
TimestampISO: fmt.Sprintf("%s", EpochToISO(timestamp)),
Signature: signature,
}
if len(appdata) > 0 {
flags := appdata[0]
advType := int(flags & 0x0F)
hasFeat1 := flags&0x20 != 0
hasFeat2 := flags&0x40 != 0
p.Flags = &AdvertFlags{
Raw: int(flags),
Type: advType,
Chat: advType == 1,
Repeater: advType == 2,
Room: advType == 3,
Sensor: advType == 4,
HasLocation: flags&0x10 != 0,
HasFeat1: hasFeat1,
HasFeat2: hasFeat2,
HasName: flags&0x80 != 0,
}
off := 1
if p.Flags.HasLocation && len(appdata) >= off+8 {
latRaw := int32(binary.LittleEndian.Uint32(appdata[off : off+4]))
lonRaw := int32(binary.LittleEndian.Uint32(appdata[off+4 : off+8]))
lat := float64(latRaw) / 1e6
lon := float64(lonRaw) / 1e6
p.Lat = &lat
p.Lon = &lon
off += 8
}
if hasFeat1 && len(appdata) >= off+2 {
feat1 := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
p.Feat1 = &feat1
off += 2
}
if hasFeat2 && len(appdata) >= off+2 {
feat2 := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
p.Feat2 = &feat2
off += 2
}
if p.Flags.HasName {
// Find null terminator to separate name from trailing telemetry bytes
nameEnd := len(appdata)
for i := off; i < len(appdata); i++ {
if appdata[i] == 0x00 {
nameEnd = i
break
}
}
name := string(appdata[off:nameEnd])
name = sanitizeName(name)
p.Name = name
off = nameEnd
// Skip null terminator(s)
for off < len(appdata) && appdata[off] == 0x00 {
off++
}
}
// Telemetry bytes after name: battery_mv(2 LE) + temperature_c(2 LE, signed, /100)
// Only sensor nodes (advType=4) carry telemetry bytes.
if p.Flags.Sensor && off+4 <= len(appdata) {
batteryMv := int(binary.LittleEndian.Uint16(appdata[off : off+2]))
tempRaw := int16(binary.LittleEndian.Uint16(appdata[off+2 : off+4]))
tempC := float64(tempRaw) / 100.0
if batteryMv > 0 && batteryMv <= 10000 {
p.BatteryMv = &batteryMv
}
// Raw int16 / 100 → °C; accept -50°C to 100°C (raw: -5000 to 10000)
if tempRaw >= -5000 && tempRaw <= 10000 {
p.TemperatureC = &tempC
}
}
}
return p
}
// channelDecryptResult holds the decrypted channel message fields.
type channelDecryptResult struct {
Timestamp uint32
Flags byte
Sender string
Message string
}
// countNonPrintable counts characters that are non-printable (< 0x20 except \n, \t).
func countNonPrintable(s string) int {
count := 0
for _, r := range s {
if r < 0x20 && r != '\n' && r != '\t' {
count++
} else if r == utf8.RuneError {
count++
}
}
return count
}
// decryptChannelMessage implements MeshCore channel decryption:
// HMAC-SHA256 MAC verification followed by AES-128-ECB decryption.
func decryptChannelMessage(ciphertextHex, macHex, channelKeyHex string) (*channelDecryptResult, error) {
channelKey, err := hex.DecodeString(channelKeyHex)
if err != nil || len(channelKey) != 16 {
return nil, fmt.Errorf("invalid channel key")
}
macBytes, err := hex.DecodeString(macHex)
if err != nil || len(macBytes) != 2 {
return nil, fmt.Errorf("invalid MAC")
}
ciphertext, err := hex.DecodeString(ciphertextHex)
if err != nil || len(ciphertext) == 0 {
return nil, fmt.Errorf("invalid ciphertext")
}
// 32-byte channel secret: 16-byte key + 16 zero bytes
channelSecret := make([]byte, 32)
copy(channelSecret, channelKey)
// Verify HMAC-SHA256 (first 2 bytes must match provided MAC)
h := hmac.New(sha256.New, channelSecret)
h.Write(ciphertext)
calculatedMac := h.Sum(nil)
if calculatedMac[0] != macBytes[0] || calculatedMac[1] != macBytes[1] {
return nil, fmt.Errorf("MAC verification failed")
}
// AES-128-ECB decrypt (block-by-block, no padding)
if len(ciphertext)%aes.BlockSize != 0 {
return nil, fmt.Errorf("ciphertext not aligned to AES block size")
}
block, err := aes.NewCipher(channelKey)
if err != nil {
return nil, fmt.Errorf("AES cipher: %w", err)
}
plaintext := make([]byte, len(ciphertext))
for i := 0; i < len(ciphertext); i += aes.BlockSize {
block.Decrypt(plaintext[i:i+aes.BlockSize], ciphertext[i:i+aes.BlockSize])
}
// Parse: timestamp(4 LE) + flags(1) + message(UTF-8, null-terminated)
if len(plaintext) < 5 {
return nil, fmt.Errorf("decrypted content too short")
}
timestamp := binary.LittleEndian.Uint32(plaintext[0:4])
flags := plaintext[4]
messageText := string(plaintext[5:])
if idx := strings.IndexByte(messageText, 0); idx >= 0 {
messageText = messageText[:idx]
}
// Validate decrypted text is printable UTF-8 (not binary garbage)
if !utf8.ValidString(messageText) || countNonPrintable(messageText) > 2 {
return nil, fmt.Errorf("decrypted text contains non-printable characters")
}
result := &channelDecryptResult{Timestamp: timestamp, Flags: flags}
// Parse "sender: message" format
colonIdx := strings.Index(messageText, ": ")
if colonIdx > 0 && colonIdx < 50 {
potentialSender := messageText[:colonIdx]
if !strings.ContainsAny(potentialSender, ":[]") {
result.Sender = potentialSender
result.Message = messageText[colonIdx+2:]
} else {
result.Message = messageText
}
} else {
result.Message = messageText
}
return result, nil
}
func decodeGrpTxt(buf []byte, channelKeys map[string]string) Payload {
if len(buf) < 3 {
return Payload{Type: "GRP_TXT", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
channelHash := int(buf[0])
channelHashHex := fmt.Sprintf("%02X", buf[0])
mac := hex.EncodeToString(buf[1:3])
encryptedData := hex.EncodeToString(buf[3:])
hasKeys := len(channelKeys) > 0
// Match Node.js: only attempt decryption if encrypted data >= 5 bytes (10 hex chars)
if hasKeys && len(encryptedData) >= 10 {
for name, key := range channelKeys {
result, err := decryptChannelMessage(encryptedData, mac, key)
if err != nil {
continue
}
text := result.Message
if result.Sender != "" && result.Message != "" {
text = result.Sender + ": " + result.Message
}
return Payload{
Type: "CHAN",
Channel: name,
ChannelHash: channelHash,
ChannelHashHex: channelHashHex,
DecryptionStatus: "decrypted",
Sender: result.Sender,
Text: text,
SenderTimestamp: result.Timestamp,
}
}
return Payload{
Type: "GRP_TXT",
ChannelHash: channelHash,
ChannelHashHex: channelHashHex,
DecryptionStatus: "decryption_failed",
MAC: mac,
EncryptedData: encryptedData,
}
}
return Payload{
Type: "GRP_TXT",
ChannelHash: channelHash,
ChannelHashHex: channelHashHex,
DecryptionStatus: "no_key",
MAC: mac,
EncryptedData: encryptedData,
}
}
func decodeAnonReq(buf []byte) Payload {
if len(buf) < 35 {
return Payload{Type: "ANON_REQ", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "ANON_REQ",
DestHash: hex.EncodeToString(buf[0:1]),
EphemeralPubKey: hex.EncodeToString(buf[1:33]),
MAC: hex.EncodeToString(buf[33:35]),
EncryptedData: hex.EncodeToString(buf[35:]),
}
}
func decodePathPayload(buf []byte) Payload {
if len(buf) < 4 {
return Payload{Type: "PATH", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
return Payload{
Type: "PATH",
DestHash: hex.EncodeToString(buf[0:1]),
SrcHash: hex.EncodeToString(buf[1:2]),
MAC: hex.EncodeToString(buf[2:4]),
PathData: hex.EncodeToString(buf[4:]),
}
}
func decodeTrace(buf []byte) Payload {
if len(buf) < 9 {
return Payload{Type: "TRACE", Error: "too short", RawHex: hex.EncodeToString(buf)}
}
tag := binary.LittleEndian.Uint32(buf[0:4])
authCode := binary.LittleEndian.Uint32(buf[4:8])
flags := int(buf[8])
p := Payload{
Type: "TRACE",
Tag: tag,
AuthCode: authCode,
TraceFlags: &flags,
}
if len(buf) > 9 {
p.PathData = hex.EncodeToString(buf[9:])
}
return p
}
func decodePayload(payloadType int, buf []byte, channelKeys map[string]string) Payload {
switch payloadType {
case PayloadREQ:
return decodeEncryptedPayload("REQ", buf)
case PayloadRESPONSE:
return decodeEncryptedPayload("RESPONSE", buf)
case PayloadTXT_MSG:
return decodeEncryptedPayload("TXT_MSG", buf)
case PayloadACK:
return decodeAck(buf)
case PayloadADVERT:
return decodeAdvert(buf)
case PayloadGRP_TXT:
return decodeGrpTxt(buf, channelKeys)
case PayloadANON_REQ:
return decodeAnonReq(buf)
case PayloadPATH:
return decodePathPayload(buf)
case PayloadTRACE:
return decodeTrace(buf)
default:
return Payload{Type: "UNKNOWN", RawHex: hex.EncodeToString(buf)}
}
}
// DecodePacket decodes a hex-encoded MeshCore packet.
func DecodePacket(hexString string, channelKeys map[string]string) (*DecodedPacket, error) {
hexString = strings.ReplaceAll(hexString, " ", "")
hexString = strings.ReplaceAll(hexString, "\n", "")
hexString = strings.ReplaceAll(hexString, "\r", "")
buf, err := hex.DecodeString(hexString)
if err != nil {
return nil, fmt.Errorf("invalid hex: %w", err)
}
if len(buf) < 2 {
return nil, fmt.Errorf("packet too short (need at least header + pathLength)")
}
header := decodeHeader(buf[0])
offset := 1
var tc *TransportCodes
if isTransportRoute(header.RouteType) {
if len(buf) < offset+4 {
return nil, fmt.Errorf("packet too short for transport codes")
}
tc = &TransportCodes{
Code1: strings.ToUpper(hex.EncodeToString(buf[offset : offset+2])),
Code2: strings.ToUpper(hex.EncodeToString(buf[offset+2 : offset+4])),
}
offset += 4
}
if offset >= len(buf) {
return nil, fmt.Errorf("packet too short (no path byte)")
}
pathByte := buf[offset]
offset++
path, bytesConsumed := decodePath(pathByte, buf, offset)
offset += bytesConsumed
payloadBuf := buf[offset:]
payload := decodePayload(header.PayloadType, payloadBuf, channelKeys)
// TRACE packets store hop IDs in the payload (buf[9:]) rather than the header
// path field. The header path byte still encodes hashSize in bits 6-7, which
// we use to split the payload path data into individual hop prefixes.
if header.PayloadType == PayloadTRACE && payload.PathData != "" {
pathBytes, err := hex.DecodeString(payload.PathData)
if err == nil && path.HashSize > 0 {
hops := make([]string, 0, len(pathBytes)/path.HashSize)
for i := 0; i+path.HashSize <= len(pathBytes); i += path.HashSize {
hops = append(hops, strings.ToUpper(hex.EncodeToString(pathBytes[i:i+path.HashSize])))
}
path.Hops = hops
path.HashCount = len(hops)
}
}
return &DecodedPacket{
Header: header,
TransportCodes: tc,
Path: path,
Payload: payload,
Raw: strings.ToUpper(hexString),
}, nil
}
// ComputeContentHash computes the SHA-256-based content hash (first 16 hex chars).
// It hashes the header byte + payload (skipping path bytes) to produce a
// path-independent identifier for the same transmission.
func ComputeContentHash(rawHex string) string {
buf, err := hex.DecodeString(rawHex)
if err != nil || len(buf) < 2 {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
headerByte := buf[0]
offset := 1
if isTransportRoute(int(headerByte & 0x03)) {
offset += 4
}
if offset >= len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
pathByte := buf[offset]
offset++
hashSize := int((pathByte>>6)&0x3) + 1
hashCount := int(pathByte & 0x3F)
pathBytes := hashSize * hashCount
payloadStart := offset + pathBytes
if payloadStart > len(buf) {
if len(rawHex) >= 16 {
return rawHex[:16]
}
return rawHex
}
payload := buf[payloadStart:]
toHash := append([]byte{headerByte}, payload...)
h := sha256.Sum256(toHash)
return hex.EncodeToString(h[:])[:16]
}
// PayloadJSON serializes the payload to JSON for DB storage.
func PayloadJSON(p *Payload) string {
b, err := json.Marshal(p)
if err != nil {
return "{}"
}
return string(b)
}
// ValidateAdvert checks decoded advert data before DB insertion.
func ValidateAdvert(p *Payload) (bool, string) {
if p == nil || p.Error != "" {
reason := "null advert"
if p != nil {
reason = p.Error
}
return false, reason
}
pk := p.PubKey
if len(pk) < 16 {
return false, fmt.Sprintf("pubkey too short (%d hex chars)", len(pk))
}
allZero := true
for _, c := range pk {
if c != '0' {
allZero = false
break
}
}
if allZero {
return false, "pubkey is all zeros"
}
if p.Lat != nil {
if math.IsInf(*p.Lat, 0) || math.IsNaN(*p.Lat) || *p.Lat < -90 || *p.Lat > 90 {
return false, fmt.Sprintf("invalid lat: %f", *p.Lat)
}
}
if p.Lon != nil {
if math.IsInf(*p.Lon, 0) || math.IsNaN(*p.Lon) || *p.Lon < -180 || *p.Lon > 180 {
return false, fmt.Sprintf("invalid lon: %f", *p.Lon)
}
}
if p.Name != "" {
for _, c := range p.Name {
if (c >= 0x00 && c <= 0x08) || c == 0x0b || c == 0x0c || (c >= 0x0e && c <= 0x1f) || c == 0x7f {
return false, "name contains control characters"
}
}
if len(p.Name) > 64 {
return false, fmt.Sprintf("name too long (%d chars)", len(p.Name))
}
}
if p.Flags != nil {
role := AdvertRole(p.Flags)
validRoles := map[string]bool{"repeater": true, "companion": true, "room": true, "sensor": true}
if !validRoles[role] {
return false, fmt.Sprintf("unknown role: %s", role)
}
}
return true, ""
}
// sanitizeName strips non-printable characters (< 0x20 except tab/newline) and DEL.
func sanitizeName(s string) string {
var b strings.Builder
b.Grow(len(s))
for _, c := range s {
if c == '\t' || c == '\n' || (c >= 0x20 && c != 0x7f) {
b.WriteRune(c)
}
}
return b.String()
}
func AdvertRole(f *AdvertFlags) string {
if f.Repeater {
return "repeater"
}
if f.Room {
return "room"
}
if f.Sensor {
return "sensor"
}
return "companion"
}
func EpochToISO(epoch uint32) string {
// Go time from Unix epoch
t := time.Unix(int64(epoch), 0)
return t.UTC().Format("2006-01-02T15:04:05.000Z")
}

View File

@@ -9,9 +9,7 @@
let autoScroll = true;
let nodeCache = {};
let selectedNode = null;
let observerIataById = {};
let observerIataByName = {};
let messageRequestId = 0;
let observerIataMap = {};
var _nodeCacheTTL = 5 * 60 * 1000; // 5 minutes
function getSelectedRegionsSnapshot() {
@@ -19,28 +17,11 @@
return rp ? rp.split(',').filter(Boolean) : null;
}
function normalizeObserverNameKey(name) {
if (!name) return '';
return String(name).trim().toLowerCase();
}
function shouldProcessWSMessageForRegion(msg, selectedRegions, observerRegionsById, observerRegionsByName) {
function shouldProcessWSMessageForRegion(msg, selectedRegions, observerRegions) {
if (!selectedRegions || !selectedRegions.length) return true;
if (observerRegionsById && observerRegionsById.byId) {
observerRegionsByName = observerRegionsById.byName || {};
observerRegionsById = observerRegionsById.byId || {};
}
observerRegionsById = observerRegionsById || {};
observerRegionsByName = observerRegionsByName || {};
var observerId = msg?.data?.packet?.observer_id || msg?.data?.observer_id || null;
var observerRegion = observerId ? observerRegionsById[observerId] : null;
if (!observerRegion) {
var observerName = msg?.data?.packet?.observer_name || msg?.data?.observer_name || msg?.data?.observer || null;
var observerNameKey = normalizeObserverNameKey(observerName);
if (observerName) observerRegion = observerRegionsByName[observerName];
if (!observerRegion && observerNameKey) observerRegion = observerRegionsByName[observerNameKey];
}
if (!observerId) return false;
var observerRegion = observerRegions[observerId];
if (!observerRegion) return false;
return selectedRegions.indexOf(observerRegion) !== -1;
}
@@ -49,53 +30,17 @@
try {
var data = await api('/observers', { ttl: CLIENT_TTL.observers });
var list = data && data.observers ? data.observers : [];
var byId = {};
var byName = {};
var map = {};
for (var i = 0; i < list.length; i++) {
var o = list[i];
var id = o.id || o.observer_id;
var name = o.name || o.observer_name;
if (!o.iata) continue;
if (id) byId[id] = o.iata;
if (name) {
byName[name] = o.iata;
var key = normalizeObserverNameKey(name);
if (key) byName[key] = o.iata;
}
if (!id || !o.iata) continue;
map[id] = o.iata;
}
observerIataById = byId;
observerIataByName = byName;
observerIataMap = map;
} catch {}
}
function beginMessageRequest(hash, regionParam) {
return { id: ++messageRequestId, hash: hash, regionParam: regionParam || '' };
}
function isStaleMessageRequest(req) {
if (!req) return true;
var currentRegion = RegionFilter.getRegionParam() || '';
if (req.id !== messageRequestId) return true;
if (selectedHash !== req.hash) return true;
if (currentRegion !== req.regionParam) return true;
return false;
}
function reconcileSelectionAfterChannelRefresh() {
if (!selectedHash || channels.some(ch => ch.hash === selectedHash)) return false;
selectedHash = null;
messages = [];
history.replaceState(null, '', '#/channels');
renderChannelList();
const header = document.getElementById('chHeader');
if (header) header.querySelector('.ch-header-text').textContent = 'Select a channel';
const msgEl = document.getElementById('chMessages');
if (msgEl) msgEl.innerHTML = '<div class="ch-empty">Choose a channel from the sidebar to view messages</div>';
document.querySelector('.ch-layout')?.classList.remove('ch-show-main');
document.getElementById('chScrollBtn')?.classList.add('hidden');
return true;
}
async function lookupNode(name) {
var cached = nodeCache[name];
if (cached !== undefined) {
@@ -473,7 +418,8 @@
}
});
function processWSBatch(msgs, selectedRegions) {
wsHandler = debouncedOnWS(function (msgs) {
var selectedRegions = getSelectedRegionsSnapshot();
var dominated = msgs.filter(function (m) {
return m.type === 'message' || (m.type === 'packet' && m.data?.decoded?.header?.payloadTypeName === 'GRP_TXT');
});
@@ -485,7 +431,7 @@
for (var i = 0; i < dominated.length; i++) {
var m = dominated[i];
if (!shouldProcessWSMessageForRegion(m, selectedRegions, observerIataById, observerIataByName)) continue;
if (!shouldProcessWSMessageForRegion(m, selectedRegions, observerIataMap)) continue;
var payload = m.data?.decoded?.payload;
if (!payload) continue;
@@ -586,18 +532,7 @@
if (liveEl) liveEl.textContent = 'New message received';
}
}
}
function handleWSBatch(msgs) {
var selectedRegions = getSelectedRegionsSnapshot();
processWSBatch(msgs, selectedRegions);
}
wsHandler = debouncedOnWS(function (msgs) {
handleWSBatch(msgs);
});
window._channelsHandleWSBatchForTest = handleWSBatch;
window._channelsProcessWSBatchForTest = processWSBatch;
// Tick relative timestamps every 1s — iterates channels array, updates DOM text only
timeAgoTimer = setInterval(function () {
@@ -639,7 +574,6 @@
return ch;
}).sort((a, b) => (b.lastActivityMs || 0) - (a.lastActivityMs || 0));
renderChannelList();
reconcileSelectionAfterChannelRefresh();
} catch (e) {
if (!silent) {
const el = document.getElementById('chList');
@@ -682,8 +616,6 @@
}
async function selectChannel(hash) {
const rp = RegionFilter.getRegionParam() || '';
const request = beginMessageRequest(hash, rp);
selectedHash = hash;
history.replaceState(null, '', `#/channels/${encodeURIComponent(hash)}`);
renderChannelList();
@@ -699,9 +631,9 @@
msgEl.innerHTML = '<div class="ch-loading">Loading messages…</div>';
try {
const rp = RegionFilter.getRegionParam();
const regionQs = rp ? '&region=' + encodeURIComponent(rp) : '';
const data = await api(`/channels/${encodeURIComponent(hash)}/messages?limit=200${regionQs}`, { ttl: CLIENT_TTL.channelMessages });
if (isStaleMessageRequest(request)) return;
messages = data.messages || [];
if (messages.length === 0 && rp) {
msgEl.innerHTML = '<div class="ch-empty">Channel not available in selected region</div>';
@@ -710,7 +642,6 @@
scrollToBottom();
}
} catch (e) {
if (isStaleMessageRequest(request)) return;
msgEl.innerHTML = `<div class="ch-empty">Failed to load messages: ${e.message}</div>`;
}
}
@@ -722,12 +653,9 @@
if (!msgEl) return;
const wasAtBottom = msgEl.scrollHeight - msgEl.scrollTop - msgEl.clientHeight < 60;
try {
const requestHash = selectedHash;
const rp = RegionFilter.getRegionParam() || '';
const request = beginMessageRequest(requestHash, rp);
const rp = RegionFilter.getRegionParam();
const regionQs = rp ? '&region=' + encodeURIComponent(rp) : '';
const data = await api(`/channels/${encodeURIComponent(requestHash)}/messages?limit=200${regionQs}`, { ttl: CLIENT_TTL.channelMessages, bust: !!opts.forceNoCache });
if (isStaleMessageRequest(request)) return;
const data = await api(`/channels/${encodeURIComponent(selectedHash)}/messages?limit=200${regionQs}`, { ttl: CLIENT_TTL.channelMessages, bust: !!opts.forceNoCache });
const newMsgs = data.messages || [];
if (opts.regionSwitch && rp && newMsgs.length === 0) {
messages = [];
@@ -790,25 +718,6 @@
if (msgEl) { msgEl.scrollTop = msgEl.scrollHeight; autoScroll = true; document.getElementById('chScrollBtn')?.classList.add('hidden'); }
}
window._channelsSetStateForTest = function (state) {
if (!state) return;
if (Array.isArray(state.channels)) channels = state.channels;
if (Array.isArray(state.messages)) messages = state.messages;
if (Object.prototype.hasOwnProperty.call(state, 'selectedHash')) selectedHash = state.selectedHash;
};
window._channelsSetObserverRegionsForTest = function (byId, byName) {
observerIataById = byId || {};
observerIataByName = byName || {};
};
window._channelsSelectChannelForTest = selectChannel;
window._channelsRefreshMessagesForTest = refreshMessages;
window._channelsLoadChannelsForTest = loadChannels;
window._channelsBeginMessageRequestForTest = beginMessageRequest;
window._channelsIsStaleMessageRequestForTest = isStaleMessageRequest;
window._channelsReconcileSelectionForTest = reconcileSelectionAfterChannelRefresh;
window._channelsGetStateForTest = function () {
return { channels: channels, messages: messages, selectedHash: selectedHash };
};
window._channelsShouldProcessWSMessageForRegion = shouldProcessWSMessageForRegion;
registerPage('channels', { init, destroy });
})();

View File

@@ -22,9 +22,9 @@
<meta name="twitter:title" content="CoreScope">
<meta name="twitter:description" content="Real-time MeshCore LoRa mesh network analyzer — live packet visualization, node tracking, channel decryption, and route analysis.">
<meta name="twitter:image" content="https://raw.githubusercontent.com/Kpa-clawbot/corescope/master/public/og-image.png">
<link rel="stylesheet" href="style.css?v=1774926567">
<link rel="stylesheet" href="home.css?v=1774926567">
<link rel="stylesheet" href="live.css?v=1774926567">
<link rel="stylesheet" href="style.css?v=1774925610">
<link rel="stylesheet" href="home.css?v=1774925610">
<link rel="stylesheet" href="live.css?v=1774925610">
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
crossorigin="anonymous">
@@ -81,31 +81,31 @@
<main id="app" role="main"></main>
<script src="vendor/qrcode.js"></script>
<script src="roles.js?v=1774926567"></script>
<script src="customize.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="region-filter.js?v=1774926567"></script>
<script src="hop-resolver.js?v=1774926567"></script>
<script src="hop-display.js?v=1774926567"></script>
<script src="app.js?v=1774926567"></script>
<script src="home.js?v=1774926567"></script>
<script src="packet-filter.js?v=1774926567"></script>
<script src="packets.js?v=1774926567"></script>
<script src="geo-filter-overlay.js?v=1774926567"></script>
<script src="map.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="channels.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="nodes.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="traces.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="analytics.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio-v1-constellation.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio-v2-constellation.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio-lab.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="live.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="observers.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="observer-detail.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="compare.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="node-analytics.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="perf.js?v=1774926567" onerror="console.error('Failed to load:', this.src)"></script>
<script src="roles.js?v=1774925610"></script>
<script src="customize.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="region-filter.js?v=1774925610"></script>
<script src="hop-resolver.js?v=1774925610"></script>
<script src="hop-display.js?v=1774925610"></script>
<script src="app.js?v=1774925610"></script>
<script src="home.js?v=1774925610"></script>
<script src="packet-filter.js?v=1774925610"></script>
<script src="packets.js?v=1774925610"></script>
<script src="geo-filter-overlay.js?v=1774925610"></script>
<script src="map.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="channels.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="nodes.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="traces.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="analytics.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio-v1-constellation.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio-v2-constellation.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="audio-lab.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="live.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="observers.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="observer-detail.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="compare.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="node-analytics.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
<script src="perf.js?v=1774925610" onerror="console.error('Failed to load:', this.src)"></script>
</body>
</html>

View File

@@ -1968,327 +1968,11 @@ console.log('\n=== channels.js: shouldProcessWSMessageForRegion ===');
assert.strictEqual(shouldProcess(msg, ['SJC'], { obs1: 'SJC' }), false);
});
test('falls back to observer_name mapping when observer_id is missing', () => {
const msg = { data: { packet: { observer_name: 'Observer Alpha' } } };
assert.strictEqual(shouldProcess(msg, ['SJC'], { obs1: 'LAX' }, { 'Observer Alpha': 'SJC' }), true);
});
test('drops message when observer region lookup missing', () => {
const msg = { data: { packet: { observer_id: 'obs9' } } };
assert.strictEqual(shouldProcess(msg, ['SJC'], { obs1: 'SJC' }), false);
});
}
console.log('\n=== channels.js: WS batch + region snapshot integration ===');
{
function makeChannelsWsSandbox(regionParam) {
const ctx = makeSandbox();
const dom = {};
function makeEl(id) {
if (dom[id]) return dom[id];
dom[id] = {
id,
innerHTML: '',
textContent: '',
value: '',
scrollTop: 0,
scrollHeight: 100,
clientHeight: 80,
style: {},
dataset: {},
classList: { add() {}, remove() {}, toggle() {}, contains() { return false; } },
addEventListener() {},
removeEventListener() {},
querySelector() { return null; },
querySelectorAll() { return []; },
getBoundingClientRect() { return { left: 0, bottom: 0, width: 0 }; },
setAttribute() {},
removeAttribute() {},
focus() {},
};
return dom[id];
}
const headerText = { textContent: '' };
makeEl('chHeader').querySelector = (sel) => (sel === '.ch-header-text' ? headerText : null);
makeEl('chMessages');
makeEl('chList');
makeEl('chScrollBtn');
makeEl('chAriaLive');
makeEl('chBackBtn');
makeEl('chRegionFilter');
const appEl = {
innerHTML: '',
querySelector(sel) {
if (sel === '.ch-sidebar' || sel === '.ch-sidebar-resize' || sel === '.ch-main') return makeEl(sel);
if (sel === '.ch-layout') return { classList: { add() {}, remove() {}, contains() { return false; } } };
return makeEl(sel);
},
addEventListener() {},
};
ctx.document.getElementById = makeEl;
ctx.document.querySelector = (sel) => {
if (sel === '.ch-layout') return { classList: { add() {}, remove() {}, contains() { return false; } } };
return null;
};
ctx.document.querySelectorAll = () => [];
ctx.document.addEventListener = () => {};
ctx.document.removeEventListener = () => {};
ctx.document.documentElement = { getAttribute: () => null, setAttribute: () => {} };
ctx.document.body = { appendChild() {}, removeChild() {}, contains() { return false; } };
ctx.history = { replaceState() {} };
ctx.matchMedia = () => ({ matches: false });
ctx.window.matchMedia = ctx.matchMedia;
ctx.MutationObserver = function () { this.observe = () => {}; this.disconnect = () => {}; };
ctx.RegionFilter = {
init() {},
onChange() { return () => {}; },
offChange() {},
getRegionParam() { return regionParam || ''; },
};
ctx.debouncedOnWS = (fn) => fn;
ctx.onWS = () => {};
ctx.offWS = () => {};
ctx.api = (path) => {
if (path.indexOf('/observers') === 0) return Promise.resolve({ observers: [] });
if (path.indexOf('/channels') === 0) return Promise.resolve({ channels: [] });
return Promise.resolve({ messages: [] });
};
ctx.CLIENT_TTL = { observers: 120000, channels: 15000, channelMessages: 10000, nodeDetail: 10000 };
ctx.ROLE_EMOJI = {};
ctx.ROLE_LABELS = {};
ctx.timeAgo = () => '1m ago';
ctx.registerPage = (name, handlers) => { ctx._pageHandlers = handlers; };
ctx.btoa = (s) => Buffer.from(String(s), 'utf8').toString('base64');
ctx.atob = (s) => Buffer.from(String(s), 'base64').toString('utf8');
loadInCtx(ctx, 'public/channels.js');
ctx._pageHandlers.init(appEl);
return { ctx, dom };
}
test('WS batch respects region snapshot and observer_name fallback', () => {
const env = makeChannelsWsSandbox('SJC');
env.ctx.window._channelsSetObserverRegionsForTest({ obs1: 'SJC' }, { 'Observer Beta': 'SJC' });
env.ctx.window._channelsSetStateForTest({
selectedHash: 'general',
channels: [{ hash: 'general', name: 'general', messageCount: 0, lastActivityMs: 0 }],
messages: [],
});
env.ctx.window._channelsHandleWSBatchForTest([
{
type: 'packet',
data: {
hash: 'hash1',
decoded: { header: { payloadTypeName: 'GRP_TXT' }, payload: { channel: 'general', text: 'Alice: hello world' } },
packet: { observer_name: 'Observer Beta' },
},
},
{
type: 'packet',
data: {
hash: 'hash2',
decoded: { header: { payloadTypeName: 'GRP_TXT' }, payload: { channel: 'general', text: 'Bob: dropped' } },
packet: { observer_name: 'Observer Zeta' },
},
},
]);
const state = env.ctx.window._channelsGetStateForTest();
assert.strictEqual(state.messages.length, 1, 'only matching-region message should be appended');
assert.strictEqual(state.messages[0].sender, 'Alice');
assert.strictEqual(state.channels[0].messageCount, 1, 'channel count increments only for accepted message');
});
test('stale selectChannel response is discarded after region change', async () => {
const ctx = makeSandbox();
const dom = {};
function makeEl(id) {
if (dom[id]) return dom[id];
dom[id] = {
id,
innerHTML: '',
textContent: '',
value: '',
scrollTop: 0,
scrollHeight: 100,
clientHeight: 80,
style: {},
dataset: {},
classList: { add() {}, remove() {}, toggle() {}, contains() { return false; } },
addEventListener() {},
removeEventListener() {},
querySelector() { return null; },
querySelectorAll() { return []; },
getBoundingClientRect() { return { left: 0, bottom: 0, width: 0 }; },
setAttribute() {},
removeAttribute() {},
focus() {},
};
return dom[id];
}
const headerText = { textContent: '' };
makeEl('chHeader').querySelector = (sel) => (sel === '.ch-header-text' ? headerText : null);
makeEl('chMessages');
makeEl('chList');
makeEl('chScrollBtn');
makeEl('chAriaLive');
makeEl('chBackBtn');
makeEl('chRegionFilter');
const appEl = {
innerHTML: '',
querySelector(sel) {
if (sel === '.ch-sidebar' || sel === '.ch-sidebar-resize' || sel === '.ch-main') return makeEl(sel);
if (sel === '.ch-layout') return { classList: { add() {}, remove() {}, contains() { return false; } } };
return makeEl(sel);
},
addEventListener() {},
};
let region = 'SJC';
let resolver = null;
ctx.document.getElementById = makeEl;
ctx.document.querySelector = (sel) => {
if (sel === '.ch-layout') return { classList: { add() {}, remove() {}, contains() { return false; } } };
return null;
};
ctx.document.querySelectorAll = () => [];
ctx.document.addEventListener = () => {};
ctx.document.removeEventListener = () => {};
ctx.document.documentElement = { getAttribute: () => null, setAttribute: () => {} };
ctx.document.body = { appendChild() {}, removeChild() {}, contains() { return false; } };
ctx.history = { replaceState() {} };
ctx.matchMedia = () => ({ matches: false });
ctx.window.matchMedia = ctx.matchMedia;
ctx.MutationObserver = function () { this.observe = () => {}; this.disconnect = () => {}; };
ctx.RegionFilter = { init() {}, onChange() { return () => {}; }, offChange() {}, getRegionParam() { return region; } };
ctx.debouncedOnWS = (fn) => fn;
ctx.onWS = () => {};
ctx.offWS = () => {};
ctx.api = (path) => {
if (path.indexOf('/observers') === 0) return Promise.resolve({ observers: [] });
if (path.indexOf('/channels?') === 0 || path === '/channels') return Promise.resolve({ channels: [{ hash: 'general', name: 'general', messageCount: 2, lastActivity: null }] });
if (path.indexOf('/channels/general/messages') === 0) {
return new Promise((resolve) => { resolver = resolve; });
}
return Promise.resolve({ messages: [] });
};
ctx.CLIENT_TTL = { observers: 120000, channels: 15000, channelMessages: 10000, nodeDetail: 10000 };
ctx.ROLE_EMOJI = {};
ctx.ROLE_LABELS = {};
ctx.timeAgo = () => '1m ago';
ctx.registerPage = (name, handlers) => { ctx._pageHandlers = handlers; };
ctx.btoa = (s) => Buffer.from(String(s), 'utf8').toString('base64');
ctx.atob = (s) => Buffer.from(String(s), 'base64').toString('utf8');
loadInCtx(ctx, 'public/channels.js');
ctx._pageHandlers.init(appEl);
await Promise.resolve();
const selectPromise = ctx.window._channelsSelectChannelForTest('general');
region = 'LAX';
ctx.window._channelsBeginMessageRequestForTest('other', 'LAX');
resolver({ messages: [{ sender: 'Alice', text: 'stale', timestamp: '2025-01-01T00:00:00Z' }] });
await selectPromise;
const state = ctx.window._channelsGetStateForTest();
assert.strictEqual(state.selectedHash, 'general', 'stale select response must not clear or overwrite selection');
assert.strictEqual(state.messages.length, 0, 'stale response must be discarded');
});
test('loadChannels clears selected hash when channel no longer exists in region', async () => {
const ctx = makeSandbox();
const dom = {};
function makeEl(id) {
if (dom[id]) return dom[id];
dom[id] = {
id,
innerHTML: '',
textContent: '',
value: '',
scrollTop: 0,
scrollHeight: 100,
clientHeight: 80,
style: {},
dataset: {},
classList: { add() {}, remove() {}, toggle() {}, contains() { return false; } },
addEventListener() {},
removeEventListener() {},
querySelector() { return null; },
querySelectorAll() { return []; },
getBoundingClientRect() { return { left: 0, bottom: 0, width: 0 }; },
setAttribute() {},
removeAttribute() {},
focus() {},
};
return dom[id];
}
const headerText = { textContent: '' };
makeEl('chHeader').querySelector = (sel) => (sel === '.ch-header-text' ? headerText : null);
makeEl('chMessages');
makeEl('chList');
makeEl('chScrollBtn');
makeEl('chAriaLive');
makeEl('chBackBtn');
makeEl('chRegionFilter');
const appEl = {
innerHTML: '',
querySelector(sel) {
if (sel === '.ch-sidebar' || sel === '.ch-sidebar-resize' || sel === '.ch-main') return makeEl(sel);
if (sel === '.ch-layout') return { classList: { add() {}, remove() {}, contains() { return false; } } };
return makeEl(sel);
},
addEventListener() {},
};
const historyCalls = [];
let channelCall = 0;
ctx.document.getElementById = makeEl;
ctx.document.querySelector = (sel) => {
if (sel === '.ch-layout') return { classList: { add() {}, remove() {}, contains() { return false; } } };
return null;
};
ctx.document.querySelectorAll = () => [];
ctx.document.addEventListener = () => {};
ctx.document.removeEventListener = () => {};
ctx.document.documentElement = { getAttribute: () => null, setAttribute: () => {} };
ctx.document.body = { appendChild() {}, removeChild() {}, contains() { return false; } };
ctx.history = { replaceState(_a, _b, url) { historyCalls.push(url); } };
ctx.matchMedia = () => ({ matches: false });
ctx.window.matchMedia = ctx.matchMedia;
ctx.MutationObserver = function () { this.observe = () => {}; this.disconnect = () => {}; };
ctx.RegionFilter = { init() {}, onChange() { return () => {}; }, offChange() {}, getRegionParam() { return 'SJC'; } };
ctx.debouncedOnWS = (fn) => fn;
ctx.onWS = () => {};
ctx.offWS = () => {};
ctx.api = (path) => {
if (path.indexOf('/observers') === 0) return Promise.resolve({ observers: [] });
if (path.indexOf('/channels') === 0) {
channelCall++;
if (channelCall === 1) return Promise.resolve({ channels: [{ hash: 'general', name: 'general', messageCount: 1, lastActivity: null }] });
return Promise.resolve({ channels: [{ hash: 'newchan', name: 'newchan', messageCount: 1, lastActivity: null }] });
}
if (path.indexOf('/channels/general/messages') === 0) return Promise.resolve({ messages: [{ sender: 'Alice', text: 'hi', timestamp: '2025-01-01T00:00:00Z' }] });
return Promise.resolve({ messages: [] });
};
ctx.CLIENT_TTL = { observers: 120000, channels: 15000, channelMessages: 10000, nodeDetail: 10000 };
ctx.ROLE_EMOJI = {};
ctx.ROLE_LABELS = {};
ctx.timeAgo = () => '1m ago';
ctx.registerPage = (name, handlers) => { ctx._pageHandlers = handlers; };
ctx.btoa = (s) => Buffer.from(String(s), 'utf8').toString('base64');
ctx.atob = (s) => Buffer.from(String(s), 'base64').toString('utf8');
loadInCtx(ctx, 'public/channels.js');
ctx._pageHandlers.init(appEl);
await Promise.resolve();
await ctx.window._channelsSelectChannelForTest('general');
await ctx.window._channelsLoadChannelsForTest(true);
ctx.window._channelsReconcileSelectionForTest();
const state = ctx.window._channelsGetStateForTest();
assert.strictEqual(state.selectedHash, null, 'selection should clear when channel disappears after region update');
assert.ok(historyCalls.includes('#/channels'), 'should route back to channels root');
});
}
// ===== SUMMARY =====
console.log(`\n${'═'.repeat(40)}`);
console.log(` Frontend helpers: ${passed} passed, ${failed} failed`);