protocol, file descriptions, more cryptogrpahy, handshake encoding, etc.

This commit is contained in:
Evgeny Poberezkin
2026-01-31 21:42:26 +00:00
parent 4a4f719bfb
commit b6c4c8faee
12 changed files with 2962 additions and 11 deletions

View File

@@ -577,7 +577,7 @@ describe "crypto/padding" $ do
**Test execution:** Tests live in `tests/XFTPWebTests.hs` in the simplexmq repo, skipped by default (require compiled TS project path). Run with:
```bash
cabal test --test-option=--match="/XFTP Web Client/"
cabal test --ghc-options -O0 --test-option=--match="/XFTP Web Client/"
```
**Random inputs:** Haskell tests can use QuickCheck to generate random inputs each run, not just hardcoded values. This catches edge cases that fixed test vectors miss.
@@ -1019,7 +1019,7 @@ Download orchestration — the top-level flow.
**Development workflow:**
1. Implement `encodeWord16` in `src/protocol/encoding.ts`
2. Run `cabal test --test-option=--match="/XFTP Web Client/encoding/encodeWord16"`
2. Run `cabal test --ghc-options -O0 --test-option=--match="/XFTP Web Client/encoding/encodeWord16"`
3. If it fails: Haskell says `expected 002a, got 2a00` → immediately know it's an endianness bug
4. Fix → rerun → passes → move to `encodeWord32`
5. Repeat until all per-function tests pass

View File

@@ -150,7 +150,7 @@ main = do
describe "XFTP file description" fileDescriptionTests
describe "XFTP CLI" xftpCLITests
describe "XFTP agent" xftpAgentTests
xftpWebTests
describe "XFTP Web Client" xftpWebTests
describe "XRCP" remoteControlTests
describe "Server CLIs" cliTests

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,94 @@
// File-level encryption/decryption matching Simplex.FileTransfer.Crypto.
// Operates on in-memory Uint8Array (no file I/O needed for browser).
import {Decoder, concatBytes, encodeInt64, encodeString, decodeString, encodeMaybe, decodeMaybe} from "../protocol/encoding.js"
import {sbInit, sbEncryptChunk, sbDecryptTailTag, sbAuth} from "./secretbox.js"
const AUTH_TAG_SIZE = 16n
// ── FileHeader ──────────────────────────────────────────────────
export interface FileHeader {
fileName: string
fileExtra: string | null
}
// Encoding matches Haskell: smpEncode (fileName, fileExtra)
// = smpEncode fileName <> smpEncode fileExtra
// = encodeString(fileName) + encodeMaybe(encodeString, fileExtra)
export function encodeFileHeader(hdr: FileHeader): Uint8Array {
return concatBytes(
encodeString(hdr.fileName),
encodeMaybe(encodeString, hdr.fileExtra)
)
}
// Parse FileHeader from decrypted content (first 1024 bytes examined).
// Returns the parsed header and remaining bytes (file content).
export function parseFileHeader(data: Uint8Array): {header: FileHeader, rest: Uint8Array} {
const hdrLen = Math.min(1024, data.length)
const d = new Decoder(data.subarray(0, hdrLen))
const fileName = decodeString(d)
const fileExtra = decodeMaybe(decodeString, d)
const consumed = d.offset()
return {
header: {fileName, fileExtra},
rest: data.subarray(consumed)
}
}
// ── Encryption (FileTransfer.Crypto:encryptFile) ────────────────
// Encrypt file content with streaming XSalsa20-Poly1305.
// Output format: encrypted(Int64 fileSize | fileHdr | source | '#' padding) | 16-byte auth tag
//
// source — raw file content
// fileHdr — pre-encoded FileHeader bytes (from encodeFileHeader)
// key — 32-byte symmetric key
// nonce — 24-byte nonce
// fileSize — BigInt(fileHdr.length + source.length)
// encSize — total output size (including 16-byte auth tag)
export function encryptFile(
source: Uint8Array,
fileHdr: Uint8Array,
key: Uint8Array,
nonce: Uint8Array,
fileSize: bigint,
encSize: bigint
): Uint8Array {
const state = sbInit(key, nonce)
const lenStr = encodeInt64(fileSize)
const padLen = Number(encSize - AUTH_TAG_SIZE - fileSize - 8n)
if (padLen < 0) throw new Error("encryptFile: encSize too small")
const hdr = sbEncryptChunk(state, concatBytes(lenStr, fileHdr))
const encSource = sbEncryptChunk(state, source)
const padding = new Uint8Array(padLen)
padding.fill(0x23) // '#'
const encPad = sbEncryptChunk(state, padding)
const tag = sbAuth(state)
return concatBytes(hdr, encSource, encPad, tag)
}
// ── Decryption (FileTransfer.Crypto:decryptChunks) ──────────────
// Decrypt one or more XFTP chunks into a FileHeader and file content.
// Chunks are concatenated, then decrypted as a single stream.
//
// encSize — total encrypted size (including 16-byte auth tag)
// chunks — downloaded XFTP chunk data (concatenated = full encrypted file)
// key — 32-byte symmetric key
// nonce — 24-byte nonce
export function decryptChunks(
encSize: bigint,
chunks: Uint8Array[],
key: Uint8Array,
nonce: Uint8Array
): {header: FileHeader, content: Uint8Array} {
if (chunks.length === 0) throw new Error("decryptChunks: empty chunks")
const paddedLen = encSize - AUTH_TAG_SIZE
const data = chunks.length === 1 ? chunks[0] : concatBytes(...chunks)
const {valid, content} = sbDecryptTailTag(key, nonce, paddedLen, data)
if (!valid) throw new Error("decryptChunks: invalid auth tag")
const {header, rest} = parseFileHeader(content)
return {header, content: rest}
}

View File

@@ -0,0 +1,219 @@
// Streaming XSalsa20-Poly1305 — Simplex.Messaging.Crypto / Crypto.Lazy
//
// Libsodium-wrappers-sumo does not expose crypto_stream_xsalsa20_xor_ic,
// so the Salsa20/20 stream cipher core is implemented here.
// HSalsa20 uses libsodium's crypto_core_hsalsa20.
// Poly1305 uses libsodium's streaming crypto_onetimeauth_* API.
import sodium, {StateAddress} from "libsodium-wrappers-sumo"
import {concatBytes} from "../protocol/encoding.js"
import {pad, unPad, padLazy, unPadLazy} from "./padding.js"
// crypto_core_hsalsa20 exists at runtime but is missing from @types/libsodium-wrappers-sumo
const _sodium = sodium as unknown as {
crypto_core_hsalsa20(input: Uint8Array, key: Uint8Array, constant?: Uint8Array): Uint8Array
} & typeof sodium
// ── Salsa20/20 stream cipher core ───────────────────────────────
function readU32LE(buf: Uint8Array, off: number): number {
return ((buf[off] | (buf[off + 1] << 8) | (buf[off + 2] << 16) | (buf[off + 3] << 24)) >>> 0)
}
function writeU32LE(buf: Uint8Array, off: number, val: number): void {
buf[off] = val & 0xff
buf[off + 1] = (val >>> 8) & 0xff
buf[off + 2] = (val >>> 16) & 0xff
buf[off + 3] = (val >>> 24) & 0xff
}
function rotl32(v: number, n: number): number {
return ((v << n) | (v >>> (32 - n))) >>> 0
}
const SIGMA_0 = 0x61707865
const SIGMA_1 = 0x3320646e
const SIGMA_2 = 0x79622d32
const SIGMA_3 = 0x6b206574
function salsa20Block(key: Uint8Array, nonce8: Uint8Array, counter: number): Uint8Array {
const k0 = readU32LE(key, 0), k1 = readU32LE(key, 4)
const k2 = readU32LE(key, 8), k3 = readU32LE(key, 12)
const k4 = readU32LE(key, 16), k5 = readU32LE(key, 20)
const k6 = readU32LE(key, 24), k7 = readU32LE(key, 28)
const n0 = readU32LE(nonce8, 0), n1 = readU32LE(nonce8, 4)
const s0 = SIGMA_0, s1 = k0, s2 = k1, s3 = k2
const s4 = k3, s5 = SIGMA_1, s6 = n0, s7 = n1
const s8 = counter >>> 0, s9 = 0, s10 = SIGMA_2, s11 = k4
const s12 = k5, s13 = k6, s14 = k7, s15 = SIGMA_3
let x0 = s0, x1 = s1, x2 = s2, x3 = s3
let x4 = s4, x5 = s5, x6 = s6, x7 = s7
let x8 = s8, x9 = s9, x10 = s10, x11 = s11
let x12 = s12, x13 = s13, x14 = s14, x15 = s15
for (let i = 0; i < 10; i++) {
// Column round
x4 ^= rotl32((x0 + x12) >>> 0, 7); x8 ^= rotl32((x4 + x0) >>> 0, 9)
x12 ^= rotl32((x8 + x4) >>> 0, 13); x0 ^= rotl32((x12 + x8) >>> 0, 18)
x9 ^= rotl32((x5 + x1) >>> 0, 7); x13 ^= rotl32((x9 + x5) >>> 0, 9)
x1 ^= rotl32((x13 + x9) >>> 0, 13); x5 ^= rotl32((x1 + x13) >>> 0, 18)
x14 ^= rotl32((x10 + x6) >>> 0, 7); x2 ^= rotl32((x14 + x10) >>> 0, 9)
x6 ^= rotl32((x2 + x14) >>> 0, 13); x10 ^= rotl32((x6 + x2) >>> 0, 18)
x3 ^= rotl32((x15 + x11) >>> 0, 7); x7 ^= rotl32((x3 + x15) >>> 0, 9)
x11 ^= rotl32((x7 + x3) >>> 0, 13); x15 ^= rotl32((x11 + x7) >>> 0, 18)
// Row round
x1 ^= rotl32((x0 + x3) >>> 0, 7); x2 ^= rotl32((x1 + x0) >>> 0, 9)
x3 ^= rotl32((x2 + x1) >>> 0, 13); x0 ^= rotl32((x3 + x2) >>> 0, 18)
x6 ^= rotl32((x5 + x4) >>> 0, 7); x7 ^= rotl32((x6 + x5) >>> 0, 9)
x4 ^= rotl32((x7 + x6) >>> 0, 13); x5 ^= rotl32((x4 + x7) >>> 0, 18)
x11 ^= rotl32((x10 + x9) >>> 0, 7); x8 ^= rotl32((x11 + x10) >>> 0, 9)
x9 ^= rotl32((x8 + x11) >>> 0, 13); x10 ^= rotl32((x9 + x8) >>> 0, 18)
x12 ^= rotl32((x15 + x14) >>> 0, 7); x13 ^= rotl32((x12 + x15) >>> 0, 9)
x14 ^= rotl32((x13 + x12) >>> 0, 13); x15 ^= rotl32((x14 + x13) >>> 0, 18)
}
const out = new Uint8Array(64)
writeU32LE(out, 0, (x0 + s0) >>> 0); writeU32LE(out, 4, (x1 + s1) >>> 0)
writeU32LE(out, 8, (x2 + s2) >>> 0); writeU32LE(out, 12, (x3 + s3) >>> 0)
writeU32LE(out, 16, (x4 + s4) >>> 0); writeU32LE(out, 20, (x5 + s5) >>> 0)
writeU32LE(out, 24, (x6 + s6) >>> 0); writeU32LE(out, 28, (x7 + s7) >>> 0)
writeU32LE(out, 32, (x8 + s8) >>> 0); writeU32LE(out, 36, (x9 + s9) >>> 0)
writeU32LE(out, 40, (x10 + s10) >>> 0); writeU32LE(out, 44, (x11 + s11) >>> 0)
writeU32LE(out, 48, (x12 + s12) >>> 0); writeU32LE(out, 52, (x13 + s13) >>> 0)
writeU32LE(out, 56, (x14 + s14) >>> 0); writeU32LE(out, 60, (x15 + s15) >>> 0)
return out
}
// ── Streaming state ─────────────────────────────────────────────
export interface SbState {
_subkey: Uint8Array
_nonce8: Uint8Array
_counter: number
_ksBuf: Uint8Array
_ksOff: number
_authState: StateAddress
}
export function sbInit(key: Uint8Array, nonce: Uint8Array): SbState {
// Double HSalsa20 cascade matching Haskell cryptonite XSalsa20 (Crypto.hs:xSalsa20):
// subkey1 = HSalsa20(key, zeros16)
// subkey2 = HSalsa20(subkey1, nonce[0:16])
// keystream = Salsa20(subkey2, nonce[16:24])
const zeros16 = new Uint8Array(16)
const subkey1 = _sodium.crypto_core_hsalsa20(zeros16, key)
const subkey = _sodium.crypto_core_hsalsa20(nonce.subarray(0, 16), subkey1)
const nonce8 = new Uint8Array(nonce.subarray(16, 24))
const block0 = salsa20Block(subkey, nonce8, 0)
const poly1305Key = block0.subarray(0, 32)
const ksBuf = new Uint8Array(block0.subarray(32))
const authState = sodium.crypto_onetimeauth_init(poly1305Key)
return {_subkey: subkey, _nonce8: nonce8, _counter: 1, _ksBuf: ksBuf, _ksOff: 0, _authState: authState}
}
export function cbInit(dhSecret: Uint8Array, nonce: Uint8Array): SbState {
return sbInit(dhSecret, nonce)
}
export function sbEncryptChunk(state: SbState, chunk: Uint8Array): Uint8Array {
const cipher = xorKeystream(state, chunk)
sodium.crypto_onetimeauth_update(state._authState, cipher)
return cipher
}
export function sbDecryptChunk(state: SbState, chunk: Uint8Array): Uint8Array {
sodium.crypto_onetimeauth_update(state._authState, chunk)
return xorKeystream(state, chunk)
}
export function sbAuth(state: SbState): Uint8Array {
return sodium.crypto_onetimeauth_final(state._authState)
}
// ── High-level: tail tag (tag appended) ─────────────────────────
export function sbEncryptTailTag(
key: Uint8Array, nonce: Uint8Array,
data: Uint8Array, len: bigint, padLen: bigint
): Uint8Array {
const padded = padLazy(data, len, padLen)
const state = sbInit(key, nonce)
const cipher = sbEncryptChunk(state, padded)
const tag = sbAuth(state)
return concatBytes(cipher, tag)
}
export function sbDecryptTailTag(
key: Uint8Array, nonce: Uint8Array,
paddedLen: bigint, data: Uint8Array
): {valid: boolean; content: Uint8Array} {
const pLen = Number(paddedLen)
const cipher = data.subarray(0, pLen)
const providedTag = data.subarray(pLen)
const state = sbInit(key, nonce)
const plaintext = sbDecryptChunk(state, cipher)
const computedTag = sbAuth(state)
const valid = providedTag.length === 16 && constantTimeEqual(providedTag, computedTag)
const content = unPadLazy(plaintext)
return {valid, content}
}
// ── Tag-prepended secretbox (Haskell Crypto.hs:cryptoBox) ───────
export function cryptoBox(key: Uint8Array, nonce: Uint8Array, msg: Uint8Array): Uint8Array {
const state = sbInit(key, nonce)
const cipher = sbEncryptChunk(state, msg)
const tag = sbAuth(state)
return concatBytes(tag, cipher)
}
export function cbEncrypt(
dhSecret: Uint8Array, nonce: Uint8Array,
msg: Uint8Array, padLen: number
): Uint8Array {
return cryptoBox(dhSecret, nonce, pad(msg, padLen))
}
export function cbDecrypt(
dhSecret: Uint8Array, nonce: Uint8Array,
packet: Uint8Array
): Uint8Array {
const tag = packet.subarray(0, 16)
const cipher = packet.subarray(16)
const state = sbInit(dhSecret, nonce)
const plaintext = sbDecryptChunk(state, cipher)
const computedTag = sbAuth(state)
if (!constantTimeEqual(tag, computedTag)) throw new Error("secretbox: authentication failed")
return unPad(plaintext)
}
// ── Internal ────────────────────────────────────────────────────
function xorKeystream(state: SbState, data: Uint8Array): Uint8Array {
const result = new Uint8Array(data.length)
let off = 0
while (off < data.length) {
if (state._ksOff >= state._ksBuf.length) {
state._ksBuf = salsa20Block(state._subkey, state._nonce8, state._counter++)
state._ksOff = 0
}
const available = state._ksBuf.length - state._ksOff
const needed = data.length - off
const n = Math.min(available, needed)
for (let i = 0; i < n; i++) {
result[off + i] = data[off + i] ^ state._ksBuf[state._ksOff + i]
}
state._ksOff += n
off += n
}
return result
}
function constantTimeEqual(a: Uint8Array, b: Uint8Array): boolean {
if (a.length !== b.length) return false
let diff = 0
for (let i = 0; i < a.length; i++) diff |= a[i] ^ b[i]
return diff === 0
}

75
xftp-web/src/download.ts Normal file
View File

@@ -0,0 +1,75 @@
// XFTP download pipeline — integration of protocol + crypto layers.
//
// Ties together: DH key exchange (keys), transport decryption (client),
// file-level decryption (file), chunk sizing (chunks), digest verification.
//
// Usage:
// 1. Parse FileDescription from YAML (description.ts)
// 2. For each chunk replica:
// a. generateX25519KeyPair() → ephemeral DH keypair
// b. encodeFGET(dhPub) → FGET command
// c. encodeAuthTransmission(...) → padded block (send to server)
// d. decodeTransmission(responseBlock) → raw response
// e. decodeResponse(raw) → FRFile { rcvDhKey, nonce }
// f. processFileResponse(rcvPrivKey, rcvDhKey, nonce) → dhSecret
// g. decryptReceivedChunk(dhSecret, nonce, encData, digest) → plaintext
// 3. processDownloadedFile(fd, plaintextChunks) → { header, content }
import {dh} from "./crypto/keys.js"
import {sha256} from "./crypto/digest.js"
import {decryptChunks, type FileHeader} from "./crypto/file.js"
import {decryptTransportChunk} from "./protocol/client.js"
import type {FileDescription} from "./protocol/description.js"
// ── Process FRFile response ─────────────────────────────────────
// Derive transport decryption secret from FRFile response parameters.
// Uses DH(serverDhKey, recipientPrivKey) to produce shared secret.
export function processFileResponse(
recipientPrivKey: Uint8Array, // Ephemeral X25519 private key (32 bytes)
serverDhKey: Uint8Array, // rcvDhKey from FRFile response (32 bytes)
): Uint8Array {
return dh(serverDhKey, recipientPrivKey)
}
// ── Decrypt a single received chunk ─────────────────────────────
// Decrypt transport-encrypted chunk data and verify SHA-256 digest.
// Returns decrypted content or throws on auth tag / digest failure.
export function decryptReceivedChunk(
dhSecret: Uint8Array,
cbNonce: Uint8Array,
encData: Uint8Array,
expectedDigest: Uint8Array | null
): Uint8Array {
const {valid, content} = decryptTransportChunk(dhSecret, cbNonce, encData)
if (!valid) throw new Error("transport auth tag verification failed")
if (expectedDigest !== null) {
const actual = sha256(content)
if (!digestEqual(actual, expectedDigest)) {
throw new Error("chunk digest mismatch")
}
}
return content
}
// ── Full download pipeline ──────────────────────────────────────
// Process downloaded file: concatenate transport-decrypted chunks,
// then file-level decrypt using key/nonce from file description.
// Returns parsed FileHeader and file content.
export function processDownloadedFile(
fd: FileDescription,
plaintextChunks: Uint8Array[]
): {header: FileHeader, content: Uint8Array} {
return decryptChunks(BigInt(fd.size), plaintextChunks, fd.key, fd.nonce)
}
// ── Internal ────────────────────────────────────────────────────
function digestEqual(a: Uint8Array, b: Uint8Array): boolean {
if (a.length !== b.length) return false
let diff = 0
for (let i = 0; i < a.length; i++) diff |= a[i] ^ b[i]
return diff === 0
}

View File

@@ -0,0 +1,86 @@
// XFTP chunk sizing — Simplex.FileTransfer.Chunks + Client
//
// Computes chunk sizes for file uploads, chunk specifications with offsets,
// and per-chunk SHA-256 digests.
import {kb, mb} from "./description.js"
import {sha256} from "../crypto/digest.js"
// ── Chunk size constants (Simplex.FileTransfer.Chunks) ──────────
export const chunkSize0 = kb(64) // 65536
export const chunkSize1 = kb(256) // 262144
export const chunkSize2 = mb(1) // 1048576
export const chunkSize3 = mb(4) // 4194304
export const serverChunkSizes = [chunkSize0, chunkSize1, chunkSize2, chunkSize3]
// ── Size constants ──────────────────────────────────────────────
export const fileSizeLen = 8 // 64-bit file size prefix (padLazy)
export const authTagSize = 16 // Poly1305 authentication tag
// ── Chunk sizing (Simplex.FileTransfer.Client.prepareChunkSizes) ─
function size34(sz: number): number {
return Math.floor((sz * 3) / 4)
}
export function prepareChunkSizes(payloadSize: number): number[] {
let smallSize: number, bigSize: number
if (payloadSize > size34(chunkSize3)) {
smallSize = chunkSize2; bigSize = chunkSize3
} else if (payloadSize > size34(chunkSize2)) {
smallSize = chunkSize1; bigSize = chunkSize2
} else {
smallSize = chunkSize0; bigSize = chunkSize1
}
function prepareSizes(size: number): number[] {
if (size === 0) return []
if (size >= bigSize) {
const n1 = Math.floor(size / bigSize)
const remSz = size % bigSize
return new Array<number>(n1).fill(bigSize).concat(prepareSizes(remSz))
}
if (size > size34(bigSize)) return [bigSize]
const n2 = Math.floor(size / smallSize)
const remSz2 = size % smallSize
return new Array<number>(remSz2 === 0 ? n2 : n2 + 1).fill(smallSize)
}
return prepareSizes(payloadSize)
}
// Find the smallest server chunk size that fits the payload.
// Returns null if payload exceeds the largest chunk size.
// Matches Haskell singleChunkSize.
export function singleChunkSize(payloadSize: number): number | null {
for (const sz of serverChunkSizes) {
if (payloadSize <= sz) return sz
}
return null
}
// ── Chunk specs ─────────────────────────────────────────────────
export interface ChunkSpec {
chunkOffset: number
chunkSize: number
}
// Generate chunk specifications with byte offsets.
// Matches Haskell prepareChunkSpecs (without filePath).
export function prepareChunkSpecs(chunkSizes: number[]): ChunkSpec[] {
const specs: ChunkSpec[] = []
let offset = 0
for (const size of chunkSizes) {
specs.push({chunkOffset: offset, chunkSize: size})
offset += size
}
return specs
}
// ── Chunk digest ────────────────────────────────────────────────
export function getChunkDigest(chunk: Uint8Array): Uint8Array {
return sha256(chunk)
}

View File

@@ -0,0 +1,95 @@
// XFTP client protocol operations — Simplex.FileTransfer.Client + Crypto
//
// CbAuthenticator-based command authentication and transport-level
// chunk encryption/decryption for XFTP downloads.
import {concatBytes} from "./encoding.js"
import {dh} from "../crypto/keys.js"
import {sha512} from "../crypto/digest.js"
import {
cbInit, sbEncryptChunk, sbDecryptChunk, sbAuth, cryptoBox
} from "../crypto/secretbox.js"
// ── Constants ───────────────────────────────────────────────────
export const cbAuthenticatorSize = 80 // SHA512 (64) + authTag (16)
// ── CbAuthenticator (Crypto.hs:cbAuthenticate) ─────────────────
// Create crypto_box authenticator for a message.
// Encrypts sha512(msg) with NaCl crypto_box using DH(peerPubKey, ownPrivKey).
// Returns 80 bytes (16-byte tag prepended + 64-byte encrypted hash).
export function cbAuthenticate(
peerPubKey: Uint8Array,
ownPrivKey: Uint8Array,
nonce: Uint8Array,
msg: Uint8Array
): Uint8Array {
const dhSecret = dh(peerPubKey, ownPrivKey)
const hash = sha512(msg)
return cryptoBox(dhSecret, nonce, hash)
}
// Verify crypto_box authenticator for a message.
// Decrypts authenticator with DH(peerPubKey, ownPrivKey), checks against sha512(msg).
export function cbVerify(
peerPubKey: Uint8Array,
ownPrivKey: Uint8Array,
nonce: Uint8Array,
authenticator: Uint8Array,
msg: Uint8Array
): boolean {
if (authenticator.length !== cbAuthenticatorSize) return false
const dhSecret = dh(peerPubKey, ownPrivKey)
const tag = authenticator.subarray(0, 16)
const cipher = authenticator.subarray(16)
const state = cbInit(dhSecret, nonce)
const plaintext = sbDecryptChunk(state, cipher)
const computedTag = sbAuth(state)
if (!constantTimeEqual(tag, computedTag)) return false
const expectedHash = sha512(msg)
return constantTimeEqual(plaintext, expectedHash)
}
// ── Transport-level chunk encryption/decryption ─────────────────
// Encrypt a chunk for transport (tag-appended format).
// Matches sendEncFile in FileTransfer.Transport:
// ciphertext streamed via sbEncryptChunk, then 16-byte auth tag appended.
export function encryptTransportChunk(
dhSecret: Uint8Array,
cbNonce: Uint8Array,
plainData: Uint8Array
): Uint8Array {
const state = cbInit(dhSecret, cbNonce)
const cipher = sbEncryptChunk(state, plainData)
const tag = sbAuth(state)
return concatBytes(cipher, tag)
}
// Decrypt a transport-encrypted chunk (tag-appended format).
// Matches receiveEncFile / receiveSbFile in FileTransfer.Transport:
// ciphertext decrypted via sbDecryptChunk, then 16-byte auth tag verified.
export function decryptTransportChunk(
dhSecret: Uint8Array,
cbNonce: Uint8Array,
encData: Uint8Array
): {valid: boolean, content: Uint8Array} {
if (encData.length < 16) return {valid: false, content: new Uint8Array(0)}
const cipher = encData.subarray(0, encData.length - 16)
const providedTag = encData.subarray(encData.length - 16)
const state = cbInit(dhSecret, cbNonce)
const plaintext = sbDecryptChunk(state, cipher)
const computedTag = sbAuth(state)
const valid = constantTimeEqual(providedTag, computedTag)
return {valid, content: plaintext}
}
// ── Internal ────────────────────────────────────────────────────
function constantTimeEqual(a: Uint8Array, b: Uint8Array): boolean {
if (a.length !== b.length) return false
let diff = 0
for (let i = 0; i < a.length; i++) diff |= a[i] ^ b[i]
return diff === 0
}

View File

@@ -0,0 +1,158 @@
// Protocol commands and responses — Simplex.FileTransfer.Protocol
//
// Commands (client -> server): FNEW, FADD, FPUT, FDEL, FGET, FACK, PING
// Responses (server -> client): SIDS, RIDS, FILE, OK, ERR, PONG
import {
Decoder, concatBytes,
encodeBytes, decodeBytes,
encodeWord32,
encodeNonEmpty, decodeNonEmpty,
encodeMaybe
} from "./encoding.js"
import {decodePubKeyX25519} from "../crypto/keys.js"
// ── Types ─────────────────────────────────────────────────────────
export interface FileInfo {
sndKey: Uint8Array // DER-encoded Ed25519 public key (44 bytes)
size: number // Word32
digest: Uint8Array // SHA-256 digest (32 bytes)
}
export type CommandError = "UNKNOWN" | "SYNTAX" | "PROHIBITED" | "NO_AUTH" | "HAS_AUTH" | "NO_ENTITY"
export type XFTPErrorType =
| {type: "BLOCK"} | {type: "SESSION"} | {type: "HANDSHAKE"}
| {type: "CMD", cmdErr: CommandError}
| {type: "AUTH"}
| {type: "BLOCKED", blockInfo: string}
| {type: "SIZE"} | {type: "QUOTA"} | {type: "DIGEST"} | {type: "CRYPTO"}
| {type: "NO_FILE"} | {type: "HAS_FILE"} | {type: "FILE_IO"}
| {type: "TIMEOUT"} | {type: "INTERNAL"}
export type FileResponse =
| {type: "FRSndIds", senderId: Uint8Array, recipientIds: Uint8Array[]}
| {type: "FRRcvIds", recipientIds: Uint8Array[]}
| {type: "FRFile", rcvDhKey: Uint8Array, nonce: Uint8Array}
| {type: "FROk"}
| {type: "FRErr", err: XFTPErrorType}
| {type: "FRPong"}
// ── FileInfo encoding ─────────────────────────────────────────────
// smpEncode FileInfo {sndKey, size, digest} = smpEncode (sndKey, size, digest)
export function encodeFileInfo(fi: FileInfo): Uint8Array {
return concatBytes(encodeBytes(fi.sndKey), encodeWord32(fi.size), encodeBytes(fi.digest))
}
// ── Command encoding (encodeProtocol) ─────────────────────────────
const SPACE = new Uint8Array([0x20])
function ascii(s: string): Uint8Array {
const buf = new Uint8Array(s.length)
for (let i = 0; i < s.length; i++) buf[i] = s.charCodeAt(i)
return buf
}
export function encodeFNEW(file: FileInfo, rcvKeys: Uint8Array[], auth: Uint8Array | null): Uint8Array {
return concatBytes(
ascii("FNEW"), SPACE,
encodeFileInfo(file),
encodeNonEmpty(encodeBytes, rcvKeys),
encodeMaybe(encodeBytes, auth)
)
}
export function encodeFADD(rcvKeys: Uint8Array[]): Uint8Array {
return concatBytes(ascii("FADD"), SPACE, encodeNonEmpty(encodeBytes, rcvKeys))
}
export function encodeFPUT(): Uint8Array { return ascii("FPUT") }
export function encodeFDEL(): Uint8Array { return ascii("FDEL") }
export function encodeFGET(rcvDhKey: Uint8Array): Uint8Array {
return concatBytes(ascii("FGET"), SPACE, encodeBytes(rcvDhKey))
}
export function encodeFACK(): Uint8Array { return ascii("FACK") }
export function encodePING(): Uint8Array { return ascii("PING") }
// ── Response decoding ─────────────────────────────────────────────
function readTag(d: Decoder): string {
const start = d.offset()
while (d.remaining() > 0) {
if (d.buf[d.offset()] === 0x20 || d.buf[d.offset()] === 0x0a) break
d.anyByte()
}
let s = ""
for (let i = start; i < d.offset(); i++) s += String.fromCharCode(d.buf[i])
return s
}
function readSpace(d: Decoder): void {
if (d.anyByte() !== 0x20) throw new Error("expected space")
}
function decodeCommandError(s: string): CommandError {
if (s === "UNKNOWN" || s === "SYNTAX" || s === "PROHIBITED" || s === "NO_AUTH" || s === "HAS_AUTH" || s === "NO_ENTITY") return s
if (s === "NO_QUEUE") return "NO_ENTITY"
throw new Error("bad CommandError: " + s)
}
export function decodeXFTPError(d: Decoder): XFTPErrorType {
const s = readTag(d)
switch (s) {
case "BLOCK": return {type: "BLOCK"}
case "SESSION": return {type: "SESSION"}
case "HANDSHAKE": return {type: "HANDSHAKE"}
case "CMD": { readSpace(d); return {type: "CMD", cmdErr: decodeCommandError(readTag(d))} }
case "AUTH": return {type: "AUTH"}
case "BLOCKED": {
readSpace(d)
const rest = d.takeAll()
let info = ""
for (let i = 0; i < rest.length; i++) info += String.fromCharCode(rest[i])
return {type: "BLOCKED", blockInfo: info}
}
case "SIZE": return {type: "SIZE"}
case "QUOTA": return {type: "QUOTA"}
case "DIGEST": return {type: "DIGEST"}
case "CRYPTO": return {type: "CRYPTO"}
case "NO_FILE": return {type: "NO_FILE"}
case "HAS_FILE": return {type: "HAS_FILE"}
case "FILE_IO": return {type: "FILE_IO"}
case "TIMEOUT": return {type: "TIMEOUT"}
case "INTERNAL": return {type: "INTERNAL"}
default: throw new Error("bad XFTPErrorType: " + s)
}
}
export function decodeResponse(data: Uint8Array): FileResponse {
const d = new Decoder(data)
const tagStr = readTag(d)
switch (tagStr) {
case "SIDS": {
readSpace(d)
return {type: "FRSndIds", senderId: decodeBytes(d), recipientIds: decodeNonEmpty(decodeBytes, d)}
}
case "RIDS": {
readSpace(d)
return {type: "FRRcvIds", recipientIds: decodeNonEmpty(decodeBytes, d)}
}
case "FILE": {
readSpace(d)
const rcvDhKey = decodePubKeyX25519(decodeBytes(d))
const nonce = d.take(24)
return {type: "FRFile", rcvDhKey, nonce}
}
case "OK": return {type: "FROk"}
case "ERR": { readSpace(d); return {type: "FRErr", err: decodeXFTPError(d)} }
case "PONG": return {type: "FRPong"}
default: throw new Error("unknown response: " + tagStr)
}
}

View File

@@ -0,0 +1,363 @@
// XFTP file description encoding/decoding — Simplex.FileTransfer.Description
//
// Handles YAML-encoded file descriptions matching Haskell Data.Yaml output format.
// Base64url encoding matches Haskell Data.ByteString.Base64.URL.encode (with padding).
// ── Base64url (RFC 4648 §5) with '=' padding ───────────────────
const B64URL = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
const B64_DECODE = new Uint8Array(128)
B64_DECODE.fill(0xff)
for (let i = 0; i < 64; i++) B64_DECODE[B64URL.charCodeAt(i)] = i
export function base64urlEncode(data: Uint8Array): string {
let result = ""
const len = data.length
let i = 0
for (; i + 2 < len; i += 3) {
const b0 = data[i], b1 = data[i + 1], b2 = data[i + 2]
result += B64URL[b0 >>> 2]
result += B64URL[((b0 & 3) << 4) | (b1 >>> 4)]
result += B64URL[((b1 & 15) << 2) | (b2 >>> 6)]
result += B64URL[b2 & 63]
}
if (i < len) {
const b0 = data[i]
result += B64URL[b0 >>> 2]
if (i + 1 < len) {
const b1 = data[i + 1]
result += B64URL[((b0 & 3) << 4) | (b1 >>> 4)]
result += B64URL[(b1 & 15) << 2]
result += "="
} else {
result += B64URL[(b0 & 3) << 4]
result += "=="
}
}
return result
}
export function base64urlDecode(s: string): Uint8Array {
let end = s.length
while (end > 0 && s.charCodeAt(end - 1) === 0x3d) end-- // strip '='
const n = end
const out = new Uint8Array((n * 3) >>> 2)
let j = 0, i = 0
for (; i + 3 < n; i += 4) {
const a = B64_DECODE[s.charCodeAt(i)], b = B64_DECODE[s.charCodeAt(i + 1)]
const c = B64_DECODE[s.charCodeAt(i + 2)], d = B64_DECODE[s.charCodeAt(i + 3)]
out[j++] = (a << 2) | (b >>> 4)
out[j++] = ((b & 15) << 4) | (c >>> 2)
out[j++] = ((c & 3) << 6) | d
}
if (n - i >= 2) {
const a = B64_DECODE[s.charCodeAt(i)], b = B64_DECODE[s.charCodeAt(i + 1)]
out[j++] = (a << 2) | (b >>> 4)
if (n - i >= 3) {
const c = B64_DECODE[s.charCodeAt(i + 2)]
out[j++] = ((b & 15) << 4) | (c >>> 2)
}
}
return out
}
// ── FileSize encoding/decoding ──────────────────────────────────
export const kb = (n: number): number => n * 1024
export const mb = (n: number): number => n * 1048576
export const gb = (n: number): number => n * 1073741824
export function encodeFileSize(bytes: number): string {
const ks = Math.floor(bytes / 1024)
if (bytes % 1024 !== 0) return String(bytes)
const ms = Math.floor(ks / 1024)
if (ks % 1024 !== 0) return ks + "kb"
const gs = Math.floor(ms / 1024)
if (ms % 1024 !== 0) return ms + "mb"
return gs + "gb"
}
export function decodeFileSize(s: string): number {
if (s.endsWith("gb")) return parseInt(s) * 1073741824
if (s.endsWith("mb")) return parseInt(s) * 1048576
if (s.endsWith("kb")) return parseInt(s) * 1024
return parseInt(s)
}
// ── Types ───────────────────────────────────────────────────────
export type FileParty = "recipient" | "sender"
export interface FileDescription {
party: FileParty
size: number // total file size in bytes
digest: Uint8Array // SHA-256 file digest
key: Uint8Array // SbKey (32 bytes)
nonce: Uint8Array // CbNonce (24 bytes)
chunkSize: number // default chunk size in bytes
chunks: FileChunk[]
redirect: RedirectFileInfo | null
}
export interface RedirectFileInfo {
size: number
digest: Uint8Array
}
export interface FileChunk {
chunkNo: number
chunkSize: number
digest: Uint8Array
replicas: FileChunkReplica[]
}
export interface FileChunkReplica {
server: string // XFTPServer URI (e.g. "xftp://abc=@example.com")
replicaId: Uint8Array
replicaKey: Uint8Array // DER-encoded private key
}
// ── Internal: flat server replica ───────────────────────────────
interface FileServerReplica {
chunkNo: number
server: string
replicaId: Uint8Array
replicaKey: Uint8Array
digest: Uint8Array | null
chunkSize: number | null
}
// ── Server replica colon-separated format ───────────────────────
function encodeServerReplica(r: FileServerReplica): string {
let s = r.chunkNo + ":" + base64urlEncode(r.replicaId) + ":" + base64urlEncode(r.replicaKey)
if (r.digest !== null) s += ":" + base64urlEncode(r.digest)
if (r.chunkSize !== null) s += ":" + encodeFileSize(r.chunkSize)
return s
}
function decodeServerReplica(server: string, s: string): FileServerReplica {
const parts = s.split(":")
if (parts.length < 3) throw new Error("invalid server replica: " + s)
return {
chunkNo: parseInt(parts[0]),
server,
replicaId: base64urlDecode(parts[1]),
replicaKey: base64urlDecode(parts[2]),
digest: parts.length >= 4 ? base64urlDecode(parts[3]) : null,
chunkSize: parts.length >= 5 ? decodeFileSize(parts[4]) : null
}
}
// ── Unfold chunks to flat replicas ──────────────────────────────
function unfoldChunksToReplicas(defChunkSize: number, chunks: FileChunk[]): FileServerReplica[] {
const result: FileServerReplica[] = []
for (const c of chunks) {
c.replicas.forEach((r, idx) => {
result.push({
chunkNo: c.chunkNo,
server: r.server,
replicaId: r.replicaId,
replicaKey: r.replicaKey,
digest: idx === 0 ? c.digest : null,
chunkSize: c.chunkSize !== defChunkSize && idx === 0 ? c.chunkSize : null
})
})
}
return result
}
// ── Group replicas by server (for YAML encoding) ────────────────
function encodeFileReplicas(
defChunkSize: number, chunks: FileChunk[]
): {server: string, chunks: string[]}[] {
const flat = unfoldChunksToReplicas(defChunkSize, chunks)
// Sort by server URI string (matches Haskell Ord for ProtocolServer when
// all servers share the same scheme and keyHash — true for typical use).
flat.sort((a, b) => a.server < b.server ? -1 : a.server > b.server ? 1 : 0)
const groups: {server: string, chunks: string[]}[] = []
for (const r of flat) {
if (groups.length === 0 || groups[groups.length - 1].server !== r.server) {
groups.push({server: r.server, chunks: [encodeServerReplica(r)]})
} else {
groups[groups.length - 1].chunks.push(encodeServerReplica(r))
}
}
return groups
}
// ── Fold flat replicas back into FileChunks ─────────────────────
function bytesEqual(a: Uint8Array, b: Uint8Array): boolean {
if (a.length !== b.length) return false
for (let i = 0; i < a.length; i++) if (a[i] !== b[i]) return false
return true
}
function foldReplicasToChunks(defChunkSize: number, replicas: FileServerReplica[]): FileChunk[] {
const sizes = new Map<number, number>()
const digests = new Map<number, Uint8Array>()
for (const r of replicas) {
if (r.chunkSize !== null) {
const existing = sizes.get(r.chunkNo)
if (existing !== undefined && existing !== r.chunkSize)
throw new Error("different size in chunk replicas")
sizes.set(r.chunkNo, r.chunkSize)
}
if (r.digest !== null) {
const existing = digests.get(r.chunkNo)
if (existing !== undefined && !bytesEqual(existing, r.digest))
throw new Error("different digest in chunk replicas")
digests.set(r.chunkNo, r.digest)
}
}
const chunkMap = new Map<number, FileChunk>()
for (const r of replicas) {
const existing = chunkMap.get(r.chunkNo)
if (existing) {
existing.replicas.push({server: r.server, replicaId: r.replicaId, replicaKey: r.replicaKey})
} else {
const digest = digests.get(r.chunkNo)
if (!digest) throw new Error("no digest for chunk")
chunkMap.set(r.chunkNo, {
chunkNo: r.chunkNo,
chunkSize: sizes.get(r.chunkNo) ?? defChunkSize,
digest,
replicas: [{server: r.server, replicaId: r.replicaId, replicaKey: r.replicaKey}]
})
}
}
return Array.from(chunkMap.values()).sort((a, b) => a.chunkNo - b.chunkNo)
}
// ── YAML encoding (matching Data.Yaml key ordering) ─────────────
export function encodeFileDescription(fd: FileDescription): string {
const lines: string[] = []
// Top-level keys in alphabetical order (matching Data.Yaml / libyaml)
lines.push("chunkSize: " + encodeFileSize(fd.chunkSize))
lines.push("digest: " + base64urlEncode(fd.digest))
lines.push("key: " + base64urlEncode(fd.key))
lines.push("nonce: " + base64urlEncode(fd.nonce))
lines.push("party: " + fd.party)
if (fd.redirect !== null) {
lines.push("redirect:")
lines.push(" digest: " + base64urlEncode(fd.redirect.digest))
lines.push(" size: " + fd.redirect.size)
}
const groups = encodeFileReplicas(fd.chunkSize, fd.chunks)
lines.push("replicas:")
for (const g of groups) {
lines.push("- chunks:")
for (const c of g.chunks) {
lines.push(" - " + c)
}
lines.push(" server: " + g.server)
}
lines.push("size: " + encodeFileSize(fd.size))
return lines.join("\n") + "\n"
}
// ── YAML decoding ───────────────────────────────────────────────
export function decodeFileDescription(yaml: string): FileDescription {
const lines = yaml.split("\n")
const topLevel: Record<string, string> = {}
const replicaGroups: {server: string, chunks: string[]}[] = []
let redirect: RedirectFileInfo | null = null
let i = 0
while (i < lines.length) {
const line = lines[i]
if (line.length === 0) { i++; continue }
if (line === "replicas:") {
i++
while (i < lines.length && lines[i].startsWith("- ")) {
const group = {server: "", chunks: [] as string[]}
i = parseReplicaItem(lines, i, group)
replicaGroups.push(group)
}
} else if (line === "redirect:") {
i++
let digestStr = "", sizeStr = ""
while (i < lines.length && lines[i].startsWith(" ")) {
const kv = lines[i].substring(2)
const ci = kv.indexOf(": ")
if (ci >= 0) {
const k = kv.substring(0, ci), v = kv.substring(ci + 2)
if (k === "digest") digestStr = v
if (k === "size") sizeStr = v
}
i++
}
redirect = {size: parseInt(sizeStr), digest: base64urlDecode(digestStr)}
} else {
const ci = line.indexOf(": ")
if (ci >= 0) topLevel[line.substring(0, ci)] = line.substring(ci + 2)
i++
}
}
const chunkSize = decodeFileSize(topLevel["chunkSize"])
const serverReplicas: FileServerReplica[] = []
for (const g of replicaGroups) {
for (const c of g.chunks) serverReplicas.push(decodeServerReplica(g.server, c))
}
return {
party: topLevel["party"] as FileParty,
size: decodeFileSize(topLevel["size"]),
digest: base64urlDecode(topLevel["digest"]),
key: base64urlDecode(topLevel["key"]),
nonce: base64urlDecode(topLevel["nonce"]),
chunkSize,
chunks: foldReplicasToChunks(chunkSize, serverReplicas),
redirect
}
}
function parseReplicaItem(
lines: string[], startIdx: number, group: {server: string, chunks: string[]}
): number {
let i = startIdx
const first = lines[i].substring(2) // strip "- " prefix
i = parseReplicaField(first, lines, i + 1, group)
while (i < lines.length && lines[i].startsWith(" ") && !lines[i].startsWith("- ")) {
i = parseReplicaField(lines[i].substring(2), lines, i + 1, group)
}
return i
}
function parseReplicaField(
entry: string, lines: string[], nextIdx: number,
group: {server: string, chunks: string[]}
): number {
if (entry === "chunks:" || entry.startsWith("chunks:")) {
let i = nextIdx
while (i < lines.length && lines[i].startsWith(" - ")) {
group.chunks.push(lines[i].substring(4))
i++
}
return i
}
const ci = entry.indexOf(": ")
if (ci >= 0) {
const k = entry.substring(0, ci), v = entry.substring(ci + 2)
if (k === "server") group.server = v
}
return nextIdx
}
// ── Validation ──────────────────────────────────────────────────
export function validateFileDescription(fd: FileDescription): string | null {
for (let i = 0; i < fd.chunks.length; i++) {
if (fd.chunks[i].chunkNo !== i + 1) return "chunk numbers are not sequential"
}
let total = 0
for (const c of fd.chunks) total += c.chunkSize
if (total !== fd.size) return "chunks total size is different than file size"
return null
}
export const fdSeparator = "################################\n"

View File

@@ -0,0 +1,155 @@
// XFTP handshake encoding/decoding — Simplex.FileTransfer.Transport
//
// Handles XFTP client/server handshake messages and version negotiation.
import {
Decoder, concatBytes,
encodeWord16, decodeWord16,
encodeBytes, decodeBytes,
decodeLarge, decodeNonEmpty
} from "./encoding.js"
import {sha256} from "../crypto/digest.js"
import {decodePubKeyX25519} from "../crypto/keys.js"
import {blockPad, blockUnpad, XFTP_BLOCK_SIZE} from "./transmission.js"
// ── Version types ──────────────────────────────────────────────────
export interface VersionRange {
minVersion: number // Word16
maxVersion: number // Word16
}
// Encode version range as two big-endian Word16s.
// Matches Haskell: smpEncode (VRange v1 v2) = smpEncode (v1, v2)
export function encodeVersionRange(vr: VersionRange): Uint8Array {
return concatBytes(encodeWord16(vr.minVersion), encodeWord16(vr.maxVersion))
}
export function decodeVersionRange(d: Decoder): VersionRange {
const minVersion = decodeWord16(d)
const maxVersion = decodeWord16(d)
if (minVersion > maxVersion) throw new Error("invalid version range: min > max")
return {minVersion, maxVersion}
}
// Version negotiation: intersection of two version ranges, or null if incompatible.
// Matches Haskell compatibleVRange.
export function compatibleVRange(a: VersionRange, b: VersionRange): VersionRange | null {
const min = Math.max(a.minVersion, b.minVersion)
const max = Math.min(a.maxVersion, b.maxVersion)
if (min > max) return null
return {minVersion: min, maxVersion: max}
}
// ── Client handshake ───────────────────────────────────────────────
export interface XFTPClientHandshake {
xftpVersion: number // Word16 — negotiated version
keyHash: Uint8Array // SHA-256 CA certificate fingerprint (32 bytes)
}
// Encode and pad client handshake to XFTP_BLOCK_SIZE.
// Wire format: pad(smpEncode (xftpVersion, keyHash), 16384)
export function encodeClientHandshake(ch: XFTPClientHandshake): Uint8Array {
const body = concatBytes(encodeWord16(ch.xftpVersion), encodeBytes(ch.keyHash))
return blockPad(body, XFTP_BLOCK_SIZE)
}
// ── Server handshake ───────────────────────────────────────────────
export interface XFTPServerHandshake {
xftpVersionRange: VersionRange
sessionId: Uint8Array
certChainDer: Uint8Array[] // raw DER certificate blobs (NonEmpty)
signedKeyDer: Uint8Array // raw DER SignedExact blob
}
// Decode padded server handshake block.
// Wire format: unpad(block) → (versionRange, sessionId, certChainPubKey)
// where certChainPubKey = (NonEmpty Large certChain, Large signedKey)
// Trailing bytes (Tail) are ignored for forward compatibility.
export function decodeServerHandshake(block: Uint8Array): XFTPServerHandshake {
const raw = blockUnpad(block)
const d = new Decoder(raw)
const xftpVersionRange = decodeVersionRange(d)
const sessionId = decodeBytes(d)
// CertChainPubKey: smpEncode (encodeCertChain certChain, SignedObject signedPubKey)
const certChainDer = decodeNonEmpty(decodeLarge, d)
const signedKeyDer = decodeLarge(d)
// Remaining bytes are Tail (ignored for forward compatibility)
return {xftpVersionRange, sessionId, certChainDer, signedKeyDer}
}
// ── Certificate utilities ──────────────────────────────────────────
// SHA-256 fingerprint of the CA certificate (last cert in chain).
// Matches Haskell: XV.getFingerprint ca X.HashSHA256
export function caFingerprint(certChainDer: Uint8Array[]): Uint8Array {
if (certChainDer.length < 2) throw new Error("caFingerprint: need at least 2 certs (leaf + CA)")
return sha256(certChainDer[certChainDer.length - 1])
}
// ── SignedExact DER parsing ────────────────────────────────────────
// Parsed components of an X.509 SignedExact structure.
export interface SignedKey {
objectDer: Uint8Array // raw DER of the signed object (SubjectPublicKeyInfo)
dhKey: Uint8Array // extracted 32-byte X25519 public key
algorithm: Uint8Array // AlgorithmIdentifier DER bytes
signature: Uint8Array // raw Ed25519 signature bytes (64 bytes)
}
// Parse ASN.1 DER length (short and long form).
function derLength(d: Decoder): number {
const first = d.anyByte()
if (first < 0x80) return first
const numBytes = first & 0x7f
if (numBytes === 0 || numBytes > 4) throw new Error("DER: unsupported length encoding")
let len = 0
for (let i = 0; i < numBytes; i++) {
len = (len << 8) | d.anyByte()
}
return len
}
// Read a complete TLV element, returning the full DER bytes (tag + length + value).
function derElement(d: Decoder): Uint8Array {
const start = d.offset()
d.anyByte() // tag
const len = derLength(d)
d.take(len) // value
return d.buf.subarray(start, d.offset())
}
// Extract components from a SignedExact X.PubKey DER structure.
// ASN.1 layout:
// SEQUENCE {
// SubjectPublicKeyInfo (SEQUENCE) — the signed object
// AlgorithmIdentifier (SEQUENCE) — signature algorithm
// BIT STRING — signature
// }
export function extractSignedKey(signedDer: Uint8Array): SignedKey {
const outer = new Decoder(signedDer)
const outerTag = outer.anyByte()
if (outerTag !== 0x30) throw new Error("SignedExact: expected SEQUENCE tag 0x30, got 0x" + outerTag.toString(16))
derLength(outer) // consume total content length
// First element: SubjectPublicKeyInfo
const objectDer = derElement(outer)
// Second element: AlgorithmIdentifier
const algorithm = derElement(outer)
// Third element: BIT STRING (signature)
const sigTag = outer.anyByte()
if (sigTag !== 0x03) throw new Error("SignedExact: expected BIT STRING tag 0x03, got 0x" + sigTag.toString(16))
const sigLen = derLength(outer)
const unusedBits = outer.anyByte()
if (unusedBits !== 0) throw new Error("SignedExact: expected 0 unused bits in signature")
const signature = outer.take(sigLen - 1)
// Extract X25519 key from SubjectPublicKeyInfo
const dhKey = decodePubKeyX25519(objectDer)
return {objectDer, dhKey, algorithm, signature}
}

View File

@@ -0,0 +1,121 @@
// XFTP transmission framing — Simplex.Messaging.Transport + FileTransfer.Protocol
//
// Handles block-level pad/unpad, batch encoding, and Ed25519 auth signing.
import {
Decoder, concatBytes,
encodeBytes, decodeBytes,
encodeLarge, decodeLarge
} from "./encoding.js"
import {sign} from "../crypto/keys.js"
// ── Constants ─────────────────────────────────────────────────────
export const XFTP_BLOCK_SIZE = 16384
// Protocol versions (FileTransfer.Transport)
export const initialXFTPVersion = 1
export const authCmdsXFTPVersion = 2
export const blockedFilesXFTPVersion = 3
export const currentXFTPVersion = 3
// ── Block-level pad/unpad (Crypto.hs:pad/unPad, strict ByteString) ──
export function blockPad(msg: Uint8Array, blockSize: number = XFTP_BLOCK_SIZE): Uint8Array {
const len = msg.length
const padLen = blockSize - len - 2
if (padLen < 0) throw new Error("blockPad: message too large for block")
const result = new Uint8Array(blockSize)
result[0] = (len >>> 8) & 0xff
result[1] = len & 0xff
result.set(msg, 2)
result.fill(0x23, 2 + len) // '#' padding
return result
}
export function blockUnpad(block: Uint8Array): Uint8Array {
if (block.length < 2) throw new Error("blockUnpad: too short")
const len = (block[0] << 8) | block[1]
if (2 + len > block.length) throw new Error("blockUnpad: invalid length")
return block.subarray(2, 2 + len)
}
// ── Transmission encoding (client -> server) ──────────────────────
// Encode an authenticated XFTP command as a padded block.
// Matches xftpEncodeAuthTransmission (implySessId = True).
//
// sessionId: TLS session ID (typically 32 bytes)
// corrId: correlation ID (ByteString)
// entityId: file entity ID (ByteString, empty for FNEW/PING)
// cmdBytes: encoded command (from encodeFNEW, encodeFGET, etc.)
// privateKey: Ed25519 private key (64-byte libsodium format)
export function encodeAuthTransmission(
sessionId: Uint8Array,
corrId: Uint8Array,
entityId: Uint8Array,
cmdBytes: Uint8Array,
privateKey: Uint8Array
): Uint8Array {
// t' = encodeTransmission_ v t = smpEncode (corrId, entityId) <> cmdBytes
const tInner = concatBytes(encodeBytes(corrId), encodeBytes(entityId), cmdBytes)
// tForAuth = smpEncode sessionId <> t' (implySessId = True)
const tForAuth = concatBytes(encodeBytes(sessionId), tInner)
// Ed25519 sign (nonce ignored for Ed25519 in Haskell sign')
const signature = sign(privateKey, tForAuth)
// tEncodeAuth False (Just (TASignature sig, Nothing)) = smpEncode (signatureBytes sig)
const authenticator = encodeBytes(signature)
// tEncode False (auth, tToSend) = authenticator <> tToSend
// tToSend = t' (since implySessId = True, no sessionId in wire)
const encoded = concatBytes(authenticator, tInner)
// tEncodeBatch1 False = \x01 + encodeLarge(encoded)
const batch = concatBytes(new Uint8Array([1]), encodeLarge(encoded))
// pad to blockSize
return blockPad(batch)
}
// Encode an unsigned XFTP command (e.g. PING) as a padded block.
// Matches xftpEncodeTransmission (implySessId = True).
export function encodeTransmission(
corrId: Uint8Array,
entityId: Uint8Array,
cmdBytes: Uint8Array
): Uint8Array {
const tInner = concatBytes(encodeBytes(corrId), encodeBytes(entityId), cmdBytes)
// No auth: tEncodeAuth False Nothing = smpEncode B.empty = \x00
const authenticator = encodeBytes(new Uint8Array(0))
const encoded = concatBytes(authenticator, tInner)
const batch = concatBytes(new Uint8Array([1]), encodeLarge(encoded))
return blockPad(batch)
}
// ── Transmission decoding (server -> client) ──────────────────────
export interface DecodedTransmission {
corrId: Uint8Array
entityId: Uint8Array
command: Uint8Array
}
// Decode a server response block into raw parts.
// Call decodeResponse(command) from commands.ts to parse the response.
// Matches xftpDecodeTClient (implySessId = True).
export function decodeTransmission(block: Uint8Array): DecodedTransmission {
// unPad
const raw = blockUnpad(block)
const d = new Decoder(raw)
// Read batch count (must be 1)
const count = d.anyByte()
if (count !== 1) throw new Error("decodeTransmission: expected batch count 1, got " + count)
// Read Large-encoded transmission
const transmission = decodeLarge(d)
const td = new Decoder(transmission)
// Skip authenticator (server responses have empty auth)
decodeBytes(td)
// Read corrId and entityId
const corrId = decodeBytes(td)
const entityId = decodeBytes(td)
// Remaining bytes are the response command
const command = td.takeAll()
return {corrId, entityId, command}
}