diff --git a/rfcs/2026-01-30-send-file-page.md b/rfcs/2026-01-30-send-file-page.md index aefaac903..28556b3d0 100644 --- a/rfcs/2026-01-30-send-file-page.md +++ b/rfcs/2026-01-30-send-file-page.md @@ -577,7 +577,7 @@ describe "crypto/padding" $ do **Test execution:** Tests live in `tests/XFTPWebTests.hs` in the simplexmq repo, skipped by default (require compiled TS project path). Run with: ```bash -cabal test --test-option=--match="/XFTP Web Client/" +cabal test --ghc-options -O0 --test-option=--match="/XFTP Web Client/" ``` **Random inputs:** Haskell tests can use QuickCheck to generate random inputs each run, not just hardcoded values. This catches edge cases that fixed test vectors miss. @@ -1019,7 +1019,7 @@ Download orchestration — the top-level flow. **Development workflow:** 1. Implement `encodeWord16` in `src/protocol/encoding.ts` -2. Run `cabal test --test-option=--match="/XFTP Web Client/encoding/encodeWord16"` +2. Run `cabal test --ghc-options -O0 --test-option=--match="/XFTP Web Client/encoding/encodeWord16"` 3. If it fails: Haskell says `expected 002a, got 2a00` → immediately know it's an endianness bug 4. Fix → rerun → passes → move to `encodeWord32` 5. Repeat until all per-function tests pass diff --git a/tests/Test.hs b/tests/Test.hs index e62531f8c..dcc5de3fb 100644 --- a/tests/Test.hs +++ b/tests/Test.hs @@ -150,7 +150,7 @@ main = do describe "XFTP file description" fileDescriptionTests describe "XFTP CLI" xftpCLITests describe "XFTP agent" xftpAgentTests - xftpWebTests + describe "XFTP Web Client" xftpWebTests describe "XRCP" remoteControlTests describe "Server CLIs" cliTests diff --git a/tests/XFTPWebTests.hs b/tests/XFTPWebTests.hs index 738916b22..fbf828063 100644 --- a/tests/XFTPWebTests.hs +++ b/tests/XFTPWebTests.hs @@ -9,19 +9,25 @@ -- Run: cabal test --test-option=--match="/XFTP Web Client/" module XFTPWebTests (xftpWebTests) where +import Control.Concurrent (forkIO, newEmptyMVar, putMVar, takeMVar) +import Control.Monad (when) +import Crypto.Error (throwCryptoError) +import qualified Crypto.PubKey.Curve25519 as X25519 +import qualified Crypto.PubKey.Ed25519 as Ed25519 +import qualified Data.ByteArray as BA import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as LB import Data.Int (Int64) import Data.List (intercalate) import qualified Data.List.NonEmpty as NE import Data.Word (Word16, Word32) -import Crypto.Error (throwCryptoError) -import qualified Crypto.PubKey.Curve25519 as X25519 -import qualified Crypto.PubKey.Ed25519 as Ed25519 -import qualified Data.ByteArray as BA +import Simplex.FileTransfer.Client (prepareChunkSizes) +import Simplex.FileTransfer.Description (FileSize (..)) +import Simplex.FileTransfer.Types (FileHeader (..)) import qualified Simplex.Messaging.Crypto as C import qualified Simplex.Messaging.Crypto.Lazy as LC import Simplex.Messaging.Encoding +import Simplex.Messaging.Encoding.String (strEncode) import System.Directory (doesDirectoryExist) import System.Exit (ExitCode (..)) import System.Process (CreateProcess (..), StdStream (..), createProcess, proc, waitForProcess) @@ -34,15 +40,21 @@ xftpWebDir = "xftp-web" -- | Run an inline ES module script via node, return stdout as ByteString. callNode :: String -> IO B.ByteString callNode script = do - (_, Just hout, _, ph) <- + (_, Just hout, Just herr, ph) <- createProcess (proc "node" ["--input-type=module", "-e", script]) { std_out = CreatePipe, + std_err = CreatePipe, cwd = Just xftpWebDir } + errVar <- newEmptyMVar + _ <- forkIO $ B.hGetContents herr >>= putMVar errVar out <- B.hGetContents hout + err <- takeMVar errVar ec <- waitForProcess ph - ec `shouldBe` ExitSuccess + when (ec /= ExitSuccess) $ + expectationFailure $ + "node " <> show ec <> "\nstderr: " <> map (toEnum . fromIntegral) (B.unpack err) pure out -- | Format a ByteString as a JS Uint8Array constructor. @@ -50,7 +62,7 @@ jsUint8 :: B.ByteString -> String jsUint8 bs = "new Uint8Array([" <> intercalate "," (map show (B.unpack bs)) <> "])" -- Import helpers for inline scripts. -impEnc, impPad, impDig, impKey :: String +impEnc, impPad, impDig, impKey, impSb :: String impEnc = "import * as E from './dist/protocol/encoding.js';" impPad = "import * as P from './dist/crypto/padding.js';" impDig = @@ -61,13 +73,66 @@ impKey = "import sodium from 'libsodium-wrappers-sumo';" <> "import * as K from './dist/crypto/keys.js';" <> "await sodium.ready;" +impSb = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as S from './dist/crypto/secretbox.js';" + <> "await sodium.ready;" +impFile :: String +impFile = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as F from './dist/crypto/file.js';" + <> "await sodium.ready;" +impCmd :: String +impCmd = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as E from './dist/protocol/encoding.js';" + <> "import * as Cmd from './dist/protocol/commands.js';" + <> "await sodium.ready;" +impTx :: String +impTx = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as E from './dist/protocol/encoding.js';" + <> "import * as K from './dist/crypto/keys.js';" + <> "import * as Tx from './dist/protocol/transmission.js';" + <> "await sodium.ready;" +impHs :: String +impHs = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as E from './dist/protocol/encoding.js';" + <> "import * as K from './dist/crypto/keys.js';" + <> "import * as Hs from './dist/protocol/handshake.js';" + <> "await sodium.ready;" +impDesc :: String +impDesc = "import * as Desc from './dist/protocol/description.js';" +impChk :: String +impChk = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as Desc from './dist/protocol/description.js';" + <> "import * as Chk from './dist/protocol/chunks.js';" + <> "await sodium.ready;" +impCli :: String +impCli = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as K from './dist/crypto/keys.js';" + <> "import * as Cli from './dist/protocol/client.js';" + <> "await sodium.ready;" +impDl :: String +impDl = + "import sodium from 'libsodium-wrappers-sumo';" + <> "import * as K from './dist/crypto/keys.js';" + <> "import * as F from './dist/crypto/file.js';" + <> "import * as Cli from './dist/protocol/client.js';" + <> "import * as Dl from './dist/download.js';" + <> "import * as Cmd from './dist/protocol/commands.js';" + <> "import * as Tx from './dist/protocol/transmission.js';" + <> "await sodium.ready;" -- | Wrap expression in process.stdout.write(Buffer.from(...)). jsOut :: String -> String jsOut expr = "process.stdout.write(Buffer.from(" <> expr <> "));" xftpWebTests :: Spec -xftpWebTests = describe "XFTP Web Client" $ do +xftpWebTests = do distExists <- runIO $ doesDirectoryExist (xftpWebDir <> "/dist") if distExists then do @@ -75,6 +140,15 @@ xftpWebTests = describe "XFTP Web Client" $ do tsPaddingTests tsDigestTests tsKeyTests + tsSecretboxTests + tsFileCryptoTests + tsCommandTests + tsTransmissionTests + tsHandshakeTests + tsDescriptionTests + tsChunkTests + tsClientTests + tsDownloadTests else it "skipped (run 'cd xftp-web && npm install && npm run build' first)" $ pendingWith "TS project not compiled" @@ -630,3 +704,1514 @@ tsKeyTests = describe "crypto/keys" $ do <> ");" <> jsOut "K.keyHash(der)" actual `shouldBe` expectedHash + +-- ── crypto/secretbox ────────────────────────────────────────────── + +tsSecretboxTests :: Spec +tsSecretboxTests = describe "crypto/secretbox" $ do + let key32 = B.pack [1 .. 32] + nonce24 = B.pack [1 .. 24] + cbNonceVal = C.cbNonce nonce24 + sbKeyVal = C.unsafeSbKey key32 + + describe "NaCl secretbox (tag prepended)" $ do + it "cbEncrypt matches Haskell sbEncrypt_" $ do + let msg = "hello NaCl secretbox" :: B.ByteString + paddedLen = 256 :: Int + hsResult = either (error . show) id $ C.sbEncrypt_ key32 cbNonceVal msg paddedLen + tsResult <- + callNode $ + impSb <> jsOut ("S.cbEncrypt(" <> jsUint8 key32 <> "," <> jsUint8 nonce24 <> "," <> jsUint8 msg <> "," <> show paddedLen <> ")") + tsResult `shouldBe` hsResult + + it "Haskell sbEncrypt_ -> TS cbDecrypt" $ do + let msg = "cross-language decrypt" :: B.ByteString + paddedLen = 128 :: Int + cipher = either (error . show) id $ C.sbEncrypt_ key32 cbNonceVal msg paddedLen + tsResult <- + callNode $ + impSb <> jsOut ("S.cbDecrypt(" <> jsUint8 key32 <> "," <> jsUint8 nonce24 <> "," <> jsUint8 cipher <> ")") + tsResult `shouldBe` msg + + it "TS cbEncrypt -> Haskell sbDecrypt_" $ do + let msg = "ts-to-haskell NaCl" :: B.ByteString + paddedLen = 64 :: Int + tsCipher <- + callNode $ + impSb <> jsOut ("S.cbEncrypt(" <> jsUint8 key32 <> "," <> jsUint8 nonce24 <> "," <> jsUint8 msg <> "," <> show paddedLen <> ")") + let hsResult = either (error . show) id $ C.sbDecrypt_ key32 cbNonceVal tsCipher + hsResult `shouldBe` msg + + describe "streaming tail-tag" $ do + it "sbEncryptTailTag matches Haskell" $ do + let msg = "hello streaming" :: B.ByteString + msgLen = fromIntegral (B.length msg) :: Int64 + paddedLen = 64 :: Int64 + hsResult = + either (error . show) id $ + LC.sbEncryptTailTag sbKeyVal cbNonceVal (LB.fromStrict msg) msgLen paddedLen + tsResult <- + callNode $ + impSb + <> jsOut + ( "S.sbEncryptTailTag(" + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> jsUint8 msg + <> "," + <> show msgLen + <> "n," + <> show paddedLen + <> "n)" + ) + tsResult `shouldBe` LB.toStrict hsResult + + it "Haskell encrypt -> TS decrypt (tail tag)" $ do + let msg = "haskell-to-ts streaming" :: B.ByteString + msgLen = fromIntegral (B.length msg) :: Int64 + paddedLen = 128 :: Int64 + cipher = + either (error . show) id $ + LC.sbEncryptTailTag sbKeyVal cbNonceVal (LB.fromStrict msg) msgLen paddedLen + tsResult <- + callNode $ + impSb + <> "const r = S.sbDecryptTailTag(" + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> show paddedLen + <> "n," + <> jsUint8 (LB.toStrict cipher) + <> ");" + <> jsOut "new Uint8Array([r.valid ? 1 : 0, ...r.content])" + let (validByte, content) = B.splitAt 1 tsResult + validByte `shouldBe` B.pack [1] + content `shouldBe` msg + + it "TS encrypt -> Haskell decrypt (tail tag)" $ do + let msg = "ts-to-haskell streaming" :: B.ByteString + msgLen = fromIntegral (B.length msg) :: Int64 + paddedLen = 64 :: Int64 + tsCipher <- + callNode $ + impSb + <> jsOut + ( "S.sbEncryptTailTag(" + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> jsUint8 msg + <> "," + <> show msgLen + <> "n," + <> show paddedLen + <> "n)" + ) + let (valid, plaintext) = + either (error . show) id $ + LC.sbDecryptTailTag sbKeyVal cbNonceVal paddedLen (LB.fromStrict tsCipher) + valid `shouldBe` True + LB.toStrict plaintext `shouldBe` msg + + it "tag tampering detection" $ do + let msg = "tamper test" :: B.ByteString + msgLen = fromIntegral (B.length msg) :: Int64 + paddedLen = 64 :: Int64 + tsResult <- + callNode $ + impSb + <> "const enc = S.sbEncryptTailTag(" + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> jsUint8 msg + <> "," + <> show msgLen + <> "n," + <> show paddedLen + <> "n);" + <> "enc[enc.length - 1] ^= 1;" + <> "const r = S.sbDecryptTailTag(" + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> show paddedLen + <> "n, enc);" + <> jsOut "new Uint8Array([r.valid ? 1 : 0])" + tsResult `shouldBe` B.pack [0] + + describe "internal consistency" $ do + it "streaming matches NaCl secretbox (TS-only)" $ do + let msg = "salsa20 validation" :: B.ByteString + msgLen = fromIntegral (B.length msg) :: Int64 + paddedLen = 64 :: Int64 + tsResult <- + callNode $ + impPad + <> impSb + <> "const msg = " + <> jsUint8 msg + <> ";" + <> "const key = " + <> jsUint8 key32 + <> ";" + <> "const nonce = " + <> jsUint8 nonce24 + <> ";" + <> "const padded = P.padLazy(msg, " + <> show msgLen + <> "n, " + <> show paddedLen + <> "n);" + <> "const nacl = S.cryptoBox(key, nonce, padded);" + <> "const stream = S.sbEncryptTailTag(key, nonce, msg, " + <> show msgLen + <> "n, " + <> show paddedLen + <> "n);" + <> "const naclTag = nacl.subarray(0, 16);" + <> "const naclCipher = nacl.subarray(16);" + <> "const streamCipher = stream.subarray(0, " + <> show paddedLen + <> ");" + <> "const streamTag = stream.subarray(" + <> show paddedLen + <> ");" + <> "const cipherMatch = naclCipher.length === streamCipher.length && naclCipher.every((b,i) => b === streamCipher[i]);" + <> "const tagMatch = naclTag.length === streamTag.length && naclTag.every((b,i) => b === streamTag[i]);" + <> jsOut "new Uint8Array([cipherMatch ? 1 : 0, tagMatch ? 1 : 0])" + tsResult `shouldBe` B.pack [1, 1] + + it "multi-chunk matches single-shot (TS-only)" $ do + let msg = B.pack [1 .. 200] + tsResult <- + callNode $ + impSb + <> "const key = " + <> jsUint8 key32 + <> ";" + <> "const nonce = " + <> jsUint8 nonce24 + <> ";" + <> "const msg = " + <> jsUint8 msg + <> ";" + <> "const st1 = S.sbInit(key, nonce);" + <> "const c1 = S.sbEncryptChunk(st1, msg);" + <> "const t1 = S.sbAuth(st1);" + <> "const st2 = S.sbInit(key, nonce);" + <> "const parts = [msg.subarray(0,50), msg.subarray(50,100), msg.subarray(100,150), msg.subarray(150)];" + <> "const c2parts = parts.map(p => S.sbEncryptChunk(st2, p));" + <> "const c2 = new Uint8Array(200); let off = 0; c2parts.forEach(p => { c2.set(p, off); off += p.length; });" + <> "const t2 = S.sbAuth(st2);" + <> "const cipherMatch = c1.length === c2.length && c1.every((b,i) => b === c2[i]);" + <> "const tagMatch = t1.length === t2.length && t1.every((b,i) => b === t2[i]);" + <> jsOut "new Uint8Array([cipherMatch ? 1 : 0, tagMatch ? 1 : 0])" + tsResult `shouldBe` B.pack [1, 1] + +-- ── crypto/file ───────────────────────────────────────────────── + +tsFileCryptoTests :: Spec +tsFileCryptoTests = describe "crypto/file" $ do + let key32 = B.pack [1 .. 32] + nonce24 = B.pack [1 .. 24] + cbNonceVal = C.cbNonce nonce24 + sbKeyVal = C.unsafeSbKey key32 + + describe "FileHeader encoding" $ do + it "encodeFileHeader matches Haskell" $ do + let hdr = FileHeader "test.txt" Nothing + hsEncoded = smpEncode hdr + tsEncoded <- callNode $ impFile <> jsOut "F.encodeFileHeader({fileName: 'test.txt', fileExtra: null})" + tsEncoded `shouldBe` hsEncoded + + it "encodeFileHeader with fileExtra" $ do + let hdr = FileHeader "document.pdf" (Just "v2") + hsEncoded = smpEncode hdr + tsEncoded <- callNode $ impFile <> jsOut "F.encodeFileHeader({fileName: 'document.pdf', fileExtra: 'v2'})" + tsEncoded `shouldBe` hsEncoded + + it "Haskell encode -> TS parseFileHeader" $ do + let hdr = FileHeader "photo.jpg" (Just "extra") + encoded = smpEncode hdr + trailing = B.pack [10, 20, 30, 40, 50] + input = encoded <> trailing + tsResult <- + callNode $ + impFile + <> "const r = F.parseFileHeader(" + <> jsUint8 input + <> ");" + <> "const hdrBytes = F.encodeFileHeader(r.header);" + <> jsOut "new Uint8Array([...hdrBytes, ...r.rest])" + tsResult `shouldBe` input + + describe "file encryption" $ do + it "encryptFile matches Haskell" $ do + let source = "Hello, this is test file content!" :: B.ByteString + hdr = FileHeader "test.txt" Nothing + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 256 :: Int64 + sb = either (error . show) id $ LC.sbInit sbKeyVal cbNonceVal + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + tag = BA.convert (LC.sbAuth sb3) :: B.ByteString + hsEncrypted = B.concat [hdrEnc, srcEnc, padEnc, tag] + tsEncrypted <- + callNode $ + impFile + <> "const source = " + <> jsUint8 source + <> ";" + <> "const fileHdr = F.encodeFileHeader({fileName: 'test.txt', fileExtra: null});" + <> jsOut + ( "F.encryptFile(source, fileHdr, " + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> show fileSize' + <> "n," + <> show encSize + <> "n)" + ) + tsEncrypted `shouldBe` hsEncrypted + + it "Haskell encrypt -> TS decryptChunks" $ do + let source = "cross-language file test data" :: B.ByteString + hdr = FileHeader "data.bin" (Just "meta") + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 128 :: Int64 + sb = either (error . show) id $ LC.sbInit sbKeyVal cbNonceVal + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + tag = BA.convert (LC.sbAuth sb3) :: B.ByteString + encrypted = B.concat [hdrEnc, srcEnc, padEnc, tag] + tsResult <- + callNode $ + impFile + <> "const r = F.decryptChunks(" + <> show encSize + <> "n, [" + <> jsUint8 encrypted + <> "], " + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> ");" + <> "const hdrBytes = F.encodeFileHeader(r.header);" + <> jsOut "new Uint8Array([...hdrBytes, ...r.content])" + tsResult `shouldBe` (fileHdr <> source) + + it "TS encryptFile -> Haskell decrypt" $ do + let source = "ts-to-haskell file" :: B.ByteString + hdr = FileHeader "note.txt" Nothing + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 128 :: Int64 + paddedLen = encSize - 16 + tsEncrypted <- + callNode $ + impFile + <> "const source = " + <> jsUint8 source + <> ";" + <> "const fileHdr = F.encodeFileHeader({fileName: 'note.txt', fileExtra: null});" + <> jsOut + ( "F.encryptFile(source, fileHdr, " + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "," + <> show fileSize' + <> "n," + <> show encSize + <> "n)" + ) + let (valid, plaintext) = + either (error . show) id $ + LC.sbDecryptTailTag sbKeyVal cbNonceVal paddedLen (LB.fromStrict tsEncrypted) + valid `shouldBe` True + LB.toStrict plaintext `shouldBe` (fileHdr <> source) + + it "multi-chunk decrypt" $ do + let source = "multi-chunk file content" :: B.ByteString + hdr = FileHeader "multi.bin" Nothing + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 128 :: Int64 + sb = either (error . show) id $ LC.sbInit sbKeyVal cbNonceVal + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + tag = BA.convert (LC.sbAuth sb3) :: B.ByteString + encrypted = B.concat [hdrEnc, srcEnc, padEnc, tag] + (chunk1, rest) = B.splitAt 50 encrypted + (chunk2, chunk3) = B.splitAt 50 rest + tsResult <- + callNode $ + impFile + <> "const r = F.decryptChunks(" + <> show encSize + <> "n, [" + <> jsUint8 chunk1 + <> "," + <> jsUint8 chunk2 + <> "," + <> jsUint8 chunk3 + <> "], " + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> ");" + <> "const hdrBytes = F.encodeFileHeader(r.header);" + <> jsOut "new Uint8Array([...hdrBytes, ...r.content])" + tsResult `shouldBe` (fileHdr <> source) + + it "auth tag tampering detection" $ do + let source = "tamper detection file" :: B.ByteString + hdr = FileHeader "secret.dat" Nothing + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 128 :: Int64 + sb = either (error . show) id $ LC.sbInit sbKeyVal cbNonceVal + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + tag = BA.convert (LC.sbAuth sb3) :: B.ByteString + encrypted = B.concat [hdrEnc, srcEnc, padEnc, tag] + tsResult <- + callNode $ + impFile + <> "const enc = " + <> jsUint8 encrypted + <> ";" + <> "enc[enc.length - 1] ^= 1;" + <> "let ok = 0;" + <> "try { F.decryptChunks(" + <> show encSize + <> "n, [enc], " + <> jsUint8 key32 + <> "," + <> jsUint8 nonce24 + <> "); ok = 1; } catch(e) { ok = 0; }" + <> jsOut "new Uint8Array([ok])" + tsResult `shouldBe` B.pack [0] + +-- ── protocol/commands ──────────────────────────────────────────── + +tsCommandTests :: Spec +tsCommandTests = describe "protocol/commands" $ do + let sndKey = B.pack [1 .. 8] + rcvKey1 = B.pack [11 .. 18] + rcvKey2 = B.pack [21 .. 28] + digest = B.pack [31 .. 38] + size32 = 12345 :: Word32 + authKey = B.pack [41 .. 48] + dhKey = B.pack [51 .. 58] + + describe "encode" $ do + it "encodeFileInfo" $ do + let expected = smpEncode sndKey <> smpEncode size32 <> smpEncode digest + tsResult <- + callNode $ + impCmd + <> "const fi = {sndKey: " + <> jsUint8 sndKey + <> ", size: " + <> show size32 + <> ", digest: " + <> jsUint8 digest + <> "};" + <> jsOut "Cmd.encodeFileInfo(fi)" + tsResult `shouldBe` expected + + it "encodeFNEW with auth" $ do + let fileInfo = smpEncode sndKey <> smpEncode size32 <> smpEncode digest + rcvKeys = smpEncodeList [rcvKey1, rcvKey2] + auth = B.singleton 0x31 <> smpEncode authKey + expected = "FNEW " <> fileInfo <> rcvKeys <> auth + tsResult <- + callNode $ + impCmd + <> "const fi = {sndKey: " + <> jsUint8 sndKey + <> ", size: " + <> show size32 + <> ", digest: " + <> jsUint8 digest + <> "};" + <> "const rks = [" + <> jsUint8 rcvKey1 + <> "," + <> jsUint8 rcvKey2 + <> "];" + <> jsOut ("Cmd.encodeFNEW(fi, rks, " <> jsUint8 authKey <> ")") + tsResult `shouldBe` expected + + it "encodeFNEW without auth" $ do + let fileInfo = smpEncode sndKey <> smpEncode size32 <> smpEncode digest + rcvKeys = smpEncodeList [rcvKey1] + expected = "FNEW " <> fileInfo <> rcvKeys <> "0" + tsResult <- + callNode $ + impCmd + <> "const fi = {sndKey: " + <> jsUint8 sndKey + <> ", size: " + <> show size32 + <> ", digest: " + <> jsUint8 digest + <> "};" + <> "const rks = [" + <> jsUint8 rcvKey1 + <> "];" + <> jsOut "Cmd.encodeFNEW(fi, rks, null)" + tsResult `shouldBe` expected + + it "encodeFADD" $ do + let expected = "FADD " <> smpEncodeList [rcvKey1, rcvKey2] + tsResult <- + callNode $ + impCmd + <> jsOut ("Cmd.encodeFADD([" <> jsUint8 rcvKey1 <> "," <> jsUint8 rcvKey2 <> "])") + tsResult `shouldBe` expected + + it "encodeFPUT" $ do + tsResult <- callNode $ impCmd <> jsOut "Cmd.encodeFPUT()" + tsResult `shouldBe` "FPUT" + + it "encodeFDEL" $ do + tsResult <- callNode $ impCmd <> jsOut "Cmd.encodeFDEL()" + tsResult `shouldBe` "FDEL" + + it "encodeFGET" $ do + let expected = "FGET " <> smpEncode dhKey + tsResult <- + callNode $ + impCmd <> jsOut ("Cmd.encodeFGET(" <> jsUint8 dhKey <> ")") + tsResult `shouldBe` expected + + it "encodeFACK" $ do + tsResult <- callNode $ impCmd <> jsOut "Cmd.encodeFACK()" + tsResult `shouldBe` "FACK" + + it "encodePING" $ do + tsResult <- callNode $ impCmd <> jsOut "Cmd.encodePING()" + tsResult `shouldBe` "PING" + + describe "decode" $ do + it "decodeResponse OK" $ do + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 ("OK" :: B.ByteString) + <> ");" + <> jsOut "new Uint8Array([r.type === 'FROk' ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + it "decodeResponse PONG" $ do + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 ("PONG" :: B.ByteString) + <> ");" + <> jsOut "new Uint8Array([r.type === 'FRPong' ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + it "decodeResponse ERR AUTH" $ do + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 ("ERR AUTH" :: B.ByteString) + <> ");" + <> jsOut "new Uint8Array([r.type === 'FRErr' && r.err.type === 'AUTH' ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + it "decodeResponse ERR CMD SYNTAX" $ do + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 ("ERR CMD SYNTAX" :: B.ByteString) + <> ");" + <> jsOut "new Uint8Array([r.type === 'FRErr' && r.err.type === 'CMD' && r.err.cmdErr === 'SYNTAX' ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + it "decodeResponse SIDS" $ do + let senderId = B.pack [1 .. 24] + rId1 = B.pack [25 .. 48] + rId2 = B.pack [49 .. 72] + sidsBytes = "SIDS " <> smpEncode senderId <> smpEncodeList [rId1, rId2] + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 sidsBytes + <> ");" + <> "if (r.type !== 'FRSndIds') throw new Error('wrong type');" + <> jsOut "E.concatBytes(r.senderId, ...r.recipientIds)" + tsResult `shouldBe` (senderId <> rId1 <> rId2) + + it "decodeResponse RIDS" $ do + let rId1 = B.pack [1 .. 16] + rId2 = B.pack [17 .. 32] + ridsBytes = "RIDS " <> smpEncodeList [rId1, rId2] + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 ridsBytes + <> ");" + <> "if (r.type !== 'FRRcvIds') throw new Error('wrong type');" + <> jsOut "E.concatBytes(...r.recipientIds)" + tsResult `shouldBe` (rId1 <> rId2) + + it "decodeResponse FILE" $ do + let rawPub = B.pack [1 .. 32] + x25519Der = B.pack [0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x6e, 0x03, 0x21, 0x00] + derKey = x25519Der <> rawPub + nonce = B.pack [201 .. 224] + fileBytes = "FILE " <> smpEncode derKey <> nonce + tsResult <- + callNode $ + impCmd + <> "const r = Cmd.decodeResponse(" + <> jsUint8 fileBytes + <> ");" + <> "if (r.type !== 'FRFile') throw new Error('wrong type: ' + r.type);" + <> jsOut "E.concatBytes(r.rcvDhKey, r.nonce)" + tsResult `shouldBe` (rawPub <> nonce) + +-- ── protocol/transmission ────────────────────────────────────────── + +tsTransmissionTests :: Spec +tsTransmissionTests = describe "protocol/transmission" $ do + describe "blockPad / blockUnpad" $ do + it "blockPad matches C.pad" $ do + let msg = "hello pad test" :: B.ByteString + blockSize = 256 :: Int + hsPadded = either (error . show) id $ C.pad msg blockSize + tsPadded <- + callNode $ + impTx <> jsOut ("Tx.blockPad(" <> jsUint8 msg <> ", " <> show blockSize <> ")") + tsPadded `shouldBe` hsPadded + + it "Haskell C.pad -> TS blockUnpad" $ do + let msg = "cross-language unpad" :: B.ByteString + blockSize = 128 :: Int + hsPadded = either (error . show) id $ C.pad msg blockSize + tsResult <- + callNode $ + impTx <> jsOut ("Tx.blockUnpad(" <> jsUint8 hsPadded <> ")") + tsResult `shouldBe` msg + + it "TS blockPad -> Haskell C.unPad" $ do + let msg = "ts-to-haskell pad" :: B.ByteString + blockSize = 128 :: Int + tsPadded <- + callNode $ + impTx <> jsOut ("Tx.blockPad(" <> jsUint8 msg <> ", " <> show blockSize <> ")") + let hsResult = either (error . show) id $ C.unPad tsPadded + hsResult `shouldBe` msg + + describe "transmission encoding" $ do + it "encodeTransmission unsigned (PING)" $ do + let corrId = "abc" :: B.ByteString + entityId = "" :: B.ByteString + cmdBytes = "PING" :: B.ByteString + tInner = smpEncode corrId <> smpEncode entityId <> cmdBytes + authenticator = smpEncode ("" :: B.ByteString) + encoded = authenticator <> tInner + batch = B.singleton 1 <> smpEncode (Large encoded) + expected = either (error . show) id $ C.pad batch 16384 + tsResult <- + callNode $ + impTx + <> jsOut + ( "Tx.encodeTransmission(" + <> jsUint8 corrId + <> ", " + <> jsUint8 entityId + <> ", " + <> jsUint8 cmdBytes + <> ")" + ) + tsResult `shouldBe` expected + + it "encodeAuthTransmission signed" $ do + let seed = B.pack [1 .. 32] + sk = throwCryptoError $ Ed25519.secretKey seed + pk = Ed25519.toPublic sk + sessionId = B.pack [101 .. 132] + corrId = "xyz" :: B.ByteString + entityId = B.pack [1 .. 24] + cmdBytes = "FPUT" :: B.ByteString + tInner = smpEncode corrId <> smpEncode entityId <> cmdBytes + tForAuth = smpEncode sessionId <> tInner + sig = Ed25519.sign sk pk tForAuth + rawSig = BA.convert sig :: B.ByteString + authenticator = smpEncode rawSig + encoded = authenticator <> tInner + batch = B.singleton 1 <> smpEncode (Large encoded) + expected = either (error . show) id $ C.pad batch 16384 + tsResult <- + callNode $ + impTx + <> "const kp = K.ed25519KeyPairFromSeed(" + <> jsUint8 seed + <> ");" + <> jsOut + ( "Tx.encodeAuthTransmission(" + <> jsUint8 sessionId + <> ", " + <> jsUint8 corrId + <> ", " + <> jsUint8 entityId + <> ", " + <> jsUint8 cmdBytes + <> ", kp.privateKey)" + ) + tsResult `shouldBe` expected + + it "decodeTransmission" $ do + let corrId = "r01" :: B.ByteString + entityId = B.pack [1 .. 16] + cmdBytes = "OK" :: B.ByteString + tInner = smpEncode corrId <> smpEncode entityId <> cmdBytes + authenticator = smpEncode ("" :: B.ByteString) + encoded = authenticator <> tInner + batch = B.singleton 1 <> smpEncode (Large encoded) + block = either (error . show) id $ C.pad batch 256 + tsResult <- + callNode $ + impTx + <> "const t = Tx.decodeTransmission(" + <> jsUint8 block + <> ");" + <> jsOut "E.concatBytes(t.corrId, t.entityId, t.command)" + tsResult `shouldBe` (corrId <> entityId <> cmdBytes) + +-- ── protocol/handshake ──────────────────────────────────────────── + +tsHandshakeTests :: Spec +tsHandshakeTests = describe "protocol/handshake" $ do + describe "version range" $ do + it "encodeVersionRange" $ do + let expected = smpEncode (1 :: Word16) <> smpEncode (3 :: Word16) + tsResult <- + callNode $ + impHs + <> jsOut "Hs.encodeVersionRange({minVersion: 1, maxVersion: 3})" + tsResult `shouldBe` expected + + it "decodeVersionRange" $ do + let vrBytes = smpEncode (2 :: Word16) <> smpEncode (5 :: Word16) + tsResult <- + callNode $ + impHs + <> "const d = new E.Decoder(" <> jsUint8 vrBytes <> ");" + <> "const vr = Hs.decodeVersionRange(d);" + <> jsOut "E.concatBytes(E.encodeWord16(vr.minVersion), E.encodeWord16(vr.maxVersion))" + tsResult `shouldBe` vrBytes + + it "compatibleVRange (compatible)" $ do + -- intersection of [1,3] and [2,5] = [2,3] + let expected = smpEncode (2 :: Word16) <> smpEncode (3 :: Word16) + tsResult <- + callNode $ + impHs + <> "const r = Hs.compatibleVRange({minVersion:1,maxVersion:3},{minVersion:2,maxVersion:5});" + <> "if (!r) throw new Error('expected compatible');" + <> jsOut "Hs.encodeVersionRange(r)" + tsResult `shouldBe` expected + + it "compatibleVRange (incompatible)" $ do + tsResult <- + callNode $ + impHs + <> "const r = Hs.compatibleVRange({minVersion:1,maxVersion:2},{minVersion:3,maxVersion:5});" + <> jsOut "new Uint8Array([r === null ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + describe "client handshake" $ do + it "encodeClientHandshake" $ do + let kh = B.pack [1 .. 32] + body = smpEncode (3 :: Word16) <> smpEncode kh + expected = either (error . show) id $ C.pad body 16384 + tsResult <- + callNode $ + impHs + <> jsOut ("Hs.encodeClientHandshake({xftpVersion:3,keyHash:" <> jsUint8 kh <> "})") + tsResult `shouldBe` expected + + describe "server handshake" $ do + it "decodeServerHandshake" $ do + let sessId = B.pack [1 .. 32] + cert1 = B.pack [101 .. 200] -- 100 bytes + cert2 = B.pack [201 .. 232] -- 32 bytes + signedKeyBytes = B.pack [1 .. 120] + -- Encode server handshake body matching Haskell wire format: + -- smpEncode (versionRange, sessionId, certChainPubKey) + -- where certChainPubKey = (NonEmpty Large certChain, Large signedKey) + body = + smpEncode (1 :: Word16) <> smpEncode (3 :: Word16) + <> smpEncode sessId + <> smpEncode (NE.fromList [Large cert1, Large cert2]) + <> smpEncode (Large signedKeyBytes) + serverBlock = either (error . show) id $ C.pad body 16384 + tsResult <- + callNode $ + impHs + <> "const hs = Hs.decodeServerHandshake(" <> jsUint8 serverBlock <> ");" + <> jsOut + ( "E.concatBytes(" + <> "E.encodeWord16(hs.xftpVersionRange.minVersion)," + <> "E.encodeWord16(hs.xftpVersionRange.maxVersion)," + <> "hs.sessionId," + <> "...hs.certChainDer," + <> "hs.signedKeyDer)" + ) + -- Expected: vmin(2) + vmax(2) + sessId(32) + cert1(100) + cert2(32) + signedKey(120) = 288 bytes + tsResult + `shouldBe` ( smpEncode (1 :: Word16) <> smpEncode (3 :: Word16) + <> sessId + <> cert1 + <> cert2 + <> signedKeyBytes + ) + + describe "certificate utilities" $ do + it "caFingerprint" $ do + let cert1 = B.pack [101 .. 200] + cert2 = B.pack [201 .. 232] + expected = C.sha256Hash cert2 + tsResult <- + callNode $ + impHs + <> "const chain = [" <> jsUint8 cert1 <> "," <> jsUint8 cert2 <> "];" + <> jsOut "Hs.caFingerprint(chain)" + tsResult `shouldBe` expected + + describe "SignedExact parsing" $ do + it "extractSignedKey" $ do + -- Generate signing key (Ed25519) + let signSeed = B.pack [1 .. 32] + signSk = throwCryptoError $ Ed25519.secretKey signSeed + signPk = Ed25519.toPublic signSk + signPkRaw = BA.convert signPk :: B.ByteString + -- Generate DH key (X25519) + dhSeed = B.pack [41 .. 72] + dhSk = throwCryptoError $ X25519.secretKey dhSeed + dhPk = X25519.toPublic dhSk + dhPkRaw = BA.convert dhPk :: B.ByteString + -- SubjectPublicKeyInfo DER for X25519 (44 bytes) + x25519Prefix = B.pack [0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x6e, 0x03, 0x21, 0x00] + spkiDer = x25519Prefix <> dhPkRaw + -- Sign the SPKI with Ed25519 + sig = Ed25519.sign signSk signPk spkiDer + sigRaw = BA.convert sig :: B.ByteString + -- AlgorithmIdentifier for Ed25519 (7 bytes) + algId = B.pack [0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x70] + -- BIT STRING wrapper (3 + 64 = 67 bytes) + bitString = B.pack [0x03, 0x41, 0x00] <> sigRaw + -- Outer SEQUENCE: content = 44 + 7 + 67 = 118 = 0x76 + content = spkiDer <> algId <> bitString + signedExactDer = B.pack [0x30, 0x76] <> content + tsResult <- + callNode $ + impHs + <> "const sk = Hs.extractSignedKey(" <> jsUint8 signedExactDer <> ");" + <> jsOut "E.concatBytes(sk.dhKey, sk.signature)" + -- dhKey (32) + signature (64) = 96 bytes + tsResult `shouldBe` (dhPkRaw <> sigRaw) + + it "extractSignedKey signature verifies" $ do + let signSeed = B.pack [1 .. 32] + signSk = throwCryptoError $ Ed25519.secretKey signSeed + signPk = Ed25519.toPublic signSk + signPkRaw = BA.convert signPk :: B.ByteString + dhSeed = B.pack [41 .. 72] + dhSk = throwCryptoError $ X25519.secretKey dhSeed + dhPk = X25519.toPublic dhSk + dhPkRaw = BA.convert dhPk :: B.ByteString + x25519Prefix = B.pack [0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x6e, 0x03, 0x21, 0x00] + spkiDer = x25519Prefix <> dhPkRaw + sig = Ed25519.sign signSk signPk spkiDer + sigRaw = BA.convert sig :: B.ByteString + algId = B.pack [0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x70] + bitString = B.pack [0x03, 0x41, 0x00] <> sigRaw + content = spkiDer <> algId <> bitString + signedExactDer = B.pack [0x30, 0x76] <> content + tsResult <- + callNode $ + impHs + <> "const sk = Hs.extractSignedKey(" <> jsUint8 signedExactDer <> ");" + <> "const ok = K.verify(" <> jsUint8 signPkRaw <> ", sk.signature, sk.objectDer);" + <> jsOut "new Uint8Array([ok ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + +-- ── protocol/description ────────────────────────────────────────── + +tsDescriptionTests :: Spec +tsDescriptionTests = describe "protocol/description" $ do + describe "base64url" $ do + it "encode matches Haskell strEncode" $ do + let bs = B.pack [0 .. 31] + tsResult <- + callNode $ + impDesc + <> jsOut ("new TextEncoder().encode(Desc.base64urlEncode(" <> jsUint8 bs <> "))") + tsResult `shouldBe` strEncode bs + + it "decode recovers original" $ do + let bs = B.pack [0 .. 31] + encoded = strEncode bs + tsResult <- + callNode $ + impDesc + <> "const s = new TextDecoder().decode(" <> jsUint8 encoded <> ");" + <> jsOut "Desc.base64urlDecode(s)" + tsResult `shouldBe` bs + + it "round-trip 256 bytes" $ do + let bs = B.pack [0 .. 255] + tsResult <- + callNode $ + impDesc + <> "const data = " <> jsUint8 bs <> ";" + <> "const encoded = Desc.base64urlEncode(data);" + <> jsOut "Desc.base64urlDecode(encoded)" + tsResult `shouldBe` bs + + describe "FileSize" $ do + it "encodeFileSize" $ do + let sizes = [500, 1024, 2048, 1048576, 8388608, 1073741824, 27262976 :: Int64] + expected = B.intercalate "," $ map (strEncode . FileSize) sizes + tsResult <- + callNode $ + impDesc + <> "const sizes = [500, 1024, 2048, 1048576, 8388608, 1073741824, 27262976];" + <> jsOut "new TextEncoder().encode(sizes.map(Desc.encodeFileSize).join(','))" + tsResult `shouldBe` expected + + it "decodeFileSize" $ do + tsResult <- + callNode $ + impDesc + <> "const strs = ['500','1kb','2kb','1mb','8mb','1gb'];" + <> jsOut "new TextEncoder().encode(strs.map(s => String(Desc.decodeFileSize(s))).join(','))" + tsResult `shouldBe` "500,1024,2048,1048576,8388608,1073741824" + + describe "FileDescription" $ do + it "fixture YAML round-trip" $ do + fixture <- B.readFile "tests/fixtures/file_description.yaml" + tsResult <- + callNode $ + impDesc + <> "const yaml = new TextDecoder().decode(" <> jsUint8 fixture <> ");" + <> "const fd = Desc.decodeFileDescription(yaml);" + <> "const reEncoded = Desc.encodeFileDescription(fd);" + <> jsOut "new TextEncoder().encode(reEncoded)" + tsResult `shouldBe` fixture + + it "fixture parsed structure" $ do + fixture <- B.readFile "tests/fixtures/file_description.yaml" + tsResult <- + callNode $ + impDesc + <> "const yaml = new TextDecoder().decode(" <> jsUint8 fixture <> ");" + <> "const fd = Desc.decodeFileDescription(yaml);" + <> "const r = [" + <> "fd.party," + <> "String(fd.size)," + <> "String(fd.chunkSize)," + <> "String(fd.chunks.length)," + <> "String(fd.chunks[0].replicas.length)," + <> "String(fd.chunks[3].chunkSize)," + <> "fd.redirect === null ? 'null' : 'redirect'" + <> "].join(',');" + <> jsOut "new TextEncoder().encode(r)" + tsResult `shouldBe` "recipient,27262976,8388608,4,2,2097152,null" + + it "encode with redirect round-trips" $ do + tsResult <- + callNode $ + impDesc + <> "const fd = {" + <> " party: 'sender'," + <> " size: 1024," + <> " digest: new Uint8Array([1,2,3])," + <> " key: new Uint8Array(32)," + <> " nonce: new Uint8Array(24)," + <> " chunkSize: 1024," + <> " chunks: [{chunkNo: 1, chunkSize: 1024, digest: new Uint8Array([4,5,6])," + <> " replicas: [{server: 'xftp://abc=@example.com', replicaId: new Uint8Array([7,8,9])," + <> " replicaKey: new Uint8Array([10,11,12])}]}]," + <> " redirect: {size: 512, digest: new Uint8Array([13,14,15])}" + <> "};" + <> "const yaml = Desc.encodeFileDescription(fd);" + <> "const fd2 = Desc.decodeFileDescription(yaml);" + <> "const r = [" + <> "fd2.party," + <> "String(fd2.redirect !== null)," + <> "String(fd2.redirect?.size)," + <> "Desc.base64urlEncode(fd2.redirect?.digest || new Uint8Array())" + <> "].join(',');" + <> jsOut "new TextEncoder().encode(r)" + tsResult `shouldBe` "sender,true,512,DQ4P" + + it "fdSeparator" $ do + tsResult <- + callNode $ + impDesc + <> jsOut "new TextEncoder().encode(Desc.fdSeparator)" + tsResult `shouldBe` "################################\n" + + describe "validation" $ do + it "valid description" $ do + fixture <- B.readFile "tests/fixtures/file_description.yaml" + tsResult <- + callNode $ + impDesc + <> "const yaml = new TextDecoder().decode(" <> jsUint8 fixture <> ");" + <> "const fd = Desc.decodeFileDescription(yaml);" + <> "const r = Desc.validateFileDescription(fd);" + <> jsOut "new TextEncoder().encode(r === null ? 'ok' : r)" + tsResult `shouldBe` "ok" + + it "non-sequential chunks" $ do + fixture <- B.readFile "tests/fixtures/file_description.yaml" + tsResult <- + callNode $ + impDesc + <> "const yaml = new TextDecoder().decode(" <> jsUint8 fixture <> ");" + <> "const fd = Desc.decodeFileDescription(yaml);" + <> "fd.chunks[1].chunkNo = 5;" + <> "const r = Desc.validateFileDescription(fd);" + <> jsOut "new TextEncoder().encode(r || 'ok')" + tsResult `shouldBe` "chunk numbers are not sequential" + + it "mismatched size" $ do + fixture <- B.readFile "tests/fixtures/file_description.yaml" + tsResult <- + callNode $ + impDesc + <> "const yaml = new TextDecoder().decode(" <> jsUint8 fixture <> ");" + <> "const fd = Desc.decodeFileDescription(yaml);" + <> "fd.size = 999;" + <> "const r = Desc.validateFileDescription(fd);" + <> jsOut "new TextEncoder().encode(r || 'ok')" + tsResult `shouldBe` "chunks total size is different than file size" + +-- ── protocol/chunks ─────────────────────────────────────────────── + +tsChunkTests :: Spec +tsChunkTests = describe "protocol/chunks" $ do + describe "prepareChunkSizes" $ do + it "matches Haskell for various sizes" $ do + let sizes = [100, 65536, 130000, 200000, 500000, 800000, 5000000, 27262976 :: Int64] + hsResults = map prepareChunkSizes sizes + expected = B.intercalate "|" $ map (\cs -> B.intercalate "," $ map (strEncode . FileSize) cs) hsResults + tsResult <- + callNode $ + impChk + <> "const sizes = [100, 65536, 130000, 200000, 500000, 800000, 5000000, 27262976];" + <> "const results = sizes.map(s => Chk.prepareChunkSizes(s).map(Desc.encodeFileSize).join(','));" + <> jsOut "new TextEncoder().encode(results.join('|'))" + tsResult `shouldBe` expected + + it "zero size" $ do + tsResult <- + callNode $ + impChk + <> jsOut "new TextEncoder().encode(Chk.prepareChunkSizes(0).join(','))" + tsResult `shouldBe` "" + + describe "singleChunkSize" $ do + it "finds smallest fitting chunk size" $ do + tsResult <- + callNode $ + impChk + <> "const sizes = [100, 65536, 262144, 300000, 1048576, 4194304, 5000000];" + <> "const results = sizes.map(s => {" + <> " const r = Chk.singleChunkSize(s);" + <> " return r === null ? 'null' : Desc.encodeFileSize(r);" + <> "});" + <> jsOut "new TextEncoder().encode(results.join(','))" + tsResult `shouldBe` "64kb,64kb,256kb,1mb,1mb,4mb,null" + + describe "prepareChunkSpecs" $ do + it "generates correct offsets" $ do + tsResult <- + callNode $ + impChk + <> "const specs = Chk.prepareChunkSpecs([4194304, 4194304, 1048576]);" + <> "const r = specs.map(s => s.chunkOffset + ':' + s.chunkSize).join(',');" + <> jsOut "new TextEncoder().encode(r)" + tsResult `shouldBe` "0:4194304,4194304:4194304,8388608:1048576" + + describe "getChunkDigest" $ do + it "matches Haskell sha256Hash" $ do + let chunk = B.pack [0 .. 63] + expected = C.sha256Hash chunk + tsResult <- + callNode $ + impChk + <> jsOut ("Chk.getChunkDigest(" <> jsUint8 chunk <> ")") + tsResult `shouldBe` expected + + describe "constants" $ do + it "serverChunkSizes" $ do + tsResult <- + callNode $ + impChk + <> jsOut "new TextEncoder().encode(Chk.serverChunkSizes.map(Desc.encodeFileSize).join(','))" + tsResult `shouldBe` "64kb,256kb,1mb,4mb" + + it "fileSizeLen and authTagSize" $ do + tsResult <- + callNode $ + impChk + <> jsOut "new TextEncoder().encode(Chk.fileSizeLen + ',' + Chk.authTagSize)" + tsResult `shouldBe` "8,16" + +-- ── protocol/client ───────────────────────────────────────────── + +tsClientTests :: Spec +tsClientTests = describe "protocol/client" $ do + -- Fixed X25519 key pairs for deterministic tests + let privARaw = B.pack [1 .. 32] + privA = throwCryptoError $ X25519.secretKey privARaw + pubA = X25519.toPublic privA + pubARaw = BA.convert pubA :: B.ByteString + privBRaw = B.pack [33 .. 64] + privB = throwCryptoError $ X25519.secretKey privBRaw + pubB = X25519.toPublic privB + pubBRaw = BA.convert pubB :: B.ByteString + nonce24 = B.pack [0 .. 23] + + describe "cbAuthenticate" $ do + it "matches Haskell output" $ do + let msg = "hello world authenticator test" + C.CbAuthenticator expected = + C.cbAuthenticate + (C.PublicKeyX25519 pubA) + (C.PrivateKeyX25519 privB) + (C.cbNonce nonce24) + msg + tsResult <- + callNode $ + impCli + <> "const auth = Cli.cbAuthenticate(" + <> jsUint8 pubARaw <> "," <> jsUint8 privBRaw <> "," + <> jsUint8 nonce24 <> "," <> jsUint8 msg <> ");" + <> jsOut "auth" + tsResult `shouldBe` expected + + it "is 80 bytes" $ do + let msg = "size test" + C.CbAuthenticator expected = + C.cbAuthenticate + (C.PublicKeyX25519 pubA) + (C.PrivateKeyX25519 privB) + (C.cbNonce nonce24) + msg + B.length expected `shouldBe` 80 + + describe "cbVerify" $ do + it "validates Haskell authenticator" $ do + let msg = "test message for verify" + C.CbAuthenticator authBytes_ = + C.cbAuthenticate + (C.PublicKeyX25519 pubA) + (C.PrivateKeyX25519 privB) + (C.cbNonce nonce24) + msg + tsResult <- + callNode $ + impCli + <> "const valid = Cli.cbVerify(" + <> jsUint8 pubBRaw <> "," <> jsUint8 privARaw <> "," + <> jsUint8 nonce24 <> "," <> jsUint8 authBytes_ <> "," + <> jsUint8 msg <> ");" + <> jsOut "new Uint8Array([valid ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + it "rejects wrong message" $ do + let msg = "correct message" + wrongMsg = "wrong message" + C.CbAuthenticator authBytes_ = + C.cbAuthenticate + (C.PublicKeyX25519 pubA) + (C.PrivateKeyX25519 privB) + (C.cbNonce nonce24) + msg + tsResult <- + callNode $ + impCli + <> "const valid = Cli.cbVerify(" + <> jsUint8 pubBRaw <> "," <> jsUint8 privARaw <> "," + <> jsUint8 nonce24 <> "," <> jsUint8 authBytes_ <> "," + <> jsUint8 wrongMsg <> ");" + <> jsOut "new Uint8Array([valid ? 1 : 0])" + tsResult `shouldBe` B.pack [0] + + it "round-trip: TS authenticate, Haskell verify" $ do + let msg = "round trip test" + tsAuth <- + callNode $ + impCli + <> "const auth = Cli.cbAuthenticate(" + <> jsUint8 pubARaw <> "," <> jsUint8 privBRaw <> "," + <> jsUint8 nonce24 <> "," <> jsUint8 msg <> ");" + <> jsOut "auth" + let hsValid = + C.cbVerify + (C.PublicKeyX25519 pubB) + (C.PrivateKeyX25519 privA) + (C.cbNonce nonce24) + (C.CbAuthenticator tsAuth) + msg + hsValid `shouldBe` True + + describe "transport chunk encryption" $ do + let dhSecret = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + dhSecretBytes = case dhSecret of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + + it "encryptTransportChunk matches Haskell" $ do + let plaintext = B.pack [100 .. 199] + state0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (cipher, state1) = LC.sbEncryptChunk state0 plaintext + tag = BA.convert $ LC.sbAuth state1 :: B.ByteString + expected = cipher <> tag + tsResult <- + callNode $ + impCli + <> "const enc = Cli.encryptTransportChunk(" + <> jsUint8 dhSecretBytes <> "," + <> jsUint8 nonce24 <> "," + <> jsUint8 plaintext <> ");" + <> jsOut "enc" + tsResult `shouldBe` expected + + it "decryptTransportChunk decrypts Haskell-encrypted data" $ do + let plaintext = B.pack ([200 .. 255] <> [0 .. 99]) + state0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (cipher, state1) = LC.sbEncryptChunk state0 plaintext + tag = BA.convert $ LC.sbAuth state1 :: B.ByteString + encData = cipher <> tag + tsResult <- + callNode $ + impCli + <> "const r = Cli.decryptTransportChunk(" + <> jsUint8 dhSecretBytes <> "," + <> jsUint8 nonce24 <> "," + <> jsUint8 encData <> ");" + <> "if (!r.valid) throw new Error('invalid');" + <> jsOut "r.content" + tsResult `shouldBe` plaintext + + it "round-trip encrypt then decrypt" $ do + let plaintext = B.pack [42, 42, 42, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + tsResult <- + callNode $ + impCli + <> "const plain = " <> jsUint8 plaintext <> ";" + <> "const enc = Cli.encryptTransportChunk(" + <> jsUint8 dhSecretBytes <> "," <> jsUint8 nonce24 <> ",plain);" + <> "const r = Cli.decryptTransportChunk(" + <> jsUint8 dhSecretBytes <> "," <> jsUint8 nonce24 <> ",enc);" + <> "if (!r.valid) throw new Error('invalid');" + <> jsOut "r.content" + tsResult `shouldBe` plaintext + + it "rejects tampered ciphertext" $ do + let plaintext = B.pack [10 .. 40] + tsResult <- + callNode $ + impCli + <> "const enc = Cli.encryptTransportChunk(" + <> jsUint8 dhSecretBytes <> "," <> jsUint8 nonce24 <> "," + <> jsUint8 plaintext <> ");" + <> "enc[0] ^= 0xff;" + <> "const r = Cli.decryptTransportChunk(" + <> jsUint8 dhSecretBytes <> "," <> jsUint8 nonce24 <> ",enc);" + <> jsOut "new Uint8Array([r.valid ? 1 : 0])" + tsResult `shouldBe` B.pack [0] + + describe "constants" $ do + it "cbAuthenticatorSize" $ do + tsResult <- + callNode $ + impCli <> jsOut "new TextEncoder().encode(String(Cli.cbAuthenticatorSize))" + tsResult `shouldBe` "80" + +-- ── download (integration) ────────────────────────────────────────── + +tsDownloadTests :: Spec +tsDownloadTests = describe "download" $ do + -- Fixed X25519 key pairs (same as client tests) + let privARaw = B.pack [1 .. 32] + privA = throwCryptoError $ X25519.secretKey privARaw + pubA = X25519.toPublic privA + pubARaw = BA.convert pubA :: B.ByteString + privBRaw = B.pack [33 .. 64] + privB = throwCryptoError $ X25519.secretKey privBRaw + pubB = X25519.toPublic privB + pubBRaw = BA.convert pubB :: B.ByteString + nonce24 = B.pack [0 .. 23] + -- File-level key/nonce (different from transport) + fileKey32 = B.pack [1 .. 32] + fileNonce24 = B.pack [1 .. 24] + fileCbNonce = C.cbNonce fileNonce24 + fileSbKey = C.unsafeSbKey fileKey32 + + describe "processFileResponse" $ do + it "derives DH secret matching Haskell" $ do + -- Simulate: client has privA, server sends pubB + let hsDhSecret = C.dh' (C.PublicKeyX25519 pubB) (C.PrivateKeyX25519 privA) + hsDhBytes = case hsDhSecret of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + tsDhSecret <- + callNode $ + impDl + <> "const dh = Dl.processFileResponse(" + <> jsUint8 privARaw <> "," <> jsUint8 pubBRaw <> ");" + <> jsOut "dh" + tsDhSecret `shouldBe` hsDhBytes + + describe "decryptReceivedChunk" $ do + it "transport decrypt with digest verification" $ do + -- Haskell: transport-encrypt a chunk + let dhSecret = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + dhSecretBytes = case dhSecret of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + chunkData = B.pack [50 .. 149] + chunkDigest = C.sha256Hash chunkData + state0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (cipher, state1) = LC.sbEncryptChunk state0 chunkData + tag = BA.convert (LC.sbAuth state1) :: B.ByteString + encData = cipher <> tag + tsResult <- + callNode $ + impDl + <> "const r = Dl.decryptReceivedChunk(" + <> jsUint8 dhSecretBytes <> "," + <> jsUint8 nonce24 <> "," + <> jsUint8 encData <> "," + <> jsUint8 chunkDigest <> ");" + <> jsOut "r" + tsResult `shouldBe` chunkData + + it "rejects wrong digest" $ do + let dhSecret = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + dhSecretBytes = case dhSecret of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + chunkData = B.pack [50 .. 149] + wrongDigest = B.replicate 32 0xff + state0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (cipher, state1) = LC.sbEncryptChunk state0 chunkData + tag = BA.convert (LC.sbAuth state1) :: B.ByteString + encData = cipher <> tag + tsResult <- + callNode $ + impDl + <> "let ok = false; try { Dl.decryptReceivedChunk(" + <> jsUint8 dhSecretBytes <> "," + <> jsUint8 nonce24 <> "," + <> jsUint8 encData <> "," + <> jsUint8 wrongDigest <> "); } catch(e) { ok = e.message.includes('digest'); }" + <> jsOut "new Uint8Array([ok ? 1 : 0])" + tsResult `shouldBe` B.pack [1] + + it "allows null digest (skip verification)" $ do + let dhSecret = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + dhSecretBytes = case dhSecret of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + chunkData = B.pack [10 .. 50] + state0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (cipher, state1) = LC.sbEncryptChunk state0 chunkData + tag = BA.convert (LC.sbAuth state1) :: B.ByteString + encData = cipher <> tag + tsResult <- + callNode $ + impDl + <> "const r = Dl.decryptReceivedChunk(" + <> jsUint8 dhSecretBytes <> "," + <> jsUint8 nonce24 <> "," + <> jsUint8 encData <> ",null);" + <> jsOut "r" + tsResult `shouldBe` chunkData + + describe "full pipeline" $ do + it "Haskell file-encrypt + transport-encrypt -> TS transport-decrypt + file-decrypt" $ do + -- Step 1: file-level encryption (matches Haskell encryptFile) + let source = "Integration test: full download pipeline!" :: B.ByteString + hdr = FileHeader "pipeline.txt" Nothing + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 256 :: Int64 + sb = either (error . show) id $ LC.sbInit fileSbKey fileCbNonce + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + fileTag = BA.convert (LC.sbAuth sb3) :: B.ByteString + fileEncrypted = B.concat [hdrEnc, srcEnc, padEnc, fileTag] + -- Step 2: transport-level encryption (simulates server sending chunk) + let dhSecret = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + dhSecretBytes = case dhSecret of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + ts0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (transportCipher, ts1) = LC.sbEncryptChunk ts0 fileEncrypted + transportTag = BA.convert (LC.sbAuth ts1) :: B.ByteString + transportEncData = transportCipher <> transportTag + -- Step 3: TS decrypts transport, then file-level + tsResult <- + callNode $ + impDl + <> "const chunk = Dl.decryptReceivedChunk(" + <> jsUint8 dhSecretBytes <> "," + <> jsUint8 nonce24 <> "," + <> jsUint8 transportEncData <> ",null);" + <> "const r = F.decryptChunks(" + <> show encSize <> "n,[chunk]," + <> jsUint8 fileKey32 <> "," + <> jsUint8 fileNonce24 <> ");" + <> "const hdrBytes = F.encodeFileHeader(r.header);" + <> jsOut "new Uint8Array([...hdrBytes, ...r.content])" + tsResult `shouldBe` (fileHdr <> source) + + it "multi-chunk file: Haskell encrypt -> TS decrypt" $ do + -- File content that spans two chunks when file-encrypted + let source = B.pack (take 200 $ cycle [0 .. 255]) + hdr = FileHeader "multi.bin" Nothing + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 512 :: Int64 + sb = either (error . show) id $ LC.sbInit fileSbKey fileCbNonce + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + fileTag = BA.convert (LC.sbAuth sb3) :: B.ByteString + fileEncrypted = B.concat [hdrEnc, srcEnc, padEnc, fileTag] + -- Split file-encrypted data into two "chunks" and transport-encrypt each + let splitPt = B.length fileEncrypted `div` 2 + fileChunk1 = B.take splitPt fileEncrypted + fileChunk2 = B.drop splitPt fileEncrypted + -- Transport encrypt chunk 1 (with separate DH / nonce per chunk) + dhSecret1 = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + dhSecret1Bytes = case dhSecret1 of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + nonce1 = nonce24 + t1s0 = either (error . show) id $ LC.cbInit dhSecret1 (C.cbNonce nonce1) + (t1cipher, t1s1) = LC.sbEncryptChunk t1s0 fileChunk1 + t1tag = BA.convert (LC.sbAuth t1s1) :: B.ByteString + transportEnc1 = t1cipher <> t1tag + -- Transport encrypt chunk 2 (different nonce) + nonce2 = B.pack [24 .. 47] + dhSecret2 = C.dh' (C.PublicKeyX25519 pubB) (C.PrivateKeyX25519 privA) + dhSecret2Bytes = case dhSecret2 of C.DhSecretX25519 k -> BA.convert k :: B.ByteString + t2s0 = either (error . show) id $ LC.cbInit dhSecret2 (C.cbNonce nonce2) + (t2cipher, t2s1) = LC.sbEncryptChunk t2s0 fileChunk2 + t2tag = BA.convert (LC.sbAuth t2s1) :: B.ByteString + transportEnc2 = t2cipher <> t2tag + -- TS: transport-decrypt each chunk, then file-level decrypt the concatenation + tsResult <- + callNode $ + impDl + <> "const c1 = Dl.decryptReceivedChunk(" + <> jsUint8 dhSecret1Bytes <> "," <> jsUint8 nonce1 <> "," + <> jsUint8 transportEnc1 <> ",null);" + <> "const c2 = Dl.decryptReceivedChunk(" + <> jsUint8 dhSecret2Bytes <> "," <> jsUint8 nonce2 <> "," + <> jsUint8 transportEnc2 <> ",null);" + <> "const r = F.decryptChunks(" + <> show encSize <> "n,[c1,c2]," + <> jsUint8 fileKey32 <> "," + <> jsUint8 fileNonce24 <> ");" + <> "const hdrBytes = F.encodeFileHeader(r.header);" + <> jsOut "new Uint8Array([...hdrBytes, ...r.content])" + tsResult `shouldBe` (fileHdr <> source) + + describe "FGET + FRFile round-trip" $ do + it "encode FGET -> decode FRFile -> process -> transport decrypt" $ do + -- Client side: generate FGET command + let dhSecret = C.dh' (C.PublicKeyX25519 pubA) (C.PrivateKeyX25519 privB) + chunkData = "FGET round-trip test data" :: B.ByteString + state0 = either (error . show) id $ LC.cbInit dhSecret (C.cbNonce nonce24) + (cipher, state1) = LC.sbEncryptChunk state0 chunkData + tag = BA.convert (LC.sbAuth state1) :: B.ByteString + encData = cipher <> tag + -- Simulate server response: FILE + -- Server sends pubA (client has privB to do DH) + serverPubDer = C.encodePubKey (C.PublicKeyX25519 pubA) + fileResponseBytes = "FILE " <> smpEncode serverPubDer <> nonce24 + -- TS: parse FRFile response, derive DH secret, decrypt transport chunk + tsResult <- + callNode $ + impDl + <> "const resp = Cmd.decodeResponse(" + <> jsUint8 fileResponseBytes <> ");" + <> "if (resp.type !== 'FRFile') throw new Error('expected FRFile');" + <> "const dhSecret = Dl.processFileResponse(" + <> jsUint8 privBRaw <> ",resp.rcvDhKey);" + <> "const r = Dl.decryptReceivedChunk(dhSecret," + <> "resp.nonce," <> jsUint8 encData <> ",null);" + <> jsOut "r" + tsResult `shouldBe` chunkData + + describe "processDownloadedFile" $ do + it "decrypts file from transport-decrypted chunks" $ do + let source = "processDownloadedFile test" :: B.ByteString + hdr = FileHeader "download.txt" (Just "v1") + fileHdr = smpEncode hdr + fileSize' = fromIntegral (B.length fileHdr + B.length source) :: Int64 + encSize = 256 :: Int64 + sb = either (error . show) id $ LC.sbInit fileSbKey fileCbNonce + lenStr = smpEncode fileSize' + (hdrEnc, sb1) = LC.sbEncryptChunk sb (lenStr <> fileHdr) + (srcEnc, sb2) = LC.sbEncryptChunk sb1 source + padLen = encSize - 16 - fileSize' - 8 + padding = B.replicate (fromIntegral padLen) 0x23 + (padEnc, sb3) = LC.sbEncryptChunk sb2 padding + fileTag = BA.convert (LC.sbAuth sb3) :: B.ByteString + fileEncrypted = B.concat [hdrEnc, srcEnc, padEnc, fileTag] + -- TS: call processDownloadedFile with a minimal FileDescription-like object + tsResult <- + callNode $ + impDl + <> "const fd = {size: " <> show encSize <> "," + <> "key: " <> jsUint8 fileKey32 <> "," + <> "nonce: " <> jsUint8 fileNonce24 <> "};" + <> "const r = Dl.processDownloadedFile(fd, [" + <> jsUint8 fileEncrypted <> "]);" + <> "const hdrBytes = F.encodeFileHeader(r.header);" + <> jsOut "new Uint8Array([...hdrBytes, ...r.content])" + tsResult `shouldBe` (fileHdr <> source) diff --git a/xftp-web/src/crypto/file.ts b/xftp-web/src/crypto/file.ts new file mode 100644 index 000000000..e4e9bb3bc --- /dev/null +++ b/xftp-web/src/crypto/file.ts @@ -0,0 +1,94 @@ +// File-level encryption/decryption matching Simplex.FileTransfer.Crypto. +// Operates on in-memory Uint8Array (no file I/O needed for browser). + +import {Decoder, concatBytes, encodeInt64, encodeString, decodeString, encodeMaybe, decodeMaybe} from "../protocol/encoding.js" +import {sbInit, sbEncryptChunk, sbDecryptTailTag, sbAuth} from "./secretbox.js" + +const AUTH_TAG_SIZE = 16n + +// ── FileHeader ────────────────────────────────────────────────── + +export interface FileHeader { + fileName: string + fileExtra: string | null +} + +// Encoding matches Haskell: smpEncode (fileName, fileExtra) +// = smpEncode fileName <> smpEncode fileExtra +// = encodeString(fileName) + encodeMaybe(encodeString, fileExtra) +export function encodeFileHeader(hdr: FileHeader): Uint8Array { + return concatBytes( + encodeString(hdr.fileName), + encodeMaybe(encodeString, hdr.fileExtra) + ) +} + +// Parse FileHeader from decrypted content (first 1024 bytes examined). +// Returns the parsed header and remaining bytes (file content). +export function parseFileHeader(data: Uint8Array): {header: FileHeader, rest: Uint8Array} { + const hdrLen = Math.min(1024, data.length) + const d = new Decoder(data.subarray(0, hdrLen)) + const fileName = decodeString(d) + const fileExtra = decodeMaybe(decodeString, d) + const consumed = d.offset() + return { + header: {fileName, fileExtra}, + rest: data.subarray(consumed) + } +} + +// ── Encryption (FileTransfer.Crypto:encryptFile) ──────────────── + +// Encrypt file content with streaming XSalsa20-Poly1305. +// Output format: encrypted(Int64 fileSize | fileHdr | source | '#' padding) | 16-byte auth tag +// +// source — raw file content +// fileHdr — pre-encoded FileHeader bytes (from encodeFileHeader) +// key — 32-byte symmetric key +// nonce — 24-byte nonce +// fileSize — BigInt(fileHdr.length + source.length) +// encSize — total output size (including 16-byte auth tag) +export function encryptFile( + source: Uint8Array, + fileHdr: Uint8Array, + key: Uint8Array, + nonce: Uint8Array, + fileSize: bigint, + encSize: bigint +): Uint8Array { + const state = sbInit(key, nonce) + const lenStr = encodeInt64(fileSize) + const padLen = Number(encSize - AUTH_TAG_SIZE - fileSize - 8n) + if (padLen < 0) throw new Error("encryptFile: encSize too small") + const hdr = sbEncryptChunk(state, concatBytes(lenStr, fileHdr)) + const encSource = sbEncryptChunk(state, source) + const padding = new Uint8Array(padLen) + padding.fill(0x23) // '#' + const encPad = sbEncryptChunk(state, padding) + const tag = sbAuth(state) + return concatBytes(hdr, encSource, encPad, tag) +} + +// ── Decryption (FileTransfer.Crypto:decryptChunks) ────────────── + +// Decrypt one or more XFTP chunks into a FileHeader and file content. +// Chunks are concatenated, then decrypted as a single stream. +// +// encSize — total encrypted size (including 16-byte auth tag) +// chunks — downloaded XFTP chunk data (concatenated = full encrypted file) +// key — 32-byte symmetric key +// nonce — 24-byte nonce +export function decryptChunks( + encSize: bigint, + chunks: Uint8Array[], + key: Uint8Array, + nonce: Uint8Array +): {header: FileHeader, content: Uint8Array} { + if (chunks.length === 0) throw new Error("decryptChunks: empty chunks") + const paddedLen = encSize - AUTH_TAG_SIZE + const data = chunks.length === 1 ? chunks[0] : concatBytes(...chunks) + const {valid, content} = sbDecryptTailTag(key, nonce, paddedLen, data) + if (!valid) throw new Error("decryptChunks: invalid auth tag") + const {header, rest} = parseFileHeader(content) + return {header, content: rest} +} diff --git a/xftp-web/src/crypto/secretbox.ts b/xftp-web/src/crypto/secretbox.ts new file mode 100644 index 000000000..48ca94da0 --- /dev/null +++ b/xftp-web/src/crypto/secretbox.ts @@ -0,0 +1,219 @@ +// Streaming XSalsa20-Poly1305 — Simplex.Messaging.Crypto / Crypto.Lazy +// +// Libsodium-wrappers-sumo does not expose crypto_stream_xsalsa20_xor_ic, +// so the Salsa20/20 stream cipher core is implemented here. +// HSalsa20 uses libsodium's crypto_core_hsalsa20. +// Poly1305 uses libsodium's streaming crypto_onetimeauth_* API. + +import sodium, {StateAddress} from "libsodium-wrappers-sumo" +import {concatBytes} from "../protocol/encoding.js" +import {pad, unPad, padLazy, unPadLazy} from "./padding.js" + +// crypto_core_hsalsa20 exists at runtime but is missing from @types/libsodium-wrappers-sumo +const _sodium = sodium as unknown as { + crypto_core_hsalsa20(input: Uint8Array, key: Uint8Array, constant?: Uint8Array): Uint8Array +} & typeof sodium + +// ── Salsa20/20 stream cipher core ─────────────────────────────── + +function readU32LE(buf: Uint8Array, off: number): number { + return ((buf[off] | (buf[off + 1] << 8) | (buf[off + 2] << 16) | (buf[off + 3] << 24)) >>> 0) +} + +function writeU32LE(buf: Uint8Array, off: number, val: number): void { + buf[off] = val & 0xff + buf[off + 1] = (val >>> 8) & 0xff + buf[off + 2] = (val >>> 16) & 0xff + buf[off + 3] = (val >>> 24) & 0xff +} + +function rotl32(v: number, n: number): number { + return ((v << n) | (v >>> (32 - n))) >>> 0 +} + +const SIGMA_0 = 0x61707865 +const SIGMA_1 = 0x3320646e +const SIGMA_2 = 0x79622d32 +const SIGMA_3 = 0x6b206574 + +function salsa20Block(key: Uint8Array, nonce8: Uint8Array, counter: number): Uint8Array { + const k0 = readU32LE(key, 0), k1 = readU32LE(key, 4) + const k2 = readU32LE(key, 8), k3 = readU32LE(key, 12) + const k4 = readU32LE(key, 16), k5 = readU32LE(key, 20) + const k6 = readU32LE(key, 24), k7 = readU32LE(key, 28) + const n0 = readU32LE(nonce8, 0), n1 = readU32LE(nonce8, 4) + + const s0 = SIGMA_0, s1 = k0, s2 = k1, s3 = k2 + const s4 = k3, s5 = SIGMA_1, s6 = n0, s7 = n1 + const s8 = counter >>> 0, s9 = 0, s10 = SIGMA_2, s11 = k4 + const s12 = k5, s13 = k6, s14 = k7, s15 = SIGMA_3 + + let x0 = s0, x1 = s1, x2 = s2, x3 = s3 + let x4 = s4, x5 = s5, x6 = s6, x7 = s7 + let x8 = s8, x9 = s9, x10 = s10, x11 = s11 + let x12 = s12, x13 = s13, x14 = s14, x15 = s15 + + for (let i = 0; i < 10; i++) { + // Column round + x4 ^= rotl32((x0 + x12) >>> 0, 7); x8 ^= rotl32((x4 + x0) >>> 0, 9) + x12 ^= rotl32((x8 + x4) >>> 0, 13); x0 ^= rotl32((x12 + x8) >>> 0, 18) + x9 ^= rotl32((x5 + x1) >>> 0, 7); x13 ^= rotl32((x9 + x5) >>> 0, 9) + x1 ^= rotl32((x13 + x9) >>> 0, 13); x5 ^= rotl32((x1 + x13) >>> 0, 18) + x14 ^= rotl32((x10 + x6) >>> 0, 7); x2 ^= rotl32((x14 + x10) >>> 0, 9) + x6 ^= rotl32((x2 + x14) >>> 0, 13); x10 ^= rotl32((x6 + x2) >>> 0, 18) + x3 ^= rotl32((x15 + x11) >>> 0, 7); x7 ^= rotl32((x3 + x15) >>> 0, 9) + x11 ^= rotl32((x7 + x3) >>> 0, 13); x15 ^= rotl32((x11 + x7) >>> 0, 18) + // Row round + x1 ^= rotl32((x0 + x3) >>> 0, 7); x2 ^= rotl32((x1 + x0) >>> 0, 9) + x3 ^= rotl32((x2 + x1) >>> 0, 13); x0 ^= rotl32((x3 + x2) >>> 0, 18) + x6 ^= rotl32((x5 + x4) >>> 0, 7); x7 ^= rotl32((x6 + x5) >>> 0, 9) + x4 ^= rotl32((x7 + x6) >>> 0, 13); x5 ^= rotl32((x4 + x7) >>> 0, 18) + x11 ^= rotl32((x10 + x9) >>> 0, 7); x8 ^= rotl32((x11 + x10) >>> 0, 9) + x9 ^= rotl32((x8 + x11) >>> 0, 13); x10 ^= rotl32((x9 + x8) >>> 0, 18) + x12 ^= rotl32((x15 + x14) >>> 0, 7); x13 ^= rotl32((x12 + x15) >>> 0, 9) + x14 ^= rotl32((x13 + x12) >>> 0, 13); x15 ^= rotl32((x14 + x13) >>> 0, 18) + } + + const out = new Uint8Array(64) + writeU32LE(out, 0, (x0 + s0) >>> 0); writeU32LE(out, 4, (x1 + s1) >>> 0) + writeU32LE(out, 8, (x2 + s2) >>> 0); writeU32LE(out, 12, (x3 + s3) >>> 0) + writeU32LE(out, 16, (x4 + s4) >>> 0); writeU32LE(out, 20, (x5 + s5) >>> 0) + writeU32LE(out, 24, (x6 + s6) >>> 0); writeU32LE(out, 28, (x7 + s7) >>> 0) + writeU32LE(out, 32, (x8 + s8) >>> 0); writeU32LE(out, 36, (x9 + s9) >>> 0) + writeU32LE(out, 40, (x10 + s10) >>> 0); writeU32LE(out, 44, (x11 + s11) >>> 0) + writeU32LE(out, 48, (x12 + s12) >>> 0); writeU32LE(out, 52, (x13 + s13) >>> 0) + writeU32LE(out, 56, (x14 + s14) >>> 0); writeU32LE(out, 60, (x15 + s15) >>> 0) + return out +} + +// ── Streaming state ───────────────────────────────────────────── + +export interface SbState { + _subkey: Uint8Array + _nonce8: Uint8Array + _counter: number + _ksBuf: Uint8Array + _ksOff: number + _authState: StateAddress +} + +export function sbInit(key: Uint8Array, nonce: Uint8Array): SbState { + // Double HSalsa20 cascade matching Haskell cryptonite XSalsa20 (Crypto.hs:xSalsa20): + // subkey1 = HSalsa20(key, zeros16) + // subkey2 = HSalsa20(subkey1, nonce[0:16]) + // keystream = Salsa20(subkey2, nonce[16:24]) + const zeros16 = new Uint8Array(16) + const subkey1 = _sodium.crypto_core_hsalsa20(zeros16, key) + const subkey = _sodium.crypto_core_hsalsa20(nonce.subarray(0, 16), subkey1) + const nonce8 = new Uint8Array(nonce.subarray(16, 24)) + const block0 = salsa20Block(subkey, nonce8, 0) + const poly1305Key = block0.subarray(0, 32) + const ksBuf = new Uint8Array(block0.subarray(32)) + const authState = sodium.crypto_onetimeauth_init(poly1305Key) + return {_subkey: subkey, _nonce8: nonce8, _counter: 1, _ksBuf: ksBuf, _ksOff: 0, _authState: authState} +} + +export function cbInit(dhSecret: Uint8Array, nonce: Uint8Array): SbState { + return sbInit(dhSecret, nonce) +} + +export function sbEncryptChunk(state: SbState, chunk: Uint8Array): Uint8Array { + const cipher = xorKeystream(state, chunk) + sodium.crypto_onetimeauth_update(state._authState, cipher) + return cipher +} + +export function sbDecryptChunk(state: SbState, chunk: Uint8Array): Uint8Array { + sodium.crypto_onetimeauth_update(state._authState, chunk) + return xorKeystream(state, chunk) +} + +export function sbAuth(state: SbState): Uint8Array { + return sodium.crypto_onetimeauth_final(state._authState) +} + +// ── High-level: tail tag (tag appended) ───────────────────────── + +export function sbEncryptTailTag( + key: Uint8Array, nonce: Uint8Array, + data: Uint8Array, len: bigint, padLen: bigint +): Uint8Array { + const padded = padLazy(data, len, padLen) + const state = sbInit(key, nonce) + const cipher = sbEncryptChunk(state, padded) + const tag = sbAuth(state) + return concatBytes(cipher, tag) +} + +export function sbDecryptTailTag( + key: Uint8Array, nonce: Uint8Array, + paddedLen: bigint, data: Uint8Array +): {valid: boolean; content: Uint8Array} { + const pLen = Number(paddedLen) + const cipher = data.subarray(0, pLen) + const providedTag = data.subarray(pLen) + const state = sbInit(key, nonce) + const plaintext = sbDecryptChunk(state, cipher) + const computedTag = sbAuth(state) + const valid = providedTag.length === 16 && constantTimeEqual(providedTag, computedTag) + const content = unPadLazy(plaintext) + return {valid, content} +} + +// ── Tag-prepended secretbox (Haskell Crypto.hs:cryptoBox) ─────── + +export function cryptoBox(key: Uint8Array, nonce: Uint8Array, msg: Uint8Array): Uint8Array { + const state = sbInit(key, nonce) + const cipher = sbEncryptChunk(state, msg) + const tag = sbAuth(state) + return concatBytes(tag, cipher) +} + +export function cbEncrypt( + dhSecret: Uint8Array, nonce: Uint8Array, + msg: Uint8Array, padLen: number +): Uint8Array { + return cryptoBox(dhSecret, nonce, pad(msg, padLen)) +} + +export function cbDecrypt( + dhSecret: Uint8Array, nonce: Uint8Array, + packet: Uint8Array +): Uint8Array { + const tag = packet.subarray(0, 16) + const cipher = packet.subarray(16) + const state = sbInit(dhSecret, nonce) + const plaintext = sbDecryptChunk(state, cipher) + const computedTag = sbAuth(state) + if (!constantTimeEqual(tag, computedTag)) throw new Error("secretbox: authentication failed") + return unPad(plaintext) +} + +// ── Internal ──────────────────────────────────────────────────── + +function xorKeystream(state: SbState, data: Uint8Array): Uint8Array { + const result = new Uint8Array(data.length) + let off = 0 + while (off < data.length) { + if (state._ksOff >= state._ksBuf.length) { + state._ksBuf = salsa20Block(state._subkey, state._nonce8, state._counter++) + state._ksOff = 0 + } + const available = state._ksBuf.length - state._ksOff + const needed = data.length - off + const n = Math.min(available, needed) + for (let i = 0; i < n; i++) { + result[off + i] = data[off + i] ^ state._ksBuf[state._ksOff + i] + } + state._ksOff += n + off += n + } + return result +} + +function constantTimeEqual(a: Uint8Array, b: Uint8Array): boolean { + if (a.length !== b.length) return false + let diff = 0 + for (let i = 0; i < a.length; i++) diff |= a[i] ^ b[i] + return diff === 0 +} diff --git a/xftp-web/src/download.ts b/xftp-web/src/download.ts new file mode 100644 index 000000000..35c4395b1 --- /dev/null +++ b/xftp-web/src/download.ts @@ -0,0 +1,75 @@ +// XFTP download pipeline — integration of protocol + crypto layers. +// +// Ties together: DH key exchange (keys), transport decryption (client), +// file-level decryption (file), chunk sizing (chunks), digest verification. +// +// Usage: +// 1. Parse FileDescription from YAML (description.ts) +// 2. For each chunk replica: +// a. generateX25519KeyPair() → ephemeral DH keypair +// b. encodeFGET(dhPub) → FGET command +// c. encodeAuthTransmission(...) → padded block (send to server) +// d. decodeTransmission(responseBlock) → raw response +// e. decodeResponse(raw) → FRFile { rcvDhKey, nonce } +// f. processFileResponse(rcvPrivKey, rcvDhKey, nonce) → dhSecret +// g. decryptReceivedChunk(dhSecret, nonce, encData, digest) → plaintext +// 3. processDownloadedFile(fd, plaintextChunks) → { header, content } + +import {dh} from "./crypto/keys.js" +import {sha256} from "./crypto/digest.js" +import {decryptChunks, type FileHeader} from "./crypto/file.js" +import {decryptTransportChunk} from "./protocol/client.js" +import type {FileDescription} from "./protocol/description.js" + +// ── Process FRFile response ───────────────────────────────────── + +// Derive transport decryption secret from FRFile response parameters. +// Uses DH(serverDhKey, recipientPrivKey) to produce shared secret. +export function processFileResponse( + recipientPrivKey: Uint8Array, // Ephemeral X25519 private key (32 bytes) + serverDhKey: Uint8Array, // rcvDhKey from FRFile response (32 bytes) +): Uint8Array { + return dh(serverDhKey, recipientPrivKey) +} + +// ── Decrypt a single received chunk ───────────────────────────── + +// Decrypt transport-encrypted chunk data and verify SHA-256 digest. +// Returns decrypted content or throws on auth tag / digest failure. +export function decryptReceivedChunk( + dhSecret: Uint8Array, + cbNonce: Uint8Array, + encData: Uint8Array, + expectedDigest: Uint8Array | null +): Uint8Array { + const {valid, content} = decryptTransportChunk(dhSecret, cbNonce, encData) + if (!valid) throw new Error("transport auth tag verification failed") + if (expectedDigest !== null) { + const actual = sha256(content) + if (!digestEqual(actual, expectedDigest)) { + throw new Error("chunk digest mismatch") + } + } + return content +} + +// ── Full download pipeline ────────────────────────────────────── + +// Process downloaded file: concatenate transport-decrypted chunks, +// then file-level decrypt using key/nonce from file description. +// Returns parsed FileHeader and file content. +export function processDownloadedFile( + fd: FileDescription, + plaintextChunks: Uint8Array[] +): {header: FileHeader, content: Uint8Array} { + return decryptChunks(BigInt(fd.size), plaintextChunks, fd.key, fd.nonce) +} + +// ── Internal ──────────────────────────────────────────────────── + +function digestEqual(a: Uint8Array, b: Uint8Array): boolean { + if (a.length !== b.length) return false + let diff = 0 + for (let i = 0; i < a.length; i++) diff |= a[i] ^ b[i] + return diff === 0 +} diff --git a/xftp-web/src/protocol/chunks.ts b/xftp-web/src/protocol/chunks.ts new file mode 100644 index 000000000..db2bbe763 --- /dev/null +++ b/xftp-web/src/protocol/chunks.ts @@ -0,0 +1,86 @@ +// XFTP chunk sizing — Simplex.FileTransfer.Chunks + Client +// +// Computes chunk sizes for file uploads, chunk specifications with offsets, +// and per-chunk SHA-256 digests. + +import {kb, mb} from "./description.js" +import {sha256} from "../crypto/digest.js" + +// ── Chunk size constants (Simplex.FileTransfer.Chunks) ────────── + +export const chunkSize0 = kb(64) // 65536 +export const chunkSize1 = kb(256) // 262144 +export const chunkSize2 = mb(1) // 1048576 +export const chunkSize3 = mb(4) // 4194304 + +export const serverChunkSizes = [chunkSize0, chunkSize1, chunkSize2, chunkSize3] + +// ── Size constants ────────────────────────────────────────────── + +export const fileSizeLen = 8 // 64-bit file size prefix (padLazy) +export const authTagSize = 16 // Poly1305 authentication tag + +// ── Chunk sizing (Simplex.FileTransfer.Client.prepareChunkSizes) ─ + +function size34(sz: number): number { + return Math.floor((sz * 3) / 4) +} + +export function prepareChunkSizes(payloadSize: number): number[] { + let smallSize: number, bigSize: number + if (payloadSize > size34(chunkSize3)) { + smallSize = chunkSize2; bigSize = chunkSize3 + } else if (payloadSize > size34(chunkSize2)) { + smallSize = chunkSize1; bigSize = chunkSize2 + } else { + smallSize = chunkSize0; bigSize = chunkSize1 + } + function prepareSizes(size: number): number[] { + if (size === 0) return [] + if (size >= bigSize) { + const n1 = Math.floor(size / bigSize) + const remSz = size % bigSize + return new Array(n1).fill(bigSize).concat(prepareSizes(remSz)) + } + if (size > size34(bigSize)) return [bigSize] + const n2 = Math.floor(size / smallSize) + const remSz2 = size % smallSize + return new Array(remSz2 === 0 ? n2 : n2 + 1).fill(smallSize) + } + return prepareSizes(payloadSize) +} + +// Find the smallest server chunk size that fits the payload. +// Returns null if payload exceeds the largest chunk size. +// Matches Haskell singleChunkSize. +export function singleChunkSize(payloadSize: number): number | null { + for (const sz of serverChunkSizes) { + if (payloadSize <= sz) return sz + } + return null +} + +// ── Chunk specs ───────────────────────────────────────────────── + +export interface ChunkSpec { + chunkOffset: number + chunkSize: number +} + +// Generate chunk specifications with byte offsets. +// Matches Haskell prepareChunkSpecs (without filePath). +export function prepareChunkSpecs(chunkSizes: number[]): ChunkSpec[] { + const specs: ChunkSpec[] = [] + let offset = 0 + for (const size of chunkSizes) { + specs.push({chunkOffset: offset, chunkSize: size}) + offset += size + } + return specs +} + +// ── Chunk digest ──────────────────────────────────────────────── + +export function getChunkDigest(chunk: Uint8Array): Uint8Array { + return sha256(chunk) +} diff --git a/xftp-web/src/protocol/client.ts b/xftp-web/src/protocol/client.ts new file mode 100644 index 000000000..e5a3c2d70 --- /dev/null +++ b/xftp-web/src/protocol/client.ts @@ -0,0 +1,95 @@ +// XFTP client protocol operations — Simplex.FileTransfer.Client + Crypto +// +// CbAuthenticator-based command authentication and transport-level +// chunk encryption/decryption for XFTP downloads. + +import {concatBytes} from "./encoding.js" +import {dh} from "../crypto/keys.js" +import {sha512} from "../crypto/digest.js" +import { + cbInit, sbEncryptChunk, sbDecryptChunk, sbAuth, cryptoBox +} from "../crypto/secretbox.js" + +// ── Constants ─────────────────────────────────────────────────── + +export const cbAuthenticatorSize = 80 // SHA512 (64) + authTag (16) + +// ── CbAuthenticator (Crypto.hs:cbAuthenticate) ───────────────── + +// Create crypto_box authenticator for a message. +// Encrypts sha512(msg) with NaCl crypto_box using DH(peerPubKey, ownPrivKey). +// Returns 80 bytes (16-byte tag prepended + 64-byte encrypted hash). +export function cbAuthenticate( + peerPubKey: Uint8Array, + ownPrivKey: Uint8Array, + nonce: Uint8Array, + msg: Uint8Array +): Uint8Array { + const dhSecret = dh(peerPubKey, ownPrivKey) + const hash = sha512(msg) + return cryptoBox(dhSecret, nonce, hash) +} + +// Verify crypto_box authenticator for a message. +// Decrypts authenticator with DH(peerPubKey, ownPrivKey), checks against sha512(msg). +export function cbVerify( + peerPubKey: Uint8Array, + ownPrivKey: Uint8Array, + nonce: Uint8Array, + authenticator: Uint8Array, + msg: Uint8Array +): boolean { + if (authenticator.length !== cbAuthenticatorSize) return false + const dhSecret = dh(peerPubKey, ownPrivKey) + const tag = authenticator.subarray(0, 16) + const cipher = authenticator.subarray(16) + const state = cbInit(dhSecret, nonce) + const plaintext = sbDecryptChunk(state, cipher) + const computedTag = sbAuth(state) + if (!constantTimeEqual(tag, computedTag)) return false + const expectedHash = sha512(msg) + return constantTimeEqual(plaintext, expectedHash) +} + +// ── Transport-level chunk encryption/decryption ───────────────── + +// Encrypt a chunk for transport (tag-appended format). +// Matches sendEncFile in FileTransfer.Transport: +// ciphertext streamed via sbEncryptChunk, then 16-byte auth tag appended. +export function encryptTransportChunk( + dhSecret: Uint8Array, + cbNonce: Uint8Array, + plainData: Uint8Array +): Uint8Array { + const state = cbInit(dhSecret, cbNonce) + const cipher = sbEncryptChunk(state, plainData) + const tag = sbAuth(state) + return concatBytes(cipher, tag) +} + +// Decrypt a transport-encrypted chunk (tag-appended format). +// Matches receiveEncFile / receiveSbFile in FileTransfer.Transport: +// ciphertext decrypted via sbDecryptChunk, then 16-byte auth tag verified. +export function decryptTransportChunk( + dhSecret: Uint8Array, + cbNonce: Uint8Array, + encData: Uint8Array +): {valid: boolean, content: Uint8Array} { + if (encData.length < 16) return {valid: false, content: new Uint8Array(0)} + const cipher = encData.subarray(0, encData.length - 16) + const providedTag = encData.subarray(encData.length - 16) + const state = cbInit(dhSecret, cbNonce) + const plaintext = sbDecryptChunk(state, cipher) + const computedTag = sbAuth(state) + const valid = constantTimeEqual(providedTag, computedTag) + return {valid, content: plaintext} +} + +// ── Internal ──────────────────────────────────────────────────── + +function constantTimeEqual(a: Uint8Array, b: Uint8Array): boolean { + if (a.length !== b.length) return false + let diff = 0 + for (let i = 0; i < a.length; i++) diff |= a[i] ^ b[i] + return diff === 0 +} diff --git a/xftp-web/src/protocol/commands.ts b/xftp-web/src/protocol/commands.ts new file mode 100644 index 000000000..9f5d56d4f --- /dev/null +++ b/xftp-web/src/protocol/commands.ts @@ -0,0 +1,158 @@ +// Protocol commands and responses — Simplex.FileTransfer.Protocol +// +// Commands (client -> server): FNEW, FADD, FPUT, FDEL, FGET, FACK, PING +// Responses (server -> client): SIDS, RIDS, FILE, OK, ERR, PONG + +import { + Decoder, concatBytes, + encodeBytes, decodeBytes, + encodeWord32, + encodeNonEmpty, decodeNonEmpty, + encodeMaybe +} from "./encoding.js" +import {decodePubKeyX25519} from "../crypto/keys.js" + +// ── Types ───────────────────────────────────────────────────────── + +export interface FileInfo { + sndKey: Uint8Array // DER-encoded Ed25519 public key (44 bytes) + size: number // Word32 + digest: Uint8Array // SHA-256 digest (32 bytes) +} + +export type CommandError = "UNKNOWN" | "SYNTAX" | "PROHIBITED" | "NO_AUTH" | "HAS_AUTH" | "NO_ENTITY" + +export type XFTPErrorType = + | {type: "BLOCK"} | {type: "SESSION"} | {type: "HANDSHAKE"} + | {type: "CMD", cmdErr: CommandError} + | {type: "AUTH"} + | {type: "BLOCKED", blockInfo: string} + | {type: "SIZE"} | {type: "QUOTA"} | {type: "DIGEST"} | {type: "CRYPTO"} + | {type: "NO_FILE"} | {type: "HAS_FILE"} | {type: "FILE_IO"} + | {type: "TIMEOUT"} | {type: "INTERNAL"} + +export type FileResponse = + | {type: "FRSndIds", senderId: Uint8Array, recipientIds: Uint8Array[]} + | {type: "FRRcvIds", recipientIds: Uint8Array[]} + | {type: "FRFile", rcvDhKey: Uint8Array, nonce: Uint8Array} + | {type: "FROk"} + | {type: "FRErr", err: XFTPErrorType} + | {type: "FRPong"} + +// ── FileInfo encoding ───────────────────────────────────────────── + +// smpEncode FileInfo {sndKey, size, digest} = smpEncode (sndKey, size, digest) +export function encodeFileInfo(fi: FileInfo): Uint8Array { + return concatBytes(encodeBytes(fi.sndKey), encodeWord32(fi.size), encodeBytes(fi.digest)) +} + +// ── Command encoding (encodeProtocol) ───────────────────────────── + +const SPACE = new Uint8Array([0x20]) + +function ascii(s: string): Uint8Array { + const buf = new Uint8Array(s.length) + for (let i = 0; i < s.length; i++) buf[i] = s.charCodeAt(i) + return buf +} + +export function encodeFNEW(file: FileInfo, rcvKeys: Uint8Array[], auth: Uint8Array | null): Uint8Array { + return concatBytes( + ascii("FNEW"), SPACE, + encodeFileInfo(file), + encodeNonEmpty(encodeBytes, rcvKeys), + encodeMaybe(encodeBytes, auth) + ) +} + +export function encodeFADD(rcvKeys: Uint8Array[]): Uint8Array { + return concatBytes(ascii("FADD"), SPACE, encodeNonEmpty(encodeBytes, rcvKeys)) +} + +export function encodeFPUT(): Uint8Array { return ascii("FPUT") } + +export function encodeFDEL(): Uint8Array { return ascii("FDEL") } + +export function encodeFGET(rcvDhKey: Uint8Array): Uint8Array { + return concatBytes(ascii("FGET"), SPACE, encodeBytes(rcvDhKey)) +} + +export function encodeFACK(): Uint8Array { return ascii("FACK") } + +export function encodePING(): Uint8Array { return ascii("PING") } + +// ── Response decoding ───────────────────────────────────────────── + +function readTag(d: Decoder): string { + const start = d.offset() + while (d.remaining() > 0) { + if (d.buf[d.offset()] === 0x20 || d.buf[d.offset()] === 0x0a) break + d.anyByte() + } + let s = "" + for (let i = start; i < d.offset(); i++) s += String.fromCharCode(d.buf[i]) + return s +} + +function readSpace(d: Decoder): void { + if (d.anyByte() !== 0x20) throw new Error("expected space") +} + +function decodeCommandError(s: string): CommandError { + if (s === "UNKNOWN" || s === "SYNTAX" || s === "PROHIBITED" || s === "NO_AUTH" || s === "HAS_AUTH" || s === "NO_ENTITY") return s + if (s === "NO_QUEUE") return "NO_ENTITY" + throw new Error("bad CommandError: " + s) +} + +export function decodeXFTPError(d: Decoder): XFTPErrorType { + const s = readTag(d) + switch (s) { + case "BLOCK": return {type: "BLOCK"} + case "SESSION": return {type: "SESSION"} + case "HANDSHAKE": return {type: "HANDSHAKE"} + case "CMD": { readSpace(d); return {type: "CMD", cmdErr: decodeCommandError(readTag(d))} } + case "AUTH": return {type: "AUTH"} + case "BLOCKED": { + readSpace(d) + const rest = d.takeAll() + let info = "" + for (let i = 0; i < rest.length; i++) info += String.fromCharCode(rest[i]) + return {type: "BLOCKED", blockInfo: info} + } + case "SIZE": return {type: "SIZE"} + case "QUOTA": return {type: "QUOTA"} + case "DIGEST": return {type: "DIGEST"} + case "CRYPTO": return {type: "CRYPTO"} + case "NO_FILE": return {type: "NO_FILE"} + case "HAS_FILE": return {type: "HAS_FILE"} + case "FILE_IO": return {type: "FILE_IO"} + case "TIMEOUT": return {type: "TIMEOUT"} + case "INTERNAL": return {type: "INTERNAL"} + default: throw new Error("bad XFTPErrorType: " + s) + } +} + +export function decodeResponse(data: Uint8Array): FileResponse { + const d = new Decoder(data) + const tagStr = readTag(d) + switch (tagStr) { + case "SIDS": { + readSpace(d) + return {type: "FRSndIds", senderId: decodeBytes(d), recipientIds: decodeNonEmpty(decodeBytes, d)} + } + case "RIDS": { + readSpace(d) + return {type: "FRRcvIds", recipientIds: decodeNonEmpty(decodeBytes, d)} + } + case "FILE": { + readSpace(d) + const rcvDhKey = decodePubKeyX25519(decodeBytes(d)) + const nonce = d.take(24) + return {type: "FRFile", rcvDhKey, nonce} + } + case "OK": return {type: "FROk"} + case "ERR": { readSpace(d); return {type: "FRErr", err: decodeXFTPError(d)} } + case "PONG": return {type: "FRPong"} + default: throw new Error("unknown response: " + tagStr) + } +} diff --git a/xftp-web/src/protocol/description.ts b/xftp-web/src/protocol/description.ts new file mode 100644 index 000000000..00afd2bd9 --- /dev/null +++ b/xftp-web/src/protocol/description.ts @@ -0,0 +1,363 @@ +// XFTP file description encoding/decoding — Simplex.FileTransfer.Description +// +// Handles YAML-encoded file descriptions matching Haskell Data.Yaml output format. +// Base64url encoding matches Haskell Data.ByteString.Base64.URL.encode (with padding). + +// ── Base64url (RFC 4648 §5) with '=' padding ─────────────────── + +const B64URL = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_" +const B64_DECODE = new Uint8Array(128) +B64_DECODE.fill(0xff) +for (let i = 0; i < 64; i++) B64_DECODE[B64URL.charCodeAt(i)] = i + +export function base64urlEncode(data: Uint8Array): string { + let result = "" + const len = data.length + let i = 0 + for (; i + 2 < len; i += 3) { + const b0 = data[i], b1 = data[i + 1], b2 = data[i + 2] + result += B64URL[b0 >>> 2] + result += B64URL[((b0 & 3) << 4) | (b1 >>> 4)] + result += B64URL[((b1 & 15) << 2) | (b2 >>> 6)] + result += B64URL[b2 & 63] + } + if (i < len) { + const b0 = data[i] + result += B64URL[b0 >>> 2] + if (i + 1 < len) { + const b1 = data[i + 1] + result += B64URL[((b0 & 3) << 4) | (b1 >>> 4)] + result += B64URL[(b1 & 15) << 2] + result += "=" + } else { + result += B64URL[(b0 & 3) << 4] + result += "==" + } + } + return result +} + +export function base64urlDecode(s: string): Uint8Array { + let end = s.length + while (end > 0 && s.charCodeAt(end - 1) === 0x3d) end-- // strip '=' + const n = end + const out = new Uint8Array((n * 3) >>> 2) + let j = 0, i = 0 + for (; i + 3 < n; i += 4) { + const a = B64_DECODE[s.charCodeAt(i)], b = B64_DECODE[s.charCodeAt(i + 1)] + const c = B64_DECODE[s.charCodeAt(i + 2)], d = B64_DECODE[s.charCodeAt(i + 3)] + out[j++] = (a << 2) | (b >>> 4) + out[j++] = ((b & 15) << 4) | (c >>> 2) + out[j++] = ((c & 3) << 6) | d + } + if (n - i >= 2) { + const a = B64_DECODE[s.charCodeAt(i)], b = B64_DECODE[s.charCodeAt(i + 1)] + out[j++] = (a << 2) | (b >>> 4) + if (n - i >= 3) { + const c = B64_DECODE[s.charCodeAt(i + 2)] + out[j++] = ((b & 15) << 4) | (c >>> 2) + } + } + return out +} + +// ── FileSize encoding/decoding ────────────────────────────────── + +export const kb = (n: number): number => n * 1024 +export const mb = (n: number): number => n * 1048576 +export const gb = (n: number): number => n * 1073741824 + +export function encodeFileSize(bytes: number): string { + const ks = Math.floor(bytes / 1024) + if (bytes % 1024 !== 0) return String(bytes) + const ms = Math.floor(ks / 1024) + if (ks % 1024 !== 0) return ks + "kb" + const gs = Math.floor(ms / 1024) + if (ms % 1024 !== 0) return ms + "mb" + return gs + "gb" +} + +export function decodeFileSize(s: string): number { + if (s.endsWith("gb")) return parseInt(s) * 1073741824 + if (s.endsWith("mb")) return parseInt(s) * 1048576 + if (s.endsWith("kb")) return parseInt(s) * 1024 + return parseInt(s) +} + +// ── Types ─────────────────────────────────────────────────────── + +export type FileParty = "recipient" | "sender" + +export interface FileDescription { + party: FileParty + size: number // total file size in bytes + digest: Uint8Array // SHA-256 file digest + key: Uint8Array // SbKey (32 bytes) + nonce: Uint8Array // CbNonce (24 bytes) + chunkSize: number // default chunk size in bytes + chunks: FileChunk[] + redirect: RedirectFileInfo | null +} + +export interface RedirectFileInfo { + size: number + digest: Uint8Array +} + +export interface FileChunk { + chunkNo: number + chunkSize: number + digest: Uint8Array + replicas: FileChunkReplica[] +} + +export interface FileChunkReplica { + server: string // XFTPServer URI (e.g. "xftp://abc=@example.com") + replicaId: Uint8Array + replicaKey: Uint8Array // DER-encoded private key +} + +// ── Internal: flat server replica ─────────────────────────────── + +interface FileServerReplica { + chunkNo: number + server: string + replicaId: Uint8Array + replicaKey: Uint8Array + digest: Uint8Array | null + chunkSize: number | null +} + +// ── Server replica colon-separated format ─────────────────────── + +function encodeServerReplica(r: FileServerReplica): string { + let s = r.chunkNo + ":" + base64urlEncode(r.replicaId) + ":" + base64urlEncode(r.replicaKey) + if (r.digest !== null) s += ":" + base64urlEncode(r.digest) + if (r.chunkSize !== null) s += ":" + encodeFileSize(r.chunkSize) + return s +} + +function decodeServerReplica(server: string, s: string): FileServerReplica { + const parts = s.split(":") + if (parts.length < 3) throw new Error("invalid server replica: " + s) + return { + chunkNo: parseInt(parts[0]), + server, + replicaId: base64urlDecode(parts[1]), + replicaKey: base64urlDecode(parts[2]), + digest: parts.length >= 4 ? base64urlDecode(parts[3]) : null, + chunkSize: parts.length >= 5 ? decodeFileSize(parts[4]) : null + } +} + +// ── Unfold chunks to flat replicas ────────────────────────────── + +function unfoldChunksToReplicas(defChunkSize: number, chunks: FileChunk[]): FileServerReplica[] { + const result: FileServerReplica[] = [] + for (const c of chunks) { + c.replicas.forEach((r, idx) => { + result.push({ + chunkNo: c.chunkNo, + server: r.server, + replicaId: r.replicaId, + replicaKey: r.replicaKey, + digest: idx === 0 ? c.digest : null, + chunkSize: c.chunkSize !== defChunkSize && idx === 0 ? c.chunkSize : null + }) + }) + } + return result +} + +// ── Group replicas by server (for YAML encoding) ──────────────── + +function encodeFileReplicas( + defChunkSize: number, chunks: FileChunk[] +): {server: string, chunks: string[]}[] { + const flat = unfoldChunksToReplicas(defChunkSize, chunks) + // Sort by server URI string (matches Haskell Ord for ProtocolServer when + // all servers share the same scheme and keyHash — true for typical use). + flat.sort((a, b) => a.server < b.server ? -1 : a.server > b.server ? 1 : 0) + const groups: {server: string, chunks: string[]}[] = [] + for (const r of flat) { + if (groups.length === 0 || groups[groups.length - 1].server !== r.server) { + groups.push({server: r.server, chunks: [encodeServerReplica(r)]}) + } else { + groups[groups.length - 1].chunks.push(encodeServerReplica(r)) + } + } + return groups +} + +// ── Fold flat replicas back into FileChunks ───────────────────── + +function bytesEqual(a: Uint8Array, b: Uint8Array): boolean { + if (a.length !== b.length) return false + for (let i = 0; i < a.length; i++) if (a[i] !== b[i]) return false + return true +} + +function foldReplicasToChunks(defChunkSize: number, replicas: FileServerReplica[]): FileChunk[] { + const sizes = new Map() + const digests = new Map() + for (const r of replicas) { + if (r.chunkSize !== null) { + const existing = sizes.get(r.chunkNo) + if (existing !== undefined && existing !== r.chunkSize) + throw new Error("different size in chunk replicas") + sizes.set(r.chunkNo, r.chunkSize) + } + if (r.digest !== null) { + const existing = digests.get(r.chunkNo) + if (existing !== undefined && !bytesEqual(existing, r.digest)) + throw new Error("different digest in chunk replicas") + digests.set(r.chunkNo, r.digest) + } + } + const chunkMap = new Map() + for (const r of replicas) { + const existing = chunkMap.get(r.chunkNo) + if (existing) { + existing.replicas.push({server: r.server, replicaId: r.replicaId, replicaKey: r.replicaKey}) + } else { + const digest = digests.get(r.chunkNo) + if (!digest) throw new Error("no digest for chunk") + chunkMap.set(r.chunkNo, { + chunkNo: r.chunkNo, + chunkSize: sizes.get(r.chunkNo) ?? defChunkSize, + digest, + replicas: [{server: r.server, replicaId: r.replicaId, replicaKey: r.replicaKey}] + }) + } + } + return Array.from(chunkMap.values()).sort((a, b) => a.chunkNo - b.chunkNo) +} + +// ── YAML encoding (matching Data.Yaml key ordering) ───────────── + +export function encodeFileDescription(fd: FileDescription): string { + const lines: string[] = [] + // Top-level keys in alphabetical order (matching Data.Yaml / libyaml) + lines.push("chunkSize: " + encodeFileSize(fd.chunkSize)) + lines.push("digest: " + base64urlEncode(fd.digest)) + lines.push("key: " + base64urlEncode(fd.key)) + lines.push("nonce: " + base64urlEncode(fd.nonce)) + lines.push("party: " + fd.party) + if (fd.redirect !== null) { + lines.push("redirect:") + lines.push(" digest: " + base64urlEncode(fd.redirect.digest)) + lines.push(" size: " + fd.redirect.size) + } + const groups = encodeFileReplicas(fd.chunkSize, fd.chunks) + lines.push("replicas:") + for (const g of groups) { + lines.push("- chunks:") + for (const c of g.chunks) { + lines.push(" - " + c) + } + lines.push(" server: " + g.server) + } + lines.push("size: " + encodeFileSize(fd.size)) + return lines.join("\n") + "\n" +} + +// ── YAML decoding ─────────────────────────────────────────────── + +export function decodeFileDescription(yaml: string): FileDescription { + const lines = yaml.split("\n") + const topLevel: Record = {} + const replicaGroups: {server: string, chunks: string[]}[] = [] + let redirect: RedirectFileInfo | null = null + let i = 0 + while (i < lines.length) { + const line = lines[i] + if (line.length === 0) { i++; continue } + if (line === "replicas:") { + i++ + while (i < lines.length && lines[i].startsWith("- ")) { + const group = {server: "", chunks: [] as string[]} + i = parseReplicaItem(lines, i, group) + replicaGroups.push(group) + } + } else if (line === "redirect:") { + i++ + let digestStr = "", sizeStr = "" + while (i < lines.length && lines[i].startsWith(" ")) { + const kv = lines[i].substring(2) + const ci = kv.indexOf(": ") + if (ci >= 0) { + const k = kv.substring(0, ci), v = kv.substring(ci + 2) + if (k === "digest") digestStr = v + if (k === "size") sizeStr = v + } + i++ + } + redirect = {size: parseInt(sizeStr), digest: base64urlDecode(digestStr)} + } else { + const ci = line.indexOf(": ") + if (ci >= 0) topLevel[line.substring(0, ci)] = line.substring(ci + 2) + i++ + } + } + const chunkSize = decodeFileSize(topLevel["chunkSize"]) + const serverReplicas: FileServerReplica[] = [] + for (const g of replicaGroups) { + for (const c of g.chunks) serverReplicas.push(decodeServerReplica(g.server, c)) + } + return { + party: topLevel["party"] as FileParty, + size: decodeFileSize(topLevel["size"]), + digest: base64urlDecode(topLevel["digest"]), + key: base64urlDecode(topLevel["key"]), + nonce: base64urlDecode(topLevel["nonce"]), + chunkSize, + chunks: foldReplicasToChunks(chunkSize, serverReplicas), + redirect + } +} + +function parseReplicaItem( + lines: string[], startIdx: number, group: {server: string, chunks: string[]} +): number { + let i = startIdx + const first = lines[i].substring(2) // strip "- " prefix + i = parseReplicaField(first, lines, i + 1, group) + while (i < lines.length && lines[i].startsWith(" ") && !lines[i].startsWith("- ")) { + i = parseReplicaField(lines[i].substring(2), lines, i + 1, group) + } + return i +} + +function parseReplicaField( + entry: string, lines: string[], nextIdx: number, + group: {server: string, chunks: string[]} +): number { + if (entry === "chunks:" || entry.startsWith("chunks:")) { + let i = nextIdx + while (i < lines.length && lines[i].startsWith(" - ")) { + group.chunks.push(lines[i].substring(4)) + i++ + } + return i + } + const ci = entry.indexOf(": ") + if (ci >= 0) { + const k = entry.substring(0, ci), v = entry.substring(ci + 2) + if (k === "server") group.server = v + } + return nextIdx +} + +// ── Validation ────────────────────────────────────────────────── + +export function validateFileDescription(fd: FileDescription): string | null { + for (let i = 0; i < fd.chunks.length; i++) { + if (fd.chunks[i].chunkNo !== i + 1) return "chunk numbers are not sequential" + } + let total = 0 + for (const c of fd.chunks) total += c.chunkSize + if (total !== fd.size) return "chunks total size is different than file size" + return null +} + +export const fdSeparator = "################################\n" diff --git a/xftp-web/src/protocol/handshake.ts b/xftp-web/src/protocol/handshake.ts new file mode 100644 index 000000000..4be04815c --- /dev/null +++ b/xftp-web/src/protocol/handshake.ts @@ -0,0 +1,155 @@ +// XFTP handshake encoding/decoding — Simplex.FileTransfer.Transport +// +// Handles XFTP client/server handshake messages and version negotiation. + +import { + Decoder, concatBytes, + encodeWord16, decodeWord16, + encodeBytes, decodeBytes, + decodeLarge, decodeNonEmpty +} from "./encoding.js" +import {sha256} from "../crypto/digest.js" +import {decodePubKeyX25519} from "../crypto/keys.js" +import {blockPad, blockUnpad, XFTP_BLOCK_SIZE} from "./transmission.js" + +// ── Version types ────────────────────────────────────────────────── + +export interface VersionRange { + minVersion: number // Word16 + maxVersion: number // Word16 +} + +// Encode version range as two big-endian Word16s. +// Matches Haskell: smpEncode (VRange v1 v2) = smpEncode (v1, v2) +export function encodeVersionRange(vr: VersionRange): Uint8Array { + return concatBytes(encodeWord16(vr.minVersion), encodeWord16(vr.maxVersion)) +} + +export function decodeVersionRange(d: Decoder): VersionRange { + const minVersion = decodeWord16(d) + const maxVersion = decodeWord16(d) + if (minVersion > maxVersion) throw new Error("invalid version range: min > max") + return {minVersion, maxVersion} +} + +// Version negotiation: intersection of two version ranges, or null if incompatible. +// Matches Haskell compatibleVRange. +export function compatibleVRange(a: VersionRange, b: VersionRange): VersionRange | null { + const min = Math.max(a.minVersion, b.minVersion) + const max = Math.min(a.maxVersion, b.maxVersion) + if (min > max) return null + return {minVersion: min, maxVersion: max} +} + +// ── Client handshake ─────────────────────────────────────────────── + +export interface XFTPClientHandshake { + xftpVersion: number // Word16 — negotiated version + keyHash: Uint8Array // SHA-256 CA certificate fingerprint (32 bytes) +} + +// Encode and pad client handshake to XFTP_BLOCK_SIZE. +// Wire format: pad(smpEncode (xftpVersion, keyHash), 16384) +export function encodeClientHandshake(ch: XFTPClientHandshake): Uint8Array { + const body = concatBytes(encodeWord16(ch.xftpVersion), encodeBytes(ch.keyHash)) + return blockPad(body, XFTP_BLOCK_SIZE) +} + +// ── Server handshake ─────────────────────────────────────────────── + +export interface XFTPServerHandshake { + xftpVersionRange: VersionRange + sessionId: Uint8Array + certChainDer: Uint8Array[] // raw DER certificate blobs (NonEmpty) + signedKeyDer: Uint8Array // raw DER SignedExact blob +} + +// Decode padded server handshake block. +// Wire format: unpad(block) → (versionRange, sessionId, certChainPubKey) +// where certChainPubKey = (NonEmpty Large certChain, Large signedKey) +// Trailing bytes (Tail) are ignored for forward compatibility. +export function decodeServerHandshake(block: Uint8Array): XFTPServerHandshake { + const raw = blockUnpad(block) + const d = new Decoder(raw) + const xftpVersionRange = decodeVersionRange(d) + const sessionId = decodeBytes(d) + // CertChainPubKey: smpEncode (encodeCertChain certChain, SignedObject signedPubKey) + const certChainDer = decodeNonEmpty(decodeLarge, d) + const signedKeyDer = decodeLarge(d) + // Remaining bytes are Tail (ignored for forward compatibility) + return {xftpVersionRange, sessionId, certChainDer, signedKeyDer} +} + +// ── Certificate utilities ────────────────────────────────────────── + +// SHA-256 fingerprint of the CA certificate (last cert in chain). +// Matches Haskell: XV.getFingerprint ca X.HashSHA256 +export function caFingerprint(certChainDer: Uint8Array[]): Uint8Array { + if (certChainDer.length < 2) throw new Error("caFingerprint: need at least 2 certs (leaf + CA)") + return sha256(certChainDer[certChainDer.length - 1]) +} + +// ── SignedExact DER parsing ──────────────────────────────────────── + +// Parsed components of an X.509 SignedExact structure. +export interface SignedKey { + objectDer: Uint8Array // raw DER of the signed object (SubjectPublicKeyInfo) + dhKey: Uint8Array // extracted 32-byte X25519 public key + algorithm: Uint8Array // AlgorithmIdentifier DER bytes + signature: Uint8Array // raw Ed25519 signature bytes (64 bytes) +} + +// Parse ASN.1 DER length (short and long form). +function derLength(d: Decoder): number { + const first = d.anyByte() + if (first < 0x80) return first + const numBytes = first & 0x7f + if (numBytes === 0 || numBytes > 4) throw new Error("DER: unsupported length encoding") + let len = 0 + for (let i = 0; i < numBytes; i++) { + len = (len << 8) | d.anyByte() + } + return len +} + +// Read a complete TLV element, returning the full DER bytes (tag + length + value). +function derElement(d: Decoder): Uint8Array { + const start = d.offset() + d.anyByte() // tag + const len = derLength(d) + d.take(len) // value + return d.buf.subarray(start, d.offset()) +} + +// Extract components from a SignedExact X.PubKey DER structure. +// ASN.1 layout: +// SEQUENCE { +// SubjectPublicKeyInfo (SEQUENCE) — the signed object +// AlgorithmIdentifier (SEQUENCE) — signature algorithm +// BIT STRING — signature +// } +export function extractSignedKey(signedDer: Uint8Array): SignedKey { + const outer = new Decoder(signedDer) + const outerTag = outer.anyByte() + if (outerTag !== 0x30) throw new Error("SignedExact: expected SEQUENCE tag 0x30, got 0x" + outerTag.toString(16)) + derLength(outer) // consume total content length + + // First element: SubjectPublicKeyInfo + const objectDer = derElement(outer) + + // Second element: AlgorithmIdentifier + const algorithm = derElement(outer) + + // Third element: BIT STRING (signature) + const sigTag = outer.anyByte() + if (sigTag !== 0x03) throw new Error("SignedExact: expected BIT STRING tag 0x03, got 0x" + sigTag.toString(16)) + const sigLen = derLength(outer) + const unusedBits = outer.anyByte() + if (unusedBits !== 0) throw new Error("SignedExact: expected 0 unused bits in signature") + const signature = outer.take(sigLen - 1) + + // Extract X25519 key from SubjectPublicKeyInfo + const dhKey = decodePubKeyX25519(objectDer) + + return {objectDer, dhKey, algorithm, signature} +} diff --git a/xftp-web/src/protocol/transmission.ts b/xftp-web/src/protocol/transmission.ts new file mode 100644 index 000000000..6e7f95453 --- /dev/null +++ b/xftp-web/src/protocol/transmission.ts @@ -0,0 +1,121 @@ +// XFTP transmission framing — Simplex.Messaging.Transport + FileTransfer.Protocol +// +// Handles block-level pad/unpad, batch encoding, and Ed25519 auth signing. + +import { + Decoder, concatBytes, + encodeBytes, decodeBytes, + encodeLarge, decodeLarge +} from "./encoding.js" +import {sign} from "../crypto/keys.js" + +// ── Constants ───────────────────────────────────────────────────── + +export const XFTP_BLOCK_SIZE = 16384 + +// Protocol versions (FileTransfer.Transport) +export const initialXFTPVersion = 1 +export const authCmdsXFTPVersion = 2 +export const blockedFilesXFTPVersion = 3 +export const currentXFTPVersion = 3 + +// ── Block-level pad/unpad (Crypto.hs:pad/unPad, strict ByteString) ── + +export function blockPad(msg: Uint8Array, blockSize: number = XFTP_BLOCK_SIZE): Uint8Array { + const len = msg.length + const padLen = blockSize - len - 2 + if (padLen < 0) throw new Error("blockPad: message too large for block") + const result = new Uint8Array(blockSize) + result[0] = (len >>> 8) & 0xff + result[1] = len & 0xff + result.set(msg, 2) + result.fill(0x23, 2 + len) // '#' padding + return result +} + +export function blockUnpad(block: Uint8Array): Uint8Array { + if (block.length < 2) throw new Error("blockUnpad: too short") + const len = (block[0] << 8) | block[1] + if (2 + len > block.length) throw new Error("blockUnpad: invalid length") + return block.subarray(2, 2 + len) +} + +// ── Transmission encoding (client -> server) ────────────────────── + +// Encode an authenticated XFTP command as a padded block. +// Matches xftpEncodeAuthTransmission (implySessId = True). +// +// sessionId: TLS session ID (typically 32 bytes) +// corrId: correlation ID (ByteString) +// entityId: file entity ID (ByteString, empty for FNEW/PING) +// cmdBytes: encoded command (from encodeFNEW, encodeFGET, etc.) +// privateKey: Ed25519 private key (64-byte libsodium format) +export function encodeAuthTransmission( + sessionId: Uint8Array, + corrId: Uint8Array, + entityId: Uint8Array, + cmdBytes: Uint8Array, + privateKey: Uint8Array +): Uint8Array { + // t' = encodeTransmission_ v t = smpEncode (corrId, entityId) <> cmdBytes + const tInner = concatBytes(encodeBytes(corrId), encodeBytes(entityId), cmdBytes) + // tForAuth = smpEncode sessionId <> t' (implySessId = True) + const tForAuth = concatBytes(encodeBytes(sessionId), tInner) + // Ed25519 sign (nonce ignored for Ed25519 in Haskell sign') + const signature = sign(privateKey, tForAuth) + // tEncodeAuth False (Just (TASignature sig, Nothing)) = smpEncode (signatureBytes sig) + const authenticator = encodeBytes(signature) + // tEncode False (auth, tToSend) = authenticator <> tToSend + // tToSend = t' (since implySessId = True, no sessionId in wire) + const encoded = concatBytes(authenticator, tInner) + // tEncodeBatch1 False = \x01 + encodeLarge(encoded) + const batch = concatBytes(new Uint8Array([1]), encodeLarge(encoded)) + // pad to blockSize + return blockPad(batch) +} + +// Encode an unsigned XFTP command (e.g. PING) as a padded block. +// Matches xftpEncodeTransmission (implySessId = True). +export function encodeTransmission( + corrId: Uint8Array, + entityId: Uint8Array, + cmdBytes: Uint8Array +): Uint8Array { + const tInner = concatBytes(encodeBytes(corrId), encodeBytes(entityId), cmdBytes) + // No auth: tEncodeAuth False Nothing = smpEncode B.empty = \x00 + const authenticator = encodeBytes(new Uint8Array(0)) + const encoded = concatBytes(authenticator, tInner) + const batch = concatBytes(new Uint8Array([1]), encodeLarge(encoded)) + return blockPad(batch) +} + +// ── Transmission decoding (server -> client) ────────────────────── + +export interface DecodedTransmission { + corrId: Uint8Array + entityId: Uint8Array + command: Uint8Array +} + +// Decode a server response block into raw parts. +// Call decodeResponse(command) from commands.ts to parse the response. +// Matches xftpDecodeTClient (implySessId = True). +export function decodeTransmission(block: Uint8Array): DecodedTransmission { + // unPad + const raw = blockUnpad(block) + const d = new Decoder(raw) + // Read batch count (must be 1) + const count = d.anyByte() + if (count !== 1) throw new Error("decodeTransmission: expected batch count 1, got " + count) + // Read Large-encoded transmission + const transmission = decodeLarge(d) + const td = new Decoder(transmission) + // Skip authenticator (server responses have empty auth) + decodeBytes(td) + // Read corrId and entityId + const corrId = decodeBytes(td) + const entityId = decodeBytes(td) + // Remaining bytes are the response command + const command = td.takeAll() + return {corrId, entityId, command} +}