From 82445ec8d5cb969eb2d13b0ac851431ec85a936a Mon Sep 17 00:00:00 2001
From: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
Date: Mon, 16 May 2022 19:27:58 +0100
Subject: [PATCH] android: refactor webrtc calls, compress webrtc session info,
make compatible with Safari (with flag) (#642)
* use simplex.chat relay
* update webrtc settings
* WebRTCView to use command/response types
* compress WebRTC session descriptions, simple web UI for calls
* update webrtc ui
* use webworked in desktop browser
* use RTCRtpScriptTransform in safari
* update android type
* refactor
* add await
---
.../android/app/src/main/assets/www/call.html | 1 +
apps/android/app/src/main/assets/www/call.js | 958 +++++++-------
.../app/src/main/assets/www/lz-string.min.js | 1 +
.../java/chat/simplex/app/model/SimpleXAPI.kt | 6 +-
.../chat/simplex/app/views/call/CallView.kt | 211 ++--
.../chat/simplex/app/views/call/WebRTC.kt | 35 +-
.../app/views/usersettings/SettingsView.kt | 4 +-
apps/ios/Shared/Model/Shared/CallTypes.swift | 4 +-
apps/ios/Shared/Model/SimpleXAPI.swift | 8 +-
apps/ios/Shared/Views/Call/WebRTC.swift | 122 +-
apps/ios/Shared/Views/Call/WebRTCView.swift | 139 ++-
apps/ios/Shared/Views/Chat/ChatView.swift | 2 +-
.../Views/UserSettings/SettingsView.swift | 10 +-
packages/simplex-chat-webrtc/copy | 4 +
packages/simplex-chat-webrtc/package.json | 4 +
packages/simplex-chat-webrtc/src/call.html | 1 +
packages/simplex-chat-webrtc/src/call.ts | 1099 +++++++++--------
packages/simplex-chat-webrtc/src/ui.js | 115 ++
packages/simplex-chat-webrtc/src/webcall.html | 51 +
packages/simplex-chat-webrtc/tsconfig.json | 3 +-
src/Simplex/Chat/Call.hs | 6 +-
tests/ChatTests.hs | 2 +-
22 files changed, 1612 insertions(+), 1174 deletions(-)
create mode 100644 apps/android/app/src/main/assets/www/lz-string.min.js
create mode 100644 packages/simplex-chat-webrtc/src/ui.js
create mode 100644 packages/simplex-chat-webrtc/src/webcall.html
diff --git a/apps/android/app/src/main/assets/www/call.html b/apps/android/app/src/main/assets/www/call.html
index fd3019e8eb..a188d38394 100644
--- a/apps/android/app/src/main/assets/www/call.html
+++ b/apps/android/app/src/main/assets/www/call.html
@@ -2,6 +2,7 @@
+
diff --git a/apps/android/app/src/main/assets/www/call.js b/apps/android/app/src/main/assets/www/call.js
index e74864b9dd..b5381b3cd9 100644
--- a/apps/android/app/src/main/assets/www/call.js
+++ b/apps/android/app/src/main/assets/www/call.js
@@ -6,464 +6,536 @@ var CallMediaType;
CallMediaType["Audio"] = "audio";
CallMediaType["Video"] = "video";
})(CallMediaType || (CallMediaType = {}));
-const keyAlgorithm = {
- name: "AES-GCM",
- length: 256,
-};
-const keyUsages = ["encrypt", "decrypt"];
-let activeCall;
-const IV_LENGTH = 12;
-const initialPlainTextRequired = {
- key: 10,
- delta: 3,
- undefined: 1,
-};
-function defaultCallConfig(encodedInsertableStreams) {
- return {
- peerConnectionConfig: {
- iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
- iceCandidatePoolSize: 10,
- encodedInsertableStreams,
- },
- iceCandidates: {
- delay: 2000,
- extrasInterval: 2000,
- extrasTimeout: 8000,
- },
- };
-}
-async function initializeCall(config, mediaType, aesKey) {
- const conn = new RTCPeerConnection(config.peerConnectionConfig);
- const remoteStream = new MediaStream();
- const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType));
- await setUpMediaStreams(conn, localStream, remoteStream, aesKey);
- conn.addEventListener("connectionstatechange", connectionStateChange);
- const iceCandidates = new Promise((resolve, _) => {
- let candidates = [];
- let resolved = false;
- let extrasInterval;
- let extrasTimeout;
- const delay = setTimeout(() => {
- if (!resolved) {
- resolveIceCandidates();
- extrasInterval = setInterval(() => {
- sendIceCandidates();
- }, config.iceCandidates.extrasInterval);
- extrasTimeout = setTimeout(() => {
- clearInterval(extrasInterval);
- sendIceCandidates();
- }, config.iceCandidates.extrasTimeout);
- }
- }, config.iceCandidates.delay);
- conn.onicecandidate = ({ candidate: c }) => c && candidates.push(c);
- conn.onicegatheringstatechange = () => {
- if (conn.iceGatheringState == "complete") {
- if (resolved) {
- if (extrasInterval)
- clearInterval(extrasInterval);
- if (extrasTimeout)
- clearTimeout(extrasTimeout);
- sendIceCandidates();
- }
- else {
+// for debugging
+// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
+var sendMessageToNative = (msg) => console.log(JSON.stringify(msg));
+// Global object with cryptrographic/encoding functions
+const callCrypto = callCryptoFunction();
+var TransformOperation;
+(function (TransformOperation) {
+ TransformOperation["Encrypt"] = "encrypt";
+ TransformOperation["Decrypt"] = "decrypt";
+})(TransformOperation || (TransformOperation = {}));
+;
+(function () {
+ let activeCall;
+ function defaultCallConfig(encodedInsertableStreams) {
+ return {
+ peerConnectionConfig: {
+ iceServers: [
+ { urls: "stun:stun.simplex.chat:5349" },
+ // {urls: "turn:turn.simplex.chat:5349", username: "private", credential: "yleob6AVkiNI87hpR94Z"},
+ ],
+ iceCandidatePoolSize: 10,
+ encodedInsertableStreams,
+ // iceTransportPolicy: "relay",
+ },
+ iceCandidates: {
+ delay: 2000,
+ extrasInterval: 2000,
+ extrasTimeout: 8000,
+ },
+ };
+ }
+ async function initializeCall(config, mediaType, aesKey, useWorker) {
+ const conn = new RTCPeerConnection(config.peerConnectionConfig);
+ const remoteStream = new MediaStream();
+ const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType));
+ await setUpMediaStreams(conn, localStream, remoteStream, aesKey, useWorker);
+ conn.addEventListener("connectionstatechange", connectionStateChange);
+ const iceCandidates = new Promise((resolve, _) => {
+ let candidates = [];
+ let resolved = false;
+ let extrasInterval;
+ let extrasTimeout;
+ const delay = setTimeout(() => {
+ if (!resolved) {
resolveIceCandidates();
+ extrasInterval = setInterval(() => {
+ sendIceCandidates();
+ }, config.iceCandidates.extrasInterval);
+ extrasTimeout = setTimeout(() => {
+ clearInterval(extrasInterval);
+ sendIceCandidates();
+ }, config.iceCandidates.extrasTimeout);
}
+ }, config.iceCandidates.delay);
+ conn.onicecandidate = ({ candidate: c }) => c && candidates.push(c);
+ conn.onicegatheringstatechange = () => {
+ if (conn.iceGatheringState == "complete") {
+ if (resolved) {
+ if (extrasInterval)
+ clearInterval(extrasInterval);
+ if (extrasTimeout)
+ clearTimeout(extrasTimeout);
+ sendIceCandidates();
+ }
+ else {
+ resolveIceCandidates();
+ }
+ }
+ };
+ function resolveIceCandidates() {
+ if (delay)
+ clearTimeout(delay);
+ resolved = true;
+ const iceCandidates = serialize(candidates);
+ candidates = [];
+ resolve(iceCandidates);
+ }
+ function sendIceCandidates() {
+ if (candidates.length === 0)
+ return;
+ const iceCandidates = serialize(candidates);
+ candidates = [];
+ sendMessageToNative({ resp: { type: "ice", iceCandidates } });
+ }
+ });
+ return { connection: conn, iceCandidates, localMedia: mediaType, localStream };
+ function connectionStateChange() {
+ sendMessageToNative({
+ resp: {
+ type: "connection",
+ state: {
+ connectionState: conn.connectionState,
+ iceConnectionState: conn.iceConnectionState,
+ iceGatheringState: conn.iceGatheringState,
+ signalingState: conn.signalingState,
+ },
+ },
+ });
+ if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
+ conn.removeEventListener("connectionstatechange", connectionStateChange);
+ sendMessageToNative({ resp: { type: "ended" } });
+ conn.close();
+ activeCall = undefined;
+ resetVideoElements();
+ }
+ }
+ }
+ function serialize(x) {
+ return LZString.compressToBase64(JSON.stringify(x));
+ }
+ function parse(s) {
+ return JSON.parse(LZString.decompressFromBase64(s));
+ }
+ Object.defineProperty(window, "processCommand", { value: processCommand });
+ async function processCommand(body) {
+ const { corrId, command } = body;
+ const pc = activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection;
+ let resp;
+ try {
+ switch (command.type) {
+ case "capabilities":
+ const encryption = supportsInsertableStreams(command.useWorker);
+ resp = { type: "capabilities", capabilities: { encryption } };
+ break;
+ case "start":
+ console.log("starting call");
+ if (activeCall) {
+ resp = { type: "error", message: "start: call already started" };
+ }
+ else if (!supportsInsertableStreams(command.useWorker) && command.aesKey) {
+ resp = { type: "error", message: "start: encryption is not supported" };
+ }
+ else {
+ const { media, useWorker } = command;
+ const encryption = supportsInsertableStreams(useWorker);
+ const aesKey = encryption ? command.aesKey : undefined;
+ activeCall = await initializeCall(defaultCallConfig(encryption && !!aesKey), media, aesKey, useWorker);
+ const pc = activeCall.connection;
+ const offer = await pc.createOffer();
+ await pc.setLocalDescription(offer);
+ // for debugging, returning the command for callee to use
+ // resp = {
+ // type: "offer",
+ // offer: serialize(offer),
+ // iceCandidates: await activeCall.iceCandidates,
+ // media,
+ // aesKey,
+ // }
+ resp = {
+ type: "offer",
+ offer: serialize(offer),
+ iceCandidates: await activeCall.iceCandidates,
+ capabilities: { encryption },
+ };
+ }
+ break;
+ case "offer":
+ if (activeCall) {
+ resp = { type: "error", message: "accept: call already started" };
+ }
+ else if (!supportsInsertableStreams(command.useWorker) && command.aesKey) {
+ resp = { type: "error", message: "accept: encryption is not supported" };
+ }
+ else {
+ const offer = parse(command.offer);
+ const remoteIceCandidates = parse(command.iceCandidates);
+ const { media, aesKey, useWorker } = command;
+ activeCall = await initializeCall(defaultCallConfig(!!aesKey), media, aesKey, useWorker);
+ const pc = activeCall.connection;
+ await pc.setRemoteDescription(new RTCSessionDescription(offer));
+ const answer = await pc.createAnswer();
+ await pc.setLocalDescription(answer);
+ addIceCandidates(pc, remoteIceCandidates);
+ // same as command for caller to use
+ resp = {
+ type: "answer",
+ answer: serialize(answer),
+ iceCandidates: await activeCall.iceCandidates,
+ };
+ }
+ break;
+ case "answer":
+ if (!pc) {
+ resp = { type: "error", message: "answer: call not started" };
+ }
+ else if (!pc.localDescription) {
+ resp = { type: "error", message: "answer: local description is not set" };
+ }
+ else if (pc.currentRemoteDescription) {
+ resp = { type: "error", message: "answer: remote description already set" };
+ }
+ else {
+ const answer = parse(command.answer);
+ const remoteIceCandidates = parse(command.iceCandidates);
+ await pc.setRemoteDescription(new RTCSessionDescription(answer));
+ addIceCandidates(pc, remoteIceCandidates);
+ resp = { type: "ok" };
+ }
+ break;
+ case "ice":
+ if (pc) {
+ const remoteIceCandidates = parse(command.iceCandidates);
+ addIceCandidates(pc, remoteIceCandidates);
+ resp = { type: "ok" };
+ }
+ else {
+ resp = { type: "error", message: "ice: call not started" };
+ }
+ break;
+ case "media":
+ if (!activeCall) {
+ resp = { type: "error", message: "media: call not started" };
+ }
+ else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video) {
+ resp = { type: "error", message: "media: no video" };
+ }
+ else {
+ enableMedia(activeCall.localStream, command.media, command.enable);
+ resp = { type: "ok" };
+ }
+ break;
+ case "end":
+ if (pc) {
+ pc.close();
+ activeCall = undefined;
+ resetVideoElements();
+ resp = { type: "ok" };
+ }
+ else {
+ resp = { type: "error", message: "end: call not started" };
+ }
+ break;
+ default:
+ resp = { type: "error", message: "unknown command" };
+ break;
+ }
+ }
+ catch (e) {
+ resp = { type: "error", message: e.message };
+ }
+ const apiResp = { corrId, resp, command };
+ sendMessageToNative(apiResp);
+ return apiResp;
+ }
+ function addIceCandidates(conn, iceCandidates) {
+ for (const c of iceCandidates) {
+ conn.addIceCandidate(new RTCIceCandidate(c));
+ }
+ }
+ async function setUpMediaStreams(pc, localStream, remoteStream, aesKey, useWorker) {
+ var _a;
+ const videos = getVideoElements();
+ if (!videos)
+ throw Error("no video elements");
+ let key;
+ let worker;
+ if (aesKey) {
+ key = await callCrypto.decodeAesKey(aesKey);
+ if (useWorker) {
+ const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`;
+ worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
+ }
+ }
+ for (const track of localStream.getTracks()) {
+ pc.addTrack(track, localStream);
+ }
+ if (aesKey && key) {
+ console.log("set up encryption for sending");
+ for (const sender of pc.getSenders()) {
+ setupPeerTransform(TransformOperation.Encrypt, sender, worker, aesKey, key);
+ }
+ }
+ // Pull tracks from remote stream as they arrive add them to remoteStream video
+ pc.ontrack = (event) => {
+ if (aesKey && key) {
+ console.log("set up decryption for receiving");
+ setupPeerTransform(TransformOperation.Decrypt, event.receiver, worker, aesKey, key);
+ }
+ remoteStream.addTrack(event.track);
+ };
+ // We assume VP8 encoding in the decode/encode stages to get the initial
+ // bytes to pass as plaintext so we enforce that here.
+ // VP8 is supported by all supports of webrtc.
+ // Use of VP8 by default may also reduce depacketisation issues.
+ // We do not encrypt the first couple of bytes of the payload so that the
+ // video elements can work by determining video keyframes and the opus mode
+ // being used. This appears to be necessary for any video feed at all.
+ // For VP8 this is the content described in
+ // https://tools.ietf.org/html/rfc6386#section-9.1
+ // which is 10 bytes for key frames and 3 bytes for delta frames.
+ // For opus (where encodedFrame.type is not set) this is the TOC byte from
+ // https://tools.ietf.org/html/rfc6716#section-3.1
+ const capabilities = RTCRtpSender.getCapabilities("video");
+ if (capabilities) {
+ const { codecs } = capabilities;
+ const selectedCodecIndex = codecs.findIndex((c) => c.mimeType === "video/VP8");
+ const selectedCodec = codecs[selectedCodecIndex];
+ codecs.splice(selectedCodecIndex, 1);
+ codecs.unshift(selectedCodec);
+ for (const t of pc.getTransceivers()) {
+ if (((_a = t.sender.track) === null || _a === void 0 ? void 0 : _a.kind) === "video") {
+ t.setCodecPreferences(codecs);
+ }
+ }
+ }
+ // setupVideoElement(videos.local)
+ // setupVideoElement(videos.remote)
+ videos.local.srcObject = localStream;
+ videos.remote.srcObject = remoteStream;
+ }
+ function setupPeerTransform(operation, peer, worker, aesKey, key) {
+ if (worker && "RTCRtpScriptTransform" in window) {
+ console.log(`${operation} with worker & RTCRtpScriptTransform`);
+ peer.transform = new RTCRtpScriptTransform(worker, { operation, aesKey });
+ }
+ else if ("createEncodedStreams" in peer) {
+ const { readable, writable } = peer.createEncodedStreams();
+ if (worker) {
+ console.log(`${operation} with worker`);
+ worker.postMessage({ operation, readable, writable, aesKey }, [readable, writable]);
+ }
+ else {
+ console.log(`${operation} without worker`);
+ const transform = callCrypto.transformFrame[operation](key);
+ readable.pipeThrough(new TransformStream({ transform })).pipeTo(writable);
+ }
+ }
+ else {
+ console.log(`no ${operation}`);
+ }
+ }
+ function callMediaConstraints(mediaType) {
+ switch (mediaType) {
+ case CallMediaType.Audio:
+ return { audio: true, video: false };
+ case CallMediaType.Video:
+ return {
+ audio: true,
+ video: {
+ frameRate: 24,
+ width: {
+ min: 480,
+ ideal: 720,
+ max: 1280,
+ },
+ aspectRatio: 1.33,
+ },
+ };
+ }
+ }
+ function supportsInsertableStreams(useWorker) {
+ return (("createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype) ||
+ (!!useWorker && "RTCRtpScriptTransform" in window));
+ }
+ function resetVideoElements() {
+ const videos = getVideoElements();
+ if (!videos)
+ return;
+ videos.local.srcObject = null;
+ videos.remote.srcObject = null;
+ }
+ function getVideoElements() {
+ const local = document.getElementById("local-video-stream");
+ const remote = document.getElementById("remote-video-stream");
+ if (!(local && remote && local instanceof HTMLMediaElement && remote instanceof HTMLMediaElement))
+ return;
+ return { local, remote };
+ }
+ // function setupVideoElement(video: HTMLElement) {
+ // // TODO use display: none
+ // video.style.opacity = "0"
+ // video.onplaying = () => {
+ // video.style.opacity = "1"
+ // }
+ // }
+ function enableMedia(s, media, enable) {
+ const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
+ for (const t of tracks)
+ t.enabled = enable;
+ }
+})();
+// Cryptography function - it is loaded both in the main window and in worker context (if the worker is used)
+function callCryptoFunction() {
+ const initialPlainTextRequired = {
+ key: 10,
+ delta: 3,
+ };
+ const IV_LENGTH = 12;
+ function encryptFrame(key) {
+ return async (frame, controller) => {
+ const data = new Uint8Array(frame.data);
+ const n = initialPlainTextRequired[frame.type] || 1;
+ const iv = randomIV();
+ const initial = data.subarray(0, n);
+ const plaintext = data.subarray(n, data.byteLength);
+ try {
+ const ciphertext = await crypto.subtle.encrypt({ name: "AES-GCM", iv: iv.buffer }, key, plaintext);
+ frame.data = concatN(initial, new Uint8Array(ciphertext), iv).buffer;
+ controller.enqueue(frame);
+ }
+ catch (e) {
+ console.log(`encryption error ${e}`);
+ throw e;
}
};
- function resolveIceCandidates() {
- if (delay)
- clearTimeout(delay);
- resolved = true;
- const iceCandidates = candidates.map((c) => JSON.stringify(c));
- candidates = [];
- resolve(iceCandidates);
- }
- function sendIceCandidates() {
- if (candidates.length === 0)
- return;
- const iceCandidates = candidates.map((c) => JSON.stringify(c));
- candidates = [];
- sendMessageToNative({ resp: { type: "ice", iceCandidates } });
- }
- });
- return { connection: conn, iceCandidates, localMedia: mediaType, localStream };
- function connectionStateChange() {
- sendMessageToNative({
- resp: {
- type: "connection",
- state: {
- connectionState: conn.connectionState,
- iceConnectionState: conn.iceConnectionState,
- iceGatheringState: conn.iceGatheringState,
- signalingState: conn.signalingState,
- },
- },
- });
- if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
- conn.removeEventListener("connectionstatechange", connectionStateChange);
- sendMessageToNative({ resp: { type: "ended" } });
- conn.close();
- activeCall = undefined;
- resetVideoElements();
- }
}
-}
-var sendMessageToNative = (msg) => console.log(JSON.stringify(msg));
-async function processCommand(body) {
- const { corrId, command } = body;
- const pc = activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection;
- let resp;
- try {
- switch (command.type) {
- case "capabilities":
- const encryption = supportsInsertableStreams();
- resp = { type: "capabilities", capabilities: { encryption } };
- break;
- case "start":
- console.log("starting call");
- if (activeCall) {
- resp = { type: "error", message: "start: call already started" };
- }
- else if (!supportsInsertableStreams() && command.aesKey) {
- resp = { type: "error", message: "start: encryption is not supported" };
- }
- else {
- const encryption = supportsInsertableStreams();
- const { media, aesKey } = command;
- activeCall = await initializeCall(defaultCallConfig(encryption && !!aesKey), media, encryption ? aesKey : undefined);
- const pc = activeCall.connection;
- const offer = await pc.createOffer();
- await pc.setLocalDescription(offer);
- // for debugging, returning the command for callee to use
- // resp = {type: "accept", offer: JSON.stringify(offer), iceCandidates: await iceCandidates, media, aesKey}
- resp = {
- type: "offer",
- offer: JSON.stringify(offer),
- iceCandidates: await activeCall.iceCandidates,
- capabilities: { encryption },
- };
- }
- break;
- case "accept":
- if (activeCall) {
- resp = { type: "error", message: "accept: call already started" };
- }
- else if (!supportsInsertableStreams() && command.aesKey) {
- resp = { type: "error", message: "accept: encryption is not supported" };
- }
- else {
- const offer = JSON.parse(command.offer);
- const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c));
- activeCall = await initializeCall(defaultCallConfig(!!command.aesKey), command.media, command.aesKey);
- const pc = activeCall.connection;
- await pc.setRemoteDescription(new RTCSessionDescription(offer));
- const answer = await pc.createAnswer();
- await pc.setLocalDescription(answer);
- addIceCandidates(pc, remoteIceCandidates);
- // same as command for caller to use
- resp = {
- type: "answer",
- answer: JSON.stringify(answer),
- iceCandidates: await activeCall.iceCandidates,
- };
- }
- break;
- case "answer":
- if (!pc) {
- resp = { type: "error", message: "answer: call not started" };
- }
- else if (!pc.localDescription) {
- resp = { type: "error", message: "answer: local description is not set" };
- }
- else if (pc.currentRemoteDescription) {
- resp = { type: "error", message: "answer: remote description already set" };
- }
- else {
- const answer = JSON.parse(command.answer);
- const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c));
- await pc.setRemoteDescription(new RTCSessionDescription(answer));
- addIceCandidates(pc, remoteIceCandidates);
- resp = { type: "ok" };
- }
- break;
- case "ice":
- if (pc) {
- const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c));
- addIceCandidates(pc, remoteIceCandidates);
- resp = { type: "ok" };
- }
- else {
- resp = { type: "error", message: "ice: call not started" };
- }
- break;
- case "media":
- if (!activeCall) {
- resp = { type: "error", message: "media: call not started" };
- }
- else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video) {
- resp = { type: "error", message: "media: no video" };
- }
- else {
- enableMedia(activeCall.localStream, command.media, command.enable);
- resp = { type: "ok" };
- }
- break;
- case "end":
- if (pc) {
- pc.close();
- activeCall = undefined;
- resetVideoElements();
- resp = { type: "ok" };
- }
- else {
- resp = { type: "error", message: "end: call not started" };
- }
- break;
- default:
- resp = { type: "error", message: "unknown command" };
- break;
- }
- }
- catch (e) {
- resp = { type: "error", message: e.message };
- }
- const apiResp = { corrId, resp, command };
- sendMessageToNative(apiResp);
- return apiResp;
-}
-function addIceCandidates(conn, iceCandidates) {
- for (const c of iceCandidates) {
- conn.addIceCandidate(new RTCIceCandidate(c));
- }
-}
-async function setUpMediaStreams(pc, localStream, remoteStream, aesKey) {
- var _a;
- const videos = getVideoElements();
- if (!videos)
- throw Error("no video elements");
- let key;
- if (aesKey) {
- const keyData = decodeBase64(encodeAscii(aesKey));
- if (keyData)
- key = await crypto.subtle.importKey("raw", keyData, keyAlgorithm, false, keyUsages);
- }
- for (const track of localStream.getTracks()) {
- pc.addTrack(track, localStream);
- }
- if (key) {
- console.log("set up encryption for sending");
- for (const sender of pc.getSenders()) {
- setupPeerTransform(sender, encodeFunction(key));
- }
- }
- // Pull tracks from remote stream as they arrive add them to remoteStream video
- pc.ontrack = (event) => {
- if (key) {
- console.log("set up decryption for receiving");
- setupPeerTransform(event.receiver, decodeFunction(key));
- }
- for (const track of event.streams[0].getTracks()) {
- remoteStream.addTrack(track);
- }
- };
- // We assume VP8 encoding in the decode/encode stages to get the initial
- // bytes to pass as plaintext so we enforce that here.
- // VP8 is supported by all supports of webrtc.
- // Use of VP8 by default may also reduce depacketisation issues.
- // We do not encrypt the first couple of bytes of the payload so that the
- // video elements can work by determining video keyframes and the opus mode
- // being used. This appears to be necessary for any video feed at all.
- // For VP8 this is the content described in
- // https://tools.ietf.org/html/rfc6386#section-9.1
- // which is 10 bytes for key frames and 3 bytes for delta frames.
- // For opus (where encodedFrame.type is not set) this is the TOC byte from
- // https://tools.ietf.org/html/rfc6716#section-3.1
- const capabilities = RTCRtpSender.getCapabilities("video");
- if (capabilities) {
- const { codecs } = capabilities;
- const selectedCodecIndex = codecs.findIndex((c) => c.mimeType === "video/VP8");
- const selectedCodec = codecs[selectedCodecIndex];
- codecs.splice(selectedCodecIndex, 1);
- codecs.unshift(selectedCodec);
- for (const t of pc.getTransceivers()) {
- if (((_a = t.sender.track) === null || _a === void 0 ? void 0 : _a.kind) === "video") {
- t.setCodecPreferences(codecs);
+ function decryptFrame(key) {
+ return async (frame, controller) => {
+ const data = new Uint8Array(frame.data);
+ const n = initialPlainTextRequired[frame.type] || 1;
+ const initial = data.subarray(0, n);
+ const ciphertext = data.subarray(n, data.byteLength - IV_LENGTH);
+ const iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength);
+ try {
+ const plaintext = await crypto.subtle.decrypt({ name: "AES-GCM", iv }, key, ciphertext);
+ frame.data = concatN(initial, new Uint8Array(plaintext)).buffer;
+ controller.enqueue(frame);
}
- }
+ catch (e) {
+ console.log(`decryption error ${e}`);
+ throw e;
+ }
+ };
}
- // setupVideoElement(videos.local)
- // setupVideoElement(videos.remote)
- videos.local.srcObject = localStream;
- videos.remote.srcObject = remoteStream;
-}
-function callMediaConstraints(mediaType) {
- switch (mediaType) {
- case CallMediaType.Audio:
- return { audio: true, video: false };
- case CallMediaType.Video:
- return {
- audio: true,
- video: {
- frameRate: 24,
- width: {
- min: 480,
- ideal: 720,
- max: 1280,
- },
- aspectRatio: 1.33,
- },
- };
+ function decodeAesKey(aesKey) {
+ const keyData = callCrypto.decodeBase64(callCrypto.encodeAscii(aesKey));
+ return crypto.subtle.importKey("raw", keyData, { name: "AES-GCM", length: 256 }, false, ["encrypt", "decrypt"]);
}
-}
-function supportsInsertableStreams() {
- return "createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype;
-}
-function resetVideoElements() {
- const videos = getVideoElements();
- if (!videos)
- return;
- videos.local.srcObject = null;
- videos.remote.srcObject = null;
-}
-function getVideoElements() {
- const local = document.getElementById("local-video-stream");
- const remote = document.getElementById("remote-video-stream");
- if (!(local && remote && local instanceof HTMLMediaElement && remote instanceof HTMLMediaElement))
- return;
- return { local, remote };
-}
-// function setupVideoElement(video: HTMLElement) {
-// // TODO use display: none
-// video.style.opacity = "0"
-// video.onplaying = () => {
-// video.style.opacity = "1"
-// }
-// }
-function enableMedia(s, media, enable) {
- const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
- for (const t of tracks)
- t.enabled = enable;
-}
-/* Stream Transforms */
-function setupPeerTransform(peer, transform) {
- const streams = peer.createEncodedStreams();
- streams.readable.pipeThrough(new TransformStream({ transform })).pipeTo(streams.writable);
-}
-/* Cryptography */
-function encodeFunction(key) {
- return async (frame, controller) => {
- const data = new Uint8Array(frame.data);
- const n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0;
- const iv = randomIV();
- const initial = data.subarray(0, n);
- const plaintext = data.subarray(n, data.byteLength);
- try {
- const ciphertext = await crypto.subtle.encrypt({ name: "AES-GCM", iv: iv.buffer }, key, plaintext);
- frame.data = concatN(initial, new Uint8Array(ciphertext), iv).buffer;
- controller.enqueue(frame);
- }
- catch (e) {
- console.log(`encryption error ${e}`);
- throw e;
- }
- };
-}
-function decodeFunction(key) {
- return async (frame, controller) => {
- const data = new Uint8Array(frame.data);
- const n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0;
- const initial = data.subarray(0, n);
- const ciphertext = data.subarray(n, data.byteLength - IV_LENGTH);
- const iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength);
- try {
- const plaintext = await crypto.subtle.decrypt({ name: "AES-GCM", iv }, key, ciphertext);
- frame.data = concatN(initial, new Uint8Array(plaintext)).buffer;
- controller.enqueue(frame);
- }
- catch (e) {
- console.log(`decryption error ${e}`);
- throw e;
- }
- };
-}
-class RTCEncodedVideoFrame {
- constructor(type, data) {
- this.type = type;
- this.data = data;
+ function concatN(...bs) {
+ const a = new Uint8Array(bs.reduce((size, b) => size + b.byteLength, 0));
+ bs.reduce((offset, b) => {
+ a.set(b, offset);
+ return offset + b.byteLength;
+ }, 0);
+ return a;
}
-}
-function randomIV() {
- return crypto.getRandomValues(new Uint8Array(IV_LENGTH));
-}
-const char_equal = "=".charCodeAt(0);
-function concatN(...bs) {
- const a = new Uint8Array(bs.reduce((size, b) => size + b.byteLength, 0));
- bs.reduce((offset, b) => {
- a.set(b, offset);
- return offset + b.byteLength;
- }, 0);
- return a;
-}
-function encodeAscii(s) {
- const a = new Uint8Array(s.length);
- let i = s.length;
- while (i--)
- a[i] = s.charCodeAt(i);
- return a;
-}
-function decodeAscii(a) {
- let s = "";
- for (let i = 0; i < a.length; i++)
- s += String.fromCharCode(a[i]);
- return s;
-}
-const base64chars = new Uint8Array("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").map((c) => c.charCodeAt(0)));
-const base64lookup = new Array(256);
-base64chars.forEach((c, i) => (base64lookup[c] = i));
-function encodeBase64(a) {
- const len = a.length;
- const b64len = Math.ceil(len / 3) * 4;
- const b64 = new Uint8Array(b64len);
- let j = 0;
- for (let i = 0; i < len; i += 3) {
- b64[j++] = base64chars[a[i] >> 2];
- b64[j++] = base64chars[((a[i] & 3) << 4) | (a[i + 1] >> 4)];
- b64[j++] = base64chars[((a[i + 1] & 15) << 2) | (a[i + 2] >> 6)];
- b64[j++] = base64chars[a[i + 2] & 63];
+ function randomIV() {
+ return crypto.getRandomValues(new Uint8Array(IV_LENGTH));
}
- if (len % 3)
- b64[b64len - 1] = char_equal;
- if (len % 3 === 1)
- b64[b64len - 2] = char_equal;
- return b64;
-}
-function decodeBase64(b64) {
- let len = b64.length;
- if (len % 4)
- return;
- let bLen = (len * 3) / 4;
- if (b64[len - 1] === char_equal) {
- len--;
- bLen--;
+ const base64chars = new Uint8Array("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").map((c) => c.charCodeAt(0)));
+ const base64lookup = new Array(256);
+ base64chars.forEach((c, i) => (base64lookup[c] = i));
+ const char_equal = "=".charCodeAt(0);
+ function encodeAscii(s) {
+ const a = new Uint8Array(s.length);
+ let i = s.length;
+ while (i--)
+ a[i] = s.charCodeAt(i);
+ return a;
+ }
+ function decodeAscii(a) {
+ let s = "";
+ for (let i = 0; i < a.length; i++)
+ s += String.fromCharCode(a[i]);
+ return s;
+ }
+ function encodeBase64(a) {
+ const len = a.length;
+ const b64len = Math.ceil(len / 3) * 4;
+ const b64 = new Uint8Array(b64len);
+ let j = 0;
+ for (let i = 0; i < len; i += 3) {
+ b64[j++] = base64chars[a[i] >> 2];
+ b64[j++] = base64chars[((a[i] & 3) << 4) | (a[i + 1] >> 4)];
+ b64[j++] = base64chars[((a[i + 1] & 15) << 2) | (a[i + 2] >> 6)];
+ b64[j++] = base64chars[a[i + 2] & 63];
+ }
+ if (len % 3)
+ b64[b64len - 1] = char_equal;
+ if (len % 3 === 1)
+ b64[b64len - 2] = char_equal;
+ return b64;
+ }
+ function decodeBase64(b64) {
+ let len = b64.length;
+ if (len % 4)
+ return;
+ let bLen = (len * 3) / 4;
if (b64[len - 1] === char_equal) {
len--;
bLen--;
+ if (b64[len - 1] === char_equal) {
+ len--;
+ bLen--;
+ }
}
+ const bytes = new Uint8Array(bLen);
+ let i = 0;
+ let pos = 0;
+ while (i < len) {
+ const enc1 = base64lookup[b64[i++]];
+ const enc2 = i < len ? base64lookup[b64[i++]] : 0;
+ const enc3 = i < len ? base64lookup[b64[i++]] : 0;
+ const enc4 = i < len ? base64lookup[b64[i++]] : 0;
+ if (enc1 === undefined || enc2 === undefined || enc3 === undefined || enc4 === undefined)
+ return;
+ bytes[pos++] = (enc1 << 2) | (enc2 >> 4);
+ bytes[pos++] = ((enc2 & 15) << 4) | (enc3 >> 2);
+ bytes[pos++] = ((enc3 & 3) << 6) | (enc4 & 63);
+ }
+ return bytes;
}
- const bytes = new Uint8Array(bLen);
- let i = 0;
- let pos = 0;
- while (i < len) {
- const enc1 = base64lookup[b64[i++]];
- const enc2 = i < len ? base64lookup[b64[i++]] : 0;
- const enc3 = i < len ? base64lookup[b64[i++]] : 0;
- const enc4 = i < len ? base64lookup[b64[i++]] : 0;
- if (enc1 === undefined || enc2 === undefined || enc3 === undefined || enc4 === undefined)
- return;
- bytes[pos++] = (enc1 << 2) | (enc2 >> 4);
- bytes[pos++] = ((enc2 & 15) << 4) | (enc3 >> 2);
- bytes[pos++] = ((enc3 & 3) << 6) | (enc4 & 63);
+ return {
+ transformFrame: { encrypt: encryptFrame, decrypt: decryptFrame },
+ decodeAesKey,
+ encodeAscii,
+ decodeAscii,
+ encodeBase64,
+ decodeBase64,
+ };
+}
+// If the worker is used for decryption, this function code (as string) is used to load the worker via Blob
+// We have to use worker optionally, as it crashes in Android web view, regardless of how it is loaded
+function workerFunction() {
+ // encryption with createEncodedStreams support
+ self.addEventListener("message", async ({ data }) => {
+ await setupTransform(data);
+ });
+ // encryption using RTCRtpScriptTransform.
+ if ("RTCTransformEvent" in self) {
+ self.addEventListener("rtctransform", async ({ transformer }) => {
+ const { operation, aesKey } = transformer.options;
+ const { readable, writable } = transformer;
+ await setupTransform({ operation, aesKey, readable, writable });
+ });
+ }
+ async function setupTransform({ operation, aesKey, readable, writable }) {
+ const key = await callCrypto.decodeAesKey(aesKey);
+ const transform = callCrypto.transformFrame[operation](key);
+ readable.pipeThrough(new TransformStream({ transform })).pipeTo(writable);
}
- return bytes;
}
//# sourceMappingURL=call.js.map
\ No newline at end of file
diff --git a/apps/android/app/src/main/assets/www/lz-string.min.js b/apps/android/app/src/main/assets/www/lz-string.min.js
new file mode 100644
index 0000000000..2d1900a0d3
--- /dev/null
+++ b/apps/android/app/src/main/assets/www/lz-string.min.js
@@ -0,0 +1 @@
+var LZString=function(){function o(o,r){if(!t[o]){t[o]={};for(var n=0;ne;e++){var s=r.charCodeAt(e);n[2*e]=s>>>8,n[2*e+1]=s%256}return n},decompressFromUint8Array:function(o){if(null===o||void 0===o)return i.decompress(o);for(var n=new Array(o.length/2),e=0,t=n.length;t>e;e++)n[e]=256*o[2*e]+o[2*e+1];var s=[];return n.forEach(function(o){s.push(r(o))}),i.decompress(s.join(""))},compressToEncodedURIComponent:function(o){return null==o?"":i._compress(o,6,function(o){return e.charAt(o)})},decompressFromEncodedURIComponent:function(r){return null==r?"":""==r?null:(r=r.replace(/ /g,"+"),i._decompress(r.length,32,function(n){return o(e,r.charAt(n))}))},compress:function(o){return i._compress(o,16,function(o){return r(o)})},_compress:function(o,r,n){if(null==o)return"";var e,t,i,s={},p={},u="",c="",a="",l=2,f=3,h=2,d=[],m=0,v=0;for(i=0;ie;e++)m<<=1,v==r-1?(v=0,d.push(n(m)),m=0):v++;for(t=a.charCodeAt(0),e=0;8>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}else{for(t=1,e=0;h>e;e++)m=m<<1|t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t=0;for(t=a.charCodeAt(0),e=0;16>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}l--,0==l&&(l=Math.pow(2,h),h++),delete p[a]}else for(t=s[a],e=0;h>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1;l--,0==l&&(l=Math.pow(2,h),h++),s[c]=f++,a=String(u)}if(""!==a){if(Object.prototype.hasOwnProperty.call(p,a)){if(a.charCodeAt(0)<256){for(e=0;h>e;e++)m<<=1,v==r-1?(v=0,d.push(n(m)),m=0):v++;for(t=a.charCodeAt(0),e=0;8>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}else{for(t=1,e=0;h>e;e++)m=m<<1|t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t=0;for(t=a.charCodeAt(0),e=0;16>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}l--,0==l&&(l=Math.pow(2,h),h++),delete p[a]}else for(t=s[a],e=0;h>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1;l--,0==l&&(l=Math.pow(2,h),h++)}for(t=2,e=0;h>e;e++)m=m<<1|1&t,v==r-1?(v=0,d.push(n(m)),m=0):v++,t>>=1;for(;;){if(m<<=1,v==r-1){d.push(n(m));break}v++}return d.join("")},decompress:function(o){return null==o?"":""==o?null:i._decompress(o.length,32768,function(r){return o.charCodeAt(r)})},_decompress:function(o,n,e){var t,i,s,p,u,c,a,l,f=[],h=4,d=4,m=3,v="",w=[],A={val:e(0),position:n,index:1};for(i=0;3>i;i+=1)f[i]=i;for(p=0,c=Math.pow(2,2),a=1;a!=c;)u=A.val&A.position,A.position>>=1,0==A.position&&(A.position=n,A.val=e(A.index++)),p|=(u>0?1:0)*a,a<<=1;switch(t=p){case 0:for(p=0,c=Math.pow(2,8),a=1;a!=c;)u=A.val&A.position,A.position>>=1,0==A.position&&(A.position=n,A.val=e(A.index++)),p|=(u>0?1:0)*a,a<<=1;l=r(p);break;case 1:for(p=0,c=Math.pow(2,16),a=1;a!=c;)u=A.val&A.position,A.position>>=1,0==A.position&&(A.position=n,A.val=e(A.index++)),p|=(u>0?1:0)*a,a<<=1;l=r(p);break;case 2:return""}for(f[3]=l,s=l,w.push(l);;){if(A.index>o)return"";for(p=0,c=Math.pow(2,m),a=1;a!=c;)u=A.val&A.position,A.position>>=1,0==A.position&&(A.position=n,A.val=e(A.index++)),p|=(u>0?1:0)*a,a<<=1;switch(l=p){case 0:for(p=0,c=Math.pow(2,8),a=1;a!=c;)u=A.val&A.position,A.position>>=1,0==A.position&&(A.position=n,A.val=e(A.index++)),p|=(u>0?1:0)*a,a<<=1;f[d++]=r(p),l=d-1,h--;break;case 1:for(p=0,c=Math.pow(2,16),a=1;a!=c;)u=A.val&A.position,A.position>>=1,0==A.position&&(A.position=n,A.val=e(A.index++)),p|=(u>0?1:0)*a,a<<=1;f[d++]=r(p),l=d-1,h--;break;case 2:return w.join("")}if(0==h&&(h=Math.pow(2,m),m++),f[l])v=f[l];else{if(l!==d)return null;v=s+s.charAt(0)}w.push(v),f[d++]=s+v.charAt(0),h--,s=v,0==h&&(h=Math.pow(2,m),m++)}}};return i}();"function"==typeof define&&define.amd?define(function(){return LZString}):"undefined"!=typeof module&&null!=module&&(module.exports=LZString);
diff --git a/apps/android/app/src/main/java/chat/simplex/app/model/SimpleXAPI.kt b/apps/android/app/src/main/java/chat/simplex/app/model/SimpleXAPI.kt
index daa0e62041..3078cb4d70 100644
--- a/apps/android/app/src/main/java/chat/simplex/app/model/SimpleXAPI.kt
+++ b/apps/android/app/src/main/java/chat/simplex/app/model/SimpleXAPI.kt
@@ -338,20 +338,20 @@ open class ChatController(private val ctrl: ChatCtrl, private val ntfManager: Nt
return r is CR.CmdOk
}
- suspend fun apiSendCallOffer(contact: Contact, rtcSession: String, rtcIceCandidates: List, media: CallMediaType, capabilities: CallCapabilities): Boolean {
+ suspend fun apiSendCallOffer(contact: Contact, rtcSession: String, rtcIceCandidates: String, media: CallMediaType, capabilities: CallCapabilities): Boolean {
val webRtcSession = WebRTCSession(rtcSession, rtcIceCandidates)
val callOffer = WebRTCCallOffer(CallType(media, capabilities), webRtcSession)
val r = sendCmd(CC.ApiSendCallOffer(contact, callOffer))
return r is CR.CmdOk
}
- suspend fun apiSendCallAnswer(contact: Contact, rtcSession: String, rtcIceCandidates: List): Boolean {
+ suspend fun apiSendCallAnswer(contact: Contact, rtcSession: String, rtcIceCandidates: String): Boolean {
val answer = WebRTCSession(rtcSession, rtcIceCandidates)
val r = sendCmd(CC.ApiSendCallAnswer(contact, answer))
return r is CR.CmdOk
}
- suspend fun apiSendCallExtraInfo(contact: Contact, rtcIceCandidates: List): Boolean {
+ suspend fun apiSendCallExtraInfo(contact: Contact, rtcIceCandidates: String): Boolean {
val extraInfo = WebRTCExtraInfo(rtcIceCandidates)
val r = sendCmd(CC.ApiSendCallExtraInfo(contact, extraInfo))
return r is CR.CmdOk
diff --git a/apps/android/app/src/main/java/chat/simplex/app/views/call/CallView.kt b/apps/android/app/src/main/java/chat/simplex/app/views/call/CallView.kt
index b85c6b59b7..4740a74745 100644
--- a/apps/android/app/src/main/java/chat/simplex/app/views/call/CallView.kt
+++ b/apps/android/app/src/main/java/chat/simplex/app/views/call/CallView.kt
@@ -1,7 +1,6 @@
package chat.simplex.app.views.call
import android.Manifest
-import android.annotation.SuppressLint
import android.content.ClipData
import android.content.ClipboardManager
import android.util.Log
@@ -24,16 +23,67 @@ import androidx.lifecycle.LifecycleEventObserver
import androidx.webkit.WebViewAssetLoader
import androidx.webkit.WebViewClientCompat
import chat.simplex.app.TAG
+import chat.simplex.app.model.json
import chat.simplex.app.views.helpers.TextEditor
import com.google.accompanist.permissions.rememberMultiplePermissionsState
+import kotlinx.serialization.decodeFromString
+import kotlinx.serialization.encodeToString
-@SuppressLint("SetJavaScriptEnabled")
@Composable
fun VideoCallView(close: () -> Unit) {
+ val callCommand = remember { mutableStateOf(null)}
+ val commandText = remember { mutableStateOf("{\"command\": {\"type\": \"start\", \"media\": \"video\", \"aesKey\": \"FwW+t6UbnwHoapYOfN4mUBUuqR7UtvYWxW16iBqM29U=\"}}") }
+ val clipboard = ContextCompat.getSystemService(LocalContext.current, ClipboardManager::class.java)
+
BackHandler(onBack = close)
+ Column(
+ horizontalAlignment = Alignment.CenterHorizontally,
+ verticalArrangement = Arrangement.spacedBy(12.dp),
+ modifier = Modifier
+ .background(MaterialTheme.colors.background)
+ .fillMaxSize()
+ ) {
+ WebRTCView(callCommand) { resp ->
+ // for debugging
+ // commandText.value = resp
+ commandText.value = json.encodeToString(resp)
+ }
+
+ TextEditor(Modifier.height(180.dp), text = commandText)
+
+ Row(
+ Modifier
+ .fillMaxWidth()
+ .padding(bottom = 6.dp),
+ horizontalArrangement = Arrangement.SpaceBetween
+ ) {
+ Button(onClick = {
+ val clip: ClipData = ClipData.newPlainText("js command", commandText.value)
+ clipboard?.setPrimaryClip(clip)
+ }) { Text("Copy") }
+ Button(onClick = {
+ try {
+ val apiCall: WVAPICall = json.decodeFromString(commandText.value)
+ commandText.value = ""
+ println("sending: ${commandText.value}")
+ callCommand.value = apiCall.command
+ } catch(e: Error) {
+ println("error parsing command: ${commandText.value}")
+ println(e)
+ }
+ }) { Text("Send") }
+ Button(onClick = {
+ commandText.value = ""
+ }) { Text("Clear") }
+ }
+ }
+}
+
+@Composable
+// for debugging
+// fun WebRTCView(callCommand: MutableState, onResponse: (String) -> Unit) {
+fun WebRTCView(callCommand: MutableState, onResponse: (WVAPIMessage) -> Unit) {
lateinit var wv: WebView
- val context = LocalContext.current
- val clipboard = ContextCompat.getSystemService(context, ClipboardManager::class.java)
val permissionsState = rememberMultiplePermissionsState(
permissions = listOf(
Manifest.permission.CAMERA,
@@ -42,6 +92,10 @@ fun VideoCallView(close: () -> Unit) {
Manifest.permission.INTERNET
)
)
+ fun processCommand(cmd: WCallCommand) {
+ val apiCall = WVAPICall(command = cmd)
+ wv.evaluateJavascript("processCommand(${json.encodeToString(apiCall)})", null)
+ }
val lifecycleOwner = LocalLifecycleOwner.current
DisposableEffect(lifecycleOwner) {
val observer = LifecycleEventObserver { _, event ->
@@ -52,96 +106,83 @@ fun VideoCallView(close: () -> Unit) {
lifecycleOwner.lifecycle.addObserver(observer)
onDispose {
- wv.evaluateJavascript("endCall()", null)
+ processCommand(WCallCommand.End())
lifecycleOwner.lifecycle.removeObserver(observer)
}
}
- val localContext = LocalContext.current
- val commandToShow = remember { mutableStateOf("processCommand({command: {type: 'start', media: 'video'}})") } //, aesKey: 'FwW+t6UbnwHoapYOfN4mUBUuqR7UtvYWxW16iBqM29U='})") }
+ LaunchedEffect(callCommand.value) {
+ val cmd = callCommand.value
+ if (cmd != null) {
+ callCommand.value = null
+ processCommand(cmd)
+ }
+ }
+
val assetLoader = WebViewAssetLoader.Builder()
- .addPathHandler("/assets/www/", WebViewAssetLoader.AssetsPathHandler(localContext))
+ .addPathHandler("/assets/www/", WebViewAssetLoader.AssetsPathHandler(LocalContext.current))
.build()
- Column(
- horizontalAlignment = Alignment.CenterHorizontally,
- verticalArrangement = Arrangement.spacedBy(12.dp),
- modifier = Modifier
- .background(MaterialTheme.colors.background)
- .fillMaxSize()
- ) {
- if (permissionsState.allPermissionsGranted) {
- Box(
- Modifier
- .fillMaxWidth()
- .aspectRatio(ratio = 1F)
- ) {
- AndroidView(
- factory = { AndroidViewContext ->
- WebView(AndroidViewContext).apply {
- layoutParams = ViewGroup.LayoutParams(
- ViewGroup.LayoutParams.MATCH_PARENT,
- ViewGroup.LayoutParams.MATCH_PARENT,
- )
- this.webChromeClient = object: WebChromeClient() {
- override fun onPermissionRequest(request: PermissionRequest) {
- if (request.origin.toString().startsWith("file:/")) {
- request.grant(request.resources)
- } else {
- Log.d(TAG, "Permission request from webview denied.")
- request.deny()
- }
- }
-
- override fun onConsoleMessage(consoleMessage: ConsoleMessage?): Boolean {
- val rtnValue = super.onConsoleMessage(consoleMessage)
- val msg = consoleMessage?.message() as String
- if (msg.startsWith("{")) {
- commandToShow.value = "processCommand($msg)"
- }
- return rtnValue
- }
- }
- this.webViewClient = LocalContentWebViewClient(assetLoader)
- this.clearHistory()
- this.clearCache(true)
-// this.addJavascriptInterface(JavascriptInterface(), "Android")
- val webViewSettings = this.settings
- webViewSettings.allowFileAccess = true
- webViewSettings.allowContentAccess = true
- webViewSettings.javaScriptEnabled = true
- webViewSettings.mediaPlaybackRequiresUserGesture = false
- webViewSettings.cacheMode = WebSettings.LOAD_NO_CACHE
- this.loadUrl("file:android_asset/www/call.html")
- }
- }
- ) {
- wv = it
- }
- }
- } else {
- Text("NEED PERMISSIONS")
- }
-
- TextEditor(Modifier.height(180.dp), text = commandToShow)
-
- Row(
+ if (permissionsState.allPermissionsGranted) {
+ Box(
Modifier
.fillMaxWidth()
- .padding(bottom = 6.dp),
- horizontalArrangement = Arrangement.SpaceBetween
+ .aspectRatio(ratio = 1F)
) {
- Button( onClick = {
- val clip: ClipData = ClipData.newPlainText("js command", commandToShow.value)
- clipboard?.setPrimaryClip(clip)
- }) {Text("Copy")}
- Button( onClick = {
- println("sending: ${commandToShow.value}")
- wv.evaluateJavascript(commandToShow.value, null)
- commandToShow.value = ""
- }) {Text("Send")}
- Button( onClick = {
- commandToShow.value = ""
- }) {Text("Clear")}
+ AndroidView(
+ factory = { AndroidViewContext ->
+ WebView(AndroidViewContext).apply {
+ layoutParams = ViewGroup.LayoutParams(
+ ViewGroup.LayoutParams.MATCH_PARENT,
+ ViewGroup.LayoutParams.MATCH_PARENT,
+ )
+ this.webChromeClient = object: WebChromeClient() {
+ override fun onPermissionRequest(request: PermissionRequest) {
+ if (request.origin.toString().startsWith("file:/")) {
+ request.grant(request.resources)
+ } else {
+ Log.d(TAG, "Permission request from webview denied.")
+ request.deny()
+ }
+ }
+ }
+ this.webViewClient = LocalContentWebViewClient(assetLoader)
+ this.clearHistory()
+ this.clearCache(true)
+ this.addJavascriptInterface(WebRTCInterface(onResponse), "WebRTCInterface")
+ val webViewSettings = this.settings
+ webViewSettings.allowFileAccess = true
+ webViewSettings.allowContentAccess = true
+ webViewSettings.javaScriptEnabled = true
+ webViewSettings.mediaPlaybackRequiresUserGesture = false
+ webViewSettings.allowFileAccessFromFileURLs = true;
+ webViewSettings.cacheMode = WebSettings.LOAD_NO_CACHE
+ this.loadUrl("file:android_asset/www/call.html")
+ }
+ }
+ ) {
+ wv = it
+ // for debugging
+ // wv.evaluateJavascript("sendMessageToNative = ({resp}) => WebRTCInterface.postMessage(JSON.stringify({command: resp}))", null)
+ wv.evaluateJavascript("sendMessageToNative = (msg) => WebRTCInterface.postMessage(JSON.stringify(msg))", null)
+ }
+ }
+ } else {
+ Text("NEED PERMISSIONS")
+ }
+}
+
+// for debugging
+// class WebRTCInterface(private val onResponse: (String) -> Unit) {
+class WebRTCInterface(private val onResponse: (WVAPIMessage) -> Unit) {
+ @JavascriptInterface
+ fun postMessage(message: String) {
+ Log.d(TAG, "WebRTCInterface.postMessage")
+ try {
+ // for debugging
+ // onResponse(message)
+ onResponse(json.decodeFromString(message))
+ } catch (e: Error) {
+ Log.e(TAG, "failed parsing WebView message: $message")
}
}
}
diff --git a/apps/android/app/src/main/java/chat/simplex/app/views/call/WebRTC.kt b/apps/android/app/src/main/java/chat/simplex/app/views/call/WebRTC.kt
index 6484c1d42a..53b3ec4086 100644
--- a/apps/android/app/src/main/java/chat/simplex/app/views/call/WebRTC.kt
+++ b/apps/android/app/src/main/java/chat/simplex/app/views/call/WebRTC.kt
@@ -40,15 +40,15 @@ enum class CallState {
}
@Serializable class WVAPICall(val corrId: Int? = null, val command: WCallCommand)
-@Serializable class WVAPIMessage(val corrId: Int? = null, val resp: WCallResponse, val command: WCallCommand?)
+@Serializable class WVAPIMessage(val corrId: Int? = null, val resp: WCallResponse, val command: WCallCommand? = null)
@Serializable
sealed class WCallCommand {
@Serializable @SerialName("capabilities") class Capabilities(): WCallCommand()
@Serializable @SerialName("start") class Start(val media: CallMediaType, val aesKey: String? = null): WCallCommand()
- @Serializable @SerialName("accept") class Accept(val offer: String, val iceCandidates: List, val media: CallMediaType, val aesKey: String? = null): WCallCommand()
- @Serializable @SerialName("answer") class Answer (val answer: String, val iceCandidates: List): WCallCommand()
- @Serializable @SerialName("ice") class Ice(val iceCandidates: List): WCallCommand()
+ @Serializable @SerialName("offer") class Accept(val offer: String, val iceCandidates: String, val media: CallMediaType, val aesKey: String? = null): WCallCommand()
+ @Serializable @SerialName("answer") class Answer (val answer: String, val iceCandidates: String): WCallCommand()
+ @Serializable @SerialName("ice") class Ice(val iceCandidates: String): WCallCommand()
@Serializable @SerialName("media") class Media(val media: CallMediaType, val enable: Boolean): WCallCommand()
@Serializable @SerialName("end") class End(): WCallCommand()
}
@@ -56,11 +56,11 @@ sealed class WCallCommand {
@Serializable
sealed class WCallResponse {
@Serializable @SerialName("capabilities") class Capabilities(val capabilities: CallCapabilities): WCallResponse()
- @Serializable @SerialName("offer") class Offer(val offer: String, val iceCandidates: List): WCallResponse()
+ @Serializable @SerialName("offer") class Offer(val offer: String, val iceCandidates: String): WCallResponse()
// TODO remove accept, it is needed for debugging
- @Serializable @SerialName("accept") class Accept(val offer: String, val iceCandidates: List, val media: CallMediaType, val aesKey: String? = null): WCallResponse()
- @Serializable @SerialName("answer") class Answer(val answer: String, val iceCandidates: List): WCallResponse()
- @Serializable @SerialName("ice") class Ice(val iceCandidates: List): WCallResponse()
+ @Serializable @SerialName("accept") class Accept(val offer: String, val iceCandidates: String, val media: CallMediaType, val aesKey: String? = null): WCallResponse()
+ @Serializable @SerialName("answer") class Answer(val answer: String, val iceCandidates: String): WCallResponse()
+ @Serializable @SerialName("ice") class Ice(val iceCandidates: String): WCallResponse()
@Serializable @SerialName("connection") class Connection(val state: ConnectionState): WCallResponse()
@Serializable @SerialName("ended") class Ended(): WCallResponse()
@Serializable @SerialName("ok") class Ok(): WCallResponse()
@@ -69,22 +69,23 @@ sealed class WCallResponse {
}
@Serializable class WebRTCCallOffer(val callType: CallType, val rtcSession: WebRTCSession)
-@Serializable class WebRTCSession(val rtcSession: String, val rtcIceCandidates: List)
-@Serializable class WebRTCExtraInfo(val rtcIceCandidates: List)
+@Serializable class WebRTCSession(val rtcSession: String, val rtcIceCandidates: String)
+@Serializable class WebRTCExtraInfo(val rtcIceCandidates: String)
@Serializable class CallType(val media: CallMediaType, val capabilities: CallCapabilities)
@Serializable class CallInvitation(val peerMedia: CallMediaType?, val sharedKey: String?)
@Serializable class CallCapabilities(val encryption: Boolean)
-enum class WebRTCCallStatus(val status: String) {
- Connected("connected"),
- Disconnected("disconnected"),
- Failed("failed")
+@Serializable
+enum class WebRTCCallStatus {
+ @SerialName("connected") Connected,
+ @SerialName("disconnected") Disconnected,
+ @SerialName("failed") Failed
}
@Serializable
-enum class CallMediaType(val media: String) {
- Video("video"),
- Audio("audio")
+enum class CallMediaType {
+ @SerialName("video") Video,
+ @SerialName("audio") Audio
}
@Serializable
diff --git a/apps/android/app/src/main/java/chat/simplex/app/views/usersettings/SettingsView.kt b/apps/android/app/src/main/java/chat/simplex/app/views/usersettings/SettingsView.kt
index b09b6e7484..acc37f9e06 100644
--- a/apps/android/app/src/main/java/chat/simplex/app/views/usersettings/SettingsView.kt
+++ b/apps/android/app/src/main/java/chat/simplex/app/views/usersettings/SettingsView.kt
@@ -208,8 +208,8 @@ fun SettingsLayout(
Text(annotatedStringResource(R.string.install_simplex_chat_for_terminal))
}
Divider(Modifier.padding(horizontal = 8.dp))
-// SettingsSectionView(showVideoChatPrototype) {
- SettingsSectionView() {
+ SettingsSectionView(showVideoChatPrototype) {
+// SettingsSectionView() {
Text("v${BuildConfig.VERSION_NAME} (${BuildConfig.VERSION_CODE})")
}
}
diff --git a/apps/ios/Shared/Model/Shared/CallTypes.swift b/apps/ios/Shared/Model/Shared/CallTypes.swift
index 355cb15983..ad59e1cc36 100644
--- a/apps/ios/Shared/Model/Shared/CallTypes.swift
+++ b/apps/ios/Shared/Model/Shared/CallTypes.swift
@@ -15,11 +15,11 @@ struct WebRTCCallOffer: Encodable {
struct WebRTCSession: Codable {
var rtcSession: String
- var rtcIceCandidates: [String]
+ var rtcIceCandidates: String
}
struct WebRTCExtraInfo: Codable {
- var rtcIceCandidates: [String]
+ var rtcIceCandidates: String
}
struct CallInvitation {
diff --git a/apps/ios/Shared/Model/SimpleXAPI.swift b/apps/ios/Shared/Model/SimpleXAPI.swift
index b6573f1bbb..8a1ad8e928 100644
--- a/apps/ios/Shared/Model/SimpleXAPI.swift
+++ b/apps/ios/Shared/Model/SimpleXAPI.swift
@@ -382,18 +382,18 @@ func apiRejectCall(_ contact: Contact) async throws {
try await sendCommandOkResp(.apiRejectCall(contact: contact))
}
-func apiSendCallOffer(_ contact: Contact, _ rtcSession: String, _ rtcIceCandidates: [String], media: CallMediaType, capabilities: CallCapabilities) async throws {
+func apiSendCallOffer(_ contact: Contact, _ rtcSession: String, _ rtcIceCandidates: String, media: CallMediaType, capabilities: CallCapabilities) async throws {
let webRtcSession = WebRTCSession(rtcSession: rtcSession, rtcIceCandidates: rtcIceCandidates)
let callOffer = WebRTCCallOffer(callType: CallType(media: media, capabilities: capabilities), rtcSession: webRtcSession)
try await sendCommandOkResp(.apiSendCallOffer(contact: contact, callOffer: callOffer))
}
-func apiSendCallAnswer(_ contact: Contact, _ rtcSession: String, _ rtcIceCandidates: [String]) async throws {
+func apiSendCallAnswer(_ contact: Contact, _ rtcSession: String, _ rtcIceCandidates: String) async throws {
let answer = WebRTCSession(rtcSession: rtcSession, rtcIceCandidates: rtcIceCandidates)
try await sendCommandOkResp(.apiSendCallAnswer(contact: contact, answer: answer))
}
-func apiSendCallExtraInfo(_ contact: Contact, _ rtcIceCandidates: [String]) async throws {
+func apiSendCallExtraInfo(_ contact: Contact, _ rtcIceCandidates: String) async throws {
let extraInfo = WebRTCExtraInfo(rtcIceCandidates: rtcIceCandidates)
try await sendCommandOkResp(.apiSendCallExtraInfo(contact: contact, extraInfo: extraInfo))
}
@@ -624,7 +624,7 @@ func processReceivedMsg(_ res: ChatResponse) {
// TODO check encryption is compatible
withCall(contact) { call in
m.activeCall = call.copy(callState: .offerReceived, peerMedia: callType.media, sharedKey: sharedKey)
- m.callCommand = .accept(offer: offer.rtcSession, iceCandidates: offer.rtcIceCandidates, media: callType.media, aesKey: sharedKey)
+ m.callCommand = .offer(offer: offer.rtcSession, iceCandidates: offer.rtcIceCandidates, media: callType.media, aesKey: sharedKey)
}
case let .callAnswer(contact, answer):
withCall(contact) { call in
diff --git a/apps/ios/Shared/Views/Call/WebRTC.swift b/apps/ios/Shared/Views/Call/WebRTC.swift
index ec3842bb07..41845d72aa 100644
--- a/apps/ios/Shared/Views/Call/WebRTC.swift
+++ b/apps/ios/Shared/Views/Call/WebRTC.swift
@@ -97,18 +97,18 @@ struct WVAPICall: Encodable {
var command: WCallCommand
}
-struct WVAPIMessage: Equatable, Decodable {
+struct WVAPIMessage: Equatable, Decodable, Encodable {
var corrId: Int?
var resp: WCallResponse
var command: WCallCommand?
}
enum WCallCommand: Equatable, Encodable, Decodable {
- case capabilities
- case start(media: CallMediaType, aesKey: String? = nil)
- case accept(offer: String, iceCandidates: [String], media: CallMediaType, aesKey: String? = nil)
- case answer(answer: String, iceCandidates: [String])
- case ice(iceCandidates: [String])
+ case capabilities(useWorker: Bool? = nil)
+ case start(media: CallMediaType, aesKey: String? = nil, useWorker: Bool? = nil)
+ case offer(offer: String, iceCandidates: String, media: CallMediaType, aesKey: String? = nil, useWorker: Bool? = nil)
+ case answer(answer: String, iceCandidates: String)
+ case ice(iceCandidates: String)
case media(media: CallMediaType, enable: Bool)
case end
@@ -116,6 +116,7 @@ enum WCallCommand: Equatable, Encodable, Decodable {
case type
case media
case aesKey
+ case useWorker
case offer
case answer
case iceCandidates
@@ -127,7 +128,7 @@ enum WCallCommand: Equatable, Encodable, Decodable {
switch self {
case .capabilities: return "capabilities"
case .start: return "start"
- case .accept: return "accept"
+ case .offer: return "offer"
case .answer: return "answer"
case .ice: return "ice"
case .media: return "media"
@@ -139,18 +140,21 @@ enum WCallCommand: Equatable, Encodable, Decodable {
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
switch self {
- case .capabilities:
+ case let .capabilities(useWorker):
try container.encode("capabilities", forKey: .type)
- case let .start(media, aesKey):
+ try container.encode(useWorker, forKey: .useWorker)
+ case let .start(media, aesKey, useWorker):
try container.encode("start", forKey: .type)
try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey)
- case let .accept(offer, iceCandidates, media, aesKey):
+ try container.encode(useWorker, forKey: .useWorker)
+ case let .offer(offer, iceCandidates, media, aesKey, useWorker):
try container.encode("accept", forKey: .type)
try container.encode(offer, forKey: .offer)
try container.encode(iceCandidates, forKey: .iceCandidates)
try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey)
+ try container.encode(useWorker, forKey: .useWorker)
case let .answer(answer, iceCandidates):
try container.encode("answer", forKey: .type)
try container.encode(answer, forKey: .answer)
@@ -172,23 +176,26 @@ enum WCallCommand: Equatable, Encodable, Decodable {
let type = try container.decode(String.self, forKey: CodingKeys.type)
switch type {
case "capabilities":
- self = .capabilities
+ let useWorker = try container.decode((Bool?).self, forKey: CodingKeys.useWorker)
+ self = .capabilities(useWorker: useWorker)
case "start":
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
- self = .start(media: media, aesKey: aesKey)
- case "accept":
+ let useWorker = try container.decode((Bool?).self, forKey: CodingKeys.useWorker)
+ self = .start(media: media, aesKey: aesKey, useWorker: useWorker)
+ case "offer":
let offer = try container.decode(String.self, forKey: CodingKeys.offer)
- let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
+ let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
- self = .accept(offer: offer, iceCandidates: iceCandidates, media: media, aesKey: aesKey)
+ let useWorker = try container.decode((Bool?).self, forKey: CodingKeys.useWorker)
+ self = .offer(offer: offer, iceCandidates: iceCandidates, media: media, aesKey: aesKey, useWorker: useWorker)
case "answer":
let answer = try container.decode(String.self, forKey: CodingKeys.answer)
- let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
+ let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
self = .answer(answer: answer, iceCandidates: iceCandidates)
case "ice":
- let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
+ let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
self = .ice(iceCandidates: iceCandidates)
case "media":
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
@@ -205,11 +212,11 @@ enum WCallCommand: Equatable, Encodable, Decodable {
enum WCallResponse: Equatable, Decodable {
case capabilities(capabilities: CallCapabilities)
- case offer(offer: String, iceCandidates: [String], capabilities: CallCapabilities)
+ case offer(offer: String, iceCandidates: String, capabilities: CallCapabilities)
// TODO remove accept, it is needed for debugging
-// case accept(offer: String, iceCandidates: [String], media: CallMediaType, aesKey: String? = nil)
- case answer(answer: String, iceCandidates: [String])
- case ice(iceCandidates: [String])
+// case offer(offer: String, iceCandidates: [String], media: CallMediaType, aesKey: String? = nil)
+ case answer(answer: String, iceCandidates: String)
+ case ice(iceCandidates: String)
case connection(state: ConnectionState)
case ended
case ok
@@ -234,7 +241,6 @@ enum WCallResponse: Equatable, Decodable {
switch self {
case .capabilities: return("capabilities")
case .offer: return("offer")
-// case .accept: return("accept")
case .answer: return("answer (TODO remove)")
case .ice: return("ice")
case .connection: return("connection")
@@ -256,15 +262,15 @@ enum WCallResponse: Equatable, Decodable {
self = .capabilities(capabilities: capabilities)
case "offer":
let offer = try container.decode(String.self, forKey: CodingKeys.offer)
- let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
+ let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
let capabilities = try container.decode(CallCapabilities.self, forKey: CodingKeys.capabilities)
self = .offer(offer: offer, iceCandidates: iceCandidates, capabilities: capabilities)
case "answer":
let answer = try container.decode(String.self, forKey: CodingKeys.answer)
- let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
+ let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
self = .answer(answer: answer, iceCandidates: iceCandidates)
case "ice":
- let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
+ let iceCandidates = try container.decode(String.self, forKey: CodingKeys.iceCandidates)
self = .ice(iceCandidates: iceCandidates)
case "connection":
let state = try container.decode(ConnectionState.self, forKey: CodingKeys.state)
@@ -286,39 +292,39 @@ enum WCallResponse: Equatable, Decodable {
}
// This protocol is for debugging
-//extension WCallResponse: Encodable {
-// func encode(to encoder: Encoder) throws {
-// var container = encoder.container(keyedBy: CodingKeys.self)
-// switch self {
-// case .capabilities:
-// try container.encode("capabilities", forKey: .type)
-// case let .offer(offer, iceCandidates, capabilities):
-// try container.encode("offer", forKey: .type)
-// try container.encode(offer, forKey: .offer)
-// try container.encode(iceCandidates, forKey: .iceCandidates)
-// try container.encode(capabilities, forKey: .capabilities)
-// case let .answer(answer, iceCandidates):
-// try container.encode("answer", forKey: .type)
-// try container.encode(answer, forKey: .answer)
-// try container.encode(iceCandidates, forKey: .iceCandidates)
-// case let .ice(iceCandidates):
-// try container.encode("ice", forKey: .type)
-// try container.encode(iceCandidates, forKey: .iceCandidates)
-// case let .connection(state):
-// try container.encode("connection", forKey: .type)
-// try container.encode(state, forKey: .state)
-// case .ended:
-// try container.encode("ended", forKey: .type)
-// case .ok:
-// try container.encode("ok", forKey: .type)
-// case let .error(message):
-// try container.encode("error", forKey: .type)
-// try container.encode(message, forKey: .message)
-// case let .invalid(type):
-// try container.encode(type, forKey: .type)
-// }
-// }
-//}
+extension WCallResponse: Encodable {
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ switch self {
+ case .capabilities:
+ try container.encode("capabilities", forKey: .type)
+ case let .offer(offer, iceCandidates, capabilities):
+ try container.encode("offer", forKey: .type)
+ try container.encode(offer, forKey: .offer)
+ try container.encode(iceCandidates, forKey: .iceCandidates)
+ try container.encode(capabilities, forKey: .capabilities)
+ case let .answer(answer, iceCandidates):
+ try container.encode("answer", forKey: .type)
+ try container.encode(answer, forKey: .answer)
+ try container.encode(iceCandidates, forKey: .iceCandidates)
+ case let .ice(iceCandidates):
+ try container.encode("ice", forKey: .type)
+ try container.encode(iceCandidates, forKey: .iceCandidates)
+ case let .connection(state):
+ try container.encode("connection", forKey: .type)
+ try container.encode(state, forKey: .state)
+ case .ended:
+ try container.encode("ended", forKey: .type)
+ case .ok:
+ try container.encode("ok", forKey: .type)
+ case let .error(message):
+ try container.encode("error", forKey: .type)
+ try container.encode(message, forKey: .message)
+ case let .invalid(type):
+ try container.encode(type, forKey: .type)
+ }
+ }
+}
struct ConnectionState: Codable, Equatable {
var connectionState: String
diff --git a/apps/ios/Shared/Views/Call/WebRTCView.swift b/apps/ios/Shared/Views/Call/WebRTCView.swift
index 1dad35e67c..39970f2513 100644
--- a/apps/ios/Shared/Views/Call/WebRTCView.swift
+++ b/apps/ios/Shared/Views/Call/WebRTCView.swift
@@ -31,6 +31,7 @@ class WebRTCCoordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler
didReceive message: WKScriptMessage
) {
logger.debug("WebRTCCoordinator.userContentController")
+ logger.debug("\(String(describing: message.body as? String))")
if let msgStr = message.body as? String,
let msg: WVAPIMessage = decodeJSON(msgStr) {
webViewMsg.wrappedValue = msg
@@ -90,74 +91,76 @@ struct WebRTCView: UIViewRepresentable {
}
}
-//struct CallViewDebug: View {
-// @State private var coordinator: WebRTCCoordinator? = nil
-// @State private var commandStr = ""
-// @State private var webViewReady: Bool = false
-// @State private var webViewMsg: WVAPIMessage? = nil
-// @FocusState private var keyboardVisible: Bool
-//
-// var body: some View {
-// VStack(spacing: 30) {
-// WebRTCView(coordinator: $coordinator, webViewReady: $webViewReady, webViewMsg: $webViewMsg).frame(maxHeight: 260)
-// .onChange(of: webViewMsg) { _ in
-// if let resp = webViewMsg {
-// commandStr = encodeJSON(resp)
-// }
-// }
-// TextEditor(text: $commandStr)
-// .focused($keyboardVisible)
-// .disableAutocorrection(true)
-// .textInputAutocapitalization(.never)
-// .padding(.horizontal, 5)
-// .padding(.top, 2)
-// .frame(height: 112)
-// .overlay(
-// RoundedRectangle(cornerRadius: 10)
-// .strokeBorder(.secondary, lineWidth: 0.3, antialiased: true)
-// )
-// HStack(spacing: 20) {
-// Button("Copy") {
-// UIPasteboard.general.string = commandStr
-// }
-// Button("Paste") {
-// commandStr = UIPasteboard.general.string ?? ""
-// }
-// Button("Clear") {
-// commandStr = ""
-// }
-// Button("Send") {
-// if let c = coordinator,
-// let command: WCallCommand = decodeJSON(commandStr) {
-// c.sendCommand(command: command)
-// }
-// }
-// }
-// HStack(spacing: 20) {
-// Button("Capabilities") {
-//
-// }
-// Button("Start") {
-// if let c = coordinator {
-// c.sendCommand(command: .start(media: .video))
-// }
-// }
-// Button("Accept") {
-//
-// }
-// Button("Answer") {
-//
-// }
-// Button("ICE") {
-//
-// }
-// Button("End") {
-//
-// }
-// }
-// }
-// }
-//}
+struct CallViewDebug: View {
+ @State private var coordinator: WebRTCCoordinator? = nil
+ @State private var commandStr = ""
+ @State private var webViewReady: Bool = false
+ @State private var webViewMsg: WVAPIMessage? = nil
+ @FocusState private var keyboardVisible: Bool
+
+ var body: some View {
+ VStack(spacing: 30) {
+ WebRTCView(coordinator: $coordinator, webViewReady: $webViewReady, webViewMsg: $webViewMsg).frame(maxHeight: 260)
+ .onChange(of: webViewMsg) { _ in
+ if let resp = webViewMsg {
+ commandStr = encodeJSON(resp)
+ }
+ }
+ TextEditor(text: $commandStr)
+ .focused($keyboardVisible)
+ .disableAutocorrection(true)
+ .textInputAutocapitalization(.never)
+ .padding(.horizontal, 5)
+ .padding(.top, 2)
+ .frame(height: 112)
+ .overlay(
+ RoundedRectangle(cornerRadius: 10)
+ .strokeBorder(.secondary, lineWidth: 0.3, antialiased: true)
+ )
+ HStack(spacing: 20) {
+ Button("Copy") {
+ UIPasteboard.general.string = commandStr
+ }
+ Button("Paste") {
+ commandStr = UIPasteboard.general.string ?? ""
+ }
+ Button("Clear") {
+ commandStr = ""
+ }
+ Button("Send") {
+ if let c = coordinator,
+ let command: WCallCommand = decodeJSON(commandStr) {
+ c.sendCommand(command: command)
+ }
+ }
+ }
+ HStack(spacing: 20) {
+ Button("Capabilities") {
+ if let c = coordinator {
+ c.sendCommand(command: .capabilities(useWorker: true))
+ }
+ }
+ Button("Start") {
+ if let c = coordinator {
+ c.sendCommand(command: .start(media: .video))
+ }
+ }
+ Button("Accept") {
+
+ }
+ Button("Answer") {
+
+ }
+ Button("ICE") {
+
+ }
+ Button("End") {
+
+ }
+ }
+ }
+ }
+}
//
//struct CallViewDebug_Previews: PreviewProvider {
// static var previews: some View {
diff --git a/apps/ios/Shared/Views/Chat/ChatView.swift b/apps/ios/Shared/Views/Chat/ChatView.swift
index f9c313863a..10e21774c2 100644
--- a/apps/ios/Shared/Views/Chat/ChatView.swift
+++ b/apps/ios/Shared/Views/Chat/ChatView.swift
@@ -126,7 +126,7 @@ struct ChatView: View {
localMedia: media
)
showCallView = true
- chatModel.callCommand = .capabilities
+ chatModel.callCommand = .capabilities(useWorker: true)
} label: {
Image(systemName: imageName)
}
diff --git a/apps/ios/Shared/Views/UserSettings/SettingsView.swift b/apps/ios/Shared/Views/UserSettings/SettingsView.swift
index 05af85086b..3119c3a77d 100644
--- a/apps/ios/Shared/Views/UserSettings/SettingsView.swift
+++ b/apps/ios/Shared/Views/UserSettings/SettingsView.swift
@@ -126,12 +126,12 @@ struct SettingsView: View {
// notificationsToggle(token)
// }
// }
-// NavigationLink {
-// CallViewDebug()
-// .frame(maxHeight: .infinity, alignment: .top)
-// } label: {
+ NavigationLink {
+ CallViewDebug()
+ .frame(maxHeight: .infinity, alignment: .top)
+ } label: {
Text("v\(appVersion ?? "?") (\(appBuild ?? "?"))")
-// }
+ }
}
}
.navigationTitle("Your settings")
diff --git a/packages/simplex-chat-webrtc/copy b/packages/simplex-chat-webrtc/copy
index 9175480911..1b766264fb 100755
--- a/packages/simplex-chat-webrtc/copy
+++ b/packages/simplex-chat-webrtc/copy
@@ -3,8 +3,12 @@
# it can be tested in the browser from dist folder
cp ./src/call.html ./dist/call.html
cp ./src/style.css ./dist/style.css
+cp ./node_modules/lz-string/libs/lz-string.min.js ./dist/lz-string.min.js
+cp ./src/webcall.html ./dist/webcall.html
+cp ./src/ui.js ./dist/ui.js
# copy to android app
cp ./src/call.html ../../apps/android/app/src/main/assets/www/call.html
cp ./src/style.css ../../apps/android/app/src/main/assets/www/style.css
cp ./dist/call.js ../../apps/android/app/src/main/assets/www/call.js
+cp ./node_modules/lz-string/libs/lz-string.min.js ../../apps/android/app/src/main/assets/www/lz-string.min.js
diff --git a/packages/simplex-chat-webrtc/package.json b/packages/simplex-chat-webrtc/package.json
index 222c0c0a56..86fe6c0fb0 100644
--- a/packages/simplex-chat-webrtc/package.json
+++ b/packages/simplex-chat-webrtc/package.json
@@ -14,6 +14,7 @@
"author": "",
"license": "AGPL-3.0-or-later",
"devDependencies": {
+ "@types/lz-string": "^1.3.34",
"husky": "^7.0.4",
"lint-staged": "^12.4.1",
"prettier": "^2.6.2",
@@ -21,5 +22,8 @@
},
"lint-staged": {
"**/*": "prettier --write --ignore-unknown"
+ },
+ "dependencies": {
+ "lz-string": "^1.4.4"
}
}
diff --git a/packages/simplex-chat-webrtc/src/call.html b/packages/simplex-chat-webrtc/src/call.html
index fd3019e8eb..a188d38394 100644
--- a/packages/simplex-chat-webrtc/src/call.html
+++ b/packages/simplex-chat-webrtc/src/call.html
@@ -2,6 +2,7 @@
+
diff --git a/packages/simplex-chat-webrtc/src/call.ts b/packages/simplex-chat-webrtc/src/call.ts
index 5c0678b6e2..b5fa306870 100644
--- a/packages/simplex-chat-webrtc/src/call.ts
+++ b/packages/simplex-chat-webrtc/src/call.ts
@@ -1,11 +1,6 @@
// Inspired by
// https://github.com/webrtc/samples/blob/gh-pages/src/content/insertable-streams/endtoend-encryption
-interface WVAPICall {
- corrId?: number
- command: WCallCommand
-}
-
interface WVApiMessage {
corrId?: number
resp: WCallResponse
@@ -14,9 +9,18 @@ interface WVApiMessage {
type WCallCommand = WCCapabilities | WCStartCall | WCAcceptOffer | WCallAnswer | WCallIceCandidates | WCEnableMedia | WCEndCall
-type WCallResponse = WRCapabilities | WCallOffer | WCallAnswer | WCallIceCandidates | WRConnection | WRCallEnded | WROk | WRError
+type WCallResponse =
+ | WRCapabilities
+ | WCallOffer
+ | WCallAnswer
+ | WCallIceCandidates
+ | WRConnection
+ | WRCallEnded
+ | WROk
+ | WRError
+ | WCAcceptOffer
-type WCallCommandTag = "capabilities" | "start" | "accept" | "answer" | "ice" | "media" | "end"
+type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "end"
type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "ended" | "ok" | "error"
@@ -35,12 +39,14 @@ interface IWCallResponse {
interface WCCapabilities extends IWCallCommand {
type: "capabilities"
+ useWorker?: boolean
}
interface WCStartCall extends IWCallCommand {
type: "start"
media: CallMediaType
aesKey?: string
+ useWorker?: boolean
}
interface WCEndCall extends IWCallCommand {
@@ -48,29 +54,30 @@ interface WCEndCall extends IWCallCommand {
}
interface WCAcceptOffer extends IWCallCommand {
- type: "accept"
+ type: "offer"
offer: string // JSON string for RTCSessionDescriptionInit
- iceCandidates: string[] // JSON strings for RTCIceCandidateInit
+ iceCandidates: string // JSON strings for RTCIceCandidateInit
media: CallMediaType
aesKey?: string
+ useWorker?: boolean
}
interface WCallOffer extends IWCallResponse {
type: "offer"
offer: string // JSON string for RTCSessionDescriptionInit
- iceCandidates: string[] // JSON strings for RTCIceCandidateInit
+ iceCandidates: string // JSON strings for RTCIceCandidateInit[]
capabilities: CallCapabilities
}
interface WCallAnswer extends IWCallCommand, IWCallResponse {
type: "answer"
answer: string // JSON string for RTCSessionDescriptionInit
- iceCandidates: string[] // JSON strings for RTCIceCandidateInit
+ iceCandidates: string // JSON strings for RTCIceCandidateInit[]
}
interface WCallIceCandidates extends IWCallCommand, IWCallResponse {
type: "ice"
- iceCandidates: string[] // JSON strings for RTCIceCandidateInit
+ iceCandidates: string // JSON strings for RTCIceCandidateInit[]
}
interface WCEnableMedia extends IWCallCommand {
@@ -111,513 +118,643 @@ interface WRError extends IWCallResponse {
message: string
}
-type RTCRtpSenderWithEncryption = RTCRtpSender & {
- createEncodedStreams: () => TransformStream
-}
-
-type RTCRtpReceiverWithEncryption = RTCRtpReceiver & {
- createEncodedStreams: () => TransformStream
-}
-
-type RTCConfigurationWithEncryption = RTCConfiguration & {
- encodedInsertableStreams: boolean
-}
-
-const keyAlgorithm: AesKeyAlgorithm = {
- name: "AES-GCM",
- length: 256,
-}
-
-const keyUsages: KeyUsage[] = ["encrypt", "decrypt"]
-
-let activeCall: Call | undefined
-
-const IV_LENGTH = 12
-
-const initialPlainTextRequired = {
- key: 10,
- delta: 3,
- undefined: 1,
-}
-
-interface Call {
- connection: RTCPeerConnection
- iceCandidates: Promise // JSON strings for RTCIceCandidate
- localMedia: CallMediaType
- localStream: MediaStream
-}
-
-interface CallConfig {
- peerConnectionConfig: RTCConfigurationWithEncryption
- iceCandidates: {
- delay: number
- extrasInterval: number
- extrasTimeout: number
- }
-}
-
-function defaultCallConfig(encodedInsertableStreams: boolean): CallConfig {
- return {
- peerConnectionConfig: {
- iceServers: [{urls: ["stun:stun.l.google.com:19302"]}],
- iceCandidatePoolSize: 10,
- encodedInsertableStreams,
- },
- iceCandidates: {
- delay: 2000,
- extrasInterval: 2000,
- extrasTimeout: 8000,
- },
- }
-}
-
-async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string): Promise {
- const conn = new RTCPeerConnection(config.peerConnectionConfig)
- const remoteStream = new MediaStream()
- const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType))
- await setUpMediaStreams(conn, localStream, remoteStream, aesKey)
- conn.addEventListener("connectionstatechange", connectionStateChange)
- const iceCandidates = new Promise((resolve, _) => {
- let candidates: RTCIceCandidate[] = []
- let resolved = false
- let extrasInterval: number | undefined
- let extrasTimeout: number | undefined
- const delay = setTimeout(() => {
- if (!resolved) {
- resolveIceCandidates()
- extrasInterval = setInterval(() => {
- sendIceCandidates()
- }, config.iceCandidates.extrasInterval)
- extrasTimeout = setTimeout(() => {
- clearInterval(extrasInterval)
- sendIceCandidates()
- }, config.iceCandidates.extrasTimeout)
- }
- }, config.iceCandidates.delay)
-
- conn.onicecandidate = ({candidate: c}) => c && candidates.push(c)
- conn.onicegatheringstatechange = () => {
- if (conn.iceGatheringState == "complete") {
- if (resolved) {
- if (extrasInterval) clearInterval(extrasInterval)
- if (extrasTimeout) clearTimeout(extrasTimeout)
- sendIceCandidates()
- } else {
- resolveIceCandidates()
- }
- }
- }
-
- function resolveIceCandidates() {
- if (delay) clearTimeout(delay)
- resolved = true
- const iceCandidates = candidates.map((c) => JSON.stringify(c))
- candidates = []
- resolve(iceCandidates)
- }
-
- function sendIceCandidates() {
- if (candidates.length === 0) return
- const iceCandidates = candidates.map((c) => JSON.stringify(c))
- candidates = []
- sendMessageToNative({resp: {type: "ice", iceCandidates}})
- }
- })
-
- return {connection: conn, iceCandidates, localMedia: mediaType, localStream}
-
- function connectionStateChange() {
- sendMessageToNative({
- resp: {
- type: "connection",
- state: {
- connectionState: conn.connectionState,
- iceConnectionState: conn.iceConnectionState,
- iceGatheringState: conn.iceGatheringState,
- signalingState: conn.signalingState,
- },
- },
- })
- if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
- conn.removeEventListener("connectionstatechange", connectionStateChange)
- sendMessageToNative({resp: {type: "ended"}})
- conn.close()
- activeCall = undefined
- resetVideoElements()
- }
- }
-}
-
+// for debugging
+// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg: WVApiMessage) => console.log(JSON.stringify(msg))
-async function processCommand(body: WVAPICall): Promise {
- const {corrId, command} = body
- const pc = activeCall?.connection
- let resp: WCallResponse
- try {
- switch (command.type) {
- case "capabilities":
- const encryption = supportsInsertableStreams()
- resp = {type: "capabilities", capabilities: {encryption}}
- break
- case "start":
- console.log("starting call")
- if (activeCall) {
- resp = {type: "error", message: "start: call already started"}
- } else if (!supportsInsertableStreams() && command.aesKey) {
- resp = {type: "error", message: "start: encryption is not supported"}
- } else {
- const encryption = supportsInsertableStreams()
- const {media, aesKey} = command
- activeCall = await initializeCall(defaultCallConfig(encryption && !!aesKey), media, encryption ? aesKey : undefined)
- const pc = activeCall.connection
- const offer = await pc.createOffer()
- await pc.setLocalDescription(offer)
- // for debugging, returning the command for callee to use
- // resp = {type: "accept", offer: JSON.stringify(offer), iceCandidates: await iceCandidates, media, aesKey}
- resp = {
- type: "offer",
- offer: JSON.stringify(offer),
- iceCandidates: await activeCall.iceCandidates,
- capabilities: {encryption},
- }
- }
- break
- case "accept":
- if (activeCall) {
- resp = {type: "error", message: "accept: call already started"}
- } else if (!supportsInsertableStreams() && command.aesKey) {
- resp = {type: "error", message: "accept: encryption is not supported"}
- } else {
- const offer = JSON.parse(command.offer)
- const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c))
- activeCall = await initializeCall(defaultCallConfig(!!command.aesKey), command.media, command.aesKey)
- const pc = activeCall.connection
- await pc.setRemoteDescription(new RTCSessionDescription(offer))
- const answer = await pc.createAnswer()
- await pc.setLocalDescription(answer)
- addIceCandidates(pc, remoteIceCandidates)
- // same as command for caller to use
- resp = {
- type: "answer",
- answer: JSON.stringify(answer),
- iceCandidates: await activeCall.iceCandidates,
- }
- }
- break
- case "answer":
- if (!pc) {
- resp = {type: "error", message: "answer: call not started"}
- } else if (!pc.localDescription) {
- resp = {type: "error", message: "answer: local description is not set"}
- } else if (pc.currentRemoteDescription) {
- resp = {type: "error", message: "answer: remote description already set"}
- } else {
- const answer = JSON.parse(command.answer)
- const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c))
- await pc.setRemoteDescription(new RTCSessionDescription(answer))
- addIceCandidates(pc, remoteIceCandidates)
- resp = {type: "ok"}
- }
- break
- case "ice":
- if (pc) {
- const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c))
- addIceCandidates(pc, remoteIceCandidates)
- resp = {type: "ok"}
- } else {
- resp = {type: "error", message: "ice: call not started"}
- }
- break
- case "media":
- if (!activeCall) {
- resp = {type: "error", message: "media: call not started"}
- } else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video) {
- resp = {type: "error", message: "media: no video"}
- } else {
- enableMedia(activeCall.localStream, command.media, command.enable)
- resp = {type: "ok"}
- }
- break
- case "end":
- if (pc) {
- pc.close()
- activeCall = undefined
- resetVideoElements()
- resp = {type: "ok"}
- } else {
- resp = {type: "error", message: "end: call not started"}
- }
- break
- default:
- resp = {type: "error", message: "unknown command"}
- break
- }
- } catch (e) {
- resp = {type: "error", message: (e as Error).message}
- }
- const apiResp = {corrId, resp, command}
- sendMessageToNative(apiResp)
- return apiResp
+// Global object with cryptrographic/encoding functions
+const callCrypto = callCryptoFunction()
+
+declare var RTCRtpScriptTransform: {
+ prototype: RTCRtpScriptTransform
+ new (worker: Worker, options?: any): RTCRtpScriptTransform
}
-function addIceCandidates(conn: RTCPeerConnection, iceCandidates: RTCIceCandidateInit[]) {
- for (const c of iceCandidates) {
- conn.addIceCandidate(new RTCIceCandidate(c))
- }
+enum TransformOperation {
+ Encrypt = "encrypt",
+ Decrypt = "decrypt",
}
-async function setUpMediaStreams(
- pc: RTCPeerConnection,
- localStream: MediaStream,
- remoteStream: MediaStream,
- aesKey?: string
-): Promise {
- const videos = getVideoElements()
- if (!videos) throw Error("no video elements")
+interface RTCRtpScriptTransform {}
- let key: CryptoKey | undefined
- if (aesKey) {
- const keyData = decodeBase64(encodeAscii(aesKey))
- if (keyData) key = await crypto.subtle.importKey("raw", keyData, keyAlgorithm, false, keyUsages)
+;(function () {
+ interface WVAPICall {
+ corrId?: number
+ command: WCallCommand
}
- for (const track of localStream.getTracks()) {
- pc.addTrack(track, localStream)
- }
- if (key) {
- console.log("set up encryption for sending")
- for (const sender of pc.getSenders() as RTCRtpSenderWithEncryption[]) {
- setupPeerTransform(sender, encodeFunction(key))
- }
- }
- // Pull tracks from remote stream as they arrive add them to remoteStream video
- pc.ontrack = (event) => {
- if (key) {
- console.log("set up decryption for receiving")
- setupPeerTransform(event.receiver as RTCRtpReceiverWithEncryption, decodeFunction(key))
- }
- for (const track of event.streams[0].getTracks()) {
- remoteStream.addTrack(track)
- }
- }
- // We assume VP8 encoding in the decode/encode stages to get the initial
- // bytes to pass as plaintext so we enforce that here.
- // VP8 is supported by all supports of webrtc.
- // Use of VP8 by default may also reduce depacketisation issues.
- // We do not encrypt the first couple of bytes of the payload so that the
- // video elements can work by determining video keyframes and the opus mode
- // being used. This appears to be necessary for any video feed at all.
- // For VP8 this is the content described in
- // https://tools.ietf.org/html/rfc6386#section-9.1
- // which is 10 bytes for key frames and 3 bytes for delta frames.
- // For opus (where encodedFrame.type is not set) this is the TOC byte from
- // https://tools.ietf.org/html/rfc6716#section-3.1
- const capabilities = RTCRtpSender.getCapabilities("video")
- if (capabilities) {
- const {codecs} = capabilities
- const selectedCodecIndex = codecs.findIndex((c) => c.mimeType === "video/VP8")
- const selectedCodec = codecs[selectedCodecIndex]
- codecs.splice(selectedCodecIndex, 1)
- codecs.unshift(selectedCodec)
- for (const t of pc.getTransceivers()) {
- if (t.sender.track?.kind === "video") {
- t.setCodecPreferences(codecs)
+ type RTCRtpSenderWithEncryption = RTCRtpSender & {
+ createEncodedStreams: () => TransformStream
+ transform: RTCRtpScriptTransform
+ }
+
+ type RTCRtpReceiverWithEncryption = RTCRtpReceiver & {
+ createEncodedStreams: () => TransformStream
+ transform: RTCRtpScriptTransform
+ }
+
+ type RTCConfigurationWithEncryption = RTCConfiguration & {
+ encodedInsertableStreams: boolean
+ }
+
+ interface Call {
+ connection: RTCPeerConnection
+ iceCandidates: Promise // JSON strings for RTCIceCandidate
+ localMedia: CallMediaType
+ localStream: MediaStream
+ }
+
+ interface CallConfig {
+ peerConnectionConfig: RTCConfigurationWithEncryption
+ iceCandidates: {
+ delay: number
+ extrasInterval: number
+ extrasTimeout: number
+ }
+ }
+
+ let activeCall: Call | undefined
+
+ function defaultCallConfig(encodedInsertableStreams: boolean): CallConfig {
+ return {
+ peerConnectionConfig: {
+ iceServers: [
+ {urls: "stun:stun.simplex.chat:5349"},
+ // {urls: "turn:turn.simplex.chat:5349", username: "private", credential: "yleob6AVkiNI87hpR94Z"},
+ ],
+ iceCandidatePoolSize: 10,
+ encodedInsertableStreams,
+ // iceTransportPolicy: "relay",
+ },
+ iceCandidates: {
+ delay: 2000,
+ extrasInterval: 2000,
+ extrasTimeout: 8000,
+ },
+ }
+ }
+
+ async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string, useWorker?: boolean): Promise {
+ const conn = new RTCPeerConnection(config.peerConnectionConfig)
+ const remoteStream = new MediaStream()
+ const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType))
+ await setUpMediaStreams(conn, localStream, remoteStream, aesKey, useWorker)
+ conn.addEventListener("connectionstatechange", connectionStateChange)
+ const iceCandidates = new Promise((resolve, _) => {
+ let candidates: RTCIceCandidate[] = []
+ let resolved = false
+ let extrasInterval: number | undefined
+ let extrasTimeout: number | undefined
+ const delay = setTimeout(() => {
+ if (!resolved) {
+ resolveIceCandidates()
+ extrasInterval = setInterval(() => {
+ sendIceCandidates()
+ }, config.iceCandidates.extrasInterval)
+ extrasTimeout = setTimeout(() => {
+ clearInterval(extrasInterval)
+ sendIceCandidates()
+ }, config.iceCandidates.extrasTimeout)
+ }
+ }, config.iceCandidates.delay)
+
+ conn.onicecandidate = ({candidate: c}) => c && candidates.push(c)
+ conn.onicegatheringstatechange = () => {
+ if (conn.iceGatheringState == "complete") {
+ if (resolved) {
+ if (extrasInterval) clearInterval(extrasInterval)
+ if (extrasTimeout) clearTimeout(extrasTimeout)
+ sendIceCandidates()
+ } else {
+ resolveIceCandidates()
+ }
+ }
}
- }
- }
- // setupVideoElement(videos.local)
- // setupVideoElement(videos.remote)
- videos.local.srcObject = localStream
- videos.remote.srcObject = remoteStream
-}
-function callMediaConstraints(mediaType: CallMediaType): MediaStreamConstraints {
- switch (mediaType) {
- case CallMediaType.Audio:
- return {audio: true, video: false}
- case CallMediaType.Video:
- return {
- audio: true,
- video: {
- frameRate: 24,
- width: {
- min: 480,
- ideal: 720,
- max: 1280,
+ function resolveIceCandidates() {
+ if (delay) clearTimeout(delay)
+ resolved = true
+ const iceCandidates = serialize(candidates)
+ candidates = []
+ resolve(iceCandidates)
+ }
+
+ function sendIceCandidates() {
+ if (candidates.length === 0) return
+ const iceCandidates = serialize(candidates)
+ candidates = []
+ sendMessageToNative({resp: {type: "ice", iceCandidates}})
+ }
+ })
+
+ return {connection: conn, iceCandidates, localMedia: mediaType, localStream}
+
+ function connectionStateChange() {
+ sendMessageToNative({
+ resp: {
+ type: "connection",
+ state: {
+ connectionState: conn.connectionState,
+ iceConnectionState: conn.iceConnectionState,
+ iceGatheringState: conn.iceGatheringState,
+ signalingState: conn.signalingState,
},
- aspectRatio: 1.33,
},
+ })
+ if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
+ conn.removeEventListener("connectionstatechange", connectionStateChange)
+ sendMessageToNative({resp: {type: "ended"}})
+ conn.close()
+ activeCall = undefined
+ resetVideoElements()
}
- }
-}
-
-function supportsInsertableStreams(): boolean {
- return "createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype
-}
-
-interface VideoElements {
- local: HTMLMediaElement
- remote: HTMLMediaElement
-}
-
-function resetVideoElements() {
- const videos = getVideoElements()
- if (!videos) return
- videos.local.srcObject = null
- videos.remote.srcObject = null
-}
-
-function getVideoElements(): VideoElements | undefined {
- const local = document.getElementById("local-video-stream")
- const remote = document.getElementById("remote-video-stream")
- if (!(local && remote && local instanceof HTMLMediaElement && remote instanceof HTMLMediaElement)) return
- return {local, remote}
-}
-
-// function setupVideoElement(video: HTMLElement) {
-// // TODO use display: none
-// video.style.opacity = "0"
-// video.onplaying = () => {
-// video.style.opacity = "1"
-// }
-// }
-
-function enableMedia(s: MediaStream, media: CallMediaType, enable: boolean) {
- const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
- for (const t of tracks) t.enabled = enable
-}
-
-/* Stream Transforms */
-function setupPeerTransform(
- peer: RTCRtpSenderWithEncryption | RTCRtpReceiverWithEncryption,
- transform: (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => void
-) {
- const streams = peer.createEncodedStreams()
- streams.readable.pipeThrough(new TransformStream({transform})).pipeTo(streams.writable)
-}
-
-/* Cryptography */
-function encodeFunction(key: CryptoKey): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => void {
- return async (frame, controller) => {
- const data = new Uint8Array(frame.data)
- const n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0
- const iv = randomIV()
- const initial = data.subarray(0, n)
- const plaintext = data.subarray(n, data.byteLength)
- try {
- const ciphertext = await crypto.subtle.encrypt({name: "AES-GCM", iv: iv.buffer}, key, plaintext)
- frame.data = concatN(initial, new Uint8Array(ciphertext), iv).buffer
- controller.enqueue(frame)
- } catch (e) {
- console.log(`encryption error ${e}`)
- throw e
}
}
-}
-function decodeFunction(key: CryptoKey): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise {
- return async (frame, controller) => {
- const data = new Uint8Array(frame.data)
- const n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0
- const initial = data.subarray(0, n)
- const ciphertext = data.subarray(n, data.byteLength - IV_LENGTH)
- const iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength)
+ function serialize(x: T): string {
+ return LZString.compressToBase64(JSON.stringify(x))
+ }
+
+ function parse(s: string): T {
+ return JSON.parse(LZString.decompressFromBase64(s)!)
+ }
+
+ Object.defineProperty(window, "processCommand", {value: processCommand})
+
+ async function processCommand(body: WVAPICall): Promise {
+ const {corrId, command} = body
+ const pc = activeCall?.connection
+ let resp: WCallResponse
try {
- const plaintext = await crypto.subtle.decrypt({name: "AES-GCM", iv}, key, ciphertext)
- frame.data = concatN(initial, new Uint8Array(plaintext)).buffer
- controller.enqueue(frame)
+ switch (command.type) {
+ case "capabilities":
+ const encryption = supportsInsertableStreams(command.useWorker)
+ resp = {type: "capabilities", capabilities: {encryption}}
+ break
+ case "start":
+ console.log("starting call")
+ if (activeCall) {
+ resp = {type: "error", message: "start: call already started"}
+ } else if (!supportsInsertableStreams(command.useWorker) && command.aesKey) {
+ resp = {type: "error", message: "start: encryption is not supported"}
+ } else {
+ const {media, useWorker} = command
+ const encryption = supportsInsertableStreams(useWorker)
+ const aesKey = encryption ? command.aesKey : undefined
+ activeCall = await initializeCall(defaultCallConfig(encryption && !!aesKey), media, aesKey, useWorker)
+ const pc = activeCall.connection
+ const offer = await pc.createOffer()
+ await pc.setLocalDescription(offer)
+ // for debugging, returning the command for callee to use
+ // resp = {
+ // type: "offer",
+ // offer: serialize(offer),
+ // iceCandidates: await activeCall.iceCandidates,
+ // media,
+ // aesKey,
+ // }
+ resp = {
+ type: "offer",
+ offer: serialize(offer),
+ iceCandidates: await activeCall.iceCandidates,
+ capabilities: {encryption},
+ }
+ }
+ break
+ case "offer":
+ if (activeCall) {
+ resp = {type: "error", message: "accept: call already started"}
+ } else if (!supportsInsertableStreams(command.useWorker) && command.aesKey) {
+ resp = {type: "error", message: "accept: encryption is not supported"}
+ } else {
+ const offer: RTCSessionDescriptionInit = parse(command.offer)
+ const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
+ const {media, aesKey, useWorker} = command
+ activeCall = await initializeCall(defaultCallConfig(!!aesKey), media, aesKey, useWorker)
+ const pc = activeCall.connection
+ await pc.setRemoteDescription(new RTCSessionDescription(offer))
+ const answer = await pc.createAnswer()
+ await pc.setLocalDescription(answer)
+ addIceCandidates(pc, remoteIceCandidates)
+ // same as command for caller to use
+ resp = {
+ type: "answer",
+ answer: serialize(answer),
+ iceCandidates: await activeCall.iceCandidates,
+ }
+ }
+ break
+ case "answer":
+ if (!pc) {
+ resp = {type: "error", message: "answer: call not started"}
+ } else if (!pc.localDescription) {
+ resp = {type: "error", message: "answer: local description is not set"}
+ } else if (pc.currentRemoteDescription) {
+ resp = {type: "error", message: "answer: remote description already set"}
+ } else {
+ const answer: RTCSessionDescriptionInit = parse(command.answer)
+ const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
+ await pc.setRemoteDescription(new RTCSessionDescription(answer))
+ addIceCandidates(pc, remoteIceCandidates)
+ resp = {type: "ok"}
+ }
+ break
+ case "ice":
+ if (pc) {
+ const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
+ addIceCandidates(pc, remoteIceCandidates)
+ resp = {type: "ok"}
+ } else {
+ resp = {type: "error", message: "ice: call not started"}
+ }
+ break
+ case "media":
+ if (!activeCall) {
+ resp = {type: "error", message: "media: call not started"}
+ } else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video) {
+ resp = {type: "error", message: "media: no video"}
+ } else {
+ enableMedia(activeCall.localStream, command.media, command.enable)
+ resp = {type: "ok"}
+ }
+ break
+ case "end":
+ if (pc) {
+ pc.close()
+ activeCall = undefined
+ resetVideoElements()
+ resp = {type: "ok"}
+ } else {
+ resp = {type: "error", message: "end: call not started"}
+ }
+ break
+ default:
+ resp = {type: "error", message: "unknown command"}
+ break
+ }
} catch (e) {
- console.log(`decryption error ${e}`)
- throw e
+ resp = {type: "error", message: (e as Error).message}
+ }
+ const apiResp = {corrId, resp, command}
+ sendMessageToNative(apiResp)
+ return apiResp
+ }
+
+ function addIceCandidates(conn: RTCPeerConnection, iceCandidates: RTCIceCandidateInit[]) {
+ for (const c of iceCandidates) {
+ conn.addIceCandidate(new RTCIceCandidate(c))
}
}
-}
-class RTCEncodedVideoFrame {
- constructor(public type: "key" | "delta", public data: ArrayBuffer) {}
-}
+ async function setUpMediaStreams(
+ pc: RTCPeerConnection,
+ localStream: MediaStream,
+ remoteStream: MediaStream,
+ aesKey?: string,
+ useWorker?: boolean
+ ): Promise {
+ const videos = getVideoElements()
+ if (!videos) throw Error("no video elements")
-function randomIV() {
- return crypto.getRandomValues(new Uint8Array(IV_LENGTH))
-}
+ let key: CryptoKey | undefined
+ let worker: Worker | undefined
+ if (aesKey) {
+ key = await callCrypto.decodeAesKey(aesKey)
+ if (useWorker) {
+ const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`
+ worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
+ }
+ }
-const char_equal = "=".charCodeAt(0)
+ for (const track of localStream.getTracks()) {
+ pc.addTrack(track, localStream)
+ }
-function concatN(...bs: Uint8Array[]): Uint8Array {
- const a = new Uint8Array(bs.reduce((size, b) => size + b.byteLength, 0))
- bs.reduce((offset, b: Uint8Array) => {
- a.set(b, offset)
- return offset + b.byteLength
- }, 0)
- return a
-}
+ if (aesKey && key) {
+ console.log("set up encryption for sending")
+ for (const sender of pc.getSenders() as RTCRtpSenderWithEncryption[]) {
+ setupPeerTransform(TransformOperation.Encrypt, sender, worker, aesKey, key)
+ }
+ }
-function encodeAscii(s: string): Uint8Array {
- const a = new Uint8Array(s.length)
- let i = s.length
- while (i--) a[i] = s.charCodeAt(i)
- return a
-}
+ // Pull tracks from remote stream as they arrive add them to remoteStream video
+ pc.ontrack = (event) => {
+ if (aesKey && key) {
+ console.log("set up decryption for receiving")
+ setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, worker, aesKey, key)
+ }
+ remoteStream.addTrack(event.track)
+ }
+ // We assume VP8 encoding in the decode/encode stages to get the initial
+ // bytes to pass as plaintext so we enforce that here.
+ // VP8 is supported by all supports of webrtc.
+ // Use of VP8 by default may also reduce depacketisation issues.
+ // We do not encrypt the first couple of bytes of the payload so that the
+ // video elements can work by determining video keyframes and the opus mode
+ // being used. This appears to be necessary for any video feed at all.
+ // For VP8 this is the content described in
+ // https://tools.ietf.org/html/rfc6386#section-9.1
+ // which is 10 bytes for key frames and 3 bytes for delta frames.
+ // For opus (where encodedFrame.type is not set) this is the TOC byte from
+ // https://tools.ietf.org/html/rfc6716#section-3.1
-function decodeAscii(a: Uint8Array): string {
- let s = ""
- for (let i = 0; i < a.length; i++) s += String.fromCharCode(a[i])
- return s
-}
-
-const base64chars = new Uint8Array("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").map((c) => c.charCodeAt(0)))
-
-const base64lookup = new Array(256) as (number | undefined)[]
-base64chars.forEach((c, i) => (base64lookup[c] = i))
-
-function encodeBase64(a: Uint8Array): Uint8Array {
- const len = a.length
- const b64len = Math.ceil(len / 3) * 4
- const b64 = new Uint8Array(b64len)
-
- let j = 0
- for (let i = 0; i < len; i += 3) {
- b64[j++] = base64chars[a[i] >> 2]
- b64[j++] = base64chars[((a[i] & 3) << 4) | (a[i + 1] >> 4)]
- b64[j++] = base64chars[((a[i + 1] & 15) << 2) | (a[i + 2] >> 6)]
- b64[j++] = base64chars[a[i + 2] & 63]
+ const capabilities = RTCRtpSender.getCapabilities("video")
+ if (capabilities) {
+ const {codecs} = capabilities
+ const selectedCodecIndex = codecs.findIndex((c) => c.mimeType === "video/VP8")
+ const selectedCodec = codecs[selectedCodecIndex]
+ codecs.splice(selectedCodecIndex, 1)
+ codecs.unshift(selectedCodec)
+ for (const t of pc.getTransceivers()) {
+ if (t.sender.track?.kind === "video") {
+ t.setCodecPreferences(codecs)
+ }
+ }
+ }
+ // setupVideoElement(videos.local)
+ // setupVideoElement(videos.remote)
+ videos.local.srcObject = localStream
+ videos.remote.srcObject = remoteStream
}
- if (len % 3) b64[b64len - 1] = char_equal
- if (len % 3 === 1) b64[b64len - 2] = char_equal
+ function setupPeerTransform(
+ operation: TransformOperation,
+ peer: RTCRtpReceiverWithEncryption | RTCRtpSenderWithEncryption,
+ worker: Worker | undefined,
+ aesKey: string,
+ key: CryptoKey
+ ) {
+ if (worker && "RTCRtpScriptTransform" in window) {
+ console.log(`${operation} with worker & RTCRtpScriptTransform`)
+ peer.transform = new RTCRtpScriptTransform(worker, {operation, aesKey})
+ } else if ("createEncodedStreams" in peer) {
+ const {readable, writable} = peer.createEncodedStreams()
+ if (worker) {
+ console.log(`${operation} with worker`)
+ worker.postMessage({operation, readable, writable, aesKey}, [readable, writable] as unknown as Transferable[])
+ } else {
+ console.log(`${operation} without worker`)
+ const transform = callCrypto.transformFrame[operation](key)
+ readable.pipeThrough(new TransformStream({transform})).pipeTo(writable)
+ }
+ } else {
+ console.log(`no ${operation}`)
+ }
+ }
- return b64
+ function callMediaConstraints(mediaType: CallMediaType): MediaStreamConstraints {
+ switch (mediaType) {
+ case CallMediaType.Audio:
+ return {audio: true, video: false}
+ case CallMediaType.Video:
+ return {
+ audio: true,
+ video: {
+ frameRate: 24,
+ width: {
+ min: 480,
+ ideal: 720,
+ max: 1280,
+ },
+ aspectRatio: 1.33,
+ },
+ }
+ }
+ }
+
+ function supportsInsertableStreams(useWorker: boolean | undefined): boolean {
+ return (
+ ("createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype) ||
+ (!!useWorker && "RTCRtpScriptTransform" in window)
+ )
+ }
+
+ interface VideoElements {
+ local: HTMLMediaElement
+ remote: HTMLMediaElement
+ }
+
+ function resetVideoElements() {
+ const videos = getVideoElements()
+ if (!videos) return
+ videos.local.srcObject = null
+ videos.remote.srcObject = null
+ }
+
+ function getVideoElements(): VideoElements | undefined {
+ const local = document.getElementById("local-video-stream")
+ const remote = document.getElementById("remote-video-stream")
+ if (!(local && remote && local instanceof HTMLMediaElement && remote instanceof HTMLMediaElement)) return
+ return {local, remote}
+ }
+
+ // function setupVideoElement(video: HTMLElement) {
+ // // TODO use display: none
+ // video.style.opacity = "0"
+ // video.onplaying = () => {
+ // video.style.opacity = "1"
+ // }
+ // }
+
+ function enableMedia(s: MediaStream, media: CallMediaType, enable: boolean) {
+ const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
+ for (const t of tracks) t.enabled = enable
+ }
+})()
+
+type TransformFrameFunc = (key: CryptoKey) => (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise
+
+interface CallCrypto {
+ transformFrame: {[x in TransformOperation]: TransformFrameFunc}
+ decodeAesKey: (aesKey: string) => Promise
+ encodeAscii: (s: string) => Uint8Array
+ decodeAscii: (a: Uint8Array) => string
+ encodeBase64: (a: Uint8Array) => Uint8Array
+ decodeBase64: (b64: Uint8Array) => Uint8Array | undefined
}
-function decodeBase64(b64: Uint8Array): Uint8Array | undefined {
- let len = b64.length
- if (len % 4) return
- let bLen = (len * 3) / 4
+interface RTCEncodedVideoFrame {
+ type: "key" | "delta"
+ data: ArrayBuffer
+}
+
+// Cryptography function - it is loaded both in the main window and in worker context (if the worker is used)
+function callCryptoFunction(): CallCrypto {
+ const initialPlainTextRequired = {
+ key: 10,
+ delta: 3,
+ }
+
+ const IV_LENGTH = 12
+
+ function encryptFrame(key: CryptoKey): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise {
+ return async (frame, controller) => {
+ const data = new Uint8Array(frame.data)
+ const n = initialPlainTextRequired[frame.type] || 1
+ const iv = randomIV()
+ const initial = data.subarray(0, n)
+ const plaintext = data.subarray(n, data.byteLength)
+ try {
+ const ciphertext = await crypto.subtle.encrypt({name: "AES-GCM", iv: iv.buffer}, key, plaintext)
+ frame.data = concatN(initial, new Uint8Array(ciphertext), iv).buffer
+ controller.enqueue(frame)
+ } catch (e) {
+ console.log(`encryption error ${e}`)
+ throw e
+ }
+ }
+ }
+
+ function decryptFrame(key: CryptoKey): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise {
+ return async (frame, controller) => {
+ const data = new Uint8Array(frame.data)
+ const n = initialPlainTextRequired[frame.type] || 1
+ const initial = data.subarray(0, n)
+ const ciphertext = data.subarray(n, data.byteLength - IV_LENGTH)
+ const iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength)
+ try {
+ const plaintext = await crypto.subtle.decrypt({name: "AES-GCM", iv}, key, ciphertext)
+ frame.data = concatN(initial, new Uint8Array(plaintext)).buffer
+ controller.enqueue(frame)
+ } catch (e) {
+ console.log(`decryption error ${e}`)
+ throw e
+ }
+ }
+ }
+
+ function decodeAesKey(aesKey: string): Promise {
+ const keyData = callCrypto.decodeBase64(callCrypto.encodeAscii(aesKey))
+ return crypto.subtle.importKey("raw", keyData!, {name: "AES-GCM", length: 256}, false, ["encrypt", "decrypt"])
+ }
+
+ function concatN(...bs: Uint8Array[]): Uint8Array {
+ const a = new Uint8Array(bs.reduce((size, b) => size + b.byteLength, 0))
+ bs.reduce((offset, b: Uint8Array) => {
+ a.set(b, offset)
+ return offset + b.byteLength
+ }, 0)
+ return a
+ }
+
+ function randomIV() {
+ return crypto.getRandomValues(new Uint8Array(IV_LENGTH))
+ }
+
+ const base64chars = new Uint8Array(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").map((c) => c.charCodeAt(0))
+ )
+
+ const base64lookup = new Array(256) as (number | undefined)[]
+ base64chars.forEach((c, i) => (base64lookup[c] = i))
+
+ const char_equal = "=".charCodeAt(0)
+
+ function encodeAscii(s: string): Uint8Array {
+ const a = new Uint8Array(s.length)
+ let i = s.length
+ while (i--) a[i] = s.charCodeAt(i)
+ return a
+ }
+
+ function decodeAscii(a: Uint8Array): string {
+ let s = ""
+ for (let i = 0; i < a.length; i++) s += String.fromCharCode(a[i])
+ return s
+ }
+
+ function encodeBase64(a: Uint8Array): Uint8Array {
+ const len = a.length
+ const b64len = Math.ceil(len / 3) * 4
+ const b64 = new Uint8Array(b64len)
+
+ let j = 0
+ for (let i = 0; i < len; i += 3) {
+ b64[j++] = base64chars[a[i] >> 2]
+ b64[j++] = base64chars[((a[i] & 3) << 4) | (a[i + 1] >> 4)]
+ b64[j++] = base64chars[((a[i + 1] & 15) << 2) | (a[i + 2] >> 6)]
+ b64[j++] = base64chars[a[i + 2] & 63]
+ }
+
+ if (len % 3) b64[b64len - 1] = char_equal
+ if (len % 3 === 1) b64[b64len - 2] = char_equal
+
+ return b64
+ }
+
+ function decodeBase64(b64: Uint8Array): Uint8Array | undefined {
+ let len = b64.length
+ if (len % 4) return
+ let bLen = (len * 3) / 4
- if (b64[len - 1] === char_equal) {
- len--
- bLen--
if (b64[len - 1] === char_equal) {
len--
bLen--
+ if (b64[len - 1] === char_equal) {
+ len--
+ bLen--
+ }
}
+
+ const bytes = new Uint8Array(bLen)
+
+ let i = 0
+ let pos = 0
+ while (i < len) {
+ const enc1 = base64lookup[b64[i++]]
+ const enc2 = i < len ? base64lookup[b64[i++]] : 0
+ const enc3 = i < len ? base64lookup[b64[i++]] : 0
+ const enc4 = i < len ? base64lookup[b64[i++]] : 0
+ if (enc1 === undefined || enc2 === undefined || enc3 === undefined || enc4 === undefined) return
+ bytes[pos++] = (enc1 << 2) | (enc2 >> 4)
+ bytes[pos++] = ((enc2 & 15) << 4) | (enc3 >> 2)
+ bytes[pos++] = ((enc3 & 3) << 6) | (enc4 & 63)
+ }
+
+ return bytes
}
- const bytes = new Uint8Array(bLen)
-
- let i = 0
- let pos = 0
- while (i < len) {
- const enc1 = base64lookup[b64[i++]]
- const enc2 = i < len ? base64lookup[b64[i++]] : 0
- const enc3 = i < len ? base64lookup[b64[i++]] : 0
- const enc4 = i < len ? base64lookup[b64[i++]] : 0
- if (enc1 === undefined || enc2 === undefined || enc3 === undefined || enc4 === undefined) return
- bytes[pos++] = (enc1 << 2) | (enc2 >> 4)
- bytes[pos++] = ((enc2 & 15) << 4) | (enc3 >> 2)
- bytes[pos++] = ((enc3 & 3) << 6) | (enc4 & 63)
+ return {
+ transformFrame: {encrypt: encryptFrame, decrypt: decryptFrame},
+ decodeAesKey,
+ encodeAscii,
+ decodeAscii,
+ encodeBase64,
+ decodeBase64,
+ }
+}
+
+// If the worker is used for decryption, this function code (as string) is used to load the worker via Blob
+// We have to use worker optionally, as it crashes in Android web view, regardless of how it is loaded
+function workerFunction() {
+ interface WorkerMessage {
+ data: Transform
+ }
+
+ interface Transform {
+ operation: TransformOperation
+ readable: ReadableStream
+ writable: WritableStream
+ aesKey: string
+ }
+
+ // encryption with createEncodedStreams support
+ self.addEventListener("message", async ({data}: WorkerMessage) => {
+ await setupTransform(data)
+ })
+
+ // encryption using RTCRtpScriptTransform.
+ if ("RTCTransformEvent" in self) {
+ self.addEventListener("rtctransform", async ({transformer}: any) => {
+ const {operation, aesKey} = transformer.options
+ const {readable, writable} = transformer
+ await setupTransform({operation, aesKey, readable, writable})
+ })
+ }
+
+ async function setupTransform({operation, aesKey, readable, writable}: Transform): Promise {
+ const key = await callCrypto.decodeAesKey(aesKey)
+ const transform = callCrypto.transformFrame[operation](key)
+ readable.pipeThrough(new TransformStream({transform})).pipeTo(writable)
}
-
- return bytes
}
diff --git a/packages/simplex-chat-webrtc/src/ui.js b/packages/simplex-chat-webrtc/src/ui.js
new file mode 100644
index 0000000000..09b413763c
--- /dev/null
+++ b/packages/simplex-chat-webrtc/src/ui.js
@@ -0,0 +1,115 @@
+;(async function run() {
+ const START_E2EE_CALL_BTN = "start-e2ee-call"
+ const START_CALL_BTN = "start-call"
+ const URL_FOR_PEER = "url-for-peer"
+ const COPY_URL_FOR_PEER_BTN = "copy-url-for-peer"
+ const DATA_FOR_PEER = "data-for-peer"
+ const COPY_DATA_FOR_PEER_BTN = "copy-data-for-peer"
+ const PASS_DATA_TO_PEER_TEXT = "pass-data-to-peer"
+ const CHAT_COMMAND_FOR_PEER = "chat-command-for-peer"
+ const COMMAND_TO_PROCESS = "command-to-process"
+ const PROCESS_COMMAND_BTN = "process-command"
+ const urlForPeer = document.getElementById(URL_FOR_PEER)
+ const dataForPeer = document.getElementById(DATA_FOR_PEER)
+ const passDataToPeerText = document.getElementById(PASS_DATA_TO_PEER_TEXT)
+ const chatCommandForPeer = document.getElementById(CHAT_COMMAND_FOR_PEER)
+ const commandToProcess = document.getElementById(COMMAND_TO_PROCESS)
+ const processCommandButton = document.getElementById(PROCESS_COMMAND_BTN)
+ const startE2EECallButton = document.getElementById(START_E2EE_CALL_BTN)
+ const {resp} = await processCommand({command: {type: "capabilities", useWorker: true}})
+ if (resp?.capabilities?.encryption) {
+ startE2EECallButton.onclick = startCall(true)
+ } else {
+ startE2EECallButton.style.display = "none"
+ }
+ const startCallButton = document.getElementById(START_CALL_BTN)
+ startCallButton.onclick = startCall()
+ const copyUrlButton = document.getElementById(COPY_URL_FOR_PEER_BTN)
+ copyUrlButton.onclick = () => {
+ navigator.clipboard.writeText(urlForPeer.innerText)
+ commandToProcess.style.display = ""
+ processCommandButton.style.display = ""
+ }
+ const copyDataButton = document.getElementById(COPY_DATA_FOR_PEER_BTN)
+ copyDataButton.onclick = () => {
+ navigator.clipboard.writeText(dataForPeer.innerText)
+ commandToProcess.style.display = ""
+ processCommandButton.style.display = ""
+ }
+ processCommandButton.onclick = () => {
+ sendCommand(JSON.parse(commandToProcess.value))
+ commandToProcess.value = ""
+ }
+ const parsed = new URLSearchParams(document.location.hash.substring(1))
+ let apiCallStr = parsed.get("command")
+ if (apiCallStr) {
+ startE2EECallButton.style.display = "none"
+ startCallButton.style.display = "none"
+ await sendCommand(JSON.parse(decodeURIComponent(apiCallStr)))
+ }
+
+ function startCall(encryption) {
+ return async () => {
+ let aesKey
+ if (encryption) {
+ const key = await crypto.subtle.generateKey({name: "AES-GCM", length: 256}, true, ["encrypt", "decrypt"])
+ const keyBytes = await crypto.subtle.exportKey("raw", key)
+ aesKey = callCrypto.decodeAscii(callCrypto.encodeBase64(new Uint8Array(keyBytes)))
+ }
+ sendCommand({command: {type: "start", media: "video", aesKey, useWorker: true}})
+ startE2EECallButton.style.display = "none"
+ startCallButton.style.display = "none"
+ }
+ }
+
+ async function sendCommand(apiCall) {
+ try {
+ console.log(apiCall)
+ const {command} = apiCall
+ const {resp} = await processCommand(apiCall)
+ console.log(resp)
+ switch (resp.type) {
+ case "offer": {
+ const {media, aesKey} = command
+ const {offer, iceCandidates, capabilities} = resp
+ const peerWCommand = {
+ command: {type: "offer", offer, iceCandidates, media, aesKey: capabilities.encryption ? aesKey : undefined, useWorker: true},
+ }
+ const url = new URL(document.location)
+ parsed.set("command", encodeURIComponent(JSON.stringify(peerWCommand)))
+ url.hash = parsed.toString()
+ urlForPeer.innerText = url.toString()
+ dataForPeer.innerText = JSON.stringify(peerWCommand)
+ copyUrlButton.style.display = ""
+ copyDataButton.style.display = ""
+
+ // const webRTCCallOffer = {callType: {media, capabilities}, rtcSession: {rtcSession: offer, rtcIceCandidates: iceCandidates}}
+ // const peerChatCommand = `/_call @${parsed.contact} offer ${JSON.stringify(webRTCCallOffer)}`
+ // chatCommandForPeer.innerText = peerChatCommand
+ return
+ }
+ case "answer": {
+ const {answer, iceCandidates} = resp
+ const peerWCommand = {command: {type: "answer", answer, iceCandidates}}
+ dataForPeer.innerText = JSON.stringify(peerWCommand)
+ copyUrlButton.style.display = "none"
+ copyDataButton.style.display = ""
+
+ // const webRTCSession = {rtcSession: answer, rtcIceCandidates: iceCandidates}
+ // const peerChatCommand = `/_call @${parsed.contact} answer ${JSON.stringify(webRTCSession)}`
+ // chatCommandForPeer.innerText = peerChatCommand
+ return
+ }
+ case "ok":
+ if ((command.type = "answer")) {
+ console.log("connecting")
+ commandToProcess.style.display = "none"
+ processCommandButton.style.display = "none"
+ }
+ return
+ }
+ } catch (e) {
+ console.log("error: ", e)
+ }
+ }
+})()
diff --git a/packages/simplex-chat-webrtc/src/webcall.html b/packages/simplex-chat-webrtc/src/webcall.html
new file mode 100644
index 0000000000..e648df8537
--- /dev/null
+++ b/packages/simplex-chat-webrtc/src/webcall.html
@@ -0,0 +1,51 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Send copied data back to your contact
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/packages/simplex-chat-webrtc/tsconfig.json b/packages/simplex-chat-webrtc/tsconfig.json
index d4d0acb3d0..061f01e8c6 100644
--- a/packages/simplex-chat-webrtc/tsconfig.json
+++ b/packages/simplex-chat-webrtc/tsconfig.json
@@ -16,6 +16,7 @@
"sourceMap": true,
"strict": true,
"strictNullChecks": true,
- "target": "ES2018"
+ "target": "ES2018",
+ "types": ["lz-string"]
}
}
diff --git a/src/Simplex/Chat/Call.hs b/src/Simplex/Chat/Call.hs
index a0f3707dd4..c4577c6b1c 100644
--- a/src/Simplex/Chat/Call.hs
+++ b/src/Simplex/Chat/Call.hs
@@ -177,8 +177,8 @@ instance ToJSON CallExtraInfo where
toEncoding = J.genericToEncoding J.defaultOptions
data WebRTCSession = WebRTCSession
- { rtcSession :: Text,
- rtcIceCandidates :: [Text]
+ { rtcSession :: Text, -- LZW compressed JSON encoding of offer or answer
+ rtcIceCandidates :: Text -- LZW compressed JSON encoding of array of ICE candidates
}
deriving (Eq, Show, Generic, FromJSON)
@@ -187,7 +187,7 @@ instance ToJSON WebRTCSession where
toEncoding = J.genericToEncoding J.defaultOptions
data WebRTCExtraInfo = WebRTCExtraInfo
- { rtcIceCandidates :: [Text]
+ { rtcIceCandidates :: Text -- LZW compressed JSON encoding of array of ICE candidates
}
deriving (Eq, Show, Generic, FromJSON)
diff --git a/tests/ChatTests.hs b/tests/ChatTests.hs
index f3ad49831c..254da1eb0a 100644
--- a/tests/ChatTests.hs
+++ b/tests/ChatTests.hs
@@ -1870,7 +1870,7 @@ testWebRTCSession :: WebRTCSession
testWebRTCSession =
WebRTCSession
{ rtcSession = "{}",
- rtcIceCandidates = [""]
+ rtcIceCandidates = "[]"
}
testWebRTCCallOffer :: WebRTCCallOffer