From 4ca07338ea6e5d0fae787d9a546e7a435391be41 Mon Sep 17 00:00:00 2001
From: Avently <7953703+avently@users.noreply.github.com>
Date: Fri, 30 Aug 2024 23:32:28 +0900
Subject: [PATCH] android, desktop: calls switching from audio to video and
back
---
.../common/views/call/CallView.android.kt | 79 +++--
.../views/chatlist/ChatListView.android.kt | 2 +-
.../chat/simplex/common/model/SimpleXAPI.kt | 2 +-
.../chat/simplex/common/views/call/WebRTC.kt | 20 +-
.../commonMain/resources/assets/www/call.js | 222 ++++++++++++--
.../resources/assets/www/desktop/ui.js | 52 +++-
.../common/views/call/CallView.desktop.kt | 8 +
.../views/chatlist/ChatListView.desktop.kt | 2 +-
.../desktop/hs_err_pid626666.log | 110 +++++++
packages/simplex-chat-webrtc/src/call.ts | 289 ++++++++++++++++--
.../simplex-chat-webrtc/src/desktop/ui.ts | 55 +++-
11 files changed, 726 insertions(+), 115 deletions(-)
create mode 100644 apps/multiplatform/desktop/hs_err_pid626666.log
diff --git a/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/call/CallView.android.kt b/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/call/CallView.android.kt
index 22f0c8d70b..8f364ec070 100644
--- a/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/call/CallView.android.kt
+++ b/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/call/CallView.android.kt
@@ -162,6 +162,16 @@ actual fun ActiveCallView() {
is WCallResponse.Connected -> {
updateActiveCall(call) { it.copy(callState = CallState.Connected, connectionInfo = r.connectionInfo) }
}
+ is WCallResponse.PeerMedia -> {
+ updateActiveCall(call) {
+ val sources = it.peerMediaSources
+ when (r.source) {
+ CallMediaSource.Mic -> it.copy(peerMediaSources = sources.copy(mic = r.enabled))
+ CallMediaSource.Camera -> it.copy(peerMediaSources = sources.copy(camera = r.enabled))
+ CallMediaSource.Screen -> it.copy(peerMediaSources = sources.copy(screen = r.enabled))
+ }
+ }
+ }
is WCallResponse.End -> {
withBGApi { chatModel.callManager.endCall(call) }
}
@@ -175,7 +185,7 @@ actual fun ActiveCallView() {
is WCallCommand.Media -> {
updateActiveCall(call) {
when (cmd.media) {
- CallMediaType.Video -> it.copy(videoEnabled = cmd.enable)
+ CallMediaType.Video -> it.copy(videoEnabled = cmd.enable, localMedia = if (cmd.enable) CallMediaType.Video else CallMediaType.Audio)
CallMediaType.Audio -> it.copy(audioEnabled = cmd.enable)
}
}
@@ -293,9 +303,9 @@ private fun ActiveCallOverlayLayout(
flipCamera: () -> Unit
) {
Column {
- val media = call.peerMedia ?: call.localMedia
+ val supportsVideo = call.supportsVideo()
CloseSheetBar({ chatModel.activeCallViewIsCollapsed.value = true }, true, tintColor = Color(0xFFFFFFD8)) {
- if (media == CallMediaType.Video) {
+ if (supportsVideo) {
Text(call.contact.chatViewName, Modifier.fillMaxWidth().padding(end = DEFAULT_PADDING), color = Color(0xFFFFFFD8), style = MaterialTheme.typography.h2, overflow = TextOverflow.Ellipsis, maxLines = 1)
}
}
@@ -327,29 +337,12 @@ private fun ActiveCallOverlayLayout(
}
}
- when (media) {
- CallMediaType.Video -> {
+ when (supportsVideo) {
+ true -> {
VideoCallInfoView(call)
- Box(Modifier.fillMaxWidth().fillMaxHeight().weight(1f), contentAlignment = Alignment.BottomCenter) {
- DisabledBackgroundCallsButton()
- }
- Row(Modifier.fillMaxWidth().padding(horizontal = 6.dp), horizontalArrangement = Arrangement.SpaceBetween, verticalAlignment = Alignment.CenterVertically) {
- ToggleAudioButton(call, enabled, toggleAudio)
- SelectSoundDevice()
- IconButton(onClick = dismiss, enabled = enabled) {
- Icon(painterResource(MR.images.ic_call_end_filled), stringResource(MR.strings.icon_descr_hang_up), tint = if (enabled) Color.Red else MaterialTheme.colors.secondary, modifier = Modifier.size(64.dp))
- }
- if (call.videoEnabled) {
- ControlButton(call, painterResource(MR.images.ic_flip_camera_android_filled), MR.strings.icon_descr_flip_camera, enabled, flipCamera)
- ControlButton(call, painterResource(MR.images.ic_videocam_filled), MR.strings.icon_descr_video_off, enabled, toggleVideo)
- } else {
- Spacer(Modifier.size(48.dp))
- ControlButton(call, painterResource(MR.images.ic_videocam_off), MR.strings.icon_descr_video_on, enabled, toggleVideo)
- }
- }
}
- CallMediaType.Audio -> {
+ false -> {
Spacer(Modifier.fillMaxHeight().weight(1f))
Column(
Modifier.fillMaxWidth(),
@@ -359,24 +352,24 @@ private fun ActiveCallOverlayLayout(
ProfileImage(size = 192.dp, image = call.contact.profile.image)
AudioCallInfoView(call)
}
- Box(Modifier.fillMaxWidth().fillMaxHeight().weight(1f), contentAlignment = Alignment.BottomCenter) {
- DisabledBackgroundCallsButton()
- }
- Box(Modifier.fillMaxWidth().padding(bottom = DEFAULT_BOTTOM_PADDING), contentAlignment = Alignment.CenterStart) {
- Box(Modifier.fillMaxWidth(), contentAlignment = Alignment.Center) {
- IconButton(onClick = dismiss, enabled = enabled) {
- Icon(painterResource(MR.images.ic_call_end_filled), stringResource(MR.strings.icon_descr_hang_up), tint = if (enabled) Color.Red else MaterialTheme.colors.secondary, modifier = Modifier.size(64.dp))
- }
- }
- Box(Modifier.padding(start = 32.dp)) {
- ToggleAudioButton(call, enabled, toggleAudio)
- }
- Box(Modifier.fillMaxWidth(), contentAlignment = Alignment.CenterEnd) {
- Box(Modifier.padding(end = 32.dp)) {
- SelectSoundDevice()
- }
- }
- }
+ }
+ }
+ Box(Modifier.fillMaxWidth().fillMaxHeight().weight(1f), contentAlignment = Alignment.BottomCenter) {
+ DisabledBackgroundCallsButton()
+ }
+
+ Row(Modifier.fillMaxWidth().padding(horizontal = 6.dp), horizontalArrangement = Arrangement.SpaceBetween, verticalAlignment = Alignment.CenterVertically) {
+ ToggleAudioButton(call, enabled, toggleAudio)
+ SelectSoundDevice()
+ IconButton(onClick = dismiss, enabled = enabled) {
+ Icon(painterResource(MR.images.ic_call_end_filled), stringResource(MR.strings.icon_descr_hang_up), tint = if (enabled) Color.Red else MaterialTheme.colors.secondary, modifier = Modifier.size(64.dp))
+ }
+ if (call.videoEnabled) {
+ ControlButton(call, painterResource(MR.images.ic_flip_camera_android_filled), MR.strings.icon_descr_flip_camera, enabled, flipCamera)
+ ControlButton(call, painterResource(MR.images.ic_videocam_filled), MR.strings.icon_descr_video_off, enabled, toggleVideo)
+ } else {
+ Spacer(Modifier.size(48.dp))
+ ControlButton(call, painterResource(MR.images.ic_videocam_off), MR.strings.icon_descr_video_on, enabled, toggleVideo)
}
}
}
@@ -769,7 +762,7 @@ fun PreviewActiveCallOverlayVideo() {
contact = Contact.sampleData,
callState = CallState.Negotiated,
localMedia = CallMediaType.Video,
- peerMedia = CallMediaType.Video,
+ peerMediaSources = CallMediaSources(),
callUUID = "",
connectionInfo = ConnectionInfo(
RTCIceCandidate(RTCIceCandidateType.Host, "tcp"),
@@ -799,7 +792,7 @@ fun PreviewActiveCallOverlayAudio() {
contact = Contact.sampleData,
callState = CallState.Negotiated,
localMedia = CallMediaType.Audio,
- peerMedia = CallMediaType.Audio,
+ peerMediaSources = CallMediaSources(),
callUUID = "",
connectionInfo = ConnectionInfo(
RTCIceCandidate(RTCIceCandidateType.Host, "udp"),
diff --git a/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.android.kt b/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.android.kt
index 3283593e09..cb72e5dde3 100644
--- a/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.android.kt
+++ b/apps/multiplatform/common/src/androidMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.android.kt
@@ -54,7 +54,7 @@ actual fun ActiveCallInteractiveArea(call: Call) {
.align(Alignment.BottomCenter),
contentAlignment = Alignment.Center
) {
- val media = call.peerMedia ?: call.localMedia
+ val media = call.peerMediaSources ?: call.localMedia
if (media == CallMediaType.Video) {
Icon(painterResource(MR.images.ic_videocam_filled), null, Modifier.size(27.dp).offset(x = 2.5.dp, y = 2.dp), tint = Color.White)
} else {
diff --git a/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/model/SimpleXAPI.kt b/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/model/SimpleXAPI.kt
index c621b9eacf..e7a79bef0e 100644
--- a/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/model/SimpleXAPI.kt
+++ b/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/model/SimpleXAPI.kt
@@ -2409,7 +2409,7 @@ object ChatController {
// TODO askConfirmation?
// TODO check encryption is compatible
withCall(r, r.contact) { call ->
- chatModel.activeCall.value = call.copy(callState = CallState.OfferReceived, peerMedia = r.callType.media, sharedKey = r.sharedKey)
+ chatModel.activeCall.value = call.copy(callState = CallState.OfferReceived, sharedKey = r.sharedKey)
val useRelay = appPrefs.webrtcPolicyRelay.get()
val iceServers = getIceServers()
Log.d(TAG, ".callOffer iceServers $iceServers")
diff --git a/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/views/call/WebRTC.kt b/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/views/call/WebRTC.kt
index 5332bc650e..becedba727 100644
--- a/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/views/call/WebRTC.kt
+++ b/apps/multiplatform/common/src/commonMain/kotlin/chat/simplex/common/views/call/WebRTC.kt
@@ -17,7 +17,7 @@ data class Call(
val callState: CallState,
val localMedia: CallMediaType,
val localCapabilities: CallCapabilities? = null,
- val peerMedia: CallMediaType? = null,
+ val peerMediaSources: CallMediaSources = CallMediaSources(),
val sharedKey: String? = null,
val audioEnabled: Boolean = true,
val videoEnabled: Boolean = localMedia == CallMediaType.Video,
@@ -37,7 +37,7 @@ data class Call(
val hasMedia: Boolean get() = callState == CallState.OfferSent || callState == CallState.Negotiated || callState == CallState.Connected
- fun supportsVideo(): Boolean = peerMedia == CallMediaType.Video || localMedia == CallMediaType.Video
+ fun supportsVideo(): Boolean = peerMediaSources.hasVideo() || localMedia == CallMediaType.Video
}
@@ -68,6 +68,14 @@ enum class CallState {
@Serializable data class WVAPICall(val corrId: Int? = null, val command: WCallCommand)
@Serializable data class WVAPIMessage(val corrId: Int? = null, val resp: WCallResponse, val command: WCallCommand? = null)
+@Serializable data class CallMediaSources(
+ val mic: Boolean = false,
+ val camera: Boolean = false,
+ val screen: Boolean = false
+) {
+ fun hasVideo() = camera || screen
+}
+
@Serializable
sealed class WCallCommand {
@Serializable @SerialName("capabilities") data class Capabilities(val media: CallMediaType): WCallCommand()
@@ -90,6 +98,7 @@ sealed class WCallResponse {
@Serializable @SerialName("ice") data class Ice(val iceCandidates: String): WCallResponse()
@Serializable @SerialName("connection") data class Connection(val state: ConnectionState): WCallResponse()
@Serializable @SerialName("connected") data class Connected(val connectionInfo: ConnectionInfo): WCallResponse()
+ @Serializable @SerialName("peerMedia") data class PeerMedia(val media: CallMediaType, val source: CallMediaSource, val enabled: Boolean): WCallResponse()
@Serializable @SerialName("end") object End: WCallResponse()
@Serializable @SerialName("ended") object Ended: WCallResponse()
@Serializable @SerialName("ok") object Ok: WCallResponse()
@@ -165,6 +174,13 @@ enum class CallMediaType {
@SerialName("audio") Audio
}
+@Serializable
+enum class CallMediaSource {
+ @SerialName("mic") Mic,
+ @SerialName("camera") Camera,
+ @SerialName("screen") Screen
+}
+
@Serializable
enum class VideoCamera {
@SerialName("user") User,
diff --git a/apps/multiplatform/common/src/commonMain/resources/assets/www/call.js b/apps/multiplatform/common/src/commonMain/resources/assets/www/call.js
index 571c494c7c..8fe29a734e 100644
--- a/apps/multiplatform/common/src/commonMain/resources/assets/www/call.js
+++ b/apps/multiplatform/common/src/commonMain/resources/assets/www/call.js
@@ -6,6 +6,13 @@ var CallMediaType;
CallMediaType["Audio"] = "audio";
CallMediaType["Video"] = "video";
})(CallMediaType || (CallMediaType = {}));
+var CallMediaSource;
+(function (CallMediaSource) {
+ CallMediaSource["Mic"] = "mic";
+ CallMediaSource["Camera"] = "camera";
+ CallMediaSource["Screen"] = "screen";
+ CallMediaSource["Unknown"] = "unknown";
+})(CallMediaSource || (CallMediaSource = {}));
var VideoCamera;
(function (VideoCamera) {
VideoCamera["User"] = "user";
@@ -131,6 +138,7 @@ const processCommand = (function () {
.filter((elem) => elem.kind == "video")
.forEach((elem) => (elem.enabled = false));
}
+ // Will become video when any video tracks will be added
const iceCandidates = getIceCandidates(pc, config);
const call = {
connection: pc,
@@ -139,9 +147,14 @@ const processCommand = (function () {
localCamera,
localStream,
remoteStream,
+ peerMediaSources: {
+ mic: false,
+ camera: false,
+ screen: false,
+ },
aesKey,
screenShareEnabled: false,
- cameraEnabled: true,
+ cameraEnabled: !isDesktop,
};
await setupMediaStreams(call);
let connectionTimeout = setTimeout(connectionHandler, answerTimeout);
@@ -232,6 +245,13 @@ const processCommand = (function () {
const aesKey = encryption ? command.aesKey : undefined;
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey);
const pc = activeCall.connection;
+ if (media == CallMediaType.Audio) {
+ console.log("LALAL ADDING TRANSCEIVER for video");
+ // For camera. So the first video in the list is for camera
+ pc.addTransceiver("video", { streams: [activeCall.localStream] });
+ }
+ // For screenshare. So the second video in the list is for screenshare
+ pc.addTransceiver("video", { streams: [activeCall.localStream] });
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
// for debugging, returning the command for callee to use
@@ -269,7 +289,11 @@ const processCommand = (function () {
const pc = activeCall.connection;
// console.log("offer remoteIceCandidates", JSON.stringify(remoteIceCandidates))
await pc.setRemoteDescription(new RTCSessionDescription(offer));
- const answer = await pc.createAnswer();
+ pc.getTransceivers().forEach((elem) => (elem.direction = "sendrecv"));
+ console.log("LALAL TRANSCE", pc.getTransceivers());
+ let answer = await pc.createAnswer();
+ console.log("LALAL SDP", answer, answer.sdp);
+ // answer!.sdp = answer.sdp?.replace("a=recvonly", "a=sendrecv")
await pc.setLocalDescription(answer);
addIceCandidates(pc, remoteIceCandidates);
// same as command for caller to use
@@ -295,6 +319,7 @@ const processCommand = (function () {
const answer = parse(command.answer);
const remoteIceCandidates = parse(command.iceCandidates);
// console.log("answer remoteIceCandidates", JSON.stringify(remoteIceCandidates))
+ console.log("LALAL SDP2", answer, answer.sdp);
await pc.setRemoteDescription(new RTCSessionDescription(answer));
addIceCandidates(pc, remoteIceCandidates);
resp = { type: "ok" };
@@ -314,8 +339,9 @@ const processCommand = (function () {
if (!activeCall) {
resp = { type: "error", message: "media: call not started" };
}
- else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video) {
- resp = { type: "error", message: "media: no video" };
+ else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video && command.enable) {
+ await startSendingVideo(activeCall, activeCall.localCamera);
+ resp = { type: "ok" };
}
else {
enableMedia(activeCall.localStream, command.media, command.enable);
@@ -399,6 +425,12 @@ const processCommand = (function () {
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
call.worker.onerror = ({ error, filename, lineno, message }) => console.log({ error, filename, lineno, message });
// call.worker.onmessage = ({data}) => console.log(JSON.stringify({message: data}))
+ call.worker.onmessage = ({ data }) => {
+ console.log(JSON.stringify({ message: data }));
+ const transceiverMid = data.transceiverMid;
+ const mute = data.mute;
+ onMediaMuteUnmute(transceiverMid, mute);
+ };
}
}
}
@@ -413,8 +445,9 @@ const processCommand = (function () {
}
if (call.aesKey && call.key) {
console.log("set up encryption for sending");
- for (const sender of pc.getSenders()) {
- setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key);
+ for (const transceiver of pc.getTransceivers()) {
+ const sender = transceiver.sender;
+ setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key, transceiver.sender.track.kind == "video" ? CallMediaType.Video : CallMediaType.Audio, transceiver.mid);
}
}
}
@@ -422,16 +455,49 @@ const processCommand = (function () {
// Pull tracks from remote stream as they arrive add them to remoteStream video
const pc = call.connection;
pc.ontrack = (event) => {
+ console.log("LALAL ON TRACK ", event);
try {
if (call.aesKey && call.key) {
console.log("set up decryption for receiving");
- setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key);
+ setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key, event.receiver.track.kind == "video" ? CallMediaType.Video : CallMediaType.Audio, event.transceiver.mid);
}
- for (const stream of event.streams) {
- for (const track of stream.getTracks()) {
- call.remoteStream.addTrack(track);
+ // const source = mediaSourceFromTransceiverMid(event.transceiver.mid)
+ // const sources = call.peerMediaSources
+ // if (source == CallMediaSource.Mic) {
+ // sources.mic = true
+ // } else if (source == CallMediaSource.Camera) {
+ // sources.camera = true
+ // } else if (source == CallMediaSource.Screen) {
+ // sources.screen = true
+ // }
+ // call.peerMediaSources = sources
+ if (event.streams.length > 0) {
+ for (const stream of event.streams) {
+ for (const track of stream.getTracks()) {
+ call.remoteStream.addTrack(track);
+ // const resp: WRPeerMedia = {
+ // type: "peerMedia",
+ // media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
+ // source: source,
+ // enabled: track.enabled,
+ // }
+ // console.log("LALAL ADDED REMOTE", track, track.kind)
+ // sendMessageToNative({resp: resp})
+ }
}
}
+ else {
+ const track = event.track;
+ call.remoteStream.addTrack(track);
+ // const resp: WRPeerMedia = {
+ // type: "peerMedia",
+ // media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
+ // source: source,
+ // enabled: track.enabled,
+ // }
+ // console.log("LALAL ADDED REMOTE", track, track.kind)
+ // sendMessageToNative({resp: resp})
+ }
console.log(`ontrack success`);
}
catch (e) {
@@ -479,6 +545,42 @@ const processCommand = (function () {
}
}
}
+ async function startSendingVideo(call, camera) {
+ console.log("LALAL STARTING SENDING VIDEO");
+ const videos = getVideoElements();
+ if (!videos)
+ throw Error("no video elements");
+ const pc = call.connection;
+ // Taking the first video transceiver and use it for sending video from camera. Following tracks are for other purposes
+ const tc = pc.getTransceivers().find((tc) => tc.receiver.track.kind == "video" && tc.direction == "sendrecv");
+ console.log(pc.getTransceivers().map((elem) => { var _a, _b; return "" + ((_a = elem.sender.track) === null || _a === void 0 ? void 0 : _a.kind) + " " + ((_b = elem.receiver.track) === null || _b === void 0 ? void 0 : _b.kind) + " " + elem.direction; }));
+ let localStream;
+ try {
+ localStream = await getLocalMediaStream(CallMediaType.Video, camera);
+ for (const t of localStream.getVideoTracks()) {
+ console.log("LALAL TC", tc, pc.getTransceivers());
+ call.localStream.addTrack(t);
+ tc === null || tc === void 0 ? void 0 : tc.sender.replaceTrack(t);
+ localStream.removeTrack(t);
+ // when adding track a `sender` will be created on that track automatically
+ //pc.addTrack(t, call.localStream)
+ console.log("LALAL ADDED VIDEO TRACK " + t);
+ }
+ call.localMedia = CallMediaType.Video;
+ call.cameraEnabled = true;
+ }
+ catch (e) {
+ return;
+ }
+ const sender = tc === null || tc === void 0 ? void 0 : tc.sender;
+ console.log("LALAL SENDER " + sender + " " + (sender === null || sender === void 0 ? void 0 : sender.getParameters()));
+ if (call.aesKey && call.key && sender) {
+ setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key, CallMediaType.Video, tc.mid);
+ }
+ // Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
+ videos.local.play();
+ console.log("LALAL SENDING VIDEO");
+ }
async function replaceMedia(call, camera) {
const videos = getVideoElements();
if (!videos)
@@ -535,20 +637,39 @@ const processCommand = (function () {
for (const t of tracks)
sender.replaceTrack(t);
}
- function setupPeerTransform(operation, peer, worker, aesKey, key) {
+ function mediaSourceFromTransceiverMid(mid) {
+ switch (mid) {
+ case "0":
+ return CallMediaSource.Mic;
+ case "1":
+ return CallMediaSource.Camera;
+ case "2":
+ return CallMediaSource.Screen;
+ default:
+ return CallMediaSource.Unknown;
+ }
+ }
+ function setupPeerTransform(operation, peer, worker, aesKey, key, media, transceiverMid) {
+ console.log("LALAL MEDIA " + media + " " + transceiverMid);
if (worker && "RTCRtpScriptTransform" in window) {
console.log(`${operation} with worker & RTCRtpScriptTransform`);
- peer.transform = new RTCRtpScriptTransform(worker, { operation, aesKey });
+ peer.transform = new RTCRtpScriptTransform(worker, { operation, aesKey, media, transceiverMid });
}
else if ("createEncodedStreams" in peer) {
const { readable, writable } = peer.createEncodedStreams();
if (worker) {
console.log(`${operation} with worker`);
- worker.postMessage({ operation, readable, writable, aesKey }, [readable, writable]);
+ worker.postMessage({ operation, readable, writable, aesKey, media, transceiverMid }, [
+ readable,
+ writable,
+ ]);
}
else {
console.log(`${operation} without worker`);
- const transform = callCrypto.transformFrame[operation](key);
+ const onMediaMuteUnmuteConst = (mute) => {
+ onMediaMuteUnmute(transceiverMid, mute);
+ };
+ const transform = callCrypto.transformFrame[operation](key, onMediaMuteUnmuteConst);
readable.pipeThrough(new TransformStream({ transform })).pipeTo(writable);
}
}
@@ -556,6 +677,46 @@ const processCommand = (function () {
console.log(`no ${operation}`);
}
}
+ function onMediaMuteUnmute(transceiverMid, mute) {
+ if (activeCall) {
+ const source = mediaSourceFromTransceiverMid(transceiverMid);
+ console.log("LALAL ON MUTE/UNMUTE", mute, source, transceiverMid);
+ const sources = activeCall.peerMediaSources;
+ if (source == CallMediaSource.Mic && activeCall.peerMediaSources.mic == mute) {
+ const resp = {
+ type: "peerMedia",
+ media: CallMediaType.Audio,
+ source: source,
+ enabled: !mute,
+ };
+ sources.mic = !mute;
+ activeCall.peerMediaSources = sources;
+ sendMessageToNative({ resp: resp });
+ }
+ else if (source == CallMediaSource.Camera && activeCall.peerMediaSources.camera == mute) {
+ const resp = {
+ type: "peerMedia",
+ media: CallMediaType.Video,
+ source: source,
+ enabled: !mute,
+ };
+ sources.camera = !mute;
+ activeCall.peerMediaSources = sources;
+ sendMessageToNative({ resp: resp });
+ }
+ else if (source == CallMediaSource.Screen && activeCall.peerMediaSources.screen == mute) {
+ const resp = {
+ type: "peerMedia",
+ media: CallMediaType.Video,
+ source: source,
+ enabled: !mute,
+ };
+ sources.screen = !mute;
+ activeCall.peerMediaSources = sources;
+ sendMessageToNative({ resp: resp });
+ }
+ }
+ }
function getLocalMediaStream(mediaType, facingMode) {
const constraints = callMediaConstraints(mediaType, facingMode);
return navigator.mediaDevices.getUserMedia(constraints);
@@ -703,6 +864,7 @@ function callCryptoFunction() {
: new Uint8Array(0);
frame.data = concatN(initial, ciphertext, iv).buffer;
controller.enqueue(frame);
+ // console.log("LALAL ENCRYPT", frame.data.byteLength)
}
catch (e) {
console.log(`encryption error ${e}`);
@@ -710,7 +872,9 @@ function callCryptoFunction() {
}
};
}
- function decryptFrame(key) {
+ function decryptFrame(key, onMediaMuteUnmute) {
+ let wasMuted = true;
+ let lastBytes = [];
return async (frame, controller) => {
const data = new Uint8Array(frame.data);
const n = initialPlainTextRequired[frame.type] || 1;
@@ -723,6 +887,23 @@ function callCryptoFunction() {
: new Uint8Array(0);
frame.data = concatN(initial, plaintext).buffer;
controller.enqueue(frame);
+ lastBytes.push(frame.data.byteLength);
+ const sliced = lastBytes.slice(-20, lastBytes.length);
+ const average = sliced.reduce((prev, value) => value + prev, 0) / Math.max(1, sliced.length);
+ if (lastBytes.length > 20) {
+ console.log("LALAL REPLACED", lastBytes.length, sliced.length);
+ lastBytes = sliced;
+ }
+ console.log("LALAL DECRYPT", frame.type, frame.data.byteLength, average);
+ // frame.type is undefined for audio stream, but defined for video
+ if (frame.type && wasMuted && average > 200) {
+ wasMuted = false;
+ onMediaMuteUnmute(false);
+ }
+ else if (frame.type && !wasMuted && average < 200) {
+ wasMuted = true;
+ onMediaMuteUnmute(true);
+ }
}
catch (e) {
console.log(`decryption error ${e}`);
@@ -828,9 +1009,9 @@ function workerFunction() {
if ("RTCTransformEvent" in self) {
self.addEventListener("rtctransform", async ({ transformer }) => {
try {
- const { operation, aesKey } = transformer.options;
+ const { operation, aesKey, transceiverMid } = transformer.options;
const { readable, writable } = transformer;
- await setupTransform({ operation, aesKey, readable, writable });
+ await setupTransform({ operation, aesKey, transceiverMid, readable, writable });
self.postMessage({ result: "setupTransform success" });
}
catch (e) {
@@ -838,9 +1019,12 @@ function workerFunction() {
}
});
}
- async function setupTransform({ operation, aesKey, readable, writable }) {
+ async function setupTransform({ operation, aesKey, transceiverMid, readable, writable }) {
const key = await callCrypto.decodeAesKey(aesKey);
- const transform = callCrypto.transformFrame[operation](key);
+ const onMediaMuteUnmute = (mute) => {
+ self.postMessage({ transceiverMid: transceiverMid, mute: mute });
+ };
+ const transform = callCrypto.transformFrame[operation](key, onMediaMuteUnmute);
readable.pipeThrough(new TransformStream({ transform })).pipeTo(writable);
}
}
diff --git a/apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/ui.js b/apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/ui.js
index 6cf711b790..f448d99373 100644
--- a/apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/ui.js
+++ b/apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/ui.js
@@ -9,6 +9,7 @@ socket.addEventListener("open", (_event) => {
sendMessageToNative = (msg) => {
console.log("Message to server");
socket.send(JSON.stringify(msg));
+ reactOnMessageToServer(msg);
};
});
socket.addEventListener("message", (event) => {
@@ -43,17 +44,19 @@ function toggleSpeakerManually() {
}
function toggleVideoManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) {
- let res;
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled) {
activeCall.cameraEnabled = !activeCall.cameraEnabled;
- res = activeCall.cameraEnabled;
+ enableVideoIcon(activeCall.cameraEnabled);
+ // } else if (activeCall.localMedia == CallMediaType.Video) {
+ // enableVideoIcon(toggleMedia(activeCall.localStream, CallMediaType.Video))
}
else {
- res = toggleMedia(activeCall.localStream, CallMediaType.Video);
+ const apiCall = { command: { type: "media", media: CallMediaType.Video, enable: activeCall.cameraEnabled != true } };
+ reactOnMessageFromServer(apiCall);
+ processCommand(apiCall).then(() => {
+ enableVideoIcon((activeCall === null || activeCall === void 0 ? void 0 : activeCall.cameraEnabled) == true);
+ });
}
- document.getElementById("toggle-video").innerHTML = res
- ? '
'
- : '
';
}
}
async function toggleScreenManually() {
@@ -65,6 +68,11 @@ async function toggleScreenManually() {
: '
';
}
}
+function enableVideoIcon(enabled) {
+ document.getElementById("toggle-video").innerHTML = enabled
+ ? '
'
+ : '
';
+}
function reactOnMessageFromServer(msg) {
var _a;
switch ((_a = msg.command) === null || _a === void 0 ? void 0 : _a.type) {
@@ -75,23 +83,41 @@ function reactOnMessageFromServer(msg) {
case "start":
document.getElementById("toggle-audio").style.display = "inline-block";
document.getElementById("toggle-speaker").style.display = "inline-block";
- if (msg.command.media == CallMediaType.Video) {
- document.getElementById("toggle-video").style.display = "inline-block";
- document.getElementById("toggle-screen").style.display = "inline-block";
- }
+ document.getElementById("toggle-video").style.display = "inline-block";
+ document.getElementById("toggle-screen").style.display = "inline-block";
document.getElementById("info-block").className = msg.command.media;
break;
+ case "media":
+ const className = (msg.command.media == CallMediaType.Video && msg.command.enable) ||
+ (activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.camera) ||
+ (activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.screen)
+ ? "video"
+ : "audio";
+ document.getElementById("info-block").className = className;
+ document.getElementById("audio-call-icon").style.display = className == CallMediaType.Audio ? "block" : "none";
+ break;
case "description":
updateCallInfoView(msg.command.state, msg.command.description);
if ((activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection.connectionState) == "connected") {
document.getElementById("progress").style.display = "none";
- if (document.getElementById("info-block").className == CallMediaType.Audio) {
- document.getElementById("audio-call-icon").style.display = "block";
- }
+ document.getElementById("audio-call-icon").style.display =
+ document.getElementById("info-block").className == CallMediaType.Audio ? "block" : "none";
}
break;
}
}
+function reactOnMessageToServer(msg) {
+ var _a;
+ switch ((_a = msg.resp) === null || _a === void 0 ? void 0 : _a.type) {
+ case "peerMedia":
+ const className = (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) == CallMediaType.Video || (activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.camera) || (activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.screen)
+ ? "video"
+ : "audio";
+ document.getElementById("info-block").className = className;
+ document.getElementById("audio-call-icon").style.display = className == CallMediaType.Audio ? "block" : "none";
+ break;
+ }
+}
function updateCallInfoView(state, description) {
document.getElementById("state").innerText = state;
document.getElementById("description").innerText = description;
diff --git a/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/call/CallView.desktop.kt b/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/call/CallView.desktop.kt
index d6331616cc..2acfcf9ef8 100644
--- a/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/call/CallView.desktop.kt
+++ b/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/call/CallView.desktop.kt
@@ -65,6 +65,14 @@ actual fun ActiveCallView() {
is WCallResponse.Connected -> {
chatModel.activeCall.value = call.copy(callState = CallState.Connected, connectionInfo = r.connectionInfo)
}
+ is WCallResponse.PeerMedia -> {
+ val sources = call.peerMediaSources
+ chatModel.activeCall.value = when (r.source) {
+ CallMediaSource.Mic -> call.copy(peerMediaSources = sources.copy(mic = r.enabled))
+ CallMediaSource.Camera -> call.copy(peerMediaSources = sources.copy(camera = r.enabled))
+ CallMediaSource.Screen -> call.copy(peerMediaSources = sources.copy(screen = r.enabled))
+ }
+ }
is WCallResponse.End -> {
withBGApi { chatModel.callManager.endCall(call) }
}
diff --git a/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.desktop.kt b/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.desktop.kt
index 9e4eeb0c96..443344dd4b 100644
--- a/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.desktop.kt
+++ b/apps/multiplatform/common/src/desktopMain/kotlin/chat/simplex/common/views/chatlist/ChatListView.desktop.kt
@@ -24,7 +24,7 @@ import kotlinx.coroutines.flow.MutableStateFlow
@Composable
actual fun ActiveCallInteractiveArea(call: Call) {
val showMenu = remember { mutableStateOf(false) }
- val media = call.peerMedia ?: call.localMedia
+ val media = call.peerMediaSources ?: call.localMedia
CompositionLocalProvider(
LocalIndication provides NoIndication
) {
diff --git a/apps/multiplatform/desktop/hs_err_pid626666.log b/apps/multiplatform/desktop/hs_err_pid626666.log
new file mode 100644
index 0000000000..1108c1a3c4
--- /dev/null
+++ b/apps/multiplatform/desktop/hs_err_pid626666.log
@@ -0,0 +1,110 @@
+#
+# A fatal error has been detected by the Java Runtime Environment:
+#
+# SIGSEGV (0xb) at pc=0x00007bf0edd03035, pid=626666, tid=626706
+#
+# JRE version: OpenJDK Runtime Environment (17.0.12+7) (build 17.0.12+7)
+# Java VM: OpenJDK 64-Bit Server VM (17.0.12+7, mixed mode, sharing, tiered, compressed oops, compressed class ptrs, g1 gc, linux-amd64)
+# Problematic frame:
+# C [libc.so.6+0x9a035] __pthread_rwlock_rdlock+0x15
+#
+# Core dump will be written. Default location: Core dumps may be processed with "/usr/lib/systemd/systemd-coredump %P %u %g %s %t %c %h" (or dumping to /mnt/Dev/apps/simplex/apps/multiplatform/desktop/core.626666)
+#
+# If you would like to submit a bug report, please visit:
+# https://bugreport.java.com/bugreport/crash.jsp
+#
+
+--------------- S U M M A R Y ------------
+
+Command Line: -Dcompose.application.configure.swing.globals=true -Dcompose.application.resources.dir=/mnt/Dev/apps/simplex/apps/multiplatform/desktop/build/compose/tmp/prepareAppResources -Dfile.encoding=UTF-8 -Duser.country=US -Duser.language=en -Duser.variant chat.simplex.desktop.MainKt
+
+Host: 12th Gen Intel(R) Core(TM) i7-12700H, 20 cores, 38G, Manjaro Linux
+Time: Fri Aug 30 22:33:15 2024 KST elapsed time: 110.587221 seconds (0d 0h 1m 50s)
+
+--------------- T H R E A D ---------------
+
+Current thread is native thread
+
+Stack: [0x00007bf042401000,0x00007bf042c01000], sp=0x00007bf042bfb820, free space=8170k
+Native frames: (J=compiled Java code, j=interpreted, Vv=VM code, C=native code)
+C [libc.so.6+0x9a035] __pthread_rwlock_rdlock+0x15
+C [libcrypto.so.3+0x18645e] CRYPTO_THREAD_read_lock+0xe
+C [libcrypto.so.3+0x1b36fa] RAND_get_rand_method+0x3a
+C [libcrypto.so.3+0x1b4c97] RAND_bytes_ex+0x27
+
+
+siginfo: si_signo: 11 (SIGSEGV), si_code: 1 (SEGV_MAPERR), si_addr: 0x0000000000000018
+
+Registers:
+RAX=0x0000000000000001, RBX=0x0000000000000000, RCX=0x0000000000000000, RDX=0x0000000000000050
+RSP=0x00007bf042bfb820, RBP=0x00007bf042bfb840, RSI=0x00007bf04c3b0ab0, RDI=0x0000000000000000
+R8 =0x0000000000000fa0, R9 =0x0000000000000000, R10=0x0000000000000001, R11=0x00007bf042bfb880
+R12=0x0000000000000050, R13=0x00007befe40c8898, R14=0x0000000000000000, R15=0x0000000000000000
+RIP=0x00007bf0edd03035, EFLAGS=0x0000000000010202, CSGSFS=0x002b000000000033, ERR=0x0000000000000004
+ TRAPNO=0x000000000000000e
+
+
+Top of Stack: (sp=0x00007bf042bfb820)
+0x00007bf042bfb820: 0000000000000fa0 00007befe40c8898
+0x00007bf042bfb830: 0000000000000050 00007befe40c8898
+0x00007bf042bfb840: 00007bf042bfb850 00007bf04c38645e
+0x00007bf042bfb850: 00007bf042bfb870 00007bf04c3b36fa
+0x00007bf042bfb860: 00007befe40c8898 0000000000000050
+0x00007bf042bfb870: 00007bf042bfb8b0 00007bf04c3b4c97
+0x00007bf042bfb880: 00007befe40c8898 00007befe40c8898
+0x00007bf042bfb890: 0000000000000fb0 00007befe40a5728
+0x00007bf042bfb8a0: 0000000000000fa0 00007befe40c78f8
+0x00007bf042bfb8b0: 0000000000000050 00007bf055d655cd
+0x00007bf042bfb8c0: 0000000000000fb0 0000000000000001
+0x00007bf042bfb8d0: 0000000000000ff0 00007bf055d645de
+0x00007bf042bfb8e0: 00007befe40c8898 00007befe40dd000
+0x00007bf042bfb8f0: 0000000000000fa0 00007befe40c8998
+0x00007bf042bfb900: 0000000100001000 00007befe40c88a8
+0x00007bf042bfb910: 0000000000000000 00007befe40a5728
+0x00007bf042bfb920: 00007befe40c78f8 0000000000000ff0
+0x00007bf042bfb930: 0000000000000001 00007befe40c78e8
+0x00007bf042bfb940: 0000000000000000 00007bf055d650f1
+0x00007bf042bfb950: 00007befe40c78f8 00007bf055d651a6
+0x00007bf042bfb960: 0000000000000000 00007befe40dd000
+0x00007bf042bfb970: 0000000000000010 0000000100000010
+0x00007bf042bfb980: 00007befe40c78a8 3f5d6a2928082800
+0x00007bf042bfb990: 0000000000000000 00007befe40de028
+0x00007bf042bfb9a0: 00007befe40b99d8 0000000000000001
+0x00007bf042bfb9b0: 0000000000000000 00007befe40dcff0
+0x00007bf042bfb9c0: 0000000000000000 00007bf055d892d4
+0x00007bf042bfb9d0: 05000000d763a120 3f5d6a2928082800
+0x00007bf042bfb9e0: 00007befe40b99d8 0000000000000000
+0x00007bf042bfb9f0: 0000000000000000 00007befe40b9cec
+0x00007bf042bfba00: 00007befe409a808 00007bf055d9042f
+0x00007bf042bfba10: 00007bf042bfba20 3f5d6a2928082800
+
+Instructions: (pc=0x00007bf0edd03035)
+0x00007bf0edd02f35: f7 0f 84 31 fd ff ff 48 8d 3d ed ba 11 00 e8 68
+0x00007bf0edd02f45: f5 fe ff 0f 1f 84 00 00 00 00 00 83 c0 16 83 e0
+0x00007bf0edd02f55: f7 75 e4 e9 3f ff ff ff 45 85 ff 0f 84 07 fd ff
+0x00007bf0edd02f65: ff 8b 43 04 48 8d 53 04 3d 01 00 00 80 74 30 8d
+0x00007bf0edd02f75: 48 ff f0 0f b1 0a 75 f0 3d 01 00 00 80 0f 85 e5
+0x00007bf0edd02f85: fc ff ff 8b 13 b8 03 00 00 00 83 ca 02 e9 9b fc
+0x00007bf0edd02f95: ff ff 83 c0 16 83 e0 f7 75 9d e9 cd fe ff ff 31
+0x00007bf0edd02fa5: c9 eb cf 85 c0 0f 85 40 ff ff ff 89 4b 0c e9 6d
+0x00007bf0edd02fb5: fe ff ff 0f 1f 84 00 00 00 00 00 f3 0f 1e fa 90
+0x00007bf0edd02fc5: 31 c0 c3 0f 1f 84 00 00 00 00 00 f3 0f 1e fa 48
+0x00007bf0edd02fd5: 85 f6 48 8d 05 aa 06 12 00 66 0f ef c0 48 c7 47
+0x00007bf0edd02fe5: 30 00 00 00 00 48 0f 44 f0 0f 11 47 10 0f 11 07
+0x00007bf0edd02ff5: 0f 11 47 20 8b 06 8b 56 04 89 47 30 31 c0 85 d2
+0x00007bf0edd03005: 0f 95 c0 89 47 1c 31 c0 c3 66 2e 0f 1f 84 00 00
+0x00007bf0edd03015: 00 00 00 0f 1f 84 00 00 00 00 00 f3 0f 1e fa 55
+0x00007bf0edd03025: 48 89 e5 41 55 41 54 53 48 89 fb 48 83 ec 08 90
+0x00007bf0edd03035: 8b 57 18 64 8b 04 25 d0 02 00 00 39 c2 0f 84 08
+0x00007bf0edd03045: 01 00 00 83 7f 30 02 74 32 b8 08 00 00 00 f0 0f
+0x00007bf0edd03055: c1 03 83 c0 08 85 c0 0f 88 fe 00 00 00 a8 01 75
+0x00007bf0edd03065: 7a 31 d2 90 48 83 c4 08 89 d0 5b 41 5c 41 5d 5d
+0x00007bf0edd03075: c3 66 2e 0f 1f 84 00 00 00 00 00 8b 37 89 f0 83
+0x00007bf0edd03085: e0 03 83 f8 02 75 c2 89 f0 c1 e8 03 74 bb 89 f2
+0x00007bf0edd03095: 89 f0 83 ca 04 f0 0f b1 13 89 c6 74 0b eb de 0f
+0x00007bf0edd030a5: 1f 40 00 83 f8 4b 74 bb 8b 33 40 f6 c6 04 74 cd
+0x00007bf0edd030b5: 44 8b 4b 1c 45 31 c0 48 89 df 45 85 c9 41 0f 95
+0x00007bf0edd030c5: c0 31 d2 31 c9 41 c1 e0 07 e8 ad 69 ff ff 89 c2
+0x00007bf0edd030d5: 83 f8 6e 75 ce eb 8c 0f 1f 40 00 89 c2 83 e2 03
+0x00007bf0edd030e5: 83 fa 01 0f 85 89 00 00 00 89 c2 83 f2 01 f0 0f
+0x00007bf0edd030f5: b1 13 75
\ No newline at end of file
diff --git a/packages/simplex-chat-webrtc/src/call.ts b/packages/simplex-chat-webrtc/src/call.ts
index 19682249e9..ad6bfbdd2e 100644
--- a/packages/simplex-chat-webrtc/src/call.ts
+++ b/packages/simplex-chat-webrtc/src/call.ts
@@ -26,6 +26,7 @@ type WCallResponse =
| WCallIceCandidates
| WRConnection
| WRCallConnected
+ | WRPeerMedia
| WRCallEnd
| WRCallEnded
| WROk
@@ -34,13 +35,31 @@ type WCallResponse =
type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "camera" | "description" | "layout" | "end"
-type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "connected" | "end" | "ended" | "ok" | "error"
+type WCallResponseTag =
+ | "capabilities"
+ | "offer"
+ | "answer"
+ | "ice"
+ | "connection"
+ | "connected"
+ | "peerMedia"
+ | "end"
+ | "ended"
+ | "ok"
+ | "error"
enum CallMediaType {
Audio = "audio",
Video = "video",
}
+enum CallMediaSource {
+ Mic = "mic",
+ Camera = "camera",
+ Screen = "screen",
+ Unknown = "unknown",
+}
+
enum VideoCamera {
User = "user",
Environment = "environment",
@@ -52,6 +71,12 @@ enum LayoutType {
RemoteVideo = "remoteVideo",
}
+interface CallMediaSources {
+ mic: boolean
+ camera: boolean
+ screen: boolean
+}
+
interface IWCallCommand {
type: WCallCommandTag
}
@@ -151,6 +176,13 @@ interface WRCallConnected extends IWCallResponse {
connectionInfo: ConnectionInfo
}
+interface WRPeerMedia extends IWCallResponse {
+ type: "peerMedia"
+ media: CallMediaType
+ source: CallMediaSource
+ enabled: boolean
+}
+
interface WRCallEnd extends IWCallResponse {
type: "end"
}
@@ -206,6 +238,7 @@ interface Call {
localCamera: VideoCamera
localStream: MediaStream
remoteStream: MediaStream
+ peerMediaSources: CallMediaSources
screenShareEnabled: boolean
cameraEnabled: boolean
aesKey?: string
@@ -341,17 +374,23 @@ const processCommand = (function () {
.filter((elem) => elem.kind == "video")
.forEach((elem) => (elem.enabled = false))
}
+ // Will become video when any video tracks will be added
const iceCandidates = getIceCandidates(pc, config)
- const call = {
+ const call: Call = {
connection: pc,
iceCandidates,
localMedia: mediaType,
localCamera,
localStream,
remoteStream,
+ peerMediaSources: {
+ mic: false,
+ camera: false,
+ screen: false,
+ },
aesKey,
screenShareEnabled: false,
- cameraEnabled: true,
+ cameraEnabled: !isDesktop,
}
await setupMediaStreams(call)
let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout)
@@ -443,6 +482,14 @@ const processCommand = (function () {
const aesKey = encryption ? command.aesKey : undefined
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey)
const pc = activeCall.connection
+ if (media == CallMediaType.Audio) {
+ console.log("LALAL ADDING TRANSCEIVER for video")
+ // For camera. So the first video in the list is for camera
+ pc.addTransceiver("video", {streams: [activeCall.localStream]})
+ }
+ // For screenshare. So the second video in the list is for screenshare
+ pc.addTransceiver("video", {streams: [activeCall.localStream]})
+
const offer = await pc.createOffer()
await pc.setLocalDescription(offer)
// for debugging, returning the command for callee to use
@@ -478,7 +525,11 @@ const processCommand = (function () {
const pc = activeCall.connection
// console.log("offer remoteIceCandidates", JSON.stringify(remoteIceCandidates))
await pc.setRemoteDescription(new RTCSessionDescription(offer))
- const answer = await pc.createAnswer()
+ pc.getTransceivers().forEach((elem) => (elem.direction = "sendrecv"))
+ console.log("LALAL TRANSCE", pc.getTransceivers())
+ let answer = await pc.createAnswer()
+ console.log("LALAL SDP", answer, answer.sdp)
+ // answer!.sdp = answer.sdp?.replace("a=recvonly", "a=sendrecv")
await pc.setLocalDescription(answer)
addIceCandidates(pc, remoteIceCandidates)
// same as command for caller to use
@@ -501,6 +552,8 @@ const processCommand = (function () {
const answer: RTCSessionDescriptionInit = parse(command.answer)
const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
// console.log("answer remoteIceCandidates", JSON.stringify(remoteIceCandidates))
+ console.log("LALAL SDP2", answer, answer.sdp)
+
await pc.setRemoteDescription(new RTCSessionDescription(answer))
addIceCandidates(pc, remoteIceCandidates)
resp = {type: "ok"}
@@ -518,8 +571,9 @@ const processCommand = (function () {
case "media":
if (!activeCall) {
resp = {type: "error", message: "media: call not started"}
- } else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video) {
- resp = {type: "error", message: "media: no video"}
+ } else if (activeCall.localMedia == CallMediaType.Audio && command.media == CallMediaType.Video && command.enable) {
+ await startSendingVideo(activeCall, activeCall.localCamera)
+ resp = {type: "ok"}
} else {
enableMedia(activeCall.localStream, command.media, command.enable)
resp = {type: "ok"}
@@ -600,6 +654,12 @@ const processCommand = (function () {
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
call.worker.onerror = ({error, filename, lineno, message}: ErrorEvent) => console.log({error, filename, lineno, message})
// call.worker.onmessage = ({data}) => console.log(JSON.stringify({message: data}))
+ call.worker.onmessage = ({data}) => {
+ console.log(JSON.stringify({message: data}))
+ const transceiverMid: string = data.transceiverMid
+ const mute: boolean = data.mute
+ onMediaMuteUnmute(transceiverMid, mute)
+ }
}
}
}
@@ -616,8 +676,17 @@ const processCommand = (function () {
if (call.aesKey && call.key) {
console.log("set up encryption for sending")
- for (const sender of pc.getSenders() as RTCRtpSenderWithEncryption[]) {
- setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key)
+ for (const transceiver of pc.getTransceivers()) {
+ const sender = transceiver.sender as RTCRtpSenderWithEncryption
+ setupPeerTransform(
+ TransformOperation.Encrypt,
+ sender,
+ call.worker,
+ call.aesKey,
+ call.key,
+ transceiver.sender.track!.kind == "video" ? CallMediaType.Video : CallMediaType.Audio,
+ transceiver.mid
+ )
}
}
}
@@ -626,15 +695,56 @@ const processCommand = (function () {
// Pull tracks from remote stream as they arrive add them to remoteStream video
const pc = call.connection
pc.ontrack = (event) => {
+ console.log("LALAL ON TRACK ", event)
try {
if (call.aesKey && call.key) {
console.log("set up decryption for receiving")
- setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, call.worker, call.aesKey, call.key)
+ setupPeerTransform(
+ TransformOperation.Decrypt,
+ event.receiver as RTCRtpReceiverWithEncryption,
+ call.worker,
+ call.aesKey,
+ call.key,
+ event.receiver.track.kind == "video" ? CallMediaType.Video : CallMediaType.Audio,
+ event.transceiver.mid
+ )
}
- for (const stream of event.streams) {
- for (const track of stream.getTracks()) {
- call.remoteStream.addTrack(track)
+ // const source = mediaSourceFromTransceiverMid(event.transceiver.mid)
+ // const sources = call.peerMediaSources
+ // if (source == CallMediaSource.Mic) {
+ // sources.mic = true
+ // } else if (source == CallMediaSource.Camera) {
+ // sources.camera = true
+ // } else if (source == CallMediaSource.Screen) {
+ // sources.screen = true
+ // }
+ // call.peerMediaSources = sources
+
+ if (event.streams.length > 0) {
+ for (const stream of event.streams) {
+ for (const track of stream.getTracks()) {
+ call.remoteStream.addTrack(track)
+ // const resp: WRPeerMedia = {
+ // type: "peerMedia",
+ // media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
+ // source: source,
+ // enabled: track.enabled,
+ // }
+ // console.log("LALAL ADDED REMOTE", track, track.kind)
+ // sendMessageToNative({resp: resp})
+ }
}
+ } else {
+ const track = event.track
+ call.remoteStream.addTrack(track)
+ // const resp: WRPeerMedia = {
+ // type: "peerMedia",
+ // media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
+ // source: source,
+ // enabled: track.enabled,
+ // }
+ // console.log("LALAL ADDED REMOTE", track, track.kind)
+ // sendMessageToNative({resp: resp})
}
console.log(`ontrack success`)
} catch (e) {
@@ -683,6 +793,51 @@ const processCommand = (function () {
}
}
+ async function startSendingVideo(call: Call, camera: VideoCamera): Promise {
+ console.log("LALAL STARTING SENDING VIDEO")
+ const videos = getVideoElements()
+ if (!videos) throw Error("no video elements")
+ const pc = call.connection
+ // Taking the first video transceiver and use it for sending video from camera. Following tracks are for other purposes
+ const tc = pc.getTransceivers().find((tc) => tc.receiver.track.kind == "video" && tc.direction == "sendrecv")
+ console.log(pc.getTransceivers().map((elem) => "" + elem.sender.track?.kind + " " + elem.receiver.track?.kind + " " + elem.direction))
+ let localStream: MediaStream
+ try {
+ localStream = await getLocalMediaStream(CallMediaType.Video, camera)
+ for (const t of localStream.getVideoTracks()) {
+ console.log("LALAL TC", tc, pc.getTransceivers())
+ call.localStream.addTrack(t)
+ tc?.sender.replaceTrack(t)
+ localStream.removeTrack(t)
+ // when adding track a `sender` will be created on that track automatically
+ //pc.addTrack(t, call.localStream)
+ console.log("LALAL ADDED VIDEO TRACK " + t)
+ }
+ call.localMedia = CallMediaType.Video
+ call.cameraEnabled = true
+ } catch (e: any) {
+ return
+ }
+
+ const sender = tc?.sender
+ console.log("LALAL SENDER " + sender + " " + sender?.getParameters())
+ if (call.aesKey && call.key && sender) {
+ setupPeerTransform(
+ TransformOperation.Encrypt,
+ sender as RTCRtpSenderWithEncryption,
+ call.worker,
+ call.aesKey,
+ call.key,
+ CallMediaType.Video,
+ tc.mid
+ )
+ }
+
+ // Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
+ videos.local.play()
+ console.log("LALAL SENDING VIDEO")
+ }
+
async function replaceMedia(call: Call, camera: VideoCamera): Promise {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
@@ -734,24 +889,46 @@ const processCommand = (function () {
if (sender) for (const t of tracks) sender.replaceTrack(t)
}
+ function mediaSourceFromTransceiverMid(mid: string | null) {
+ switch (mid) {
+ case "0":
+ return CallMediaSource.Mic
+ case "1":
+ return CallMediaSource.Camera
+ case "2":
+ return CallMediaSource.Screen
+ default:
+ return CallMediaSource.Unknown
+ }
+ }
+
function setupPeerTransform(
operation: TransformOperation,
peer: RTCRtpReceiverWithEncryption | RTCRtpSenderWithEncryption,
worker: Worker | undefined,
aesKey: string,
- key: CryptoKey
+ key: CryptoKey,
+ media: CallMediaType,
+ transceiverMid: string | null
) {
+ console.log("LALAL MEDIA " + media + " " + transceiverMid)
if (worker && "RTCRtpScriptTransform" in window) {
console.log(`${operation} with worker & RTCRtpScriptTransform`)
- peer.transform = new RTCRtpScriptTransform(worker, {operation, aesKey})
+ peer.transform = new RTCRtpScriptTransform(worker, {operation, aesKey, media, transceiverMid})
} else if ("createEncodedStreams" in peer) {
const {readable, writable} = peer.createEncodedStreams()
if (worker) {
console.log(`${operation} with worker`)
- worker.postMessage({operation, readable, writable, aesKey}, [readable, writable] as unknown as Transferable[])
+ worker.postMessage({operation, readable, writable, aesKey, media, transceiverMid}, [
+ readable,
+ writable,
+ ] as unknown as Transferable[])
} else {
console.log(`${operation} without worker`)
- const transform = callCrypto.transformFrame[operation](key)
+ const onMediaMuteUnmuteConst = (mute: boolean) => {
+ onMediaMuteUnmute(transceiverMid, mute)
+ }
+ const transform = callCrypto.transformFrame[operation](key, onMediaMuteUnmuteConst)
readable.pipeThrough(new TransformStream({transform})).pipeTo(writable)
}
} else {
@@ -759,6 +936,45 @@ const processCommand = (function () {
}
}
+ function onMediaMuteUnmute(transceiverMid: string | null, mute: boolean) {
+ if (activeCall) {
+ const source = mediaSourceFromTransceiverMid(transceiverMid)
+ console.log("LALAL ON MUTE/UNMUTE", mute, source, transceiverMid)
+ const sources = activeCall.peerMediaSources
+ if (source == CallMediaSource.Mic && activeCall.peerMediaSources.mic == mute) {
+ const resp: WRPeerMedia = {
+ type: "peerMedia",
+ media: CallMediaType.Audio,
+ source: source,
+ enabled: !mute,
+ }
+ sources.mic = !mute
+ activeCall.peerMediaSources = sources
+ sendMessageToNative({resp: resp})
+ } else if (source == CallMediaSource.Camera && activeCall.peerMediaSources.camera == mute) {
+ const resp: WRPeerMedia = {
+ type: "peerMedia",
+ media: CallMediaType.Video,
+ source: source,
+ enabled: !mute,
+ }
+ sources.camera = !mute
+ activeCall.peerMediaSources = sources
+ sendMessageToNative({resp: resp})
+ } else if (source == CallMediaSource.Screen && activeCall.peerMediaSources.screen == mute) {
+ const resp: WRPeerMedia = {
+ type: "peerMedia",
+ media: CallMediaType.Video,
+ source: source,
+ enabled: !mute,
+ }
+ sources.screen = !mute
+ activeCall.peerMediaSources = sources
+ sendMessageToNative({resp: resp})
+ }
+ }
+ }
+
function getLocalMediaStream(mediaType: CallMediaType, facingMode: VideoCamera): Promise {
const constraints = callMediaConstraints(mediaType, facingMode)
return navigator.mediaDevices.getUserMedia(constraints)
@@ -902,7 +1118,10 @@ function changeLayout(layout: LayoutType) {
}
}
-type TransformFrameFunc = (key: CryptoKey) => (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise
+type TransformFrameFunc = (
+ key: CryptoKey,
+ onMediaMuteUnmute: (mute: boolean) => void
+) => (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise
interface CallCrypto {
transformFrame: {[x in TransformOperation]: TransformFrameFunc}
@@ -936,6 +1155,7 @@ function callCryptoFunction(): CallCrypto {
: new Uint8Array(0)
frame.data = concatN(initial, ciphertext, iv).buffer
controller.enqueue(frame)
+ // console.log("LALAL ENCRYPT", frame.data.byteLength)
} catch (e) {
console.log(`encryption error ${e}`)
throw e
@@ -943,7 +1163,12 @@ function callCryptoFunction(): CallCrypto {
}
}
- function decryptFrame(key: CryptoKey): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise {
+ function decryptFrame(
+ key: CryptoKey,
+ onMediaMuteUnmute: (mute: boolean) => void
+ ): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise {
+ let wasMuted = true
+ let lastBytes: number[] = []
return async (frame, controller) => {
const data = new Uint8Array(frame.data)
const n = initialPlainTextRequired[frame.type] || 1
@@ -956,6 +1181,22 @@ function callCryptoFunction(): CallCrypto {
: new Uint8Array(0)
frame.data = concatN(initial, plaintext).buffer
controller.enqueue(frame)
+ lastBytes.push(frame.data.byteLength)
+ const sliced = lastBytes.slice(-20, lastBytes.length)
+ const average = sliced.reduce((prev, value) => value + prev, 0) / Math.max(1, sliced.length)
+ if (lastBytes.length > 20) {
+ console.log("LALAL REPLACED", lastBytes.length, sliced.length)
+ lastBytes = sliced
+ }
+ console.log("LALAL DECRYPT", frame.type, frame.data.byteLength, average)
+ // frame.type is undefined for audio stream, but defined for video
+ if (frame.type && wasMuted && average > 200) {
+ wasMuted = false
+ onMediaMuteUnmute(false)
+ } else if (frame.type && !wasMuted && average < 200) {
+ wasMuted = true
+ onMediaMuteUnmute(true)
+ }
} catch (e) {
console.log(`decryption error ${e}`)
throw e
@@ -1076,6 +1317,7 @@ function workerFunction() {
readable: ReadableStream
writable: WritableStream
aesKey: string
+ transceiverMid: string | null
}
// encryption with createEncodedStreams support
@@ -1087,9 +1329,9 @@ function workerFunction() {
if ("RTCTransformEvent" in self) {
self.addEventListener("rtctransform", async ({transformer}: any) => {
try {
- const {operation, aesKey} = transformer.options
+ const {operation, aesKey, transceiverMid} = transformer.options
const {readable, writable} = transformer
- await setupTransform({operation, aesKey, readable, writable})
+ await setupTransform({operation, aesKey, transceiverMid, readable, writable})
self.postMessage({result: "setupTransform success"})
} catch (e) {
self.postMessage({message: `setupTransform error: ${(e as Error).message}`})
@@ -1097,9 +1339,12 @@ function workerFunction() {
})
}
- async function setupTransform({operation, aesKey, readable, writable}: Transform): Promise {
+ async function setupTransform({operation, aesKey, transceiverMid, readable, writable}: Transform): Promise {
const key = await callCrypto.decodeAesKey(aesKey)
- const transform = callCrypto.transformFrame[operation](key)
+ const onMediaMuteUnmute = (mute: boolean) => {
+ self.postMessage({transceiverMid: transceiverMid, mute: mute})
+ }
+ const transform = callCrypto.transformFrame[operation](key, onMediaMuteUnmute)
readable.pipeThrough(new TransformStream({transform})).pipeTo(writable)
}
}
diff --git a/packages/simplex-chat-webrtc/src/desktop/ui.ts b/packages/simplex-chat-webrtc/src/desktop/ui.ts
index 4f336a17b1..deb50ff808 100644
--- a/packages/simplex-chat-webrtc/src/desktop/ui.ts
+++ b/packages/simplex-chat-webrtc/src/desktop/ui.ts
@@ -10,6 +10,7 @@ socket.addEventListener("open", (_event) => {
sendMessageToNative = (msg: WVApiMessage) => {
console.log("Message to server")
socket.send(JSON.stringify(msg))
+ reactOnMessageToServer(msg)
}
})
@@ -50,16 +51,18 @@ function toggleSpeakerManually() {
function toggleVideoManually() {
if (activeCall?.localMedia) {
- let res: boolean
if (activeCall?.screenShareEnabled) {
activeCall.cameraEnabled = !activeCall.cameraEnabled
- res = activeCall.cameraEnabled
+ enableVideoIcon(activeCall.cameraEnabled)
+ // } else if (activeCall.localMedia == CallMediaType.Video) {
+ // enableVideoIcon(toggleMedia(activeCall.localStream, CallMediaType.Video))
} else {
- res = toggleMedia(activeCall.localStream, CallMediaType.Video)
+ const apiCall: WVAPICall = {command: {type: "media", media: CallMediaType.Video, enable: activeCall.cameraEnabled != true}}
+ reactOnMessageFromServer(apiCall as any)
+ processCommand(apiCall).then(() => {
+ enableVideoIcon(activeCall?.cameraEnabled == true)
+ })
}
- document.getElementById("toggle-video")!!.innerHTML = res
- ? '
'
- : '
'
}
}
@@ -73,6 +76,12 @@ async function toggleScreenManually() {
}
}
+function enableVideoIcon(enabled: boolean) {
+ document.getElementById("toggle-video")!!.innerHTML = enabled
+ ? '
'
+ : '
'
+}
+
function reactOnMessageFromServer(msg: WVApiMessage) {
switch (msg.command?.type) {
case "capabilities":
@@ -82,24 +91,44 @@ function reactOnMessageFromServer(msg: WVApiMessage) {
case "start":
document.getElementById("toggle-audio")!!.style.display = "inline-block"
document.getElementById("toggle-speaker")!!.style.display = "inline-block"
- if (msg.command.media == CallMediaType.Video) {
- document.getElementById("toggle-video")!!.style.display = "inline-block"
- document.getElementById("toggle-screen")!!.style.display = "inline-block"
- }
+ document.getElementById("toggle-video")!!.style.display = "inline-block"
+ document.getElementById("toggle-screen")!!.style.display = "inline-block"
document.getElementById("info-block")!!.className = msg.command.media
break
+ case "media":
+ const className =
+ (msg.command.media == CallMediaType.Video && msg.command.enable) ||
+ activeCall?.peerMediaSources.camera ||
+ activeCall?.peerMediaSources.screen
+ ? "video"
+ : "audio"
+ document.getElementById("info-block")!!.className = className
+ document.getElementById("audio-call-icon")!.style.display = className == CallMediaType.Audio ? "block" : "none"
+ break
case "description":
updateCallInfoView(msg.command.state, msg.command.description)
if (activeCall?.connection.connectionState == "connected") {
document.getElementById("progress")!.style.display = "none"
- if (document.getElementById("info-block")!!.className == CallMediaType.Audio) {
- document.getElementById("audio-call-icon")!.style.display = "block"
- }
+ document.getElementById("audio-call-icon")!.style.display =
+ document.getElementById("info-block")!!.className == CallMediaType.Audio ? "block" : "none"
}
break
}
}
+function reactOnMessageToServer(msg: WVApiMessage) {
+ switch (msg.resp?.type) {
+ case "peerMedia":
+ const className =
+ activeCall?.localMedia == CallMediaType.Video || activeCall?.peerMediaSources.camera || activeCall?.peerMediaSources.screen
+ ? "video"
+ : "audio"
+ document.getElementById("info-block")!!.className = className
+ document.getElementById("audio-call-icon")!.style.display = className == CallMediaType.Audio ? "block" : "none"
+ break
+ }
+}
+
function updateCallInfoView(state: string, description: string) {
document.getElementById("state")!!.innerText = state
document.getElementById("description")!!.innerText = description