working all 4 streams with mute handling differently

This commit is contained in:
Avently
2024-09-03 00:26:35 +09:00
parent 579bbf7abe
commit 6f69da321c
15 changed files with 624 additions and 217 deletions
@@ -168,7 +168,8 @@ actual fun ActiveCallView() {
when (r.source) {
CallMediaSource.Mic -> it.copy(peerMediaSources = sources.copy(mic = r.enabled))
CallMediaSource.Camera -> it.copy(peerMediaSources = sources.copy(camera = r.enabled))
CallMediaSource.Screen -> it.copy(peerMediaSources = sources.copy(screen = r.enabled))
CallMediaSource.ScreenAudio -> it.copy(peerMediaSources = sources.copy(screenAudio = r.enabled))
CallMediaSource.ScreenVideo -> it.copy(peerMediaSources = sources.copy(screenVideo = r.enabled))
}
}
}
@@ -71,9 +71,10 @@ enum class CallState {
@Serializable data class CallMediaSources(
val mic: Boolean = false,
val camera: Boolean = false,
val screen: Boolean = false
val screenAudio: Boolean = false,
val screenVideo: Boolean = false
) {
fun hasVideo() = camera || screen
fun hasVideo() = camera || screenVideo
}
@Serializable
@@ -178,7 +179,8 @@ enum class CallMediaType {
enum class CallMediaSource {
@SerialName("mic") Mic,
@SerialName("camera") Camera,
@SerialName("screen") Screen
@SerialName("screenAudio") ScreenAudio,
@SerialName("screenVideo") ScreenVideo
}
@Serializable
@@ -14,6 +14,16 @@
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="remote-screen-video-stream"
class="inline"
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"
class="inline"
@@ -22,6 +32,15 @@
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-screen-video-stream"
class="inline"
muted
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
></video>
</body>
<footer>
<script src="../call.js"></script>
@@ -12,6 +12,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.inline {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream.inline {
position: absolute;
width: 30%;
@@ -23,6 +30,17 @@ body {
right: 0;
}
#local-screen-video-stream.inline {
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 30%;
right: 0;
}
#remote-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -30,6 +48,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
#local-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -37,6 +62,13 @@ body {
object-fit: cover;
}
#local-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
@@ -10,7 +10,8 @@ var CallMediaSource;
(function (CallMediaSource) {
CallMediaSource["Mic"] = "mic";
CallMediaSource["Camera"] = "camera";
CallMediaSource["Screen"] = "screen";
CallMediaSource["ScreenAudio"] = "screenAudio";
CallMediaSource["ScreenVideo"] = "screenVideo";
CallMediaSource["Unknown"] = "unknown";
})(CallMediaSource || (CallMediaSource = {}));
var VideoCamera;
@@ -36,7 +37,7 @@ var TransformOperation;
TransformOperation["Decrypt"] = "decrypt";
})(TransformOperation || (TransformOperation = {}));
function localMedia(call) {
return call.localMediaSources.camera || call.localMediaSources.screen ? CallMediaType.Video : CallMediaType.Audio;
return call.localMediaSources.camera || call.localMediaSources.screenVideo ? CallMediaType.Video : CallMediaType.Audio;
}
let activeCall;
let answerTimeout = 30000;
@@ -133,8 +134,10 @@ const processCommand = (function () {
pc = new RTCPeerConnection(config.peerConnectionConfig);
}
const remoteStream = new MediaStream();
const remoteScreenStream = new MediaStream();
const localCamera = VideoCamera.User;
const localStream = await getLocalMediaStream(mediaType, localCamera);
const localScreenStream = new MediaStream();
if (isDesktop) {
localStream
.getTracks()
@@ -149,17 +152,23 @@ const processCommand = (function () {
localMediaSources: {
mic: true,
camera: mediaType == CallMediaType.Video && !isDesktop,
screen: false,
screenAudio: false,
screenVideo: false,
},
localCamera,
localStream,
localScreenStream,
remoteStream,
remoteScreenStream,
peerMediaSources: {
mic: false,
camera: false,
screen: false,
screenAudio: false,
screenVideo: false,
},
aesKey,
cameraTrackWasSetBefore: mediaType == CallMediaType.Video,
screenShareWasSetupBefore: false,
};
await setupMediaStreams(call);
let connectionTimeout = setTimeout(connectionHandler, answerTimeout);
@@ -252,11 +261,12 @@ const processCommand = (function () {
const pc = activeCall.connection;
if (media == CallMediaType.Audio) {
console.log("LALAL ADDING TRANSCEIVER for video");
// For camera. So the first video in the list is for camera
// For camera. The first video in the list is for camera
pc.addTransceiver("video", { streams: [activeCall.localStream] });
}
// For screenshare. So the second video in the list is for screenshare
pc.addTransceiver("video", { streams: [activeCall.localStream] });
// For screenshare. So the second audio and video in the list is for screenshare
pc.addTransceiver("audio", { streams: [activeCall.localScreenStream] });
pc.addTransceiver("video", { streams: [activeCall.localScreenStream] });
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
// for debugging, returning the command for callee to use
@@ -294,8 +304,9 @@ const processCommand = (function () {
const pc = activeCall.connection;
// console.log("offer remoteIceCandidates", JSON.stringify(remoteIceCandidates))
await pc.setRemoteDescription(new RTCSessionDescription(offer));
// Enable using the same transceivers for sending media too, so total number of transceivers will be: audio, camera, screen audio, screen video
pc.getTransceivers().forEach((elem) => (elem.direction = "sendrecv"));
console.log("LALAL TRANSCE", pc.getTransceivers());
console.log("LALAL TRANSCE", pc.getTransceivers(), pc.getTransceivers().map((elem) => { var _a, _b; return "" + elem.mid + " " + ((_a = elem.sender.track) === null || _a === void 0 ? void 0 : _a.kind) + " " + ((_b = elem.sender.track) === null || _b === void 0 ? void 0 : _b.label); }));
let answer = await pc.createAnswer();
console.log("LALAL SDP", answer, answer.sdp);
// answer!.sdp = answer.sdp?.replace("a=recvonly", "a=sendrecv")
@@ -344,8 +355,8 @@ const processCommand = (function () {
if (!activeCall) {
resp = { type: "error", message: "media: call not started" };
}
else if (localMedia(activeCall) == CallMediaType.Audio && command.media == CallMediaType.Video && command.enable) {
await startSendingVideo(activeCall, activeCall.localCamera);
else if (!activeCall.cameraTrackWasSetBefore && command.media == CallMediaType.Video && command.enable) {
await startSendingCamera(activeCall, activeCall.localCamera);
resp = { type: "ok" };
}
else {
@@ -416,10 +427,14 @@ const processCommand = (function () {
// setupVideoElement(videos.local)
// setupVideoElement(videos.remote)
videos.local.srcObject = call.localStream;
videos.localScreen.srcObject = call.localScreenStream;
videos.remote.srcObject = call.remoteStream;
videos.remoteScreen.srcObject = call.remoteScreenStream;
// Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
videos.local.play();
// videos.localScreen.play()
videos.remote.play();
videos.remoteScreen.play();
}
async function setupEncryptionWorker(call) {
if (call.aesKey) {
@@ -466,42 +481,28 @@ const processCommand = (function () {
console.log("set up decryption for receiving");
setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key, event.receiver.track.kind == "video" ? CallMediaType.Video : CallMediaType.Audio, event.transceiver.mid);
}
// const source = mediaSourceFromTransceiverMid(event.transceiver.mid)
// const sources = call.peerMediaSources
// if (source == CallMediaSource.Mic) {
// sources.mic = true
// } else if (source == CallMediaSource.Camera) {
// sources.camera = true
// } else if (source == CallMediaSource.Screen) {
// sources.screen = true
// }
// call.peerMediaSources = sources
if (event.streams.length > 0) {
for (const stream of event.streams) {
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track);
// const resp: WRPeerMedia = {
// type: "peerMedia",
// media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
// source: source,
// enabled: track.enabled,
// }
// console.log("LALAL ADDED REMOTE", track, track.kind)
// sendMessageToNative({resp: resp})
const mediaSource = mediaSourceFromTransceiverMid(event.transceiver.mid);
if (mediaSource == CallMediaSource.ScreenAudio || mediaSource == CallMediaSource.ScreenVideo) {
call.remoteScreenStream.addTrack(track);
}
else {
call.remoteStream.addTrack(track);
}
}
}
}
else {
const track = event.track;
call.remoteStream.addTrack(track);
// const resp: WRPeerMedia = {
// type: "peerMedia",
// media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
// source: source,
// enabled: track.enabled,
// }
// console.log("LALAL ADDED REMOTE", track, track.kind)
// sendMessageToNative({resp: resp})
const mediaSource = mediaSourceFromTransceiverMid(event.transceiver.mid);
if (mediaSource == CallMediaSource.ScreenAudio || mediaSource == CallMediaSource.ScreenVideo) {
call.remoteScreenStream.addTrack(track);
}
else {
call.remoteStream.addTrack(track);
}
}
console.log(`ontrack success`);
}
@@ -550,7 +551,7 @@ const processCommand = (function () {
}
}
}
async function startSendingVideo(call, camera) {
async function startSendingCamera(call, camera) {
console.log("LALAL STARTING SENDING VIDEO");
const videos = getVideoElements();
if (!videos)
@@ -572,6 +573,7 @@ const processCommand = (function () {
console.log("LALAL ADDED VIDEO TRACK " + t);
}
call.localMediaSources.camera = true;
call.cameraTrackWasSetBefore = true;
}
catch (e) {
return;
@@ -585,6 +587,64 @@ const processCommand = (function () {
videos.local.play();
console.log("LALAL SENDING VIDEO");
}
async function enableDisableScreenShare(call) {
const videos = getVideoElements();
if (!videos)
throw Error("no video elements");
const pc = call.connection;
if (call.localMediaSources.screenVideo) {
let localScreenStream;
try {
localScreenStream = await getLocalScreenCaptureStream();
}
catch (e) {
call.localMediaSources.screenAudio = false;
call.localMediaSources.screenVideo = false;
return;
}
for (const t of localScreenStream.getTracks())
call.localScreenStream.addTrack(t);
for (const t of localScreenStream.getTracks())
localScreenStream.removeTrack(t);
pc.getTransceivers().forEach((elem) => {
const source = mediaSourceFromTransceiverMid(elem.mid);
const screenAudioTrack = call.localScreenStream.getTracks().find((elem) => elem.kind == "audio");
const screenVideoTrack = call.localScreenStream.getTracks().find((elem) => elem.kind == "video");
if (source == CallMediaSource.ScreenAudio && screenAudioTrack) {
elem.sender.replaceTrack(screenAudioTrack);
console.log("LALAL REPLACED AUDIO SCREEN TRACK");
}
else if (source == CallMediaSource.ScreenVideo && screenVideoTrack) {
elem.sender.replaceTrack(screenVideoTrack);
console.log("LALAL REPLACED VIDEO SCREEN TRACK");
}
if (!call.screenShareWasSetupBefore &&
call.aesKey &&
call.key &&
(source == CallMediaSource.ScreenAudio || source == CallMediaSource.ScreenVideo)) {
setupPeerTransform(TransformOperation.Encrypt, elem.sender, call.worker, call.aesKey, call.key, source == CallMediaSource.ScreenVideo ? CallMediaType.Video : CallMediaType.Audio, elem.mid);
}
});
call.screenShareWasSetupBefore = true;
// videos.localScreen.pause()
// videos.localScreen.srcObject = call.localScreenStream
videos.localScreen.play();
videos.localScreen.style.display = "block";
}
else {
pc.getTransceivers().forEach((elem) => {
const source = mediaSourceFromTransceiverMid(elem.mid);
if (source == CallMediaSource.ScreenAudio || source == CallMediaSource.ScreenVideo) {
elem.sender.replaceTrack(null);
}
});
for (const t of call.localScreenStream.getTracks())
t.stop();
for (const t of call.localScreenStream.getTracks())
call.localScreenStream.removeTrack(t);
videos.localScreen.style.display = "none";
}
}
async function replaceMedia(call, camera) {
const videos = getVideoElements();
if (!videos)
@@ -594,41 +654,19 @@ const processCommand = (function () {
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled);
let localStream;
try {
localStream = call.localMediaSources.screen
? await getLocalScreenCaptureStream()
: await getLocalMediaStream(localMedia(call), camera);
localStream = await getLocalMediaStream(localMedia(call), camera);
}
catch (e) {
if (call.localMediaSources.screen) {
call.localMediaSources.screen = false;
}
return;
}
if (!call.localMediaSources.screen) {
for (const t of call.localStream.getTracks())
t.stop();
}
else {
// Don't stop audio track if switching to screenshare
for (const t of call.localStream.getVideoTracks())
t.stop();
// Replace new track from screenshare with old track from recording device
for (const t of localStream.getAudioTracks()) {
t.stop();
localStream.removeTrack(t);
}
for (const t of call.localStream.getAudioTracks())
localStream.addTrack(t);
}
for (const t of call.localStream.getTracks())
t.stop();
call.localCamera = camera;
const audioTracks = localStream.getAudioTracks();
const videoTracks = localStream.getVideoTracks();
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false));
}
if (!call.localMediaSources.camera && !call.localMediaSources.screen) {
videoTracks.forEach((elem) => (elem.enabled = false));
}
replaceTracks(pc, audioTracks);
replaceTracks(pc, videoTracks);
call.localStream = localStream;
@@ -650,7 +688,9 @@ const processCommand = (function () {
case "1":
return CallMediaSource.Camera;
case "2":
return CallMediaSource.Screen;
return CallMediaSource.ScreenAudio;
case "3":
return CallMediaSource.ScreenVideo;
default:
return CallMediaSource.Unknown;
}
@@ -684,6 +724,9 @@ const processCommand = (function () {
}
}
function onMediaMuteUnmute(transceiverMid, mute) {
const videos = getVideoElements();
if (!videos)
throw Error("no video elements");
if (activeCall) {
const source = mediaSourceFromTransceiverMid(transceiverMid);
console.log("LALAL ON MUTE/UNMUTE", mute, source, transceiverMid);
@@ -708,17 +751,30 @@ const processCommand = (function () {
};
sources.camera = !mute;
activeCall.peerMediaSources = sources;
videos.remote.style.display = !mute ? "block" : "none";
sendMessageToNative({ resp: resp });
}
else if (source == CallMediaSource.Screen && activeCall.peerMediaSources.screen == mute) {
else if (source == CallMediaSource.ScreenAudio && activeCall.peerMediaSources.screenAudio == mute) {
const resp = {
type: "peerMedia",
media: CallMediaType.Audio,
source: source,
enabled: !mute,
};
sources.screenAudio = !mute;
activeCall.peerMediaSources = sources;
sendMessageToNative({ resp: resp });
}
else if (source == CallMediaSource.ScreenVideo && activeCall.peerMediaSources.screenVideo == mute) {
const resp = {
type: "peerMedia",
media: CallMediaType.Video,
source: source,
enabled: !mute,
};
sources.screen = !mute;
sources.screenVideo = !mute;
activeCall.peerMediaSources = sources;
videos.remoteScreen.style.display = !mute ? "block" : "none";
sendMessageToNative({ resp: resp });
}
}
@@ -778,14 +834,25 @@ const processCommand = (function () {
if (!videos)
return;
videos.local.srcObject = null;
videos.localScreen.srcObject = null;
videos.remote.srcObject = null;
videos.remoteScreen.srcObject = null;
}
function getVideoElements() {
const local = document.getElementById("local-video-stream");
const localScreen = document.getElementById("local-screen-video-stream");
const remote = document.getElementById("remote-video-stream");
if (!(local && remote && local instanceof HTMLMediaElement && remote instanceof HTMLMediaElement))
const remoteScreen = document.getElementById("remote-screen-video-stream");
if (!(local &&
localScreen &&
remote &&
remoteScreen &&
local instanceof HTMLMediaElement &&
localScreen instanceof HTMLMediaElement &&
remote instanceof HTMLMediaElement &&
remoteScreen instanceof HTMLMediaElement))
return;
return { local, remote };
return { local, localScreen, remote, remoteScreen };
}
// function setupVideoElement(video: HTMLElement) {
// // TODO use display: none
@@ -797,7 +864,16 @@ const processCommand = (function () {
function enableMedia(s, media, enable) {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
for (const t of tracks)
t.enabled = enable;
activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection.getTransceivers().forEach((elem) => {
if (enable) {
t.enabled = true;
elem.sender.replaceTrack(t);
}
else {
t.enabled = false;
elem.sender.replaceTrack(null);
}
});
if (media == CallMediaType.Video && activeCall) {
activeCall.localMediaSources.camera = enable;
}
@@ -806,8 +882,9 @@ const processCommand = (function () {
const call = activeCall;
if (!call)
return;
call.localMediaSources.screen = !call.localMediaSources.screen;
await replaceMedia(call, call.localCamera);
call.localMediaSources.screenAudio = !call.localMediaSources.screenAudio;
call.localMediaSources.screenVideo = !call.localMediaSources.screenVideo;
await enableDisableScreenShare(call);
};
return processCommand;
})();
@@ -880,7 +957,21 @@ function callCryptoFunction() {
}
function decryptFrame(key, onMediaMuteUnmute) {
let wasMuted = true;
let lastBytes = [];
let timeout = 0;
const resetTimeout = () => {
if (wasMuted) {
wasMuted = false;
onMediaMuteUnmute(wasMuted);
}
clearTimeout(timeout);
timeout = setTimeout(() => {
if (!wasMuted) {
wasMuted = true;
onMediaMuteUnmute(wasMuted);
}
}, 3000);
};
// let lastBytes: number[] = []
return async (frame, controller) => {
const data = new Uint8Array(frame.data);
const n = initialPlainTextRequired[frame.type] || 1;
@@ -893,23 +984,25 @@ function callCryptoFunction() {
: new Uint8Array(0);
frame.data = concatN(initial, plaintext).buffer;
controller.enqueue(frame);
lastBytes.push(frame.data.byteLength);
const sliced = lastBytes.slice(-20, lastBytes.length);
const average = sliced.reduce((prev, value) => value + prev, 0) / Math.max(1, sliced.length);
if (lastBytes.length > 20) {
console.log("LALAL REPLACED", lastBytes.length, sliced.length);
lastBytes = sliced;
}
console.log("LALAL DECRYPT", frame.type, frame.data.byteLength, average);
// frame.type is undefined for audio stream, but defined for video
if (frame.type && wasMuted && average > 200) {
wasMuted = false;
onMediaMuteUnmute(false);
}
else if (frame.type && !wasMuted && average < 200) {
wasMuted = true;
onMediaMuteUnmute(true);
}
resetTimeout();
// Check by bytes if track was disabled (not set to null)
// lastBytes.push(frame.data.byteLength)
// const sliced = lastBytes.slice(-20, lastBytes.length)
// const average = sliced.reduce((prev, value) => value + prev, 0) / Math.max(1, sliced.length)
// if (lastBytes.length > 20) {
// lastBytes = sliced
// }
// if (frame.type) {
// console.log("LALAL DECRYPT", frame.type, frame.data.byteLength, average)
// }
// // frame.type is undefined for audio stream, but defined for video
// if (frame.type && wasMuted && average > 200) {
// wasMuted = false
// onMediaMuteUnmute(false)
// } else if (frame.type && !wasMuted && average < 200) {
// wasMuted = true
// onMediaMuteUnmute(true)
// }
}
catch (e) {
console.log(`decryption error ${e}`);
@@ -14,6 +14,16 @@
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="remote-screen-video-stream"
class="inline"
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"
class="inline"
@@ -21,6 +31,16 @@
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-screen-video-stream"
class="inline"
muted
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
></video>
<div id="progress"></div>
<div id="info-block">
<p id="state"></p>
@@ -12,6 +12,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.inline {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream.inline {
position: absolute;
width: 20%;
@@ -23,6 +30,17 @@ body {
right: 0;
}
#local-screen-video-stream.inline {
position: absolute;
width: 20%;
max-width: 20%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 33%;
right: 0;
}
#remote-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -30,6 +48,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
#local-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -37,6 +62,13 @@ body {
object-fit: cover;
}
#local-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
@@ -44,28 +44,20 @@ function toggleSpeakerManually() {
}
function toggleVideoManually() {
if (activeCall) {
if (activeCall.localMediaSources.screen) {
activeCall.localMediaSources.camera = !activeCall.localMediaSources.camera;
enableVideoIcon(activeCall.localMediaSources.camera);
// } else if (activeCall.localMedia == CallMediaType.Video) {
// enableVideoIcon(toggleMedia(activeCall.localStream, CallMediaType.Video))
}
else {
const apiCall = { command: { type: "media", media: CallMediaType.Video, enable: activeCall.localMediaSources.camera != true } };
reactOnMessageFromServer(apiCall);
processCommand(apiCall).then(() => {
var _a;
enableVideoIcon(((_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources) === null || _a === void 0 ? void 0 : _a.camera) == true);
});
}
const apiCall = { command: { type: "media", media: CallMediaType.Video, enable: activeCall.localMediaSources.camera != true } };
reactOnMessageFromServer(apiCall);
processCommand(apiCall).then(() => {
var _a;
enableVideoIcon(((_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources) === null || _a === void 0 ? void 0 : _a.camera) == true);
});
}
}
async function toggleScreenManually() {
var _a;
const was = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources.screen;
const was = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources.screenVideo;
await toggleScreenShare();
if (was != (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources.screen)) {
document.getElementById("toggle-screen").innerHTML = ((_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources) === null || _a === void 0 ? void 0 : _a.screen)
if (was != (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources.screenVideo)) {
document.getElementById("toggle-screen").innerHTML = ((_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources) === null || _a === void 0 ? void 0 : _a.screenVideo)
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />';
}
@@ -92,7 +84,7 @@ function reactOnMessageFromServer(msg) {
case "media":
const className = (msg.command.media == CallMediaType.Video && msg.command.enable) ||
(activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.camera) ||
(activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.screen)
(activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.screenVideo)
? "video"
: "audio";
document.getElementById("info-block").className = className;
@@ -114,7 +106,7 @@ function reactOnMessageToServer(msg) {
return;
switch ((_a = msg.resp) === null || _a === void 0 ? void 0 : _a.type) {
case "peerMedia":
const className = localMedia(activeCall) == CallMediaType.Video || activeCall.peerMediaSources.camera || activeCall.peerMediaSources.screen
const className = localMedia(activeCall) == CallMediaType.Video || activeCall.peerMediaSources.camera || activeCall.peerMediaSources.screenVideo
? "video"
: "audio";
document.getElementById("info-block").className = className;
@@ -70,7 +70,8 @@ actual fun ActiveCallView() {
chatModel.activeCall.value = when (r.source) {
CallMediaSource.Mic -> call.copy(peerMediaSources = sources.copy(mic = r.enabled))
CallMediaSource.Camera -> call.copy(peerMediaSources = sources.copy(camera = r.enabled))
CallMediaSource.Screen -> call.copy(peerMediaSources = sources.copy(screen = r.enabled))
CallMediaSource.ScreenAudio -> call.copy(peerMediaSources = sources.copy(screenAudio = r.enabled))
CallMediaSource.ScreenVideo -> call.copy(peerMediaSources = sources.copy(screenVideo = r.enabled))
}
}
is WCallResponse.End -> {
@@ -14,6 +14,16 @@
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="remote-screen-video-stream"
class="inline"
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"
class="inline"
@@ -22,6 +32,15 @@
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-screen-video-stream"
class="inline"
muted
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
></video>
</body>
<footer>
<script src="../call.js"></script>
@@ -12,6 +12,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.inline {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream.inline {
position: absolute;
width: 30%;
@@ -23,6 +30,17 @@ body {
right: 0;
}
#local-screen-video-stream.inline {
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 30%;
right: 0;
}
#remote-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -30,6 +48,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
#local-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -37,6 +62,13 @@ body {
object-fit: cover;
}
#local-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
+205 -86
View File
@@ -56,7 +56,8 @@ enum CallMediaType {
enum CallMediaSource {
Mic = "mic",
Camera = "camera",
Screen = "screen",
ScreenAudio = "screenAudio",
ScreenVideo = "screenVideo",
Unknown = "unknown",
}
@@ -74,7 +75,8 @@ enum LayoutType {
interface CallMediaSources {
mic: boolean
camera: boolean
screen: boolean
screenAudio: boolean
screenVideo: boolean
}
interface IWCallCommand {
@@ -237,15 +239,20 @@ interface Call {
localMediaSources: CallMediaSources
localCamera: VideoCamera
localStream: MediaStream
localScreenStream: MediaStream
remoteStream: MediaStream
remoteScreenStream: MediaStream
peerMediaSources: CallMediaSources
aesKey?: string
worker?: Worker
key?: CryptoKey
// controls whether transceiver's track was replaced already or set initially (in video call)
cameraTrackWasSetBefore: boolean
screenShareWasSetupBefore: boolean
}
function localMedia(call: Call): CallMediaType {
return call.localMediaSources.camera || call.localMediaSources.screen ? CallMediaType.Video : CallMediaType.Audio
return call.localMediaSources.camera || call.localMediaSources.screenVideo ? CallMediaType.Video : CallMediaType.Audio
}
let activeCall: Call | undefined
@@ -368,8 +375,10 @@ const processCommand = (function () {
pc = new RTCPeerConnection(config.peerConnectionConfig)
}
const remoteStream = new MediaStream()
const remoteScreenStream = new MediaStream()
const localCamera = VideoCamera.User
const localStream = await getLocalMediaStream(mediaType, localCamera)
const localScreenStream = new MediaStream()
if (isDesktop) {
localStream
.getTracks()
@@ -384,17 +393,23 @@ const processCommand = (function () {
localMediaSources: {
mic: true,
camera: mediaType == CallMediaType.Video && !isDesktop,
screen: false,
screenAudio: false,
screenVideo: false,
},
localCamera,
localStream,
localScreenStream,
remoteStream,
remoteScreenStream,
peerMediaSources: {
mic: false,
camera: false,
screen: false,
screenAudio: false,
screenVideo: false,
},
aesKey,
cameraTrackWasSetBefore: mediaType == CallMediaType.Video,
screenShareWasSetupBefore: false,
}
await setupMediaStreams(call)
let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout)
@@ -488,11 +503,12 @@ const processCommand = (function () {
const pc = activeCall.connection
if (media == CallMediaType.Audio) {
console.log("LALAL ADDING TRANSCEIVER for video")
// For camera. So the first video in the list is for camera
// For camera. The first video in the list is for camera
pc.addTransceiver("video", {streams: [activeCall.localStream]})
}
// For screenshare. So the second video in the list is for screenshare
pc.addTransceiver("video", {streams: [activeCall.localStream]})
// For screenshare. So the second audio and video in the list is for screenshare
pc.addTransceiver("audio", {streams: [activeCall.localScreenStream]})
pc.addTransceiver("video", {streams: [activeCall.localScreenStream]})
const offer = await pc.createOffer()
await pc.setLocalDescription(offer)
@@ -529,8 +545,13 @@ const processCommand = (function () {
const pc = activeCall.connection
// console.log("offer remoteIceCandidates", JSON.stringify(remoteIceCandidates))
await pc.setRemoteDescription(new RTCSessionDescription(offer))
// Enable using the same transceivers for sending media too, so total number of transceivers will be: audio, camera, screen audio, screen video
pc.getTransceivers().forEach((elem) => (elem.direction = "sendrecv"))
console.log("LALAL TRANSCE", pc.getTransceivers())
console.log(
"LALAL TRANSCE",
pc.getTransceivers(),
pc.getTransceivers().map((elem) => "" + elem.mid + " " + elem.sender.track?.kind + " " + elem.sender.track?.label)
)
let answer = await pc.createAnswer()
console.log("LALAL SDP", answer, answer.sdp)
// answer!.sdp = answer.sdp?.replace("a=recvonly", "a=sendrecv")
@@ -575,8 +596,8 @@ const processCommand = (function () {
case "media":
if (!activeCall) {
resp = {type: "error", message: "media: call not started"}
} else if (localMedia(activeCall) == CallMediaType.Audio && command.media == CallMediaType.Video && command.enable) {
await startSendingVideo(activeCall, activeCall.localCamera)
} else if (!activeCall.cameraTrackWasSetBefore && command.media == CallMediaType.Video && command.enable) {
await startSendingCamera(activeCall, activeCall.localCamera)
resp = {type: "ok"}
} else {
enableMedia(activeCall.localStream, command.media, command.enable)
@@ -644,10 +665,14 @@ const processCommand = (function () {
// setupVideoElement(videos.local)
// setupVideoElement(videos.remote)
videos.local.srcObject = call.localStream
videos.localScreen.srcObject = call.localScreenStream
videos.remote.srcObject = call.remoteStream
videos.remoteScreen.srcObject = call.remoteScreenStream
// Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
videos.local.play()
// videos.localScreen.play()
videos.remote.play()
videos.remoteScreen.play()
}
async function setupEncryptionWorker(call: Call) {
@@ -713,42 +738,26 @@ const processCommand = (function () {
event.transceiver.mid
)
}
// const source = mediaSourceFromTransceiverMid(event.transceiver.mid)
// const sources = call.peerMediaSources
// if (source == CallMediaSource.Mic) {
// sources.mic = true
// } else if (source == CallMediaSource.Camera) {
// sources.camera = true
// } else if (source == CallMediaSource.Screen) {
// sources.screen = true
// }
// call.peerMediaSources = sources
if (event.streams.length > 0) {
for (const stream of event.streams) {
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track)
// const resp: WRPeerMedia = {
// type: "peerMedia",
// media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
// source: source,
// enabled: track.enabled,
// }
// console.log("LALAL ADDED REMOTE", track, track.kind)
// sendMessageToNative({resp: resp})
const mediaSource = mediaSourceFromTransceiverMid(event.transceiver.mid)
if (mediaSource == CallMediaSource.ScreenAudio || mediaSource == CallMediaSource.ScreenVideo) {
call.remoteScreenStream.addTrack(track)
} else {
call.remoteStream.addTrack(track)
}
}
}
} else {
const track = event.track
call.remoteStream.addTrack(track)
// const resp: WRPeerMedia = {
// type: "peerMedia",
// media: track.kind == "audio" ? CallMediaType.Audio : CallMediaType.Video,
// source: source,
// enabled: track.enabled,
// }
// console.log("LALAL ADDED REMOTE", track, track.kind)
// sendMessageToNative({resp: resp})
const mediaSource = mediaSourceFromTransceiverMid(event.transceiver.mid)
if (mediaSource == CallMediaSource.ScreenAudio || mediaSource == CallMediaSource.ScreenVideo) {
call.remoteScreenStream.addTrack(track)
} else {
call.remoteStream.addTrack(track)
}
}
console.log(`ontrack success`)
} catch (e) {
@@ -797,7 +806,7 @@ const processCommand = (function () {
}
}
async function startSendingVideo(call: Call, camera: VideoCamera): Promise<void> {
async function startSendingCamera(call: Call, camera: VideoCamera): Promise<void> {
console.log("LALAL STARTING SENDING VIDEO")
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
@@ -818,6 +827,7 @@ const processCommand = (function () {
console.log("LALAL ADDED VIDEO TRACK " + t)
}
call.localMediaSources.camera = true
call.cameraTrackWasSetBefore = true
} catch (e: any) {
return
}
@@ -841,6 +851,71 @@ const processCommand = (function () {
console.log("LALAL SENDING VIDEO")
}
async function enableDisableScreenShare(call: Call): Promise<void> {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
const pc = call.connection
if (call.localMediaSources.screenVideo) {
let localScreenStream: MediaStream
try {
localScreenStream = await getLocalScreenCaptureStream()
} catch (e: any) {
call.localMediaSources.screenAudio = false
call.localMediaSources.screenVideo = false
return
}
for (const t of localScreenStream.getTracks()) call.localScreenStream.addTrack(t)
for (const t of localScreenStream.getTracks()) localScreenStream.removeTrack(t)
pc.getTransceivers().forEach((elem) => {
const source = mediaSourceFromTransceiverMid(elem.mid)
const screenAudioTrack = call.localScreenStream.getTracks().find((elem) => elem.kind == "audio")
const screenVideoTrack = call.localScreenStream.getTracks().find((elem) => elem.kind == "video")
if (source == CallMediaSource.ScreenAudio && screenAudioTrack) {
elem.sender.replaceTrack(screenAudioTrack)
console.log("LALAL REPLACED AUDIO SCREEN TRACK")
} else if (source == CallMediaSource.ScreenVideo && screenVideoTrack) {
elem.sender.replaceTrack(screenVideoTrack)
console.log("LALAL REPLACED VIDEO SCREEN TRACK")
}
if (
!call.screenShareWasSetupBefore &&
call.aesKey &&
call.key &&
(source == CallMediaSource.ScreenAudio || source == CallMediaSource.ScreenVideo)
) {
setupPeerTransform(
TransformOperation.Encrypt,
elem.sender as RTCRtpSenderWithEncryption,
call.worker,
call.aesKey,
call.key,
source == CallMediaSource.ScreenVideo ? CallMediaType.Video : CallMediaType.Audio,
elem.mid
)
}
})
call.screenShareWasSetupBefore = true
// videos.localScreen.pause()
// videos.localScreen.srcObject = call.localScreenStream
videos.localScreen.play()
videos.localScreen.style.display = "block"
} else {
pc.getTransceivers().forEach((elem) => {
const source = mediaSourceFromTransceiverMid(elem.mid)
if (source == CallMediaSource.ScreenAudio || source == CallMediaSource.ScreenVideo) {
elem.sender.replaceTrack(null)
}
})
for (const t of call.localScreenStream.getTracks()) t.stop()
for (const t of call.localScreenStream.getTracks()) call.localScreenStream.removeTrack(t)
videos.localScreen.style.display = "none"
}
}
async function replaceMedia(call: Call, camera: VideoCamera): Promise<void> {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
@@ -849,27 +924,11 @@ const processCommand = (function () {
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled)
let localStream: MediaStream
try {
localStream = call.localMediaSources.screen
? await getLocalScreenCaptureStream()
: await getLocalMediaStream(localMedia(call), camera)
localStream = await getLocalMediaStream(localMedia(call), camera)
} catch (e: any) {
if (call.localMediaSources.screen) {
call.localMediaSources.screen = false
}
return
}
if (!call.localMediaSources.screen) {
for (const t of call.localStream.getTracks()) t.stop()
} else {
// Don't stop audio track if switching to screenshare
for (const t of call.localStream.getVideoTracks()) t.stop()
// Replace new track from screenshare with old track from recording device
for (const t of localStream.getAudioTracks()) {
t.stop()
localStream.removeTrack(t)
}
for (const t of call.localStream.getAudioTracks()) localStream.addTrack(t)
}
for (const t of call.localStream.getTracks()) t.stop()
call.localCamera = camera
const audioTracks = localStream.getAudioTracks()
@@ -877,9 +936,6 @@ const processCommand = (function () {
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false))
}
if (!call.localMediaSources.camera && !call.localMediaSources.screen) {
videoTracks.forEach((elem) => (elem.enabled = false))
}
replaceTracks(pc, audioTracks)
replaceTracks(pc, videoTracks)
@@ -901,7 +957,9 @@ const processCommand = (function () {
case "1":
return CallMediaSource.Camera
case "2":
return CallMediaSource.Screen
return CallMediaSource.ScreenAudio
case "3":
return CallMediaSource.ScreenVideo
default:
return CallMediaSource.Unknown
}
@@ -942,6 +1000,8 @@ const processCommand = (function () {
}
function onMediaMuteUnmute(transceiverMid: string | null, mute: boolean) {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
if (activeCall) {
const source = mediaSourceFromTransceiverMid(transceiverMid)
console.log("LALAL ON MUTE/UNMUTE", mute, source, transceiverMid)
@@ -965,16 +1025,28 @@ const processCommand = (function () {
}
sources.camera = !mute
activeCall.peerMediaSources = sources
videos.remote.style.display = !mute ? "block" : "none"
sendMessageToNative({resp: resp})
} else if (source == CallMediaSource.Screen && activeCall.peerMediaSources.screen == mute) {
} else if (source == CallMediaSource.ScreenAudio && activeCall.peerMediaSources.screenAudio == mute) {
const resp: WRPeerMedia = {
type: "peerMedia",
media: CallMediaType.Audio,
source: source,
enabled: !mute,
}
sources.screenAudio = !mute
activeCall.peerMediaSources = sources
sendMessageToNative({resp: resp})
} else if (source == CallMediaSource.ScreenVideo && activeCall.peerMediaSources.screenVideo == mute) {
const resp: WRPeerMedia = {
type: "peerMedia",
media: CallMediaType.Video,
source: source,
enabled: !mute,
}
sources.screen = !mute
sources.screenVideo = !mute
activeCall.peerMediaSources = sources
videos.remoteScreen.style.display = !mute ? "block" : "none"
sendMessageToNative({resp: resp})
}
}
@@ -1033,7 +1105,9 @@ const processCommand = (function () {
interface VideoElements {
local: HTMLMediaElement
localScreen: HTMLMediaElement
remote: HTMLMediaElement
remoteScreen: HTMLMediaElement
}
function shutdownCameraAndMic() {
@@ -1046,14 +1120,30 @@ const processCommand = (function () {
const videos = getVideoElements()
if (!videos) return
videos.local.srcObject = null
videos.localScreen.srcObject = null
videos.remote.srcObject = null
videos.remoteScreen.srcObject = null
}
function getVideoElements(): VideoElements | undefined {
const local = document.getElementById("local-video-stream")
const localScreen = document.getElementById("local-screen-video-stream")
const remote = document.getElementById("remote-video-stream")
if (!(local && remote && local instanceof HTMLMediaElement && remote instanceof HTMLMediaElement)) return
return {local, remote}
const remoteScreen = document.getElementById("remote-screen-video-stream")
if (
!(
local &&
localScreen &&
remote &&
remoteScreen &&
local instanceof HTMLMediaElement &&
localScreen instanceof HTMLMediaElement &&
remote instanceof HTMLMediaElement &&
remoteScreen instanceof HTMLMediaElement
)
)
return
return {local, localScreen, remote, remoteScreen}
}
// function setupVideoElement(video: HTMLElement) {
@@ -1066,7 +1156,16 @@ const processCommand = (function () {
function enableMedia(s: MediaStream, media: CallMediaType, enable: boolean) {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks) t.enabled = enable
for (const t of tracks)
activeCall?.connection.getTransceivers().forEach((elem) => {
if (enable) {
t.enabled = true
elem.sender.replaceTrack(t)
} else {
t.enabled = false
elem.sender.replaceTrack(null)
}
})
if (media == CallMediaType.Video && activeCall) {
activeCall.localMediaSources.camera = enable
}
@@ -1075,8 +1174,9 @@ const processCommand = (function () {
toggleScreenShare = async function () {
const call = activeCall
if (!call) return
call.localMediaSources.screen = !call.localMediaSources.screen
await replaceMedia(call, call.localCamera)
call.localMediaSources.screenAudio = !call.localMediaSources.screenAudio
call.localMediaSources.screenVideo = !call.localMediaSources.screenVideo
await enableDisableScreenShare(call)
}
return processCommand
@@ -1173,7 +1273,21 @@ function callCryptoFunction(): CallCrypto {
onMediaMuteUnmute: (mute: boolean) => void
): (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise<void> {
let wasMuted = true
let lastBytes: number[] = []
let timeout: number = 0
const resetTimeout = () => {
if (wasMuted) {
wasMuted = false
onMediaMuteUnmute(wasMuted)
}
clearTimeout(timeout)
timeout = setTimeout(() => {
if (!wasMuted) {
wasMuted = true
onMediaMuteUnmute(wasMuted)
}
}, 3000)
}
// let lastBytes: number[] = []
return async (frame, controller) => {
const data = new Uint8Array(frame.data)
const n = initialPlainTextRequired[frame.type] || 1
@@ -1186,22 +1300,27 @@ function callCryptoFunction(): CallCrypto {
: new Uint8Array(0)
frame.data = concatN(initial, plaintext).buffer
controller.enqueue(frame)
lastBytes.push(frame.data.byteLength)
const sliced = lastBytes.slice(-20, lastBytes.length)
const average = sliced.reduce((prev, value) => value + prev, 0) / Math.max(1, sliced.length)
if (lastBytes.length > 20) {
console.log("LALAL REPLACED", lastBytes.length, sliced.length)
lastBytes = sliced
}
console.log("LALAL DECRYPT", frame.type, frame.data.byteLength, average)
// frame.type is undefined for audio stream, but defined for video
if (frame.type && wasMuted && average > 200) {
wasMuted = false
onMediaMuteUnmute(false)
} else if (frame.type && !wasMuted && average < 200) {
wasMuted = true
onMediaMuteUnmute(true)
}
resetTimeout()
// Check by bytes if track was disabled (not set to null)
// lastBytes.push(frame.data.byteLength)
// const sliced = lastBytes.slice(-20, lastBytes.length)
// const average = sliced.reduce((prev, value) => value + prev, 0) / Math.max(1, sliced.length)
// if (lastBytes.length > 20) {
// lastBytes = sliced
// }
// if (frame.type) {
// console.log("LALAL DECRYPT", frame.type, frame.data.byteLength, average)
// }
// // frame.type is undefined for audio stream, but defined for video
// if (frame.type && wasMuted && average > 200) {
// wasMuted = false
// onMediaMuteUnmute(false)
// } else if (frame.type && !wasMuted && average < 200) {
// wasMuted = true
// onMediaMuteUnmute(true)
// }
} catch (e) {
console.log(`decryption error ${e}`)
throw e
@@ -14,6 +14,16 @@
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="remote-screen-video-stream"
class="inline"
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"
class="inline"
@@ -21,6 +31,16 @@
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-screen-video-stream"
class="inline"
muted
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
style="display: none"
></video>
<div id="progress"></div>
<div id="info-block">
<p id="state"></p>
@@ -12,6 +12,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.inline {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream.inline {
position: absolute;
width: 20%;
@@ -23,6 +30,17 @@ body {
right: 0;
}
#local-screen-video-stream.inline {
position: absolute;
width: 20%;
max-width: 20%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 33%;
right: 0;
}
#remote-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -30,6 +48,13 @@ body {
object-fit: cover;
}
#remote-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
#local-video-stream.fullscreen {
position: absolute;
height: 100%;
@@ -37,6 +62,13 @@ body {
object-fit: cover;
}
#local-screen-video-stream.fullscreen {
position: absolute;
height: 100%;
width: 100%;
object-fit: cover;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
+10 -17
View File
@@ -51,26 +51,19 @@ function toggleSpeakerManually() {
function toggleVideoManually() {
if (activeCall) {
if (activeCall.localMediaSources.screen) {
activeCall.localMediaSources.camera = !activeCall.localMediaSources.camera
enableVideoIcon(activeCall.localMediaSources.camera)
// } else if (activeCall.localMedia == CallMediaType.Video) {
// enableVideoIcon(toggleMedia(activeCall.localStream, CallMediaType.Video))
} else {
const apiCall: WVAPICall = {command: {type: "media", media: CallMediaType.Video, enable: activeCall.localMediaSources.camera != true}}
reactOnMessageFromServer(apiCall as any)
processCommand(apiCall).then(() => {
enableVideoIcon(activeCall?.localMediaSources?.camera == true)
})
}
const apiCall: WVAPICall = {command: {type: "media", media: CallMediaType.Video, enable: activeCall.localMediaSources.camera != true}}
reactOnMessageFromServer(apiCall as any)
processCommand(apiCall).then(() => {
enableVideoIcon(activeCall?.localMediaSources?.camera == true)
})
}
}
async function toggleScreenManually() {
const was = activeCall?.localMediaSources.screen
const was = activeCall?.localMediaSources.screenVideo
await toggleScreenShare()
if (was != activeCall?.localMediaSources.screen) {
document.getElementById("toggle-screen")!!.innerHTML = activeCall?.localMediaSources?.screen
if (was != activeCall?.localMediaSources.screenVideo) {
document.getElementById("toggle-screen")!!.innerHTML = activeCall?.localMediaSources?.screenVideo
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />'
}
@@ -99,7 +92,7 @@ function reactOnMessageFromServer(msg: WVApiMessage) {
const className =
(msg.command.media == CallMediaType.Video && msg.command.enable) ||
activeCall?.peerMediaSources.camera ||
activeCall?.peerMediaSources.screen
activeCall?.peerMediaSources.screenVideo
? "video"
: "audio"
document.getElementById("info-block")!!.className = className
@@ -122,7 +115,7 @@ function reactOnMessageToServer(msg: WVApiMessage) {
switch (msg.resp?.type) {
case "peerMedia":
const className =
localMedia(activeCall) == CallMediaType.Video || activeCall.peerMediaSources.camera || activeCall.peerMediaSources.screen
localMedia(activeCall) == CallMediaType.Video || activeCall.peerMediaSources.camera || activeCall.peerMediaSources.screenVideo
? "video"
: "audio"
document.getElementById("info-block")!!.className = className