This commit is contained in:
Avently
2024-09-04 23:52:56 +09:00
parent 5a21f9c668
commit 5bb99a4361
6 changed files with 407 additions and 157 deletions
@@ -29,6 +29,7 @@ var LayoutType;
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg) => console.log(JSON.stringify(msg));
var toggleScreenShare = async () => { };
var localOrPeerMediaSourcesChanged = (_call) => { };
// Global object with cryptrographic/encoding functions
const callCrypto = callCryptoFunction();
var TransformOperation;
@@ -39,12 +40,20 @@ var TransformOperation;
function localMedia(call) {
return call.localMediaSources.camera || call.localMediaSources.screenVideo ? CallMediaType.Video : CallMediaType.Audio;
}
function peerMedia(call) {
return call.peerMediaSources.camera || call.peerMediaSources.screenVideo ? CallMediaType.Video : CallMediaType.Audio;
}
let activeCall;
let answerTimeout = 30000;
var useWorker = false;
var isDesktop = false;
var localizedState = "";
var localizedDescription = "";
// Passing true here will send audio in screen record stream
const allowSendScreenAudio = false;
// When one side of a call sends candidates tot fast (until local & remote descriptions are set), that candidates
// will be stored here and then set when the call will be ready to process them
var afterCallInitializedCandidates = [];
const processCommand = (function () {
const defaultIceServers = [
{ urls: ["stuns:stun.simplex.im:443"] },
@@ -137,14 +146,22 @@ const processCommand = (function () {
const remoteScreenStream = new MediaStream();
const localCamera = VideoCamera.User;
let localStream;
// Mic can be disabled while in call if a user didn't give permission to use it, it's fine
let micEnabled = false;
try {
localStream = await getLocalMediaStream(mediaType, localCamera);
micEnabled = true;
}
catch (e) {
console.log("Error while getting local media stream", e);
if (isDesktop) {
window.alert("Permission denied. Please, allow mic and video to make the call working.");
desktopShowPermissionsAlert(mediaType);
localStream = getEmptyStream(mediaType, pc);
}
else {
// On Android all streams should be present
throw e;
}
throw e;
}
const localScreenStream = new MediaStream();
if (isDesktop) {
@@ -159,7 +176,7 @@ const processCommand = (function () {
connection: pc,
iceCandidates,
localMediaSources: {
mic: true,
mic: micEnabled,
camera: mediaType == CallMediaType.Video && !isDesktop,
screenAudio: false,
screenVideo: false,
@@ -179,6 +196,7 @@ const processCommand = (function () {
cameraTrackWasSetBefore: mediaType == CallMediaType.Video,
screenShareWasSetupBefore: false,
};
localOrPeerMediaSourcesChanged(call);
await setupMediaStreams(call);
let connectionTimeout = setTimeout(connectionHandler, answerTimeout);
pc.addEventListener("connectionstatechange", connectionStateChange);
@@ -254,8 +272,15 @@ const processCommand = (function () {
if (activeCall)
endCall();
// This request for local media stream is made to prompt for camera/mic permissions on call start
if (command.media)
await getLocalMediaStream(command.media, VideoCamera.User);
if (command.media) {
try {
await getLocalMediaStream(command.media, VideoCamera.User);
}
catch (e) {
// Will be shown on the next stage of call estabilishing, can work without any streams
//desktopShowPermissionsAlert(command.media)
}
}
const encryption = supportsInsertableStreams(useWorker);
resp = { type: "capabilities", capabilities: { encryption } };
break;
@@ -278,6 +303,8 @@ const processCommand = (function () {
pc.addTransceiver("video", { streams: [activeCall.localScreenStream] });
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
addIceCandidates(pc, afterCallInitializedCandidates);
afterCallInitializedCandidates = [];
// for debugging, returning the command for callee to use
// resp = {
// type: "offer",
@@ -321,6 +348,8 @@ const processCommand = (function () {
// answer!.sdp = answer.sdp?.replace("a=recvonly", "a=sendrecv")
await pc.setLocalDescription(answer);
addIceCandidates(pc, remoteIceCandidates);
addIceCandidates(pc, afterCallInitializedCandidates);
afterCallInitializedCandidates = [];
// same as command for caller to use
resp = {
type: "answer",
@@ -351,13 +380,14 @@ const processCommand = (function () {
}
break;
case "ice":
const remoteIceCandidates = parse(command.iceCandidates);
if (pc) {
const remoteIceCandidates = parse(command.iceCandidates);
addIceCandidates(pc, remoteIceCandidates);
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "ice: call not started" };
afterCallInitializedCandidates = remoteIceCandidates;
resp = { type: "error", message: "ice: call not started yet, will add candidates later" };
}
break;
case "media":
@@ -368,9 +398,22 @@ const processCommand = (function () {
await startSendingCamera(activeCall, activeCall.localCamera);
resp = { type: "ok" };
}
else if ((command.source == CallMediaSource.Mic && activeCall.localStream.getAudioTracks().length > 0) ||
(command.source == CallMediaSource.Camera && activeCall.localStream.getVideoTracks().length > 0)) {
if (enableMedia(activeCall.localStream, command.source, command.enable)) {
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "media: cannot enable media source" };
}
}
else {
enableMedia(activeCall.localStream, command.source, command.enable);
resp = { type: "ok" };
if (await replaceMedia(activeCall, activeCall.localCamera)) {
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "media: cannot replace media source" };
}
}
break;
case "camera":
@@ -378,7 +421,12 @@ const processCommand = (function () {
resp = { type: "error", message: "camera: call not started" };
}
else {
await replaceMedia(activeCall, command.camera);
if (await replaceMedia(activeCall, command.camera)) {
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "camera: cannot replace media source" };
}
resp = { type: "ok" };
}
break;
@@ -440,10 +488,11 @@ const processCommand = (function () {
videos.remote.srcObject = call.remoteStream;
videos.remoteScreen.srcObject = call.remoteScreenStream;
// Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
videos.local.play();
videos.local.play().catch((e) => console.log(e));
// videos.localScreen.play()
videos.remote.play();
videos.remoteScreen.play();
// For example, exception can be: NotAllowedError: play() failed because the user didn't interact with the document first
videos.remote.play().catch((e) => console.log(e));
videos.remoteScreen.play().catch((e) => console.log(e));
}
async function setupEncryptionWorker(call) {
if (call.aesKey) {
@@ -476,7 +525,7 @@ const processCommand = (function () {
console.log("set up encryption for sending");
for (const transceiver of pc.getTransceivers()) {
const sender = transceiver.sender;
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key, transceiver.sender.track.kind == "video" ? CallMediaType.Video : CallMediaType.Audio, transceiver.mid);
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key, mediaSourceFromTransceiverMid(transceiver.mid) == CallMediaSource.Camera ? CallMediaType.Video : CallMediaType.Audio, transceiver.mid);
}
}
}
@@ -583,8 +632,11 @@ const processCommand = (function () {
}
call.localMediaSources.camera = true;
call.cameraTrackWasSetBefore = true;
localOrPeerMediaSourcesChanged(call);
}
catch (e) {
console.log("Start sending camera error", e);
desktopShowPermissionsAlert(CallMediaType.Video);
return;
}
const sender = tc === null || tc === void 0 ? void 0 : tc.sender;
@@ -593,22 +645,23 @@ const processCommand = (function () {
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key, CallMediaType.Video, tc.mid);
}
// Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
videos.local.play();
videos.local.play().catch((e) => console.log(e));
console.log("LALAL SENDING VIDEO");
}
async function enableDisableScreenShare(call) {
toggleScreenShare = async function () {
const call = activeCall;
if (!call)
return;
const videos = getVideoElements();
if (!videos)
throw Error("no video elements");
const pc = call.connection;
if (call.localMediaSources.screenVideo) {
if (!call.localMediaSources.screenVideo) {
let localScreenStream;
try {
localScreenStream = await getLocalScreenCaptureStream();
}
catch (e) {
call.localMediaSources.screenAudio = false;
call.localMediaSources.screenVideo = false;
return;
}
for (const t of localScreenStream.getTracks())
@@ -625,6 +678,10 @@ const processCommand = (function () {
}
else if (source == CallMediaSource.ScreenVideo && screenVideoTrack) {
elem.sender.replaceTrack(screenVideoTrack);
screenVideoTrack.onended = () => {
console.log("LALAL ENDED SCREEN TRACK");
toggleScreenShare();
};
console.log("LALAL REPLACED VIDEO SCREEN TRACK");
}
if (!call.screenShareWasSetupBefore &&
@@ -637,7 +694,7 @@ const processCommand = (function () {
call.screenShareWasSetupBefore = true;
// videos.localScreen.pause()
// videos.localScreen.srcObject = call.localScreenStream
videos.localScreen.play();
videos.localScreen.play().catch((e) => console.log(e));
videos.localScreen.style.visibility = "visible";
}
else {
@@ -653,7 +710,12 @@ const processCommand = (function () {
call.localScreenStream.removeTrack(t);
videos.localScreen.style.visibility = "hidden";
}
}
if (allowSendScreenAudio) {
call.localMediaSources.screenAudio = !call.localMediaSources.screenAudio;
}
call.localMediaSources.screenVideo = !call.localMediaSources.screenVideo;
localOrPeerMediaSourcesChanged(call);
};
async function replaceMedia(call, camera) {
const videos = getVideoElements();
if (!videos)
@@ -666,7 +728,9 @@ const processCommand = (function () {
localStream = await getLocalMediaStream(localMedia(call), camera);
}
catch (e) {
return;
console.log("Replace media error", e);
desktopShowPermissionsAlert(CallMediaType.Video);
return false;
}
for (const t of call.localStream.getTracks())
t.stop();
@@ -680,7 +744,11 @@ const processCommand = (function () {
replaceTracks(pc, videoTracks);
call.localStream = localStream;
videos.local.srcObject = localStream;
videos.local.play();
videos.local.play().catch((e) => console.log(e));
call.localMediaSources.mic = call.localStream.getAudioTracks().length > 0;
call.localMediaSources.camera = call.localStream.getVideoTracks().length > 0;
localOrPeerMediaSourcesChanged(call);
return true;
}
function replaceTracks(pc, tracks) {
if (!tracks.length)
@@ -751,6 +819,8 @@ const processCommand = (function () {
sources.mic = !mute;
activeCall.peerMediaSources = sources;
sendMessageToNative({ resp: resp });
if (!mute)
videos.remote.play().catch((e) => console.log(e));
}
else if (source == CallMediaSource.Camera && activeCall.peerMediaSources.camera == mute) {
const resp = {
@@ -763,6 +833,8 @@ const processCommand = (function () {
activeCall.peerMediaSources = sources;
videos.remote.style.visibility = !mute ? "visible" : "hidden";
sendMessageToNative({ resp: resp });
if (!mute)
videos.remote.play().catch((e) => console.log(e));
}
else if (source == CallMediaSource.ScreenAudio && activeCall.peerMediaSources.screenAudio == mute) {
const resp = {
@@ -774,6 +846,8 @@ const processCommand = (function () {
sources.screenAudio = !mute;
activeCall.peerMediaSources = sources;
sendMessageToNative({ resp: resp });
if (!mute)
videos.remoteScreen.play().catch((e) => console.log(e));
}
else if (source == CallMediaSource.ScreenVideo && activeCall.peerMediaSources.screenVideo == mute) {
const resp = {
@@ -786,6 +860,8 @@ const processCommand = (function () {
activeCall.peerMediaSources = sources;
videos.remoteScreen.style.visibility = !mute ? "visible" : "hidden";
sendMessageToNative({ resp: resp });
if (!mute)
videos.remoteScreen.play().catch((e) => console.log(e));
}
if (activeCall.peerMediaSources.screenVideo) {
videos.remote.className = "collapsed";
@@ -793,11 +869,20 @@ const processCommand = (function () {
else {
videos.remote.className = "inline";
}
localOrPeerMediaSourcesChanged(activeCall);
}
function getLocalMediaStream(mediaType, facingMode) {
const constraints = callMediaConstraints(mediaType, facingMode);
return navigator.mediaDevices.getUserMedia(constraints);
}
function getEmptyStream(mediaType, pc) {
const stream = new MediaStream();
pc.addTransceiver("audio", { streams: [stream] });
if (mediaType == CallMediaType.Video) {
pc.addTransceiver("video", { streams: [stream] });
}
return stream;
}
function getLocalScreenCaptureStream() {
const constraints /* DisplayMediaStreamConstraints */ = {
video: {
@@ -809,7 +894,7 @@ const processCommand = (function () {
//},
//aspectRatio: 1.33,
},
audio: false,
audio: allowSendScreenAudio,
// This works with Chrome, Edge, Opera, but not with Firefox and Safari
// systemAudio: "include"
};
@@ -877,33 +962,43 @@ const processCommand = (function () {
// }
// }
function enableMedia(s, source, enable) {
if (!activeCall)
return false;
const tracks = source == CallMediaSource.Camera ? s.getVideoTracks() : s.getAudioTracks();
for (const t of tracks)
activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection.getTransceivers().forEach((elem) => {
if ((t.kind == CallMediaType.Audio && mediaSourceFromTransceiverMid(elem.mid) == CallMediaSource.Mic) ||
(t.kind == CallMediaType.Video && mediaSourceFromTransceiverMid(elem.mid) == CallMediaSource.Camera)) {
let changedSource = false;
for (const t of tracks) {
for (const transceiver of activeCall.connection.getTransceivers()) {
if ((t.kind == CallMediaType.Audio && mediaSourceFromTransceiverMid(transceiver.mid) == CallMediaSource.Mic) ||
(t.kind == CallMediaType.Video && mediaSourceFromTransceiverMid(transceiver.mid) == CallMediaSource.Camera)) {
if (enable) {
t.enabled = true;
elem.sender.replaceTrack(t);
transceiver.sender.replaceTrack(t);
}
else {
t.enabled = false;
elem.sender.replaceTrack(null);
transceiver.sender.replaceTrack(null);
}
if (source == CallMediaSource.Mic) {
activeCall.localMediaSources.mic = enable;
changedSource = true;
}
else if (source == CallMediaSource.Camera) {
activeCall.localMediaSources.camera = enable;
changedSource = true;
}
}
});
if (source == CallMediaSource.Camera && activeCall) {
activeCall.localMediaSources.camera = enable;
}
}
if (changedSource) {
localOrPeerMediaSourcesChanged(activeCall);
return true;
}
else {
console.log("Enable media error");
desktopShowPermissionsAlert(source == CallMediaSource.Mic ? CallMediaType.Audio : CallMediaType.Video);
return false;
}
}
toggleScreenShare = async function () {
const call = activeCall;
if (!call)
return;
call.localMediaSources.screenAudio = !call.localMediaSources.screenAudio;
call.localMediaSources.screenVideo = !call.localMediaSources.screenVideo;
await enableDisableScreenShare(call);
};
return processCommand;
})();
function toggleRemoteVideoFitFill() {
@@ -914,16 +1009,15 @@ function toggleRemoteScreenVideoFitFill() {
const remoteScreen = document.getElementById("remote-screen-video-stream");
remoteScreen.style.objectFit = remoteScreen.style.objectFit != "contain" ? "contain" : "cover";
}
function toggleMedia(s, media) {
function togglePeerMedia(s, media) {
if (!activeCall)
return false;
let res = false;
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
for (const t of tracks) {
t.enabled = !t.enabled;
res = t.enabled;
}
if (media == CallMediaType.Video && activeCall) {
activeCall.localMediaSources.camera = res;
}
return res;
}
function changeLayout(layout) {
@@ -948,6 +1042,16 @@ function changeLayout(layout) {
break;
}
}
function desktopShowPermissionsAlert(mediaType) {
if (!isDesktop)
return;
if (mediaType == CallMediaType.Audio) {
window.alert("Permissions denied. Please, allow access to mic to make the call working and hit unmute button. Don't reload the page.");
}
else {
window.alert("Permissions denied. Please, allow access to mic and camera to make the call working and hit unmute button. Don't reload the page.");
}
}
// Cryptography function - it is loaded both in the main window and in worker context (if the worker is used)
function callCryptoFunction() {
const initialPlainTextRequired = {
@@ -45,6 +45,7 @@
<div id="info-block">
<p id="state"></p>
<p id="description"></p>
<b id="media-sources" style="color: #fff"></b>
</div>
<div id="audio-call-icon">
<img src="/desktop/images/ic_phone_in_talk.svg" />
@@ -53,7 +54,7 @@
<button id="toggle-screen" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleScreenManually()">
<img src="/desktop/images/ic_screen_share.svg" />
</button>
<button id="toggle-audio" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleAudioManually()">
<button id="toggle-mic" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleMicManually()">
<img src="/desktop/images/ic_mic.svg" />
</button>
<button id="end-call" onclick="javascript:endCallManually()">
@@ -62,7 +63,7 @@
<button id="toggle-speaker" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleSpeakerManually()">
<img src="/desktop/images/ic_volume_up.svg" />
</button>
<button id="toggle-video" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleVideoManually()">
<button id="toggle-camera" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleCameraManually()">
<img src="/desktop/images/ic_videocam_off.svg" />
</button>
</p>
@@ -28,47 +28,80 @@ socket.addEventListener("close", (_event) => {
function endCallManually() {
sendMessageToNative({ resp: { type: "end" } });
}
function toggleAudioManually() {
if (activeCall && localMedia(activeCall)) {
document.getElementById("toggle-audio").innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_mic.svg" />'
: '<img src="/desktop/images/ic_mic_off.svg" />';
function toggleMicManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localStream) {
const apiCall = {
command: { type: "media", source: CallMediaSource.Mic, enable: !activeCall.localMediaSources.mic },
};
processCommand(apiCall);
}
}
function toggleSpeakerManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.remoteStream) {
document.getElementById("toggle-speaker").innerHTML = toggleMedia(activeCall.remoteStream, CallMediaType.Audio)
document.getElementById("toggle-speaker").innerHTML = togglePeerMedia(activeCall.remoteStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_volume_up.svg" />'
: '<img src="/desktop/images/ic_volume_down.svg" />';
}
}
function toggleVideoManually() {
function toggleCameraManually() {
if (activeCall) {
const apiCall = {
command: { type: "media", source: CallMediaSource.Camera, enable: activeCall.localMediaSources.camera != true },
};
reactOnMessageFromServer(apiCall);
processCommand(apiCall).then(() => {
var _a;
enableVideoIcon(((_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources) === null || _a === void 0 ? void 0 : _a.camera) == true);
});
processCommand(apiCall);
}
}
async function toggleScreenManually() {
var _a;
const was = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources.screenVideo;
await toggleScreenShare();
if (was != (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources.screenVideo)) {
document.getElementById("toggle-screen").innerHTML = ((_a = activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMediaSources) === null || _a === void 0 ? void 0 : _a.screenVideo)
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />';
}
}
function enableVideoIcon(enabled) {
document.getElementById("toggle-video").innerHTML = enabled
// override function in call.ts to adapt UI to enabled media sources
localOrPeerMediaSourcesChanged = (call) => {
enableMicIcon(call.localMediaSources.mic);
enableCameraIcon(call.localMediaSources.camera);
enableScreenIcon(call.localMediaSources.screenVideo);
const className = localMedia(call) == CallMediaType.Video || peerMedia(call) == CallMediaType.Video ? CallMediaType.Video : CallMediaType.Audio;
document.getElementById("info-block").className = className;
if (call.connection.iceConnectionState == "connected") {
document.getElementById("audio-call-icon").style.display = className == CallMediaType.Audio ? "block" : "none";
}
document.getElementById("media-sources").innerText = mediaSourcesStatus(call);
};
function enableMicIcon(enabled) {
document.getElementById("toggle-mic").innerHTML = enabled
? '<img src="/desktop/images/ic_mic.svg" />'
: '<img src="/desktop/images/ic_mic_off.svg" />';
}
function enableCameraIcon(enabled) {
document.getElementById("toggle-camera").innerHTML = enabled
? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />';
}
function enableScreenIcon(enabled) {
document.getElementById("toggle-screen").innerHTML = enabled
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />';
}
function mediaSourcesStatus(call) {
let status = "local";
if (call.localMediaSources.mic)
status += " mic";
if (call.localMediaSources.camera)
status += " cam";
if (call.localMediaSources.screenAudio)
status += " scrA";
if (call.localMediaSources.screenVideo)
status += " scrV";
status += " | peer";
if (call.peerMediaSources.mic)
status += " mic";
if (call.peerMediaSources.camera)
status += " cam";
if (call.peerMediaSources.screenAudio)
status += " scrA";
if (call.peerMediaSources.screenVideo)
status += " scrV";
return status;
}
function reactOnMessageFromServer(msg) {
var _a;
switch ((_a = msg.command) === null || _a === void 0 ? void 0 : _a.type) {
@@ -77,21 +110,12 @@ function reactOnMessageFromServer(msg) {
break;
case "offer":
case "start":
document.getElementById("toggle-audio").style.display = "inline-block";
document.getElementById("toggle-mic").style.display = "inline-block";
document.getElementById("toggle-speaker").style.display = "inline-block";
document.getElementById("toggle-video").style.display = "inline-block";
document.getElementById("toggle-camera").style.display = "inline-block";
document.getElementById("toggle-screen").style.display = "inline-block";
document.getElementById("info-block").className = msg.command.media;
break;
case "media":
const className = (msg.command.source == CallMediaSource.Camera && msg.command.enable) ||
(activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.camera) ||
(activeCall === null || activeCall === void 0 ? void 0 : activeCall.peerMediaSources.screenVideo)
? "video"
: "audio";
document.getElementById("info-block").className = className;
document.getElementById("audio-call-icon").style.display = className == CallMediaType.Audio ? "block" : "none";
break;
case "description":
updateCallInfoView(msg.command.state, msg.command.description);
if ((activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection.connectionState) == "connected") {
+138 -45
View File
@@ -212,6 +212,7 @@ interface ConnectionInfo {
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg: WVApiMessage) => console.log(JSON.stringify(msg))
var toggleScreenShare = async () => {}
var localOrPeerMediaSourcesChanged = (_call: Call) => {}
// Global object with cryptrographic/encoding functions
const callCrypto = callCryptoFunction()
@@ -255,12 +256,21 @@ function localMedia(call: Call): CallMediaType {
return call.localMediaSources.camera || call.localMediaSources.screenVideo ? CallMediaType.Video : CallMediaType.Audio
}
function peerMedia(call: Call): CallMediaType {
return call.peerMediaSources.camera || call.peerMediaSources.screenVideo ? CallMediaType.Video : CallMediaType.Audio
}
let activeCall: Call | undefined
let answerTimeout = 30_000
var useWorker = false
var isDesktop = false
var localizedState = ""
var localizedDescription = ""
// Passing true here will send audio in screen record stream
const allowSendScreenAudio = false
// When one side of a call sends candidates tot fast (until local & remote descriptions are set), that candidates
// will be stored here and then set when the call will be ready to process them
var afterCallInitializedCandidates: RTCIceCandidateInit[] = []
const processCommand = (function () {
type RTCRtpSenderWithEncryption = RTCRtpSender & {
@@ -378,13 +388,20 @@ const processCommand = (function () {
const remoteScreenStream = new MediaStream()
const localCamera = VideoCamera.User
let localStream: MediaStream
// Mic can be disabled while in call if a user didn't give permission to use it, it's fine
let micEnabled = false
try {
localStream = await getLocalMediaStream(mediaType, localCamera)
micEnabled = true
} catch (e) {
console.log("Error while getting local media stream", e)
if (isDesktop) {
window.alert("Permissions denied. Please, allow mic and video to make the call working and repeat again.")
desktopShowPermissionsAlert(mediaType)
localStream = getEmptyStream(mediaType, pc)
} else {
// On Android all streams should be present
throw e
}
throw e
}
const localScreenStream = new MediaStream()
if (isDesktop) {
@@ -399,7 +416,7 @@ const processCommand = (function () {
connection: pc,
iceCandidates,
localMediaSources: {
mic: true,
mic: micEnabled,
camera: mediaType == CallMediaType.Video && !isDesktop,
screenAudio: false,
screenVideo: false,
@@ -419,6 +436,7 @@ const processCommand = (function () {
cameraTrackWasSetBefore: mediaType == CallMediaType.Video,
screenShareWasSetupBefore: false,
}
localOrPeerMediaSourcesChanged(call)
await setupMediaStreams(call)
let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout)
pc.addEventListener("connectionstatechange", connectionStateChange)
@@ -497,7 +515,14 @@ const processCommand = (function () {
console.log("starting outgoing call - capabilities")
if (activeCall) endCall()
// This request for local media stream is made to prompt for camera/mic permissions on call start
if (command.media) await getLocalMediaStream(command.media, VideoCamera.User)
if (command.media) {
try {
await getLocalMediaStream(command.media, VideoCamera.User)
} catch (e) {
// Will be shown on the next stage of call estabilishing, can work without any streams
//desktopShowPermissionsAlert(command.media)
}
}
const encryption = supportsInsertableStreams(useWorker)
resp = {type: "capabilities", capabilities: {encryption}}
break
@@ -520,6 +545,8 @@ const processCommand = (function () {
const offer = await pc.createOffer()
await pc.setLocalDescription(offer)
addIceCandidates(pc, afterCallInitializedCandidates)
afterCallInitializedCandidates = []
// for debugging, returning the command for callee to use
// resp = {
// type: "offer",
@@ -565,6 +592,8 @@ const processCommand = (function () {
// answer!.sdp = answer.sdp?.replace("a=recvonly", "a=sendrecv")
await pc.setLocalDescription(answer)
addIceCandidates(pc, remoteIceCandidates)
addIceCandidates(pc, afterCallInitializedCandidates)
afterCallInitializedCandidates = []
// same as command for caller to use
resp = {
type: "answer",
@@ -593,12 +622,13 @@ const processCommand = (function () {
}
break
case "ice":
const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
if (pc) {
const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
addIceCandidates(pc, remoteIceCandidates)
resp = {type: "ok"}
} else {
resp = {type: "error", message: "ice: call not started"}
afterCallInitializedCandidates = remoteIceCandidates
resp = {type: "error", message: "ice: call not started yet, will add candidates later"}
}
break
case "media":
@@ -607,16 +637,32 @@ const processCommand = (function () {
} else if (!activeCall.cameraTrackWasSetBefore && command.source == CallMediaSource.Camera && command.enable) {
await startSendingCamera(activeCall, activeCall.localCamera)
resp = {type: "ok"}
} else if (
(command.source == CallMediaSource.Mic && activeCall.localStream.getAudioTracks().length > 0) ||
(command.source == CallMediaSource.Camera && activeCall.localStream.getVideoTracks().length > 0)
) {
if (enableMedia(activeCall.localStream, command.source, command.enable)) {
resp = {type: "ok"}
} else {
resp = {type: "error", message: "media: cannot enable media source"}
}
} else {
enableMedia(activeCall.localStream, command.source, command.enable)
resp = {type: "ok"}
if (await replaceMedia(activeCall, activeCall.localCamera)) {
resp = {type: "ok"}
} else {
resp = {type: "error", message: "media: cannot replace media source"}
}
}
break
case "camera":
if (!activeCall || !pc) {
resp = {type: "error", message: "camera: call not started"}
} else {
await replaceMedia(activeCall, command.camera)
if (await replaceMedia(activeCall, command.camera)) {
resp = {type: "ok"}
} else {
resp = {type: "error", message: "camera: cannot replace media source"}
}
resp = {type: "ok"}
}
break
@@ -677,10 +723,11 @@ const processCommand = (function () {
videos.remote.srcObject = call.remoteStream
videos.remoteScreen.srcObject = call.remoteScreenStream
// Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
videos.local.play()
videos.local.play().catch((e) => console.log(e))
// videos.localScreen.play()
videos.remote.play()
videos.remoteScreen.play()
// For example, exception can be: NotAllowedError: play() failed because the user didn't interact with the document first
videos.remote.play().catch((e) => console.log(e))
videos.remoteScreen.play().catch((e) => console.log(e))
}
async function setupEncryptionWorker(call: Call) {
@@ -721,7 +768,7 @@ const processCommand = (function () {
call.worker,
call.aesKey,
call.key,
transceiver.sender.track!.kind == "video" ? CallMediaType.Video : CallMediaType.Audio,
mediaSourceFromTransceiverMid(transceiver.mid) == CallMediaSource.Camera ? CallMediaType.Video : CallMediaType.Audio,
transceiver.mid
)
}
@@ -836,7 +883,10 @@ const processCommand = (function () {
}
call.localMediaSources.camera = true
call.cameraTrackWasSetBefore = true
localOrPeerMediaSourcesChanged(call)
} catch (e: any) {
console.log("Start sending camera error", e)
desktopShowPermissionsAlert(CallMediaType.Video)
return
}
@@ -855,22 +905,23 @@ const processCommand = (function () {
}
// Without doing it manually Firefox shows black screen but video can be played in Picture-in-Picture
videos.local.play()
videos.local.play().catch((e) => console.log(e))
console.log("LALAL SENDING VIDEO")
}
async function enableDisableScreenShare(call: Call): Promise<void> {
toggleScreenShare = async function () {
const call = activeCall
if (!call) return
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
const pc = call.connection
if (call.localMediaSources.screenVideo) {
if (!call.localMediaSources.screenVideo) {
let localScreenStream: MediaStream
try {
localScreenStream = await getLocalScreenCaptureStream()
} catch (e: any) {
call.localMediaSources.screenAudio = false
call.localMediaSources.screenVideo = false
return
}
@@ -886,6 +937,10 @@ const processCommand = (function () {
console.log("LALAL REPLACED AUDIO SCREEN TRACK")
} else if (source == CallMediaSource.ScreenVideo && screenVideoTrack) {
elem.sender.replaceTrack(screenVideoTrack)
screenVideoTrack.onended = () => {
console.log("LALAL ENDED SCREEN TRACK")
toggleScreenShare()
}
console.log("LALAL REPLACED VIDEO SCREEN TRACK")
}
if (
@@ -909,7 +964,7 @@ const processCommand = (function () {
call.screenShareWasSetupBefore = true
// videos.localScreen.pause()
// videos.localScreen.srcObject = call.localScreenStream
videos.localScreen.play()
videos.localScreen.play().catch((e) => console.log(e))
videos.localScreen.style.visibility = "visible"
} else {
pc.getTransceivers().forEach((elem) => {
@@ -922,9 +977,15 @@ const processCommand = (function () {
for (const t of call.localScreenStream.getTracks()) call.localScreenStream.removeTrack(t)
videos.localScreen.style.visibility = "hidden"
}
if (allowSendScreenAudio) {
call.localMediaSources.screenAudio = !call.localMediaSources.screenAudio
}
call.localMediaSources.screenVideo = !call.localMediaSources.screenVideo
localOrPeerMediaSourcesChanged(call)
}
async function replaceMedia(call: Call, camera: VideoCamera): Promise<void> {
async function replaceMedia(call: Call, camera: VideoCamera): Promise<boolean> {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
const pc = call.connection
@@ -934,7 +995,9 @@ const processCommand = (function () {
try {
localStream = await getLocalMediaStream(localMedia(call), camera)
} catch (e: any) {
return
console.log("Replace media error", e)
desktopShowPermissionsAlert(CallMediaType.Video)
return false
}
for (const t of call.localStream.getTracks()) t.stop()
call.localCamera = camera
@@ -949,7 +1012,13 @@ const processCommand = (function () {
replaceTracks(pc, videoTracks)
call.localStream = localStream
videos.local.srcObject = localStream
videos.local.play()
videos.local.play().catch((e) => console.log(e))
call.localMediaSources.mic = call.localStream.getAudioTracks().length > 0
call.localMediaSources.camera = call.localStream.getVideoTracks().length > 0
localOrPeerMediaSourcesChanged(call)
return true
}
function replaceTracks(pc: RTCPeerConnection, tracks: MediaStreamTrack[]) {
@@ -1011,6 +1080,7 @@ const processCommand = (function () {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
if (!activeCall) return
const source = mediaSourceFromTransceiverMid(transceiverMid)
console.log("LALAL ON MUTE/UNMUTE", mute, source, transceiverMid)
const sources = activeCall.peerMediaSources
@@ -1024,6 +1094,7 @@ const processCommand = (function () {
sources.mic = !mute
activeCall.peerMediaSources = sources
sendMessageToNative({resp: resp})
if (!mute) videos.remote.play().catch((e) => console.log(e))
} else if (source == CallMediaSource.Camera && activeCall.peerMediaSources.camera == mute) {
const resp: WRPeerMedia = {
type: "peerMedia",
@@ -1035,6 +1106,7 @@ const processCommand = (function () {
activeCall.peerMediaSources = sources
videos.remote.style.visibility = !mute ? "visible" : "hidden"
sendMessageToNative({resp: resp})
if (!mute) videos.remote.play().catch((e) => console.log(e))
} else if (source == CallMediaSource.ScreenAudio && activeCall.peerMediaSources.screenAudio == mute) {
const resp: WRPeerMedia = {
type: "peerMedia",
@@ -1045,6 +1117,7 @@ const processCommand = (function () {
sources.screenAudio = !mute
activeCall.peerMediaSources = sources
sendMessageToNative({resp: resp})
if (!mute) videos.remoteScreen.play().catch((e) => console.log(e))
} else if (source == CallMediaSource.ScreenVideo && activeCall.peerMediaSources.screenVideo == mute) {
const resp: WRPeerMedia = {
type: "peerMedia",
@@ -1056,12 +1129,14 @@ const processCommand = (function () {
activeCall.peerMediaSources = sources
videos.remoteScreen.style.visibility = !mute ? "visible" : "hidden"
sendMessageToNative({resp: resp})
if (!mute) videos.remoteScreen.play().catch((e) => console.log(e))
}
if (activeCall.peerMediaSources.screenVideo) {
videos.remote.className = "collapsed"
} else {
videos.remote.className = "inline"
}
localOrPeerMediaSourcesChanged(activeCall)
}
function getLocalMediaStream(mediaType: CallMediaType, facingMode: VideoCamera): Promise<MediaStream> {
@@ -1089,7 +1164,7 @@ const processCommand = (function () {
//},
//aspectRatio: 1.33,
},
audio: false,
audio: allowSendScreenAudio,
// This works with Chrome, Edge, Opera, but not with Firefox and Safari
// systemAudio: "include"
}
@@ -1175,34 +1250,42 @@ const processCommand = (function () {
// }
// }
function enableMedia(s: MediaStream, source: CallMediaSource, enable: boolean) {
function enableMedia(s: MediaStream, source: CallMediaSource, enable: boolean): boolean {
if (!activeCall) return false
const tracks = source == CallMediaSource.Camera ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks)
activeCall?.connection.getTransceivers().forEach((elem) => {
let changedSource = false
for (const t of tracks) {
for (const transceiver of activeCall.connection.getTransceivers()) {
if (
(t.kind == CallMediaType.Audio && mediaSourceFromTransceiverMid(elem.mid) == CallMediaSource.Mic) ||
(t.kind == CallMediaType.Video && mediaSourceFromTransceiverMid(elem.mid) == CallMediaSource.Camera)
(t.kind == CallMediaType.Audio && mediaSourceFromTransceiverMid(transceiver.mid) == CallMediaSource.Mic) ||
(t.kind == CallMediaType.Video && mediaSourceFromTransceiverMid(transceiver.mid) == CallMediaSource.Camera)
) {
if (enable) {
t.enabled = true
elem.sender.replaceTrack(t)
transceiver.sender.replaceTrack(t)
} else {
t.enabled = false
elem.sender.replaceTrack(null)
transceiver.sender.replaceTrack(null)
}
if (source == CallMediaSource.Mic) {
activeCall.localMediaSources.mic = enable
changedSource = true
} else if (source == CallMediaSource.Camera) {
activeCall.localMediaSources.camera = enable
changedSource = true
}
}
})
if (source == CallMediaSource.Camera && activeCall) {
activeCall.localMediaSources.camera = enable
}
}
if (changedSource) {
localOrPeerMediaSourcesChanged(activeCall)
return true
} else {
console.log("Enable media error")
desktopShowPermissionsAlert(source == CallMediaSource.Mic ? CallMediaType.Audio : CallMediaType.Video)
return false
}
}
toggleScreenShare = async function () {
const call = activeCall
if (!call) return
call.localMediaSources.screenAudio = !call.localMediaSources.screenAudio
call.localMediaSources.screenVideo = !call.localMediaSources.screenVideo
await enableDisableScreenShare(call)
}
return processCommand
@@ -1218,16 +1301,14 @@ function toggleRemoteScreenVideoFitFill() {
remoteScreen.style.objectFit = remoteScreen.style.objectFit != "contain" ? "contain" : "cover"
}
function toggleMedia(s: MediaStream, media: CallMediaType): boolean {
function togglePeerMedia(s: MediaStream, media: CallMediaType): boolean {
if (!activeCall) return false
let res = false
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks) {
t.enabled = !t.enabled
res = t.enabled
}
if (media == CallMediaType.Video && activeCall) {
activeCall.localMediaSources.camera = res
}
return res
}
@@ -1254,6 +1335,18 @@ function changeLayout(layout: LayoutType) {
}
}
function desktopShowPermissionsAlert(mediaType: CallMediaType) {
if (!isDesktop) return
if (mediaType == CallMediaType.Audio) {
window.alert("Permissions denied. Please, allow access to mic to make the call working and hit unmute button. Don't reload the page.")
} else {
window.alert(
"Permissions denied. Please, allow access to mic and camera to make the call working and hit unmute button. Don't reload the page."
)
}
}
type TransformFrameFunc = (
key: CryptoKey,
onMediaMuteUnmute: (mute: boolean) => void
@@ -45,6 +45,7 @@
<div id="info-block">
<p id="state"></p>
<p id="description"></p>
<b id="media-sources" style="color: #fff"></b>
</div>
<div id="audio-call-icon">
<img src="/desktop/images/ic_phone_in_talk.svg" />
@@ -53,7 +54,7 @@
<button id="toggle-screen" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleScreenManually()">
<img src="/desktop/images/ic_screen_share.svg" />
</button>
<button id="toggle-audio" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleAudioManually()">
<button id="toggle-mic" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleMicManually()">
<img src="/desktop/images/ic_mic.svg" />
</button>
<button id="end-call" onclick="javascript:endCallManually()">
@@ -62,7 +63,7 @@
<button id="toggle-speaker" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleSpeakerManually()">
<img src="/desktop/images/ic_volume_up.svg" />
</button>
<button id="toggle-video" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleVideoManually()">
<button id="toggle-camera" style="display: none; width: 44px; height: 44px" onclick="javascript:toggleCameraManually()">
<img src="/desktop/images/ic_videocam_off.svg" />
</button>
</p>
+58 -31
View File
@@ -33,50 +33,87 @@ function endCallManually() {
sendMessageToNative({resp: {type: "end"}})
}
function toggleAudioManually() {
if (activeCall && localMedia(activeCall)) {
document.getElementById("toggle-audio")!!.innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_mic.svg" />'
: '<img src="/desktop/images/ic_mic_off.svg" />'
function toggleMicManually() {
if (activeCall?.localStream) {
const apiCall: WVAPICall = {
command: {type: "media", source: CallMediaSource.Mic, enable: !activeCall.localMediaSources.mic},
}
processCommand(apiCall)
}
}
function toggleSpeakerManually() {
if (activeCall?.remoteStream) {
document.getElementById("toggle-speaker")!!.innerHTML = toggleMedia(activeCall.remoteStream, CallMediaType.Audio)
document.getElementById("toggle-speaker")!!.innerHTML = togglePeerMedia(activeCall.remoteStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_volume_up.svg" />'
: '<img src="/desktop/images/ic_volume_down.svg" />'
}
}
function toggleVideoManually() {
function toggleCameraManually() {
if (activeCall) {
const apiCall: WVAPICall = {
command: {type: "media", source: CallMediaSource.Camera, enable: activeCall.localMediaSources.camera != true},
}
reactOnMessageFromServer(apiCall as any)
processCommand(apiCall).then(() => {
enableVideoIcon(activeCall?.localMediaSources?.camera == true)
})
processCommand(apiCall)
}
}
async function toggleScreenManually() {
const was = activeCall?.localMediaSources.screenVideo
await toggleScreenShare()
if (was != activeCall?.localMediaSources.screenVideo) {
document.getElementById("toggle-screen")!!.innerHTML = activeCall?.localMediaSources?.screenVideo
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />'
}
}
function enableVideoIcon(enabled: boolean) {
document.getElementById("toggle-video")!!.innerHTML = enabled
// override function in call.ts to adapt UI to enabled media sources
localOrPeerMediaSourcesChanged = (call: Call) => {
enableMicIcon(call.localMediaSources.mic)
enableCameraIcon(call.localMediaSources.camera)
enableScreenIcon(call.localMediaSources.screenVideo)
const className =
localMedia(call) == CallMediaType.Video || peerMedia(call) == CallMediaType.Video ? CallMediaType.Video : CallMediaType.Audio
document.getElementById("info-block")!.className = className
if (call.connection.iceConnectionState == "connected") {
document.getElementById("audio-call-icon")!.style.display = className == CallMediaType.Audio ? "block" : "none"
}
document.getElementById("media-sources")!.innerText = mediaSourcesStatus(call)
}
function enableMicIcon(enabled: boolean) {
document.getElementById("toggle-mic")!.innerHTML = enabled
? '<img src="/desktop/images/ic_mic.svg" />'
: '<img src="/desktop/images/ic_mic_off.svg" />'
}
function enableCameraIcon(enabled: boolean) {
document.getElementById("toggle-camera")!.innerHTML = enabled
? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />'
}
function enableScreenIcon(enabled: boolean) {
document.getElementById("toggle-screen")!.innerHTML = enabled
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />'
}
function mediaSourcesStatus(call: Call): string {
let status = "local"
if (call.localMediaSources.mic) status += " mic"
if (call.localMediaSources.camera) status += " cam"
if (call.localMediaSources.screenAudio) status += " scrA"
if (call.localMediaSources.screenVideo) status += " scrV"
status += " | peer"
if (call.peerMediaSources.mic) status += " mic"
if (call.peerMediaSources.camera) status += " cam"
if (call.peerMediaSources.screenAudio) status += " scrA"
if (call.peerMediaSources.screenVideo) status += " scrV"
return status
}
function reactOnMessageFromServer(msg: WVApiMessage) {
switch (msg.command?.type) {
case "capabilities":
@@ -84,22 +121,12 @@ function reactOnMessageFromServer(msg: WVApiMessage) {
break
case "offer":
case "start":
document.getElementById("toggle-audio")!!.style.display = "inline-block"
document.getElementById("toggle-mic")!!.style.display = "inline-block"
document.getElementById("toggle-speaker")!!.style.display = "inline-block"
document.getElementById("toggle-video")!!.style.display = "inline-block"
document.getElementById("toggle-camera")!!.style.display = "inline-block"
document.getElementById("toggle-screen")!!.style.display = "inline-block"
document.getElementById("info-block")!!.className = msg.command.media
break
case "media":
const className =
(msg.command.source == CallMediaSource.Camera && msg.command.enable) ||
activeCall?.peerMediaSources.camera ||
activeCall?.peerMediaSources.screenVideo
? "video"
: "audio"
document.getElementById("info-block")!!.className = className
document.getElementById("audio-call-icon")!.style.display = className == CallMediaType.Audio ? "block" : "none"
break
case "description":
updateCallInfoView(msg.command.state, msg.command.description)
if (activeCall?.connection.connectionState == "connected") {