android, desktop: fix Safari sound (#4947)

* android, desktop: fix Safari sound

* another approach

* test

* Revert "test"

This reverts commit f89a30a88e.

* Revert "another approach"

This reverts commit 824ab7047c.

* Revert "android, desktop: fix Safari sound"

This reverts commit 80a866d472.

* android, desktop: fix Safari sound

* dependencies
This commit is contained in:
Stanislav Dmitrenko
2024-09-28 04:04:16 +07:00
committed by GitHub
parent f048ddb922
commit fc0879ebb7
3 changed files with 43 additions and 31 deletions
@@ -186,6 +186,7 @@ const processCommand = (function () {
localStream,
localScreenStream,
remoteStream,
remoteTracks: new Map(),
remoteScreenStream,
peerMediaSources: {
mic: false,
@@ -548,14 +549,6 @@ const processCommand = (function () {
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
call.worker.onerror = ({ error, filename, lineno, message }) => console.log({ error, filename, lineno, message });
// call.worker.onmessage = ({data}) => console.log(JSON.stringify({message: data}))
call.worker.onmessage = ({ data }) => {
console.log(JSON.stringify({ message: data }));
const transceiverMid = data.transceiverMid;
const mute = data.mute;
if (transceiverMid && mute != undefined) {
onMediaMuteUnmute(transceiverMid, mute);
}
};
}
}
}
@@ -661,12 +654,7 @@ const processCommand = (function () {
}
setupMuteUnmuteListener(event.transceiver, track);
const mediaSource = mediaSourceFromTransceiverMid(event.transceiver.mid);
if (mediaSource == CallMediaSource.ScreenAudio || mediaSource == CallMediaSource.ScreenVideo) {
call.remoteScreenStream.addTrack(track);
}
else {
call.remoteStream.addTrack(track);
}
call.remoteTracks.set(mediaSource, track);
console.log(`ontrack success`);
}
catch (e) {
@@ -1023,10 +1011,26 @@ const processCommand = (function () {
if (!mute)
videos.remoteScreen.play().catch((e) => console.log(e));
}
if (!mute)
addRemoteTracksWhenUnmuted(source, activeCall);
localOrPeerMediaSourcesChanged(activeCall);
// Make sure that remote camera and remote screen video in their places and shown/hidden based on layout type currently in use
changeLayout(activeCall.layout);
}
/*
When new remote tracks are coming, they don't get added to remote streams. They are stored in a map and once any of them "unmuted",
that track is added to the stream. Such workaround needed because Safari doesn't play one stream
if another one is not playing too, eg. no audio if only audio is playing while video track is present too but muted.
But we have possibility to have only one currently active track, even no active track at all.
*/
function addRemoteTracksWhenUnmuted(source, call) {
const track = call.remoteTracks.get(source);
if (track) {
const stream = source == CallMediaSource.Mic || source == CallMediaSource.Camera ? call.remoteStream : call.remoteScreenStream;
stream.addTrack(track);
call.remoteTracks.delete(source);
}
}
async function getLocalMediaStream(mic, camera, facingMode) {
if (!mic && !camera)
return new MediaStream();
@@ -1136,7 +1140,7 @@ const processCommand = (function () {
if (peerHasOldVersion) {
console.log("The peer has an old version.", "Tracks size:", activeCall.remoteStream.getAudioTracks().length, activeCall.remoteStream.getVideoTracks().length);
onMediaMuteUnmute("0", false);
if (activeCall.remoteStream.getVideoTracks().length > 0) {
if (activeCall.remoteStream.getVideoTracks().length > 0 || activeCall.remoteTracks.get(CallMediaSource.Camera)) {
onMediaMuteUnmute("1", false);
}
if (activeCall.localMediaSources.camera && !activeCall.peerMediaSources.camera) {
+2 -2
View File
@@ -27,7 +27,7 @@
"author": "SimpleX Chat",
"license": "AGPL-3.0-or-later",
"devDependencies": {
"@types/lz-string": "^1.3.34",
"@types/lz-string": "1.3.34",
"husky": "^7.0.4",
"isomorphic-webcrypto": "^2.3.8",
"lint-staged": "^12.4.1",
@@ -38,6 +38,6 @@
"**/*": "prettier --write --ignore-unknown"
},
"dependencies": {
"lz-string": "^1.4.4"
"lz-string": "1.5.0"
}
}
+22 -14
View File
@@ -248,7 +248,10 @@ interface Call {
localCamera: VideoCamera
localStream: MediaStream
localScreenStream: MediaStream
// has no tracks in the beggining, see addRemoteTracksWhenUnmuted
remoteStream: MediaStream
remoteTracks: Map<CallMediaSource, MediaStreamTrack>
// has no tracks in the beggining too
remoteScreenStream: MediaStream
peerMediaSources: CallMediaSources
aesKey?: string
@@ -439,6 +442,7 @@ const processCommand = (function () {
localStream,
localScreenStream,
remoteStream,
remoteTracks: new Map(),
remoteScreenStream,
peerMediaSources: {
mic: false,
@@ -794,14 +798,6 @@ const processCommand = (function () {
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
call.worker.onerror = ({error, filename, lineno, message}: ErrorEvent) => console.log({error, filename, lineno, message})
// call.worker.onmessage = ({data}) => console.log(JSON.stringify({message: data}))
call.worker.onmessage = ({data}) => {
console.log(JSON.stringify({message: data}))
const transceiverMid: string = data.transceiverMid
const mute: boolean = data.mute
if (transceiverMid && mute != undefined) {
onMediaMuteUnmute(transceiverMid, mute)
}
}
}
}
}
@@ -927,11 +923,7 @@ const processCommand = (function () {
setupMuteUnmuteListener(event.transceiver, track)
const mediaSource = mediaSourceFromTransceiverMid(event.transceiver.mid)
if (mediaSource == CallMediaSource.ScreenAudio || mediaSource == CallMediaSource.ScreenVideo) {
call.remoteScreenStream.addTrack(track)
} else {
call.remoteStream.addTrack(track)
}
call.remoteTracks.set(mediaSource, track)
console.log(`ontrack success`)
} catch (e) {
console.log(`ontrack error: ${(e as Error).message}`)
@@ -1296,11 +1288,27 @@ const processCommand = (function () {
sendMessageToNative({resp: resp})
if (!mute) videos.remoteScreen.play().catch((e) => console.log(e))
}
if (!mute) addRemoteTracksWhenUnmuted(source, activeCall)
localOrPeerMediaSourcesChanged(activeCall)
// Make sure that remote camera and remote screen video in their places and shown/hidden based on layout type currently in use
changeLayout(activeCall.layout)
}
/*
When new remote tracks are coming, they don't get added to remote streams. They are stored in a map and once any of them "unmuted",
that track is added to the stream. Such workaround needed because Safari doesn't play one stream
if another one is not playing too, eg. no audio if only audio is playing while video track is present too but muted.
But we have possibility to have only one currently active track, even no active track at all.
*/
function addRemoteTracksWhenUnmuted(source: CallMediaSource, call: Call) {
const track = call.remoteTracks.get(source)
if (track) {
const stream = source == CallMediaSource.Mic || source == CallMediaSource.Camera ? call.remoteStream : call.remoteScreenStream
stream.addTrack(track)
call.remoteTracks.delete(source)
}
}
async function getLocalMediaStream(mic: boolean, camera: boolean, facingMode: VideoCamera): Promise<MediaStream> {
if (!mic && !camera) return new MediaStream()
const constraints = callMediaConstraints(mic, camera, facingMode)
@@ -1422,7 +1430,7 @@ const processCommand = (function () {
activeCall.remoteStream.getVideoTracks().length
)
onMediaMuteUnmute("0", false)
if (activeCall.remoteStream.getVideoTracks().length > 0) {
if (activeCall.remoteStream.getVideoTracks().length > 0 || activeCall.remoteTracks.get(CallMediaSource.Camera)) {
onMediaMuteUnmute("1", false)
}
if (activeCall.localMediaSources.camera && !activeCall.peerMediaSources.camera) {