feat(telephony): update call metadata tracking and user interface updates for active calls, including path hops and interface details; implement ringtone playback handling for browser autoplay restrictions; add configuration for telephone announcement enabling.

This commit is contained in:
Ivan
2026-04-30 15:33:25 -05:00
parent 56ab5d9e34
commit 01f5164828
9 changed files with 827 additions and 120 deletions
+63 -3
View File
@@ -6831,6 +6831,8 @@ class ReticulumMeshChat:
"rx_bytes": 0,
"tx_packets": 0,
"rx_packets": 0,
"path_hops": None,
"path_interface": None,
}
link = getattr(self.telephone_manager, "call_stats", {}).get("link")
if link:
@@ -6838,6 +6840,53 @@ class ReticulumMeshChat:
active_call["rx_bytes"] = getattr(link, "rxbytes", 0)
active_call["tx_packets"] = getattr(link, "tx", 0)
active_call["rx_packets"] = getattr(link, "rx", 0)
# Best-effort direct link metadata fallback.
if active_call["path_hops"] is None:
for hop_attr in ["hops", "hop_count", "path_hops"]:
hops_val = getattr(link, hop_attr, None)
if isinstance(hops_val, int):
active_call["path_hops"] = hops_val
break
if not active_call["path_interface"]:
for iface_attr in ["attached_interface", "interface", "ifac"]:
iface_val = getattr(link, iface_attr, None)
if isinstance(iface_val, str) and iface_val.strip():
active_call["path_interface"] = iface_val.strip()
break
iface_name = (
getattr(iface_val, "name", None) if iface_val else None
)
if isinstance(iface_name, str) and iface_name.strip():
active_call["path_interface"] = iface_name.strip()
break
# Try multiple destination hashes; depending on LXST state, the
# active call hash is not always the route-resolvable destination.
for candidate_hex in [
remote_telephony_hash,
remote_hash,
active_call["hash"],
remote_destination_hash,
]:
if not candidate_hex:
continue
try:
candidate_hash = bytes.fromhex(candidate_hex)
except Exception:
continue
try:
if not RNS.Transport.has_path(candidate_hash):
continue
active_call["path_hops"] = RNS.Transport.hops_to(candidate_hash)
if hasattr(self, "reticulum") and self.reticulum:
active_call["path_interface"] = (
self.reticulum.get_next_hop_if_name(
candidate_hash,
)
)
break
except Exception:
continue
initiation_target_hash = self.telephone_manager.initiation_target_hash
initiation_target_name = None
@@ -6887,6 +6936,9 @@ class ReticulumMeshChat:
"target_frame_time_ms",
None,
),
"diagnostics": self.web_audio_bridge.get_diagnostics()
if hasattr(self.web_audio_bridge, "get_diagnostics")
else None,
},
},
)
@@ -6921,7 +6973,7 @@ class ReticulumMeshChat:
# hangup active telephone call
@routes.get("/api/v1/telephone/hangup")
async def telephone_hangup(request):
await asyncio.to_thread(self.telephone_manager.hangup)
self.telephone_manager.request_hangup()
return web.json_response(
{
@@ -12441,8 +12493,10 @@ class ReticulumMeshChat:
if ctx.config.lxmf_local_propagation_node_enabled.get():
ctx.message_router.announce_propagation_node()
# send announce for telephone
ctx.telephone_manager.announce(display_name=ctx.config.display_name.get())
# send announce for telephone (can be disabled to reduce unsolicited
# incoming telephony link attempts from public lxst.telephony announces)
if ctx.config.telephone_announce_enabled.get():
ctx.telephone_manager.announce(display_name=ctx.config.display_name.get())
# tell websocket clients we just announced
await self.send_announced_to_websocket_clients(context=ctx)
@@ -13042,6 +13096,11 @@ class ReticulumMeshChat:
self._parse_bool(data["telephone_allow_calls_from_contacts_only"]),
)
if "telephone_announce_enabled" in data:
self.config.telephone_announce_enabled.set(
self._parse_bool(data["telephone_announce_enabled"]),
)
if "call_recording_enabled" in data:
value = self._parse_bool(data["call_recording_enabled"])
self.config.call_recording_enabled.set(value)
@@ -14209,6 +14268,7 @@ class ReticulumMeshChat:
"map_nominatim_api_url": ctx.config.map_nominatim_api_url.get(),
"do_not_disturb_enabled": ctx.config.do_not_disturb_enabled.get(),
"telephone_allow_calls_from_contacts_only": ctx.config.telephone_allow_calls_from_contacts_only.get(),
"telephone_announce_enabled": ctx.config.telephone_announce_enabled.get(),
"telephone_audio_profile_id": ctx.config.telephone_audio_profile_id.get(),
"telephone_web_audio_enabled": ctx.config.telephone_web_audio_enabled.get(),
"telephone_web_audio_allow_fallback": ctx.config.telephone_web_audio_allow_fallback.get(),
+5
View File
@@ -198,6 +198,11 @@ class ConfigManager:
"telephone_allow_calls_from_contacts_only",
False,
)
self.telephone_announce_enabled = self.BoolConfig(
self,
"telephone_announce_enabled",
True,
)
self.telephone_audio_profile_id = self.IntConfig(
self,
"telephone_audio_profile_id",
+36 -48
View File
@@ -4,11 +4,13 @@ import asyncio
import base64
import contextlib
import os
import threading
import time
import RNS
from LXST import Telephone
from meshchatx.src.backend import reticulum_pathfinding
from meshchatx.src.backend.meshchat_utils import (
hex_identifier_to_bytes,
normalize_hex_identifier,
@@ -116,14 +118,19 @@ class TelephoneManager:
self.telephone = None
def hangup(self):
self._update_initiation_status(None, None)
if self.telephone:
try:
self.telephone.hangup()
except Exception as e:
RNS.log(f"TelephoneManager: Error during hangup: {e}", RNS.LOG_ERROR)
# Always clear initiation status on hangup to prevent "Dialing..." hang
def request_hangup(self):
# FIXME: Remove async hangup shim when LXST call() cancellation is non-blocking.
self._update_initiation_status(None, None)
if not self.telephone:
return
threading.Thread(target=self.hangup, daemon=True).start()
def register_ringing_callback(self, callback):
self.on_ringing_callback = callback
@@ -221,6 +228,11 @@ class TelephoneManager:
return not bool(self.initiation_status)
async def _await_path(self, destination_hash: bytes, timeout_seconds: float):
# Reuse shared pathfinding behavior so stale/unresponsive routes are
# refreshed before we wait, mirroring the faster outbound LXMF path prep.
with contextlib.suppress(Exception):
reticulum_pathfinding.prepare_fresh_path_request(None, destination_hash)
timeout_after = time.monotonic() + max(0.0, timeout_seconds)
next_request_at = 0.0
@@ -234,7 +246,7 @@ class TelephoneManager:
now = time.monotonic()
if now >= next_request_at:
with contextlib.suppress(Exception):
RNS.Transport.request_path(destination_hash)
reticulum_pathfinding.nudge_path_request(destination_hash)
next_request_at = now + self._path_retry_interval_s
await asyncio.sleep(self._path_poll_interval_s)
@@ -311,6 +323,10 @@ class TelephoneManager:
if destination_identity is None:
self._update_initiation_status("Discovering path/identity...")
with contextlib.suppress(Exception):
reticulum_pathfinding.prepare_fresh_path_request(
None, destination_hash
)
timeout_after = time.monotonic() + timeout_seconds
next_request_at = 0.0
@@ -322,7 +338,7 @@ class TelephoneManager:
now = time.monotonic()
if now >= next_request_at:
with contextlib.suppress(Exception):
RNS.Transport.request_path(destination_hash)
reticulum_pathfinding.nudge_path_request(destination_hash)
next_request_at = now + self._path_retry_interval_s
destination_identity = resolve_identity(destination_hash_hex)
@@ -335,10 +351,22 @@ class TelephoneManager:
msg = "Destination identity not found"
raise RuntimeError(msg)
if not RNS.Transport.has_path(destination_hash):
# FIXME: Remove telephony-destination pre-path lookup once LXST aligns
# identity-hash and telephony-destination path handling.
call_destination_hash = destination_hash
with contextlib.suppress(Exception):
call_destination_hash = RNS.Destination(
destination_identity,
RNS.Destination.OUT,
RNS.Destination.SINGLE,
"lxst",
"telephony",
).hash
if not RNS.Transport.has_path(call_destination_hash):
self._update_initiation_status("Requesting path...")
has_path = await self._await_path(
destination_hash,
call_destination_hash,
timeout_seconds=min(timeout_seconds, 10),
)
if self._is_initiation_cancelled():
@@ -390,8 +418,10 @@ class TelephoneManager:
await asyncio.sleep(self._status_poll_interval_s)
if cancel_requested:
self._update_initiation_status(None, None)
with contextlib.suppress(Exception):
self.telephone.hangup()
# FIXME: Remove async hangup dispatch when LXST exposes cooperative cancellation.
asyncio.create_task(asyncio.to_thread(self.telephone.hangup))
return None
# If the task finished but we're still ringing or connecting,
@@ -462,74 +492,32 @@ class TelephoneManager:
def mute_transmit(self):
if self.telephone:
# Manual override as LXST internal muting can be buggy
if hasattr(self.telephone, "audio_input") and self.telephone.audio_input:
try:
self.telephone.audio_input.stop()
except Exception as e:
RNS.log(f"Failed to stop audio input for mute: {e}", RNS.LOG_ERROR)
# Still call the internal method just in case it does something useful
try:
self.telephone.mute_transmit()
except Exception:
pass
self.transmit_muted = True
def unmute_transmit(self):
if self.telephone:
# Manual override as LXST internal muting can be buggy
if hasattr(self.telephone, "audio_input") and self.telephone.audio_input:
try:
self.telephone.audio_input.start()
except Exception as e:
RNS.log(
f"Failed to start audio input for unmute: {e}",
RNS.LOG_ERROR,
)
# Still call the internal method just in case
try:
self.telephone.unmute_transmit()
except Exception:
pass
self.transmit_muted = False
def mute_receive(self):
if self.telephone:
# Manual override as LXST internal muting can be buggy
if hasattr(self.telephone, "audio_output") and self.telephone.audio_output:
try:
self.telephone.audio_output.stop()
except Exception as e:
RNS.log(f"Failed to stop audio output for mute: {e}", RNS.LOG_ERROR)
# Still call the internal method just in case
try:
self.telephone.mute_receive()
except Exception:
pass
self.receive_muted = True
def unmute_receive(self):
if self.telephone:
# Manual override as LXST internal muting can be buggy
if hasattr(self.telephone, "audio_output") and self.telephone.audio_output:
try:
self.telephone.audio_output.start()
except Exception as e:
RNS.log(
f"Failed to start audio output for unmute: {e}",
RNS.LOG_ERROR,
)
# Still call the internal method just in case
try:
self.telephone.unmute_receive()
except Exception:
pass
self.receive_muted = False
+27 -6
View File
@@ -703,6 +703,7 @@ export default {
isSpeakerMuting: false,
endedTimeout: null,
ringtonePlayer: null,
ringtoneAutoplayBlocked: false,
toneGenerator: new ToneGenerator(),
isFetchingRingtone: false,
initiationStatus: null,
@@ -806,6 +807,8 @@ export default {
this.stopRingtone();
this.toneGenerator.stop();
window.removeEventListener("meshchatx-intent-uri", this.onAndroidIntentUri);
window.removeEventListener("pointerdown", this.onRingtoneUnlockGesture, true);
window.removeEventListener("keydown", this.onRingtoneUnlockGesture, true);
},
mounted() {
try {
@@ -828,8 +831,19 @@ export default {
});
}
window.addEventListener("meshchatx-intent-uri", this.onAndroidIntentUri);
window.addEventListener("pointerdown", this.onRingtoneUnlockGesture, true);
window.addEventListener("keydown", this.onRingtoneUnlockGesture, true);
},
methods: {
onRingtoneUnlockGesture() {
if (!this.ringtoneAutoplayBlocked) {
return;
}
this.ringtoneAutoplayBlocked = false;
if (this.activeCall?.status === 4 && this.activeCall?.is_incoming) {
this.playRingtone();
}
},
startShellAuthWatch() {
if (typeof this._shellAuthWatchStop === "function") {
this._shellAuthWatchStop();
@@ -1574,12 +1588,19 @@ export default {
}
},
playRingtone() {
if (this.ringtonePlayer) {
if (this.ringtonePlayer.paused) {
this.ringtonePlayer.play().catch((e) => {
console.log("Failed to play custom ringtone:", e);
});
}
if (!this.ringtonePlayer || this.ringtoneAutoplayBlocked) {
return;
}
if (this.ringtonePlayer.paused) {
this.ringtonePlayer.play().catch((e) => {
if (e?.name === "NotAllowedError") {
// Browser autoplay policy blocked playback until user gesture.
// Stop retry spam; we retry once user interacts again.
this.ringtoneAutoplayBlocked = true;
return;
}
console.warn("Failed to play custom ringtone:", e);
});
}
},
stopRingtone() {
@@ -80,7 +80,7 @@
</div>
<!-- Phone Tab -->
<div v-if="activeTab === 'phone'" class="flex-1 flex flex-col">
<div v-if="activeTab === 'phone'" class="flex-1 flex flex-col pt-2">
<div
v-if="activeCall || isCallEnded || initiationStatus"
class="flex-1 flex flex-col items-center justify-center py-12 px-4"
@@ -88,12 +88,6 @@
<div
class="w-full max-w-md border-b border-gray-200 dark:border-zinc-800 p-8! flex flex-col items-center text-center relative overflow-hidden"
>
<!-- Status pulse background -->
<div
v-if="activeCall && activeCall.status === 6"
class="absolute inset-0 bg-green-500/5 animate-pulse"
></div>
<!-- Recording indicator -->
<div
v-if="activeCall && activeCall.is_recording"
@@ -159,6 +153,25 @@
)
}}
</div>
<div
v-if="activeCall"
class="mt-1 flex items-center justify-center gap-2 text-[11px] text-gray-500 dark:text-zinc-400"
>
<span
v-if="activeCall.path_hops != null"
class="inline-flex items-center gap-1 rounded-full bg-gray-100 dark:bg-zinc-800 px-2 py-0.5"
>
<MaterialDesignIcon icon-name="sitemap-outline" class="size-4" />
{{ activeCall.path_hops }} hops
</span>
<span
v-if="activeCall.path_interface"
class="inline-flex items-center gap-1 rounded-full bg-gray-100 dark:bg-zinc-800 px-2 py-0.5 max-w-[16rem]"
>
<MaterialDesignIcon icon-name="access-point-network" class="size-4" />
<span class="truncate">{{ activeCall.path_interface }}</span>
</span>
</div>
<div
v-if="(activeCall || lastCall)?.is_contact || !!initiationTargetName"
class="inline-flex items-center gap-1 px-2 py-0.5 bg-blue-50 dark:bg-blue-900/30 text-blue-600 dark:text-blue-400 text-[10px] font-bold rounded-full uppercase tracking-wider"
@@ -240,6 +253,51 @@
>
{{ elapsedTime }}
</div>
<div
v-if="activeCall && activeCall.status === 6"
class="mt-3 grid grid-cols-2 gap-2 text-xs w-full max-w-xs"
>
<div
class="rounded-xl bg-gray-50 dark:bg-zinc-800/70 border border-gray-100 dark:border-zinc-700/70 px-2 py-1.5 text-left"
>
<div class="text-[10px] text-gray-500 dark:text-zinc-400">
TX Pkts
</div>
<div class="font-semibold text-gray-800 dark:text-zinc-100">
{{ formatNumber(activeCall.tx_packets) }}
</div>
</div>
<div
class="rounded-xl bg-gray-50 dark:bg-zinc-800/70 border border-gray-100 dark:border-zinc-700/70 px-2 py-1.5 text-left"
>
<div class="text-[10px] text-gray-500 dark:text-zinc-400">
RX Pkts
</div>
<div class="font-semibold text-gray-800 dark:text-zinc-100">
{{ formatNumber(activeCall.rx_packets) }}
</div>
</div>
<div
class="rounded-xl bg-gray-50 dark:bg-zinc-800/70 border border-gray-100 dark:border-zinc-700/70 px-2 py-1.5 text-left"
>
<div class="text-[10px] text-gray-500 dark:text-zinc-400">
TX Data Out
</div>
<div class="font-semibold text-gray-800 dark:text-zinc-100">
{{ formatBytes(activeCall.tx_bytes) }}
</div>
</div>
<div
class="rounded-xl bg-gray-50 dark:bg-zinc-800/70 border border-gray-100 dark:border-zinc-700/70 px-2 py-1.5 text-left"
>
<div class="text-[10px] text-gray-500 dark:text-zinc-400">
RX Data In
</div>
<div class="font-semibold text-gray-800 dark:text-zinc-100">
{{ formatBytes(activeCall.rx_bytes) }}
</div>
</div>
</div>
</div>
</template>
<template v-else-if="initiationStatus">
@@ -258,6 +316,7 @@
>
Duration: {{ callDuration }}
</div>
<!-- Play Voicemail Button -->
<div v-if="isCallEnded && wasVoicemail" class="mt-6 animate-fade-in">
<button
@@ -325,20 +384,6 @@
class="size-6"
/>
</button>
<!-- toggle stats -->
<button
type="button"
:class="[
isShowingStats
? 'bg-blue-500 text-white shadow-blue-500/20'
: 'bg-gray-100 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-200 dark:hover:bg-zinc-700 shadow-gray-200/20 dark:shadow-black/20',
]"
class="p-4 rounded-full shadow-lg transition-all duration-200"
@click="isShowingStats = !isShowingStats"
>
<MaterialDesignIcon icon-name="chart-bar" class="size-6" />
</button>
</div>
</div>
</div>
@@ -383,31 +428,6 @@
}}</span>
</button>
</div>
<!-- stats -->
<div
v-if="isShowingStats && activeCall"
class="w-full mt-6 p-4 text-left bg-gray-50 dark:bg-zinc-800/50 rounded-2xl text-[10px] text-gray-500 dark:text-zinc-400 font-mono border border-gray-100 dark:border-zinc-800 relative z-10"
>
<div class="grid grid-cols-2 gap-4">
<div class="flex flex-col gap-1">
<div class="flex justify-between">
<span>TX Pkts</span><span>{{ activeCall.tx_packets }}</span>
</div>
<div class="flex justify-between">
<span>TX Data</span><span>{{ formatBytes(activeCall.tx_bytes) }}</span>
</div>
</div>
<div class="flex flex-col gap-1">
<div class="flex justify-between">
<span>RX Pkts</span><span>{{ activeCall.rx_packets }}</span>
</div>
<div class="flex justify-between">
<span>RX Data</span><span>{{ formatBytes(activeCall.rx_bytes) }}</span>
</div>
</div>
</div>
</div>
</div>
</div>
@@ -520,6 +540,12 @@
:label="$t('call.allow_calls_from_contacts_only')"
@update:model-value="toggleAllowCallsFromContactsOnly"
/>
<Toggle
id="telephone-announce-toggle"
:model-value="config?.telephone_announce_enabled"
label="Announce Telephone Presence (LXST)"
@update:model-value="toggleTelephoneAnnounceEnabled"
/>
<div class="flex flex-col gap-1">
<Toggle
id="web-audio-toggle"
@@ -830,7 +856,7 @@
</div>
<!-- Phonebook Tab -->
<div v-if="activeTab === 'phonebook'" class="flex-1 flex flex-col max-w-3xl mx-auto w-full">
<div v-if="activeTab === 'phonebook'" class="flex-1 flex flex-col max-w-3xl mx-auto w-full pt-2">
<div class="mb-4">
<div class="relative">
<input
@@ -946,7 +972,7 @@
</div>
<!-- Voicemail Tab -->
<div v-if="activeTab === 'voicemail'" class="flex-1 flex flex-col max-w-3xl mx-auto w-full">
<div v-if="activeTab === 'voicemail'" class="flex-1 flex flex-col max-w-3xl mx-auto w-full pt-2">
<div class="mb-4">
<div class="relative">
<input
@@ -1366,7 +1392,7 @@
</div>
<!-- Contacts Tab -->
<div v-if="activeTab === 'contacts'" class="flex-1 flex flex-col max-w-3xl mx-auto w-full">
<div v-if="activeTab === 'contacts'" class="flex-1 flex flex-col max-w-3xl mx-auto w-full pt-2">
<div class="mb-4 flex gap-2">
<div class="relative flex-1">
<input
@@ -2128,7 +2154,9 @@ import Toggle from "../forms/Toggle.vue";
import ToastUtils from "../../js/ToastUtils";
import RingtoneEditor from "./RingtoneEditor.vue";
import AudioWaveformPlayer from "../messages/AudioWaveformPlayer.vue";
import telephonePcmCaptureWorkletUrl from "../../js/telephone-pcm-capture.worklet.js?url";
// Keep this as a same-origin static asset URL so strict CSP can load it.
const telephonePcmCaptureWorkletUrl = "/assets/js/telephone-pcm-capture.worklet.js";
export default {
name: "CallPage",
@@ -2146,7 +2174,6 @@ export default {
audioProfiles: [],
selectedAudioProfileId: null,
destinationHash: "",
isShowingStats: false,
callHistory: [],
callHistorySearch: "",
callHistoryLimit: 10,
@@ -2217,6 +2244,8 @@ export default {
audioCtx: null,
audioStream: null,
audioSourceNode: null,
audioNoiseHighpass: null,
audioNoiseCompressor: null,
audioProcessor: null,
audioWorkletNode: null,
audioSilentGain: null,
@@ -2228,6 +2257,15 @@ export default {
remoteAudioEl: null,
useAndroidNativeTelephone: false,
androidNativeTelephoneListener: null,
localAudioLevel: 0,
remoteAudioLevel: 0,
localAudioTarget: 0,
remoteAudioTarget: 0,
visualizerRafId: null,
visualizerPhase: 0,
visualizerEnabled: true,
prevCallTxBytes: 0,
prevCallRxBytes: 0,
};
},
computed: {
@@ -2309,6 +2347,9 @@ export default {
this.getRecordings();
}
},
activeCall() {
this.stopAudioVisualizer();
},
},
mounted() {
this.getConfig();
@@ -2365,6 +2406,7 @@ export default {
if (this.historyInterval) clearInterval(this.historyInterval);
if (this.elapsedTimeInterval) clearInterval(this.elapsedTimeInterval);
if (this.endedTimeout) clearTimeout(this.endedTimeout);
this.stopAudioVisualizer();
if (this.audioPlayer) {
this.audioPlayer.pause();
this.audioPlayer = null;
@@ -2378,6 +2420,9 @@ export default {
formatBytes(bytes) {
return Utils.formatBytes(bytes || 0);
},
formatNumber(value) {
return Utils.formatNumber(value || 0);
},
formatDateTime(timestamp) {
return Utils.convertUnixMillisToLocalDateTimeString(timestamp);
},
@@ -2387,6 +2432,198 @@ export default {
formatDuration(seconds) {
return Utils.formatMinutesSeconds(seconds);
},
capturePeakLevel(samples) {
if (!samples || samples.length === 0) return 0;
let peak = 0;
for (let i = 0; i < samples.length; i += 1) {
const value = Math.abs(samples[i]);
if (value > peak) peak = value;
}
return peak;
},
extractInt16Samples(payload) {
if (!payload) return null;
if (payload instanceof ArrayBuffer) {
return new Int16Array(payload);
}
if (ArrayBuffer.isView(payload)) {
const byteLengthEven = Math.floor(payload.byteLength / 2) * 2;
return new Int16Array(payload.buffer, payload.byteOffset, byteLengthEven / 2);
}
return null;
},
computeSignalLevel(samples, scale = 1) {
if (!samples || samples.length === 0) return 0;
let peak = 0;
let sumSq = 0;
for (let i = 0; i < samples.length; i += 1) {
const value = Math.abs(samples[i]) / scale;
if (value > peak) peak = value;
sumSq += value * value;
}
const rms = Math.sqrt(sumSq / samples.length);
const boosted = Math.max(peak * 0.8, rms * 2.4);
return this.normalizeAudioLevel(boosted);
},
normalizeAudioLevel(level) {
if (!Number.isFinite(level)) return 0;
const normalized = level > 1 && level <= 100 ? level / 100 : level;
return Math.max(0, Math.min(1, normalized));
},
disableAudioVisualizer() {
this.visualizerEnabled = false;
this.stopAudioVisualizer();
},
updateVisualizerFromCallStats(newCall, oldCall) {
if (!newCall || newCall.status !== 6) {
this.prevCallTxBytes = 0;
this.prevCallRxBytes = 0;
this.localAudioTarget = 0;
this.remoteAudioTarget = 0;
return;
}
// Real PCM levels are preferred; this is a fallback when bridge/native
// audio telemetry is unavailable but link stats are present.
if (this.audioWs || this.useAndroidNativeTelephone) {
this.prevCallTxBytes = Number(newCall.tx_bytes || 0);
this.prevCallRxBytes = Number(newCall.rx_bytes || 0);
return;
}
const tx = Number(newCall.tx_bytes || 0);
const rx = Number(newCall.rx_bytes || 0);
const prevTx = oldCall && oldCall.hash === newCall.hash ? this.prevCallTxBytes : tx;
const prevRx = oldCall && oldCall.hash === newCall.hash ? this.prevCallRxBytes : rx;
const txDelta = Math.max(0, tx - prevTx);
const rxDelta = Math.max(0, rx - prevRx);
// Convert byte deltas to subtle activity levels with soft cap.
const txLevel = this.normalizeAudioLevel(Math.log10(1 + txDelta) / 2.8);
const rxLevel = this.normalizeAudioLevel(Math.log10(1 + rxDelta) / 2.8);
this.localAudioTarget = Math.max(this.localAudioTarget, txLevel);
this.remoteAudioTarget = Math.max(this.remoteAudioTarget, rxLevel);
this.localAudioLevel = Math.max(this.localAudioLevel, this.localAudioTarget);
this.remoteAudioLevel = Math.max(this.remoteAudioLevel, this.remoteAudioTarget);
this.prevCallTxBytes = tx;
this.prevCallRxBytes = rx;
},
resizeAudioVisualizerCanvas(canvas) {
if (!canvas) return false;
const cssWidth = Math.max(160, Math.floor(canvas.clientWidth || 256));
const cssHeight = Math.max(56, Math.floor(canvas.clientHeight || 72));
const dpr = Math.max(1, Number(window.devicePixelRatio) || 1);
const targetWidth = Math.floor(cssWidth * dpr);
const targetHeight = Math.floor(cssHeight * dpr);
if (canvas.width !== targetWidth || canvas.height !== targetHeight) {
canvas.width = targetWidth;
canvas.height = targetHeight;
}
return true;
},
startAudioVisualizer() {
if (!this.visualizerEnabled || this.visualizerRafId) {
return;
}
if (
typeof window.requestAnimationFrame !== "function" ||
typeof window.cancelAnimationFrame !== "function"
) {
this.disableAudioVisualizer();
return;
}
const canvas = this.$refs.callAudioVisualizer;
if (!canvas || typeof canvas.getContext !== "function") {
return;
}
if (!this.resizeAudioVisualizerCanvas(canvas)) {
this.disableAudioVisualizer();
return;
}
const ctx = canvas.getContext("2d");
if (!ctx) {
this.disableAudioVisualizer();
return;
}
const loop = () => {
const currentCanvas = this.$refs.callAudioVisualizer;
if (!currentCanvas || typeof currentCanvas.getContext !== "function") {
this.stopAudioVisualizer();
return;
}
if (!this.resizeAudioVisualizerCanvas(currentCanvas)) {
this.disableAudioVisualizer();
return;
}
const currentCtx = currentCanvas.getContext("2d");
if (!currentCtx) {
this.disableAudioVisualizer();
return;
}
const width = currentCanvas.width;
const height = currentCanvas.height;
const centerY = height / 2;
this.visualizerPhase += 0.065;
this.localAudioTarget *= 0.985;
this.remoteAudioTarget *= 0.985;
this.localAudioLevel = Math.max(this.localAudioLevel * 0.965, this.localAudioTarget);
this.remoteAudioLevel = Math.max(this.remoteAudioLevel * 0.965, this.remoteAudioTarget);
currentCtx.clearRect(0, 0, width, height);
currentCtx.fillStyle = "rgba(10, 12, 18, 0.9)";
currentCtx.fillRect(0, 0, width, height);
currentCtx.strokeStyle = "rgba(156, 163, 175, 0.22)";
currentCtx.lineWidth = 1;
currentCtx.beginPath();
currentCtx.moveTo(0, centerY);
currentCtx.lineTo(width, centerY);
currentCtx.stroke();
const drawWave = (level, color, phaseOffset, direction) => {
const clampedLevel = this.normalizeAudioLevel(level);
if (clampedLevel < 0.003) {
return;
}
const amp = clampedLevel * (height * 0.4);
currentCtx.beginPath();
currentCtx.strokeStyle = color;
currentCtx.lineWidth = 2;
const step = 4;
for (let x = 0; x <= width; x += step) {
const t = (x / width) * Math.PI * 6 + this.visualizerPhase + phaseOffset;
const envelope = 0.5 + 0.5 * Math.sin((x / width) * Math.PI);
const y = centerY + direction * Math.sin(t) * amp * envelope;
if (x === 0) {
currentCtx.moveTo(x, y);
} else {
currentCtx.lineTo(x, y);
}
}
currentCtx.stroke();
};
drawWave(this.localAudioLevel, "rgba(34, 211, 238, 0.95)", 0, -1);
drawWave(this.remoteAudioLevel, "rgba(167, 139, 250, 0.95)", Math.PI / 2, 1);
this.visualizerRafId = window.requestAnimationFrame(loop);
};
this.visualizerRafId = window.requestAnimationFrame(loop);
},
stopAudioVisualizer() {
if (this.visualizerRafId) {
window.cancelAnimationFrame(this.visualizerRafId);
this.visualizerRafId = null;
}
this.localAudioLevel = 0;
this.remoteAudioLevel = 0;
this.localAudioTarget = 0;
this.remoteAudioTarget = 0;
},
isMeshChatXAndroid() {
return (
window.MeshChatXAndroid &&
@@ -2408,6 +2645,11 @@ export default {
return window.AudioContext || window.webkitAudioContext || null;
},
pickWebAudioMicConstraints(mediaDevices) {
const processingHints = {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
};
const canEnumerate = this.hasEnumerateDevicesApi(mediaDevices);
const validIds = canEnumerate
? new Set(
@@ -2418,10 +2660,10 @@ export default {
: new Set();
const sid = this.selectedAudioInputId;
if (sid === "__meshchat_default_in__") {
return { audio: true };
return { audio: processingHints };
}
const id = sid && validIds.has(sid) ? sid : null;
return id ? { audio: { deviceId: { exact: id } } } : { audio: true };
return id ? { audio: { ...processingHints, deviceId: { exact: id } } } : { audio: processingHints };
},
async getUserMediaWithMicFallback(mediaDevices) {
const constraints = this.pickWebAudioMicConstraints(mediaDevices);
@@ -2608,6 +2850,37 @@ export default {
const source = this.audioCtx.createMediaStreamSource(stream);
this.audioSourceNode = source;
let captureInput = source;
// Lightweight mic cleanup stage before PCM capture:
// - High-pass removes low rumble/fan hum.
// - Compressor smooths sudden peaks and lifts speech intelligibility.
if (
typeof this.audioCtx.createBiquadFilter === "function" &&
typeof this.audioCtx.createDynamicsCompressor === "function"
) {
try {
const highpass = this.audioCtx.createBiquadFilter();
highpass.type = "highpass";
highpass.frequency.value = 120;
highpass.Q.value = 0.707;
const compressor = this.audioCtx.createDynamicsCompressor();
compressor.threshold.value = -45;
compressor.knee.value = 30;
compressor.ratio.value = 3;
compressor.attack.value = 0.003;
compressor.release.value = 0.25;
source.connect(highpass);
highpass.connect(compressor);
captureInput = compressor;
this.audioNoiseHighpass = highpass;
this.audioNoiseCompressor = compressor;
} catch (filterErr) {
this.logWebAudioFailure("telephone-noise-filter", filterErr);
}
}
const wsProtocol = window.location.protocol === "https:" ? "wss:" : "ws:";
const url = `${wsProtocol}//${window.location.host}/ws/telephone/audio`;
@@ -2644,9 +2917,16 @@ export default {
channelCount: 1,
});
processor.port.onmessage = (event) => {
sendMicPcmToWs(event.data);
const pcmBuffer = event.data;
sendMicPcmToWs(pcmBuffer);
const samples = this.extractInt16Samples(pcmBuffer);
if (samples && samples.length > 0) {
const level = this.computeSignalLevel(samples, 0x7fff);
this.localAudioTarget = Math.max(this.localAudioTarget, level);
this.localAudioLevel = Math.max(this.localAudioLevel, this.localAudioTarget);
}
};
source.connect(processor);
captureInput.connect(processor);
this.audioWorkletNode = processor;
micTapNode = processor;
} catch (workletErr) {
@@ -2670,9 +2950,12 @@ export default {
if (!ch0 || ch0.length === 0) {
return;
}
const level = this.computeSignalLevel(ch0, 1);
this.localAudioTarget = Math.max(this.localAudioTarget, level);
this.localAudioLevel = Math.max(this.localAudioLevel, this.localAudioTarget);
sendMicPcmToWs(floatChannelToInt16PcmBuffer(ch0));
};
source.connect(scriptNode);
captureInput.connect(scriptNode);
this.audioProcessor = scriptNode;
micTapNode = scriptNode;
} catch (scriptErr) {
@@ -2701,6 +2984,14 @@ export default {
try {
const msg = JSON.parse(event.data);
if (msg.type === "error") {
const errMsg = typeof msg.message === "string" ? msg.message : "";
if (errMsg.includes("Web audio is disabled in config")) {
if (this.config) {
this.config.telephone_web_audio_enabled = false;
}
this.stopWebAudio();
return;
}
this.logWebAudioFailure("ws-server-error", new Error(msg.message || "unknown"));
if (
this.activeCall &&
@@ -2840,8 +3131,12 @@ export default {
if (!this.audioCtx || !arrayBuffer) {
return;
}
const pcm = new Int16Array(arrayBuffer);
const pcm = this.extractInt16Samples(arrayBuffer);
if (!pcm) return;
if (pcm.length === 0) return;
const remoteLevel = this.computeSignalLevel(pcm, 0x7fff);
this.remoteAudioTarget = Math.max(this.remoteAudioTarget, remoteLevel);
this.remoteAudioLevel = Math.max(this.remoteAudioLevel, this.remoteAudioTarget);
const floatBuf = new Float32Array(pcm.length);
for (let i = 0; i < pcm.length; i += 1) {
floatBuf[i] = pcm[i] / 0x7fff;
@@ -2872,6 +3167,13 @@ export default {
this._unbindAndroidNativeTelephone();
this.androidNativeTelephoneListener = (ev) => {
const d = ev && ev.detail;
if (d && d.kind === "levels") {
this.localAudioTarget = Math.max(this.localAudioTarget, this.normalizeAudioLevel(d.tx_level));
this.remoteAudioTarget = Math.max(this.remoteAudioTarget, this.normalizeAudioLevel(d.rx_level));
this.localAudioLevel = Math.max(this.localAudioLevel, this.localAudioTarget);
this.remoteAudioLevel = Math.max(this.remoteAudioLevel, this.remoteAudioTarget);
return;
}
if (d && d.kind === "error" && d.detail) {
this.logWebAudioFailure("android-native", new Error(String(d.sub || d.detail || "error")));
}
@@ -2917,6 +3219,22 @@ export default {
}
this.audioSourceNode = null;
}
if (this.audioNoiseHighpass) {
try {
this.audioNoiseHighpass.disconnect();
} catch {
// ignore
}
this.audioNoiseHighpass = null;
}
if (this.audioNoiseCompressor) {
try {
this.audioNoiseCompressor.disconnect();
} catch {
// ignore
}
this.audioNoiseCompressor = null;
}
if (this.audioProcessor) {
try {
this.audioProcessor.disconnect();
@@ -3172,6 +3490,19 @@ export default {
ToastUtils.error(this.$t("call.failed_to_update_call_settings"));
}
},
async toggleTelephoneAnnounceEnabled(value) {
try {
await window.api.patch("/api/v1/config", {
telephone_announce_enabled: value,
});
if (this.config) {
this.config.telephone_announce_enabled = value;
}
ToastUtils.success(value ? "Telephone announces enabled" : "Telephone announces disabled");
} catch {
ToastUtils.error(this.$t("call.failed_to_update_call_settings"));
}
},
async toggleCallRecording(value) {
try {
await window.api.patch("/api/v1/config", {
@@ -3824,16 +4155,14 @@ export default {
? "/api/v1/telephone/unmute-transmit"
: "/api/v1/telephone/mute-transmit";
await window.api.get(endpoint);
// clear muting state after a short delay to allow backend to catch up
setTimeout(() => {
this.isMicMuting = false;
}, 500);
} catch {
this.isMicMuting = false;
// Revert on error
this.localMicMuted = !this.localMicMuted;
ToastUtils.error(this.$t("call.failed_to_toggle_microphone"));
this.isMicMuting = false;
}
},
async toggleSpeaker() {
@@ -3851,16 +4180,14 @@ export default {
? "/api/v1/telephone/unmute-receive"
: "/api/v1/telephone/mute-receive";
await window.api.get(endpoint);
// clear muting state after a short delay to allow backend to catch up
setTimeout(() => {
this.isSpeakerMuting = false;
}, 500);
} catch {
this.isSpeakerMuting = false;
// Revert on error
this.localSpeakerMuted = !this.localSpeakerMuted;
ToastUtils.error(this.$t("call.failed_to_toggle_speaker"));
this.isSpeakerMuting = false;
}
},
},
@@ -0,0 +1,23 @@
// SPDX-License-Identifier: 0BSD
class TelephonePcmCaptureProcessor extends AudioWorkletProcessor {
process(inputs) {
const input = inputs[0];
if (!input || input.length === 0) {
return true;
}
const ch0 = input[0];
if (!ch0 || ch0.length === 0) {
return true;
}
const pcm = new Int16Array(ch0.length);
for (let i = 0; i < ch0.length; i += 1) {
const s = ch0[i];
pcm[i] = Math.max(-1, Math.min(1, s)) * 0x7fff;
}
this.port.postMessage(pcm.buffer, [pcm.buffer]);
return true;
}
}
registerProcessor("telephone-pcm-capture", TelephonePcmCaptureProcessor);
@@ -434,3 +434,53 @@ async def test_rapid_dial_cancel_soak_has_bounded_memory(telephone_manager):
# Keep this lax enough for CI variance while still catching obvious leaks.
assert peak < 80 * 1024 * 1024
def test_request_hangup_clears_status_immediately_and_runs_hangup(telephone_manager):
telephone_manager.initiation_status = "Establishing link..."
telephone_manager.request_hangup()
assert telephone_manager.initiation_status is None
for _ in range(50):
if telephone_manager.telephone.hangup.called:
break
time.sleep(0.005)
assert telephone_manager.telephone.hangup.called
@pytest.mark.asyncio
async def test_initiate_checks_path_for_lxst_telephony_destination(telephone_manager):
destination_hash = bytes.fromhex("af" * 16)
telephony_destination_hash = bytes.fromhex("be" * 16)
observed_hashes = []
destination_identity = MagicMock()
def has_path(hash_bytes):
observed_hashes.append(hash_bytes)
return hash_bytes == telephony_destination_hash
fake_destination = MagicMock()
fake_destination.hash = telephony_destination_hash
telephone_manager.telephone.call.side_effect = lambda _identity: setattr(
telephone_manager.telephone, "call_status", 0
)
with (
patch(
"meshchatx.src.backend.telephone_manager.RNS.Identity.recall",
return_value=destination_identity,
),
patch(
"meshchatx.src.backend.telephone_manager.RNS.Destination",
return_value=fake_destination,
),
patch(
"meshchatx.src.backend.telephone_manager.RNS.Transport.has_path",
side_effect=has_path,
),
):
await asyncio.wait_for(
telephone_manager.initiate(destination_hash, timeout_seconds=1),
timeout=0.5,
)
assert telephony_destination_hash in observed_hashes
+87
View File
@@ -208,4 +208,91 @@ describe("App.vue Modals", () => {
expect(wrapper.vm.$refs.changelogModal.visible).toBe(true);
});
it("playRingtone marks autoplay blocked on NotAllowedError", async () => {
const wrapper = mount(App, {
global: {
plugins: [router, vuetify, i18n],
stubs: {
MaterialDesignIcon: true,
LxmfUserIcon: true,
NotificationBell: true,
LanguageSelector: true,
CallOverlay: true,
CommandPalette: true,
IntegrityWarningModal: true,
VDialog: true,
VCard: true,
VCardText: true,
VCardActions: true,
VBtn: true,
VIcon: true,
VToolbar: true,
VToolbarTitle: true,
VSpacer: true,
VProgressCircular: true,
VCheckbox: true,
VDivider: true,
},
},
});
await router.isReady();
await new Promise((resolve) => setTimeout(resolve, 50));
const err = new Error("autoplay blocked");
err.name = "NotAllowedError";
const play = vi.fn().mockRejectedValue(err);
wrapper.vm.ringtonePlayer = {
paused: true,
play,
};
wrapper.vm.playRingtone();
await Promise.resolve();
expect(wrapper.vm.ringtoneAutoplayBlocked).toBe(true);
expect(play).toHaveBeenCalledTimes(1);
});
it("onRingtoneUnlockGesture retries ringtone when incoming call still ringing", async () => {
const wrapper = mount(App, {
global: {
plugins: [router, vuetify, i18n],
stubs: {
MaterialDesignIcon: true,
LxmfUserIcon: true,
NotificationBell: true,
LanguageSelector: true,
CallOverlay: true,
CommandPalette: true,
IntegrityWarningModal: true,
VDialog: true,
VCard: true,
VCardText: true,
VCardActions: true,
VBtn: true,
VIcon: true,
VToolbar: true,
VToolbarTitle: true,
VSpacer: true,
VProgressCircular: true,
VCheckbox: true,
VDivider: true,
},
},
});
await router.isReady();
await new Promise((resolve) => setTimeout(resolve, 50));
const playRingtone = vi.spyOn(wrapper.vm, "playRingtone").mockImplementation(() => {});
wrapper.vm.ringtoneAutoplayBlocked = true;
wrapper.vm.activeCall = { status: 4, is_incoming: true };
wrapper.vm.onRingtoneUnlockGesture();
expect(wrapper.vm.ringtoneAutoplayBlocked).toBe(false);
expect(playRingtone).toHaveBeenCalledTimes(1);
});
});
+146
View File
@@ -166,6 +166,34 @@ describe("CallPage.vue", () => {
expect(wrapper.find('input[type="text"]').exists()).toBe(true);
});
it("renders call hops and interface metadata below address", async () => {
const wrapper = mountCallPage();
await wrapper.vm.$nextTick();
wrapper.vm.activeCall = {
status: 6,
remote_identity_hash: "ab".repeat(16),
remote_identity_name: "Path Test",
path_hops: 3,
path_interface: "Default Interface",
tx_packets: 303,
rx_packets: 289,
tx_bytes: 35 * 1024,
rx_bytes: 82 * 1024,
};
await wrapper.vm.$nextTick();
expect(wrapper.text()).toContain("3 hops");
expect(wrapper.text()).toContain("Default Interface");
expect(wrapper.text()).toContain("TX Pkts");
expect(wrapper.text()).toContain("303");
expect(wrapper.text()).toContain("RX Pkts");
expect(wrapper.text()).toContain("289");
expect(wrapper.text()).toContain("TX Data Out");
expect(wrapper.text()).toContain("35 KB");
expect(wrapper.text()).toContain("RX Data In");
expect(wrapper.text()).toContain("82 KB");
});
it("attempts to place a call when 'Call' button is clicked", async () => {
const wrapper = mountCallPage();
await wrapper.vm.$nextTick();
@@ -231,6 +259,16 @@ describe("CallPage.vue", () => {
});
});
it("toggleTelephoneAnnounceEnabled patches config", async () => {
const wrapper = mountCallPage();
await wrapper.vm.$nextTick();
wrapper.vm.config = { telephone_announce_enabled: true };
await wrapper.vm.toggleTelephoneAnnounceEnabled(false);
expect(axiosMock.patch).toHaveBeenCalledWith(expect.stringContaining("/api/v1/config"), {
telephone_announce_enabled: false,
});
});
it("ensureWebAudio stops when server reports web audio disabled", async () => {
const wrapper = mountCallPage();
await wrapper.vm.$nextTick();
@@ -325,6 +363,20 @@ describe("CallPage.vue", () => {
expect(stream).toBe(fakeStream);
});
it("pickWebAudioMicConstraints includes browser audio processing hints", async () => {
const wrapper = mountCallPage();
await flushPromises();
wrapper.vm.selectedAudioInputId = "mic-1";
wrapper.vm.audioInputDevices = [{ kind: "audioinput", deviceId: "mic-1" }];
const mediaDevices = { enumerateDevices: vi.fn().mockResolvedValue([]) };
const constraints = wrapper.vm.pickWebAudioMicConstraints(mediaDevices);
expect(constraints.audio.echoCancellation).toBe(true);
expect(constraints.audio.noiseSuppression).toBe(true);
expect(constraints.audio.autoGainControl).toBe(true);
expect(constraints.audio.deviceId).toEqual({ exact: "mic-1" });
});
it("startWebAudio uses MeshChatXAndroid native bridge when platform is android", async () => {
const wrapper = mountCallPage();
await flushPromises();
@@ -546,4 +598,98 @@ describe("CallPage.vue", () => {
expect(wrapper.vm.ringtoneStatus.id).toBe(3);
expect(wrapper.vm.ringtoneStatus.volume).toBe(0.8);
});
it("resizeAudioVisualizerCanvas scales dimensions for responsive density", async () => {
const wrapper = mountCallPage();
await flushPromises();
const canvas = { clientWidth: 320, clientHeight: 80, width: 0, height: 0 };
const ok = wrapper.vm.resizeAudioVisualizerCanvas(canvas);
expect(ok).toBe(true);
expect(canvas.width).toBeGreaterThanOrEqual(320);
expect(canvas.height).toBeGreaterThanOrEqual(80);
});
it("android native level events feed tx/rx visualizer levels", async () => {
const wrapper = mountCallPage();
await flushPromises();
wrapper.vm.localAudioLevel = 0;
wrapper.vm.remoteAudioLevel = 0;
wrapper.vm._bindAndroidNativeTelephone();
try {
window.dispatchEvent(
new CustomEvent("meshchatx-native-telephone-audio", {
detail: { kind: "levels", tx_level: 0.45, rx_level: 82 },
})
);
expect(wrapper.vm.localAudioLevel).toBeGreaterThan(0.4);
expect(wrapper.vm.remoteAudioLevel).toBeGreaterThan(0.8);
} finally {
wrapper.vm._unbindAndroidNativeTelephone();
}
});
it("playRemotePcm updates RX visualizer level from incoming PCM", async () => {
const wrapper = mountCallPage();
await flushPromises();
const connect = vi.fn();
const start = vi.fn();
wrapper.vm.audioCtx = {
createBuffer: vi.fn(() => ({ copyToChannel: vi.fn() })),
createBufferSource: vi.fn(() => ({ connect, start, buffer: null })),
destination: {},
};
wrapper.vm.selectedAudioOutputId = "__meshchat_default_out__";
wrapper.vm.remoteAudioLevel = 0;
const pcm = new Int16Array([0, 4000, -9000, 12000, -15000, 8000]);
wrapper.vm.playRemotePcm(pcm.buffer);
expect(wrapper.vm.remoteAudioLevel).toBeGreaterThan(0);
expect(connect).toHaveBeenCalled();
expect(start).toHaveBeenCalled();
});
it("updateVisualizerFromCallStats animates levels without web audio bridge", async () => {
const wrapper = mountCallPage();
await flushPromises();
wrapper.vm.audioWs = null;
wrapper.vm.useAndroidNativeTelephone = false;
wrapper.vm.localAudioLevel = 0;
wrapper.vm.remoteAudioLevel = 0;
wrapper.vm.prevCallTxBytes = 1000;
wrapper.vm.prevCallRxBytes = 2000;
wrapper.vm.updateVisualizerFromCallStats(
{ hash: "aa", status: 6, tx_bytes: 3000, rx_bytes: 4500 },
{ hash: "aa", status: 6, tx_bytes: 1000, rx_bytes: 2000 }
);
expect(wrapper.vm.localAudioLevel).toBeGreaterThan(0);
expect(wrapper.vm.remoteAudioLevel).toBeGreaterThan(0);
expect(wrapper.vm.prevCallTxBytes).toBe(3000);
expect(wrapper.vm.prevCallRxBytes).toBe(4500);
});
it("updateVisualizerFromCallStats does not override bridge-provided levels", async () => {
const wrapper = mountCallPage();
await flushPromises();
wrapper.vm.audioWs = { readyState: 1 };
wrapper.vm.useAndroidNativeTelephone = false;
wrapper.vm.localAudioLevel = 0.55;
wrapper.vm.remoteAudioLevel = 0.42;
wrapper.vm.prevCallTxBytes = 0;
wrapper.vm.prevCallRxBytes = 0;
wrapper.vm.updateVisualizerFromCallStats(
{ hash: "bb", status: 6, tx_bytes: 9999, rx_bytes: 8888 },
{ hash: "bb", status: 6, tx_bytes: 0, rx_bytes: 0 }
);
expect(wrapper.vm.localAudioLevel).toBe(0.55);
expect(wrapper.vm.remoteAudioLevel).toBe(0.42);
expect(wrapper.vm.prevCallTxBytes).toBe(9999);
expect(wrapper.vm.prevCallRxBytes).toBe(8888);
});
});