1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="utf-8" />
<title>videocall.rs</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<link data-trunk rel="copy-dir" href="./assets" />
<!-- TODO: Uncomment this when we have a way to pass features to the build -->
<!-- <link data-trunk rel="rust" href="Cargo.toml" data-cargo-features="fake-peers" data-cargo-no-default-features /> -->
<link data-trunk rel="rust" href="Cargo.toml"/>
<link data-trunk rel="css" href="./static/leptos-style.css" />
<link data-trunk rel="css" href="./static/tailwind.css" />
<link data-trunk rel="css" href="./static/style.css" />
<link data-trunk rel="css" href="./static/global.css" />
<link data-trunk rel="copy-dir" href="./static" />
<link data-trunk rel="copy-file" href="./scripts/config.js" />
<!-- TOFIX: these files are not being hashed correctly, so they are not being cached -->
<link data-trunk rel="copy-file" href="./scripts/encoderWorker.min.js" />
<link data-trunk rel="copy-file" href="./scripts/decoderWorker.min.js" />
<link data-trunk rel="copy-file" href="./scripts/pcmPlayerWorker.js" />
<script src="/config.js"></script>
<!-- Compile videocall-codecs's `worker.rs` as a separate WASM module for WebCodecs -->
<link
data-trunk
rel="rust"
href="../videocall-codecs/Cargo.toml"
data-bin="worker_decoder"
data-type="worker"
data-cargo-features="wasm"
data-cargo-no-default-features
data-loader-shim
/>
<!-- Runtime link so the wasm decoder can find the built worker URL -->
<link id="codecs-worker" href="/worker_decoder_loader.js" />
<!-- NetEq worker -->
<link
data-trunk
rel="rust"
href="../neteq/Cargo.toml"
data-bin="neteq_worker"
data-type="worker"
data-cargo-features="web"
data-cargo-no-default-features
data-loader-shim
/>
<link id="neteq-worker" href="/neteq_worker_loader.js" />
<!-- Load wasm worker scripts from the videocall-codecs crate -->
<script>
if (!self.MediaStreamTrackProcessor) {
self.MediaStreamTrackProcessor = class MediaStreamTrackProcessor {
constructor({track}) {
if (track.kind == "video") {
this.readable = new ReadableStream({
async start(controller) {
this.video = document.createElement("video");
this.video.muted = true; // Prevent potential audio feedback
this.video.srcObject = new MediaStream([track]);
await Promise.all([
this.video.play(),
new Promise(r => this.video.onloadedmetadata = r) // Wait for metadata
]);
this.track = track;
// Ensure initial dimensions are set before creating canvas
if (!this.video.videoWidth || !this.video.videoHeight) {
console.warn("Video dimensions not available immediately after metadata load.");
// Potential fallback or wait mechanism might be needed if this happens often
}
this.canvas = new OffscreenCanvas(this.video.videoWidth || 640, this.video.videoHeight || 480); // Use default dimensions as fallback
this.ctx = this.canvas.getContext('2d', {desynchronized: true});
// --- Performance Improvement: Use requestVideoFrameCallback if available ---
if (this.video.requestVideoFrameCallback) {
console.log("Using requestVideoFrameCallback for MediaStreamTrackProcessor polyfill");
const processFrame = (now, metadata) => {
// Check if stream is closed before processing
if (!controller.desiredSize) return;
try {
if (this.video.videoWidth && this.video.videoHeight) {
// Resize canvas if video dimensions changed
if (this.canvas.width !== this.video.videoWidth || this.canvas.height !== this.video.videoHeight) {
this.canvas.width = this.video.videoWidth;
this.canvas.height = this.video.videoHeight;
// Re-get context if needed, although usually not necessary for 2d
// this.ctx = this.canvas.getContext('2d', {desynchronized: true});
}
this.ctx.drawImage(this.video, 0, 0);
// Use mediaTime for more accurate timestamp
controller.enqueue(new VideoFrame(this.canvas, {timestamp: metadata.mediaTime * 1e6 }));
}
} catch (e) {
console.error("Error processing video frame (rVFC):", e);
try { controller.error(e); } catch {} // Close stream on error
} finally {
// Schedule the next frame processing only if stream is still active
if (controller.desiredSize > 0) {
try {
this.video.requestVideoFrameCallback(processFrame);
} catch (e) {
console.error("Error requesting next video frame callback:", e);
try { controller.error(e); } catch {}
}
} else {
console.log("Stopping rVFC loop as stream is closed or backed up.");
this.video?.pause(); // Pause video when stopping
}
}
};
// Start the loop
this.video.requestVideoFrameCallback(processFrame);
} else {
// --- Fallback to simplified requestAnimationFrame ---
console.warn("requestVideoFrameCallback not supported, falling back to requestAnimationFrame for MediaStreamTrackProcessor polyfill");
let lastTimestamp = -1;
const processFrameRAF = (timestamp) => {
// Check if stream is closed
if (!controller.desiredSize) {
console.log("Stopping rAF loop as stream is closed or backed up.");
this.video?.pause(); // Pause video when stopping
return;
}
// Avoid processing the same frame multiple times if RAF fires rapidly
if (timestamp === lastTimestamp) {
requestAnimationFrame(processFrameRAF);
return;
}
lastTimestamp = timestamp;
try {
if (this.video.videoWidth && this.video.videoHeight) {
// Resize canvas if video dimensions changed
if (this.canvas.width !== this.video.videoWidth || this.canvas.height !== this.video.videoHeight) {
this.canvas.width = this.video.videoWidth;
this.canvas.height = this.video.videoHeight;
}
this.ctx.drawImage(this.video, 0, 0);
// Use performance.now() for timestamp as RAF timestamp isn't media time
controller.enqueue(new VideoFrame(this.canvas, { timestamp: performance.now() * 1000 }));
}
} catch (e) {
console.error("Error processing video frame (RAF):", e);
try { controller.error(e); } catch {} // Close stream on error
} finally {
// Schedule the next frame
if (controller.desiredSize > 0) {
requestAnimationFrame(processFrameRAF);
} else {
console.log("Stopping rAF loop as stream is closed or backed up.");
this.video?.pause(); // Pause video when stopping
}
}
};
// Start the loop
requestAnimationFrame(processFrameRAF);
}
},
// Pull is no longer needed as the stream is now push-based
// pull(controller) { ... },
cancel(reason) {
console.log("Video track processor cancelled:", reason);
if (this.video) {
this.video.pause();
this.video.srcObject = null; // Release stream resources
}
// The rVFC/rAF loops will stop automatically due to the desiredSize check
}
});
} else if (track.kind == "audio") {
this.readable = new ReadableStream({
async start(controller) {
this.ac = new AudioContext;
this.arrays = [];
function worklet() {
registerProcessor("mstp-shim", class Processor extends AudioWorkletProcessor {
process(input) { this.port.postMessage(input); return true; }
});
}
await this.ac.audioWorklet.addModule(`data:text/javascript,(${worklet.toString()})()`);
this.node = new AudioWorkletNode(this.ac, "mstp-shim");
this.ac.createMediaStreamSource(new MediaStream([track])).connect(this.node);
this.node.port.addEventListener("message", ({data}) => data[0][0] && this.arrays.push(data));
},
async pull(controller) {
while (!this.arrays.length) await new Promise(r => this.node.port.onmessage = r);
const [channels] = this.arrays.shift();
const joined = new Float32Array(channels.reduce((a, b) => a + b.length, 0));
channels.reduce((offset, a) => (joined.set(a, offset), offset + a.length), 0);
controller.enqueue(new AudioData({
format: "f32-planar",
sampleRate: this.ac.sampleRate,
numberOfFrames: channels[0].length,
numberOfChannels: channels.length,
timestamp: this.ac.currentTime * 1e6 | 0,
data: joined,
transfer: [joined.buffer]
}));
}
});
}
}
};
}
</script>
<!-- MediaStreamTrackGenerator polyfill -->
<script>
if (!window.MediaStreamTrackGenerator) {
window.MediaStreamTrackGenerator = class MediaStreamTrackGenerator {
constructor({kind}) {
if (kind == "video") {
const canvas = document.createElement("canvas");
const ctx = canvas.getContext('2d', {desynchronized: true});
const track = canvas.captureStream().getVideoTracks()[0];
track.writable = new WritableStream({
write(frame) {
canvas.width = frame.displayWidth;
canvas.height = frame.displayHeight;
ctx.drawImage(frame, 0, 0, canvas.width, canvas.height);
frame.close();
}
});
return track;
} else if (kind == "audio") {
const ac = new AudioContext;
const dest = ac.createMediaStreamDestination();
const [track] = dest.stream.getAudioTracks();
track.writable = new WritableStream({
async start(controller) {
this.arrays = [];
function worklet() {
registerProcessor("mstg-shim", class Processor extends AudioWorkletProcessor {
constructor() {
super();
this.arrays = [];
this.arrayOffset = 0;
this.port.onmessage = ({data}) => this.arrays.push(data);
this.emptyArray = new Float32Array(0);
}
process(inputs, [[output]]) {
for (let i = 0; i < output.length; i++) {
if (!this.array || this.arrayOffset >= this.array.length) {
this.array = this.arrays.shift() || this.emptyArray;
this.arrayOffset = 0;
}
output[i] = this.array[this.arrayOffset++] || 0;
}
return true;
}
});
}
await ac.audioWorklet.addModule(`data:text/javascript,(${worklet.toString()})()`);
this.node = new AudioWorkletNode(ac, "mstg-shim");
this.node.connect(dest);
return track;
},
write(audioData) {
const array = new Float32Array(audioData.numberOfFrames * audioData.numberOfChannels);
audioData.copyTo(array, {planeIndex: 0});
this.node.port.postMessage(array, [array.buffer]);
audioData.close();
}
});
return track;
}
}
};
}
</script>
</head>
<body class="bg-background text-foreground">
</body>
</html>