1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="utf-8" />
<title>videocall.rs (Yew)</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<link data-trunk rel="copy-dir" href="./assets" />
<!-- TODO: Uncomment this when we have a way to pass features to the build -->
<!-- <link data-trunk rel="rust" href="Cargo.toml" data-cargo-features="fake-peers" data-cargo-no-default-features /> -->
<link data-trunk rel="rust" href="Cargo.toml"/>
<link data-trunk rel="css" href="./static/leptos-style.css" />
<link data-trunk rel="css" href="./static/tailwind.css" />
<link data-trunk rel="css" href="./static/style.css" />
<link data-trunk rel="css" href="./static/global.css" />
<link data-trunk rel="copy-dir" href="./static" />
<link data-trunk rel="copy-file" href="./scripts/config.js" />
<!-- TOFIX: these files are not being hashed correctly, so they are not being cached -->
<link data-trunk rel="copy-file" href="./scripts/encoderWorker.min.js" />
<link data-trunk rel="copy-file" href="./scripts/decoderWorker.min.js" />
<script src="/config.js"></script>
<!-- Compile videocall-codecs's `worker.rs` as a separate WASM module for WebCodecs -->
<link
data-trunk
rel="rust"
href="../videocall-codecs/Cargo.toml"
data-bin="worker_decoder"
data-type="worker"
data-cargo-features="wasm"
data-cargo-no-default-features
data-loader-shim
/>
<!-- Runtime link so the wasm decoder can find the built worker URL -->
<link id="codecs-worker" href="/worker_decoder_loader.js" />
<!-- NetEq worker -->
<link
data-trunk
rel="rust"
href="../neteq/Cargo.toml"
data-bin="neteq_worker"
data-type="worker"
data-cargo-features="web"
data-cargo-no-default-features
data-loader-shim
/>
<link id="neteq-worker" href="/neteq_worker_loader.js" />
<!-- Load wasm worker scripts from the videocall-codecs crate -->
<!-- MediaStreamTrackProcessor polyfill using LiveKit's approach:
Creates VideoFrame directly from video element instead of canvas copy.
See: https://github.com/livekit/track-processors-js/pull/65 -->
<script>
if (!self.MediaStreamTrackProcessor) {
self.MediaStreamTrackProcessor = class MediaStreamTrackProcessor {
constructor({track}) {
if (track.kind == "video") {
this.readable = new ReadableStream({
async start(controller) {
this.video = document.createElement("video");
this.video.muted = true;
this.video.srcObject = new MediaStream([track]);
await Promise.all([
this.video.play(),
new Promise(r => this.video.onloadedmetadata = r)
]);
this.track = track;
if (!this.video.videoWidth || !this.video.videoHeight) {
console.warn("Video dimensions not available after metadata load");
}
// Try direct VideoFrame from video element (LiveKit approach)
// This may be faster than canvas copy in some browsers
const supportsDirectVideoFrame = (() => {
try {
const testFrame = new VideoFrame(this.video);
testFrame.close();
return true;
} catch (e) {
return false;
}
})();
if (supportsDirectVideoFrame) {
console.log("Using direct VideoFrame(video) - LiveKit approach");
// Track last video time to avoid duplicate frames
let lastVideoTime = -1;
if (this.video.requestVideoFrameCallback) {
console.log("Using requestVideoFrameCallback with direct VideoFrame");
const processFrame = (now, metadata) => {
if (!controller.desiredSize) {
this.video?.pause();
return;
}
// Only process if video has new frame
const currentTime = this.video.currentTime;
if (currentTime !== lastVideoTime) {
lastVideoTime = currentTime;
try {
// Create VideoFrame directly from video element
const frame = new VideoFrame(this.video, {
timestamp: metadata.mediaTime * 1e6
});
controller.enqueue(frame);
} catch (e) {
console.error("Error creating VideoFrame:", e);
}
}
if (controller.desiredSize > 0) {
this.video.requestVideoFrameCallback(processFrame);
}
};
this.video.requestVideoFrameCallback(processFrame);
} else {
console.warn("Using requestAnimationFrame with direct VideoFrame");
let lastTimestamp = -1;
const processFrameRAF = (timestamp) => {
if (!controller.desiredSize) {
this.video?.pause();
return;
}
const currentTime = this.video.currentTime;
if (currentTime !== lastVideoTime && timestamp !== lastTimestamp) {
lastVideoTime = currentTime;
lastTimestamp = timestamp;
try {
const frame = new VideoFrame(this.video, {
timestamp: performance.now() * 1000
});
controller.enqueue(frame);
} catch (e) {
console.error("Error creating VideoFrame:", e);
}
}
if (controller.desiredSize > 0) {
requestAnimationFrame(processFrameRAF);
}
};
requestAnimationFrame(processFrameRAF);
}
} else {
// Fallback: Canvas-based approach (original polyfill)
console.log("Using canvas-based polyfill (direct VideoFrame not supported)");
this.canvas = new OffscreenCanvas(
this.video.videoWidth || 640,
this.video.videoHeight || 480
);
this.ctx = this.canvas.getContext('2d', {desynchronized: true});
if (this.video.requestVideoFrameCallback) {
const processFrame = (now, metadata) => {
if (!controller.desiredSize) return;
try {
if (this.video.videoWidth && this.video.videoHeight) {
if (this.canvas.width !== this.video.videoWidth ||
this.canvas.height !== this.video.videoHeight) {
this.canvas.width = this.video.videoWidth;
this.canvas.height = this.video.videoHeight;
}
this.ctx.drawImage(this.video, 0, 0);
controller.enqueue(new VideoFrame(this.canvas, {
timestamp: metadata.mediaTime * 1e6
}));
}
} catch (e) {
console.error("Error processing video frame:", e);
}
if (controller.desiredSize > 0) {
this.video.requestVideoFrameCallback(processFrame);
} else {
this.video?.pause();
}
};
this.video.requestVideoFrameCallback(processFrame);
} else {
let lastTimestamp = -1;
const processFrameRAF = (timestamp) => {
if (!controller.desiredSize) {
this.video?.pause();
return;
}
if (timestamp !== lastTimestamp) {
lastTimestamp = timestamp;
try {
if (this.video.videoWidth && this.video.videoHeight) {
if (this.canvas.width !== this.video.videoWidth ||
this.canvas.height !== this.video.videoHeight) {
this.canvas.width = this.video.videoWidth;
this.canvas.height = this.video.videoHeight;
}
this.ctx.drawImage(this.video, 0, 0);
controller.enqueue(new VideoFrame(this.canvas, {
timestamp: performance.now() * 1000
}));
}
} catch (e) {
console.error("Error processing video frame:", e);
}
}
if (controller.desiredSize > 0) {
requestAnimationFrame(processFrameRAF);
}
};
requestAnimationFrame(processFrameRAF);
}
}
},
cancel(reason) {
console.log("Video track processor cancelled:", reason);
if (this.video) {
this.video.pause();
this.video.srcObject = null;
}
}
});
} else if (track.kind == "audio") {
this.readable = new ReadableStream({
async start(controller) {
this.ac = new AudioContext;
this.arrays = [];
function worklet() {
registerProcessor("mstp-shim", class Processor extends AudioWorkletProcessor {
process(input) { this.port.postMessage(input); return true; }
});
}
await this.ac.audioWorklet.addModule(`data:text/javascript,(${worklet.toString()})()`);
this.node = new AudioWorkletNode(this.ac, "mstp-shim");
this.ac.createMediaStreamSource(new MediaStream([track])).connect(this.node);
this.node.port.addEventListener("message", ({data}) => data[0][0] && this.arrays.push(data));
},
async pull(controller) {
while (!this.arrays.length) await new Promise(r => this.node.port.onmessage = r);
const [channels] = this.arrays.shift();
const joined = new Float32Array(channels.reduce((a, b) => a + b.length, 0));
channels.reduce((offset, a) => (joined.set(a, offset), offset + a.length), 0);
controller.enqueue(new AudioData({
format: "f32-planar",
sampleRate: this.ac.sampleRate,
numberOfFrames: channels[0].length,
numberOfChannels: channels.length,
timestamp: this.ac.currentTime * 1e6 | 0,
data: joined,
transfer: [joined.buffer]
}));
}
});
}
}
};
}
</script>
<!-- MediaStreamTrackGenerator polyfill -->
<script>
if (!window.MediaStreamTrackGenerator) {
window.MediaStreamTrackGenerator = class MediaStreamTrackGenerator {
constructor({kind}) {
if (kind == "video") {
const canvas = document.createElement("canvas");
const ctx = canvas.getContext('2d', {desynchronized: true});
const track = canvas.captureStream().getVideoTracks()[0];
track.writable = new WritableStream({
write(frame) {
canvas.width = frame.displayWidth;
canvas.height = frame.displayHeight;
ctx.drawImage(frame, 0, 0, canvas.width, canvas.height);
frame.close();
}
});
return track;
} else if (kind == "audio") {
const ac = new AudioContext;
const dest = ac.createMediaStreamDestination();
const [track] = dest.stream.getAudioTracks();
track.writable = new WritableStream({
async start(controller) {
this.arrays = [];
function worklet() {
registerProcessor("mstg-shim", class Processor extends AudioWorkletProcessor {
constructor() {
super();
this.arrays = [];
this.arrayOffset = 0;
this.port.onmessage = ({data}) => this.arrays.push(data);
this.emptyArray = new Float32Array(0);
}
process(inputs, [[output]]) {
for (let i = 0; i < output.length; i++) {
if (!this.array || this.arrayOffset >= this.array.length) {
this.array = this.arrays.shift() || this.emptyArray;
this.arrayOffset = 0;
}
output[i] = this.array[this.arrayOffset++] || 0;
}
return true;
}
});
}
await ac.audioWorklet.addModule(`data:text/javascript,(${worklet.toString()})()`);
this.node = new AudioWorkletNode(ac, "mstg-shim");
this.node.connect(dest);
return track;
},
write(audioData) {
const array = new Float32Array(audioData.numberOfFrames * audioData.numberOfChannels);
audioData.copyTo(array, {planeIndex: 0});
this.node.port.postMessage(array, [array.buffer]);
audioData.close();
}
});
return track;
}
}
};
}
</script>
</head>
<body class="bg-background text-foreground">
</body>
</html>