Live video on the AT Protocol
1import {
2 IngestMediaSource,
3 PlayerProtocol,
4 PlayerStatus,
5 usePlayerStore,
6 useStreamplaceStore,
7} from "@streamplace/components";
8import streamKey from "components/live-dashboard/stream-key";
9import Hls from "hls.js";
10import useStreamplaceNode from "hooks/useStreamplaceNode";
11import {
12 ForwardedRef,
13 forwardRef,
14 useCallback,
15 useEffect,
16 useRef,
17 useState,
18} from "react";
19import { Text, View } from "tamagui";
20import { srcToUrl } from "./shared";
21import useWebRTC, { useWebRTCIngest } from "./use-webrtc";
22import {
23 logWebRTCDiagnostics,
24 useWebRTCDiagnostics,
25} from "./webrtc-diagnostics";
26import { checkWebRTCSupport } from "./webrtc-primitives";
27
28type VideoProps = { url: string };
29
30export default function WebVideo() {
31 const inProto = usePlayerStore((x) => x.protocol);
32 const isIngesting = usePlayerStore((x) => x.ingestConnectionState !== null);
33 const selectedRendition = usePlayerStore((x) => x.selectedRendition);
34 const src = usePlayerStore((x) => x.src);
35 const { url, protocol } = srcToUrl({ src: src, selectedRendition }, inProto);
36 if (isIngesting) {
37 return <WebcamIngestPlayer url={url} />;
38 }
39 if (protocol === PlayerProtocol.PROGRESSIVE_MP4) {
40 return <ProgressiveMP4Player url={url} />;
41 } else if (protocol === PlayerProtocol.PROGRESSIVE_WEBM) {
42 return <ProgressiveWebMPlayer url={url} />;
43 } else if (protocol === PlayerProtocol.HLS) {
44 return <HLSPlayer url={url} />;
45 } else if (protocol === PlayerProtocol.WEBRTC) {
46 return <WebRTCPlayer url={url} />;
47 } else {
48 throw new Error(`unknown playback protocol ${inProto}`);
49 }
50}
51
52const updateEvents = {
53 playing: true,
54 waiting: true,
55 stalled: true,
56 pause: true,
57 suspend: true,
58 mute: true,
59};
60
61const VideoElement = forwardRef(
62 (props: VideoProps, refCallback: ForwardedRef<HTMLVideoElement | null>) => {
63 const x = usePlayerStore((x) => x);
64 const url = useStreamplaceStore((x) => x.url);
65 const playerEvent = usePlayerStore((x) => x.playerEvent);
66 const setMuted = usePlayerStore((x) => x.setMuted);
67 const setMuteWasForced = usePlayerStore((x) => x.setMuteWasForced);
68 const muted = usePlayerStore((x) => x.muted);
69 const ingest = usePlayerStore((x) => x.ingestConnectionState !== null);
70 const volume = usePlayerStore((x) => x.volume);
71 const setStatus = usePlayerStore((x) => x.setStatus);
72 const setUserInteraction = usePlayerStore((x) => x.setUserInteraction);
73
74 const event = (evType) => (e) => {
75 console.log(evType);
76 const now = new Date();
77 if (updateEvents[evType]) {
78 x.setStatus(evType);
79 }
80 console.log("Sending", evType, "status to", url);
81 playerEvent(url, now.toISOString(), evType, {});
82 };
83 const [firstAttempt, setFirstAttempt] = useState(true);
84
85 const localVideoRef = useRef<HTMLVideoElement | null>(null);
86
87 // setPipAction comes from Zustand store
88 useEffect(() => {
89 if (typeof x.setPipAction === "function") {
90 const fn = () => {
91 if (localVideoRef.current) {
92 try {
93 localVideoRef.current.requestPictureInPicture?.();
94 } catch (err) {
95 console.error("Error requesting Picture-in-Picture:", err);
96 }
97 } else {
98 console.log("No video ref available for PiP");
99 }
100 };
101 x.setPipAction(fn);
102 }
103 // Cleanup on unmount
104 return () => {
105 if (typeof x.setPipAction === "function") {
106 x.setPipAction(undefined);
107 }
108 };
109 }, []);
110
111 // Memoized callback ref for video element
112 const handleVideoRef = useCallback(
113 (videoElement: HTMLVideoElement | null) => {
114 localVideoRef.current = videoElement;
115 if (typeof refCallback === "function") {
116 refCallback(videoElement);
117 } else if (refCallback && "current" in refCallback) {
118 refCallback.current = videoElement;
119 }
120 },
121 [refCallback],
122 );
123
124 // attempts to autoplay the video. if that fails, it attempts
125 // to play the video muted; some browsers will only let you
126 // autoplay if you're muted
127 const canPlayThrough = (e) => {
128 event("canplaythrough")(e);
129 if (firstAttempt && localVideoRef.current) {
130 setFirstAttempt(false);
131 localVideoRef.current.play().catch((err) => {
132 if (err.name === "NotAllowedError") {
133 if (localVideoRef.current) {
134 setMuted(true);
135 localVideoRef.current.muted = true;
136 localVideoRef.current
137 .play()
138 .then(() => {
139 console.warn("Browser forced video to start muted");
140 setMuteWasForced(true);
141 })
142 .catch((err) => {
143 console.error("error playing video", err);
144 });
145 }
146 }
147 });
148 }
149 };
150
151 useEffect(() => {
152 return () => {
153 setStatus(PlayerStatus.START);
154 };
155 }, [setStatus]);
156
157 useEffect(() => {
158 if (localVideoRef.current) {
159 localVideoRef.current.volume = volume;
160 console.log("Setting volume to", volume);
161 }
162 }, [volume]);
163
164 return (
165 <View
166 backgroundColor="#111"
167 alignItems="stretch"
168 f={1}
169 onPointerMove={setUserInteraction}
170 >
171 <video
172 autoPlay={true}
173 playsInline={true}
174 ref={handleVideoRef}
175 controls={false}
176 src={ingest ? undefined : props.url}
177 muted={muted}
178 crossOrigin="anonymous"
179 onMouseMove={setUserInteraction}
180 onClick={setUserInteraction}
181 onAbort={event("abort")}
182 onCanPlay={event("canplay")}
183 onCanPlayThrough={canPlayThrough}
184 // onDurationChange={event("durationchange")}
185 onEmptied={event("emptied")}
186 onEncrypted={event("encrypted")}
187 onEnded={event("ended")}
188 onError={event("error")}
189 onLoadedData={event("loadeddata")}
190 onLoadedMetadata={event("loadedmetadata")}
191 onLoadStart={event("loadstart")}
192 onPause={event("pause")}
193 onPlay={event("play")}
194 onPlaying={event("playing")}
195 // onProgress={event("progress")}
196 // onTimeUpdate={event("timeupdate")}
197 onRateChange={event("ratechange")}
198 onSeeked={event("seeked")}
199 onSeeking={event("seeking")}
200 onStalled={event("stalled")}
201 onSuspend={event("suspend")}
202 onVolumeChange={event("volumechange")}
203 onWaiting={event("waiting")}
204 style={{
205 objectFit: "contain",
206 backgroundColor: "transparent",
207 width: "100%",
208 height: "100%",
209 transform: ingest ? "scaleX(-1)" : undefined,
210 }}
211 />
212 </View>
213 );
214 },
215);
216
217export function ProgressiveMP4Player(props: VideoProps) {
218 return <VideoElement {...props} />;
219}
220
221export function ProgressiveWebMPlayer(props: VideoProps) {
222 return <VideoElement {...props} />;
223}
224
225export function HLSPlayer(props: VideoProps) {
226 const localRef = useRef<HTMLVideoElement | null>(null);
227
228 const videoRef = usePlayerStore((x) => x.videoRef);
229 const setVideoRef = usePlayerStore((x) => x.setVideoRef);
230
231 const refCallback = useCallback((node: HTMLVideoElement | null) => {
232 localRef.current = node;
233 if (typeof videoRef === "function") {
234 videoRef(node);
235 } else if (videoRef) {
236 localRef.current = node;
237 setVideoRef(localRef);
238 }
239 }, []);
240 useEffect(() => {
241 if (!localRef.current) {
242 return;
243 }
244 if (Hls.isSupported()) {
245 // workaround for not having quite the right number of audio frames :(
246 var hls = new Hls({ maxAudioFramesDrift: 20 });
247 hls.loadSource(props.url);
248 try {
249 hls.attachMedia(localRef.current);
250 } catch (e) {
251 console.error("error on attachMedia");
252 hls.stopLoad();
253 return;
254 }
255 hls.on(Hls.Events.MANIFEST_PARSED, () => {
256 if (!localRef.current) {
257 return;
258 }
259 localRef.current.play();
260 });
261 return () => {
262 hls.stopLoad();
263 };
264 } else if (localRef.current.canPlayType("application/vnd.apple.mpegurl")) {
265 localRef.current.src = props.url;
266 localRef.current.addEventListener("canplay", () => {
267 if (!localRef.current) {
268 return;
269 }
270 localRef.current.play();
271 });
272 }
273 }, [props.url]);
274 return <VideoElement {...props} ref={refCallback} />;
275}
276
277export function WebRTCPlayer(props: VideoProps) {
278 const [webrtcError, setWebrtcError] = useState<string | null>(null);
279 const setStatus = usePlayerStore((x) => x.setStatus);
280 const setProtocol = usePlayerStore((x) => x.setProtocol);
281 const diagnostics = useWebRTCDiagnostics();
282 // Check WebRTC compatibility on component mount
283 useEffect(() => {
284 try {
285 checkWebRTCSupport();
286 console.log("WebRTC Player - Browser compatibility check passed");
287 logWebRTCDiagnostics();
288 } catch (error) {
289 console.error("WebRTC Player - Compatibility error:", error.message);
290 setWebrtcError(error.message);
291 setStatus(PlayerStatus.START);
292 return;
293 }
294 }, []);
295
296 // Monitor diagnostics for errors
297 useEffect(() => {
298 if (!diagnostics.browserSupport && diagnostics.errors.length > 0) {
299 setWebrtcError(diagnostics.errors.join(", "));
300 }
301 }, [diagnostics]);
302
303 if (!diagnostics.done) return <></>;
304
305 if (webrtcError) {
306 setProtocol(PlayerProtocol.HLS);
307 return (
308 <View
309 backgroundColor="#111"
310 alignItems="center"
311 justifyContent="center"
312 f={1}
313 padding="$4"
314 >
315 <View
316 backgroundColor="$red10"
317 padding="$3"
318 borderRadius="$4"
319 maxWidth={400}
320 >
321 <View marginBottom="$2">
322 <Text fontSize="$8" fontWeight="bold" color="white">
323 WebRTC Not Supported
324 </Text>
325 </View>
326 <Text fontSize="$4" color="white" lineHeight="$1" marginBottom="$3">
327 {webrtcError}
328 </Text>
329 {diagnostics.errors.length > 0 && (
330 <View>
331 <Text
332 fontSize="$4"
333 fontWeight="bold"
334 color="white"
335 marginBottom="$2"
336 >
337 Technical Details:
338 </Text>
339 {diagnostics.errors.map((error, index) => (
340 <Text key={index} fontSize="$3" color="white" marginBottom="$1">
341 • {error}
342 </Text>
343 ))}
344 </View>
345 )}
346 <Text fontSize="$3">
347 • To use WebRTC, you may need to disable any blocking extensions or
348 update your browser.
349 </Text>
350 <Text mt="$2">Switching to HLS...</Text>
351 </View>
352 </View>
353 );
354 }
355 return <WebRTCPlayerInner url={props.url} />;
356}
357
358export function WebRTCPlayerInner({ url }: { url: string }) {
359 const [videoElement, setVideoElement] = useState<HTMLVideoElement | null>(
360 null,
361 );
362 const [connectionStatus, setConnectionStatus] =
363 useState<string>("initializing");
364
365 const localVideoRef = useRef<HTMLVideoElement | null>(null);
366
367 const videoRef = usePlayerStore((x) => x.videoRef);
368 const setVideoRef = usePlayerStore((x) => x.setVideoRef);
369
370 const status = usePlayerStore((x) => x.status);
371 const setStatus = usePlayerStore((x) => x.setStatus);
372
373 const playerEvent = usePlayerStore((x) => x.playerEvent);
374 const spurl = useStreamplaceStore((x) => x.url);
375
376 const handleRef = useCallback((node: HTMLVideoElement | null) => {
377 if (node) {
378 setVideoElement(node);
379 }
380 if (typeof videoRef === "function") {
381 videoRef(node);
382 } else if (setVideoRef) {
383 setVideoRef(localVideoRef);
384 }
385 }, []);
386
387 const [mediaStream, stuck] = useWebRTC(url);
388
389 // Debug logging for WebRTC connection state
390 useEffect(() => {
391 // Update connection status based on state
392 if (stuck) {
393 setConnectionStatus("connection-failed");
394 } else if (mediaStream) {
395 setConnectionStatus("connected");
396 } else {
397 setConnectionStatus("connecting");
398 }
399 }, [url, mediaStream, stuck, status]);
400
401 useEffect(() => {
402 if (stuck && status === PlayerStatus.PLAYING) {
403 setStatus(PlayerStatus.STALLED);
404 }
405 if (!stuck && mediaStream) {
406 setStatus(PlayerStatus.PLAYING);
407 }
408 }, [stuck, status, mediaStream]);
409
410 useEffect(() => {
411 if (!mediaStream) {
412 return;
413 }
414 const evt = (evType) => (e) => {
415 console.log("webrtc event", evType);
416 playerEvent(spurl, new Date().toISOString(), evType, {});
417 };
418 const active = evt("active");
419 const inactive = evt("inactive");
420 const ended = evt("ended");
421 const mute = evt("mute");
422 const unmute = evt("playing"); // playing has resumed yay
423
424 mediaStream.addEventListener("active", active);
425 mediaStream.addEventListener("inactive", inactive);
426 mediaStream.addEventListener("ended", ended);
427 for (const track of mediaStream.getTracks()) {
428 track.addEventListener("ended", ended);
429 track.addEventListener("mute", mute);
430 track.addEventListener("unmute", unmute);
431 }
432 return () => {
433 for (const track of mediaStream.getTracks()) {
434 track.removeEventListener("ended", ended);
435 track.removeEventListener("mute", mute);
436 track.removeEventListener("unmute", unmute);
437 }
438 mediaStream.removeEventListener("active", active);
439 mediaStream.removeEventListener("inactive", inactive);
440 mediaStream.removeEventListener("ended", ended);
441 };
442 }, [mediaStream]);
443
444 // Test not working right now
445 // useEffect(() => {
446 // if (!props.avSyncTest) {
447 // return;
448 // }
449 // if (!mediaStream) {
450 // return;
451 // }
452 // quietReceiver(mediaStream, playerEvent);
453 // }, [mediaStream, props.avSyncTest, playerEvent]);
454
455 useEffect(() => {
456 if (!videoElement) {
457 return;
458 }
459 videoElement.srcObject = mediaStream;
460 }, [videoElement, mediaStream]);
461
462 // Show loading/connection status when no media stream is available
463 if (!mediaStream) {
464 return (
465 <View
466 backgroundColor="#111"
467 alignItems="center"
468 justifyContent="center"
469 f={1}
470 padding="$4"
471 >
472 <View
473 backgroundColor="$blue10"
474 padding="$3"
475 borderRadius="$4"
476 maxWidth={400}
477 >
478 <View marginBottom="$2">
479 <Text fontSize="$6" fontWeight="bold" color="white">
480 Connecting...
481 </Text>
482 </View>
483 <Text fontSize="$4" color="white" lineHeight="$1">
484 Establishing WebRTC connection ({connectionStatus})
485 </Text>
486 </View>
487 </View>
488 );
489 }
490 return <VideoElement url={url} ref={handleRef} />;
491}
492
493export function WebcamIngestPlayer(props: VideoProps) {
494 const ingestStarting = usePlayerStore((x) => x.ingestStarting);
495 const ingestMediaSource = usePlayerStore((x) => x.ingestMediaSource);
496 const ingestAutoStart = usePlayerStore((x) => x.ingestAutoStart);
497
498 const [videoElement, setVideoElement] = useState<HTMLVideoElement | null>(
499 null,
500 );
501 const handleRef = useCallback((node: HTMLVideoElement | null) => {
502 if (node) {
503 setVideoElement(node);
504 }
505 }, []);
506
507 const { url } = useStreamplaceNode();
508 const [localMediaStream, setLocalMediaStream] = useState<MediaStream | null>(
509 null,
510 );
511 // we assign a stream key in the webrtcingest hook
512 const [remoteMediaStream, setRemoteMediaStream] = useWebRTCIngest({
513 endpoint: `${url}/api/ingest/webrtc`,
514 });
515
516 useEffect(() => {
517 if (ingestMediaSource === IngestMediaSource.DISPLAY) {
518 navigator.mediaDevices
519 .getDisplayMedia({
520 audio: true,
521 video: true,
522 })
523 .then((stream) => {
524 setLocalMediaStream(stream);
525 })
526 .catch((e) => {
527 console.error("error getting display media", e);
528 });
529 } else {
530 navigator.mediaDevices
531 .getUserMedia({
532 audio: true,
533 video: {
534 width: { min: 200, ideal: 1920, max: 3840 },
535 height: { min: 200, ideal: 1080, max: 2160 },
536 },
537 })
538 .then((stream) => {
539 setLocalMediaStream(stream);
540 })
541 .catch((e) => {
542 console.error("error getting user media", e);
543 });
544 }
545 }, [ingestMediaSource]);
546
547 useEffect(() => {
548 if (!ingestStarting && !ingestAutoStart) {
549 setRemoteMediaStream(null);
550 return;
551 }
552 if (!localMediaStream) {
553 return;
554 }
555 if (!streamKey) {
556 return;
557 }
558 setRemoteMediaStream(localMediaStream);
559 }, [localMediaStream, ingestStarting, streamKey, ingestAutoStart]);
560
561 useEffect(() => {
562 if (!videoElement) {
563 return;
564 }
565 if (!localMediaStream) {
566 return;
567 }
568 videoElement.srcObject = localMediaStream;
569 }, [videoElement, localMediaStream]);
570
571 return <VideoElement {...props} ref={handleRef} />;
572}