enable screen sharing
Browse files
frontend/src/lib/components/MediaListSwitcher.svelte
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
<script lang="ts">
|
2 |
import { mediaDevices, mediaStreamActions } from '$lib/mediaStream';
|
|
|
3 |
import { onMount } from 'svelte';
|
4 |
|
5 |
let deviceId: string = '';
|
@@ -14,13 +15,20 @@
|
|
14 |
});
|
15 |
</script>
|
16 |
|
17 |
-
<div class="text-xs">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
{#if $mediaDevices}
|
19 |
<select
|
20 |
bind:value={deviceId}
|
21 |
on:change={() => mediaStreamActions.switchCamera(deviceId)}
|
22 |
id="devices-list"
|
23 |
-
class="border-1 cursor-pointer rounded-md border-gray-
|
24 |
>
|
25 |
{#each $mediaDevices as device, i}
|
26 |
<option value={device.deviceId}>{device.label}</option>
|
|
|
1 |
<script lang="ts">
|
2 |
import { mediaDevices, mediaStreamActions } from '$lib/mediaStream';
|
3 |
+
import Screen from '$lib/icons/screen.svelte';
|
4 |
import { onMount } from 'svelte';
|
5 |
|
6 |
let deviceId: string = '';
|
|
|
15 |
});
|
16 |
</script>
|
17 |
|
18 |
+
<div class="flex items-center justify-center text-xs">
|
19 |
+
<button
|
20 |
+
title="Share your screen"
|
21 |
+
class="border-1 my-1 block cursor-pointer rounded-md border-gray-500 border-opacity-50 bg-slate-100 bg-opacity-30 p-[2px] font-medium text-white"
|
22 |
+
on:click={() => mediaStreamActions.startScreenCapture()}
|
23 |
+
>
|
24 |
+
<Screen classList={'w-100'} />
|
25 |
+
</button>
|
26 |
{#if $mediaDevices}
|
27 |
<select
|
28 |
bind:value={deviceId}
|
29 |
on:change={() => mediaStreamActions.switchCamera(deviceId)}
|
30 |
id="devices-list"
|
31 |
+
class="border-1 block cursor-pointer rounded-md border-gray-800 border-opacity-50 bg-slate-100 bg-opacity-30 p-[2px] font-medium text-white"
|
32 |
>
|
33 |
{#each $mediaDevices as device, i}
|
34 |
<option value={device.deviceId}>{device.label}</option>
|
frontend/src/lib/components/VideoInput.svelte
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
<script lang="ts">
|
2 |
import 'rvfc-polyfill';
|
3 |
-
|
|
|
4 |
import {
|
5 |
mediaStreamStatus,
|
6 |
MediaStreamStatusEnum,
|
@@ -11,10 +12,20 @@
|
|
11 |
import MediaListSwitcher from './MediaListSwitcher.svelte';
|
12 |
|
13 |
let videoEl: HTMLVideoElement;
|
|
|
|
|
14 |
let videoFrameCallbackId: number;
|
15 |
-
const WIDTH =
|
16 |
-
const HEIGHT =
|
|
|
|
|
17 |
let selectedDevice: string = '';
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
$: {
|
19 |
console.log(selectedDevice);
|
20 |
}
|
@@ -25,8 +36,22 @@
|
|
25 |
$: if (videoEl) {
|
26 |
videoEl.srcObject = $mediaStream;
|
27 |
}
|
|
|
28 |
async function onFrameChange(now: DOMHighResTimeStamp, metadata: VideoFrameCallbackMetadata) {
|
29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
onFrameChangeStore.set({ blob });
|
31 |
videoFrameCallbackId = videoEl.requestVideoFrameCallback(onFrameChange);
|
32 |
}
|
@@ -34,24 +59,17 @@
|
|
34 |
$: if ($mediaStreamStatus == MediaStreamStatusEnum.CONNECTED) {
|
35 |
videoFrameCallbackId = videoEl.requestVideoFrameCallback(onFrameChange);
|
36 |
}
|
37 |
-
async function
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
|
|
|
|
42 |
|
43 |
const ctx = canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;
|
44 |
-
ctx.drawImage(
|
45 |
-
videoEl,
|
46 |
-
videoW / 2 - (videoH * aspectRatio) / 2,
|
47 |
-
0,
|
48 |
-
videoH * aspectRatio,
|
49 |
-
videoH,
|
50 |
-
0,
|
51 |
-
0,
|
52 |
-
WIDTH,
|
53 |
-
HEIGHT
|
54 |
-
);
|
55 |
const blob = await canvas.convertToBlob({ type: 'image/jpeg', quality: 1 });
|
56 |
return blob;
|
57 |
}
|
@@ -60,7 +78,7 @@
|
|
60 |
<div class="relative mx-auto max-w-lg overflow-hidden rounded-lg border border-slate-300">
|
61 |
<div class="relative z-10 aspect-square w-full object-cover">
|
62 |
{#if $mediaDevices.length > 0}
|
63 |
-
<div class="absolute bottom-0 right-0">
|
64 |
<MediaListSwitcher />
|
65 |
</div>
|
66 |
{/if}
|
@@ -72,6 +90,8 @@
|
|
72 |
muted
|
73 |
loop
|
74 |
></video>
|
|
|
|
|
75 |
</div>
|
76 |
<div class="absolute left-0 top-0 flex aspect-square w-full items-center justify-center">
|
77 |
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 448" class="w-40 p-5 opacity-20">
|
|
|
1 |
<script lang="ts">
|
2 |
import 'rvfc-polyfill';
|
3 |
+
|
4 |
+
import { onDestroy, onMount } from 'svelte';
|
5 |
import {
|
6 |
mediaStreamStatus,
|
7 |
MediaStreamStatusEnum,
|
|
|
12 |
import MediaListSwitcher from './MediaListSwitcher.svelte';
|
13 |
|
14 |
let videoEl: HTMLVideoElement;
|
15 |
+
let canvasEl: HTMLCanvasElement;
|
16 |
+
let ctx: CanvasRenderingContext2D;
|
17 |
let videoFrameCallbackId: number;
|
18 |
+
const WIDTH = 768;
|
19 |
+
const HEIGHT = 768;
|
20 |
+
// ajust the throttle time to your needs
|
21 |
+
const THROTTLE_TIME = 1000 / 15;
|
22 |
let selectedDevice: string = '';
|
23 |
+
|
24 |
+
onMount(() => {
|
25 |
+
ctx = canvasEl.getContext('2d') as CanvasRenderingContext2D;
|
26 |
+
canvasEl.width = WIDTH;
|
27 |
+
canvasEl.height = HEIGHT;
|
28 |
+
});
|
29 |
$: {
|
30 |
console.log(selectedDevice);
|
31 |
}
|
|
|
36 |
$: if (videoEl) {
|
37 |
videoEl.srcObject = $mediaStream;
|
38 |
}
|
39 |
+
let lastMillis = 0;
|
40 |
async function onFrameChange(now: DOMHighResTimeStamp, metadata: VideoFrameCallbackMetadata) {
|
41 |
+
if (now - lastMillis < THROTTLE_TIME) {
|
42 |
+
videoFrameCallbackId = videoEl.requestVideoFrameCallback(onFrameChange);
|
43 |
+
return;
|
44 |
+
}
|
45 |
+
const videoWidth = videoEl.videoWidth;
|
46 |
+
const videoHeight = videoEl.videoHeight;
|
47 |
+
const blob = await grapCropBlobImg(
|
48 |
+
videoEl,
|
49 |
+
videoWidth / 2 - WIDTH / 2,
|
50 |
+
videoHeight / 2 - HEIGHT / 2,
|
51 |
+
WIDTH,
|
52 |
+
HEIGHT
|
53 |
+
);
|
54 |
+
|
55 |
onFrameChangeStore.set({ blob });
|
56 |
videoFrameCallbackId = videoEl.requestVideoFrameCallback(onFrameChange);
|
57 |
}
|
|
|
59 |
$: if ($mediaStreamStatus == MediaStreamStatusEnum.CONNECTED) {
|
60 |
videoFrameCallbackId = videoEl.requestVideoFrameCallback(onFrameChange);
|
61 |
}
|
62 |
+
async function grapCropBlobImg(
|
63 |
+
video: HTMLVideoElement,
|
64 |
+
x: number,
|
65 |
+
y: number,
|
66 |
+
width: number,
|
67 |
+
height: number
|
68 |
+
) {
|
69 |
+
const canvas = new OffscreenCanvas(width, height);
|
70 |
|
71 |
const ctx = canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;
|
72 |
+
ctx.drawImage(video, x, y, width, height, 0, 0, width, height);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
const blob = await canvas.convertToBlob({ type: 'image/jpeg', quality: 1 });
|
74 |
return blob;
|
75 |
}
|
|
|
78 |
<div class="relative mx-auto max-w-lg overflow-hidden rounded-lg border border-slate-300">
|
79 |
<div class="relative z-10 aspect-square w-full object-cover">
|
80 |
{#if $mediaDevices.length > 0}
|
81 |
+
<div class="absolute bottom-0 right-0 z-10">
|
82 |
<MediaListSwitcher />
|
83 |
</div>
|
84 |
{/if}
|
|
|
90 |
muted
|
91 |
loop
|
92 |
></video>
|
93 |
+
<canvas bind:this={canvasEl} class="absolute left-0 top-0 aspect-square w-full object-cover"
|
94 |
+
></canvas>
|
95 |
</div>
|
96 |
<div class="absolute left-0 top-0 flex aspect-square w-full items-center justify-center">
|
97 |
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 448" class="w-40 p-5 opacity-20">
|
frontend/src/lib/icons/screen.svelte
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<script lang="ts">
|
2 |
+
export let classList: string = '';
|
3 |
+
</script>
|
4 |
+
|
5 |
+
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 -32 576 576" height="16px" class={classList}>
|
6 |
+
<path
|
7 |
+
fill="currentColor"
|
8 |
+
d="M64 0A64 64 0 0 0 0 64v288a64 64 0 0 0 64 64h176l-10.7 32H160a32 32 0 1 0 0 64h256a32 32 0 1 0 0-64h-69.3L336 416h176a64 64 0 0 0 64-64V64a64 64 0 0 0-64-64H64zm448 64v288H64V64h448z"
|
9 |
+
/>
|
10 |
+
</svg>
|
frontend/src/lib/mediaStream.ts
CHANGED
@@ -43,6 +43,33 @@ export const mediaStreamActions = {
|
|
43 |
mediaStream.set(null);
|
44 |
});
|
45 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
async switchCamera(mediaDevicedID: string) {
|
47 |
if (get(mediaStreamStatus) !== MediaStreamStatusEnum.CONNECTED) {
|
48 |
return;
|
|
|
43 |
mediaStream.set(null);
|
44 |
});
|
45 |
},
|
46 |
+
async startScreenCapture() {
|
47 |
+
const displayMediaOptions = {
|
48 |
+
video: {
|
49 |
+
displaySurface: "window",
|
50 |
+
},
|
51 |
+
audio: false,
|
52 |
+
surfaceSwitching: "include"
|
53 |
+
};
|
54 |
+
|
55 |
+
|
56 |
+
let captureStream = null;
|
57 |
+
|
58 |
+
try {
|
59 |
+
captureStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
|
60 |
+
const videoTrack = captureStream.getVideoTracks()[0];
|
61 |
+
|
62 |
+
console.log("Track settings:");
|
63 |
+
console.log(JSON.stringify(videoTrack.getSettings(), null, 2));
|
64 |
+
console.log("Track constraints:");
|
65 |
+
console.log(JSON.stringify(videoTrack.getConstraints(), null, 2));
|
66 |
+
mediaStreamStatus.set(MediaStreamStatusEnum.CONNECTED);
|
67 |
+
mediaStream.set(captureStream)
|
68 |
+
} catch (err) {
|
69 |
+
console.error(err);
|
70 |
+
}
|
71 |
+
|
72 |
+
},
|
73 |
async switchCamera(mediaDevicedID: string) {
|
74 |
if (get(mediaStreamStatus) !== MediaStreamStatusEnum.CONNECTED) {
|
75 |
return;
|