Justin Haaheim Mark Duppenthaler commited on
Commit
6bfe941
1 Parent(s): 0537a4b

Squashed commit of the following:

Browse files

commit 151299bcb4803525f9a5ed243fd567bf539a54b8
Author: Justin Haaheim <justinhaaheim@users.noreply.github.com>
Date: Thu Nov 30 23:00:42 2023 +0000

Prevent the server from being unlocked if were locking it completely

commit 2a6cf503c9729a410b57f1b7e164c7b5ff01dc46
Author: Justin Haaheim <justinhaaheim@users.noreply.github.com>
Date: Thu Nov 30 22:57:47 2023 +0000

Fix member object creation and add logging if lock completely is set

commit ea97d8b4145ede78fca9c249439e10be97ba1097
Author: Justin Haaheim <justinhaaheim@users.noreply.github.com>
Date: Thu Nov 30 22:51:06 2023 +0000

Enable fully locking server using env var

commit c40b4cb84fbbb74d3b70ff09f473b9612e1bd4ac
Author: Justin Haaheim <justinhaaheim@users.noreply.github.com>
Date: Thu Nov 30 13:16:38 2023 -0800

[Streaming] Add headphones notice and echo cancellation warning (#176)

commit 94a338e9127b1a3936ea1d1c6763e6a90ee8ee9d
Author: Justin Haaheim <justinhaaheim@users.noreply.github.com>
Date: Thu Nov 30 09:31:56 2023 -0800

[Streaming] Show full language name in dropdown (#173)

Co-authored-by: Mark Duppenthaler <mduppes@gmail.com>

seamless_server/app_pubsub.py CHANGED
@@ -124,7 +124,22 @@ class ServerLock(TypedDict):
124
  member_object: Member
125
 
126
 
127
- server_lock: Optional[ServerLock] = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128
 
129
  server_id = str(uuid4())
130
 
@@ -497,16 +512,17 @@ async def join_room(sid, client_id, room_id_from_client, config_dict):
497
  ):
498
  # If something goes wrong and the server gets stuck in a locked state the client can
499
  # force the server to remove the lock by passing the special name ESCAPE_HATCH_SERVER_LOCK_RELEASE_NAME
500
- # TEMP: remove escape hatch for demo
501
- # if (
502
- # server_lock is not None
503
- # and config_dict.get("lockServerName")
504
- # == ESCAPE_HATCH_SERVER_LOCK_RELEASE_NAME
505
- # ):
506
- # server_lock = None
507
- # logger.info(
508
- # f"🔓 Server lock has been reset by {client_id} using the escape hatch name {ESCAPE_HATCH_SERVER_LOCK_RELEASE_NAME}"
509
- # )
 
510
 
511
  # If the server is not locked, set a lock. If it's already locked to this client, update the lock object
512
  if server_lock is None or server_lock.get("client_id") == client_id:
 
124
  member_object: Member
125
 
126
 
127
+ if os.environ.get("LOCK_SERVER_COMPLETELY"):
128
+ logger.info("LOCK_SERVER_COMPLETELY is set. Server will be locked on startup.")
129
+ dummy_server_lock_member_object = Member(
130
+ client_id="seamless_user", session_id="dummy", name="Seamless User"
131
+ )
132
+ # Normally this would be an actual transcoder, but it's fine putting True here since currently we only check for the presence of the transcoder
133
+ dummy_server_lock_member_object.transcoder = True
134
+ server_lock: Optional[ServerLock] = (
135
+ {
136
+ "name": "Seamless User",
137
+ "client_id": "seamless_user",
138
+ "member_object": dummy_server_lock_member_object,
139
+ }
140
+ if os.environ.get("LOCK_SERVER_COMPLETELY")
141
+ else None
142
+ )
143
 
144
  server_id = str(uuid4())
145
 
 
512
  ):
513
  # If something goes wrong and the server gets stuck in a locked state the client can
514
  # force the server to remove the lock by passing the special name ESCAPE_HATCH_SERVER_LOCK_RELEASE_NAME
515
+ if (
516
+ server_lock is not None
517
+ and config_dict.get("lockServerName")
518
+ == ESCAPE_HATCH_SERVER_LOCK_RELEASE_NAME
519
+ # If we are locking the server completely we don't want someone to be able to unlock it
520
+ and not os.environ.get("LOCK_SERVER_COMPLETELY")
521
+ ):
522
+ server_lock = None
523
+ logger.info(
524
+ f"🔓 Server lock has been reset by {client_id} using the escape hatch name {ESCAPE_HATCH_SERVER_LOCK_RELEASE_NAME}"
525
+ )
526
 
527
  # If the server is not locked, set a lock. If it's already locked to this client, update the lock object
528
  if server_lock is None or server_lock.get("client_id") == client_id:
streaming-react-app/package.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "name": "streaming-react-app",
3
  "private": true,
4
- "version": "0.0.13",
5
  "type": "module",
6
  "scripts": {
7
  "dev": "vite --host --strictPort",
8
- "build": "tsc && vite build",
9
  "preview": "vite preview",
10
  "clean:node-modules": "rm -rf node_modules/",
11
  "ts-check": "tsc --noEmit",
@@ -24,7 +24,6 @@
24
  "amazon-cognito-identity-js": "^6.3.6",
25
  "audiobuffer-to-wav": "^1.0.0",
26
  "aws-sdk": "^2.1472.0",
27
- "iso-639-1": "^3.1.0",
28
  "js-cookie": "^3.0.5",
29
  "lodash": "4.17.21",
30
  "react": "^18.2.0",
 
1
  {
2
  "name": "streaming-react-app",
3
  "private": true,
4
+ "version": "0.0.14",
5
  "type": "module",
6
  "scripts": {
7
  "dev": "vite --host --strictPort",
8
+ "build": "vite build",
9
  "preview": "vite preview",
10
  "clean:node-modules": "rm -rf node_modules/",
11
  "ts-check": "tsc --noEmit",
 
24
  "amazon-cognito-identity-js": "^6.3.6",
25
  "audiobuffer-to-wav": "^1.0.0",
26
  "aws-sdk": "^2.1472.0",
 
27
  "js-cookie": "^3.0.5",
28
  "lodash": "4.17.21",
29
  "react": "^18.2.0",
streaming-react-app/src/StreamingInterface.tsx CHANGED
@@ -57,12 +57,12 @@ import {CURSOR_BLINK_INTERVAL_MS} from './cursorBlinkInterval';
57
  import {getURLParams} from './URLParams';
58
  import debug from './debug';
59
  import DebugSection from './DebugSection';
60
- import {Grid} from '@mui/material';
 
61
  import {getLanguageFromThreeLetterCode} from './languageLookup';
 
62
 
63
- const AUDIO_STREAM_DEFAULTS: {
64
- [key in SupportedInputSource]: BrowserAudioStreamConfig;
65
- } = {
66
  userMedia: {
67
  echoCancellation: false,
68
  noiseSuppression: true,
@@ -71,13 +71,10 @@ const AUDIO_STREAM_DEFAULTS: {
71
  echoCancellation: false,
72
  noiseSuppression: false,
73
  },
74
- };
75
 
76
  async function requestUserMediaAudioStream(
77
- config: BrowserAudioStreamConfig = {
78
- echoCancellation: false,
79
- noiseSuppression: true,
80
- },
81
  ) {
82
  const stream = await navigator.mediaDevices.getUserMedia({
83
  audio: {...config, channelCount: 1},
@@ -90,10 +87,7 @@ async function requestUserMediaAudioStream(
90
  }
91
 
92
  async function requestDisplayMediaAudioStream(
93
- config: BrowserAudioStreamConfig = {
94
- echoCancellation: false,
95
- noiseSuppression: false,
96
- },
97
  ) {
98
  const stream = await navigator.mediaDevices.getDisplayMedia({
99
  audio: {...config, channelCount: 1},
@@ -962,8 +956,9 @@ export default function StreamingInterface() {
962
  </RadioGroup>
963
  </FormControl>
964
  </Box>
965
- <Box sx={{flex: 1}}>
966
- <FormControl disabled={streamFixedConfigOptionsDisabled}>
 
967
  <FormLabel>Options</FormLabel>
968
  <FormControlLabel
969
  control={
@@ -980,9 +975,9 @@ export default function StreamingInterface() {
980
  }
981
  />
982
  }
983
- label="Noise Suppression (Browser)"
984
  />
985
- <FormControlLabel
986
  control={
987
  <Checkbox
988
  checked={
@@ -997,7 +992,7 @@ export default function StreamingInterface() {
997
  }
998
  />
999
  }
1000
- label="Echo Cancellation (Browser)"
1001
  />
1002
  <FormControlLabel
1003
  control={
@@ -1008,12 +1003,34 @@ export default function StreamingInterface() {
1008
  ) => setServerDebugFlag(event.target.checked)}
1009
  />
1010
  }
1011
- label="Server Debug Flag"
1012
  />
1013
  </FormControl>
1014
  </Box>
1015
  </Stack>
1016
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1017
  <Stack direction="row" spacing={2}>
1018
  {streamingStatus === 'stopped' ? (
1019
  <Button
 
57
  import {getURLParams} from './URLParams';
58
  import debug from './debug';
59
  import DebugSection from './DebugSection';
60
+ import Switch from '@mui/material/Switch';
61
+ import Grid from '@mui/material/Grid';
62
  import {getLanguageFromThreeLetterCode} from './languageLookup';
63
+ import HeadphonesIcon from '@mui/icons-material/Headphones';
64
 
65
+ const AUDIO_STREAM_DEFAULTS = {
 
 
66
  userMedia: {
67
  echoCancellation: false,
68
  noiseSuppression: true,
 
71
  echoCancellation: false,
72
  noiseSuppression: false,
73
  },
74
+ } as const;
75
 
76
  async function requestUserMediaAudioStream(
77
+ config: BrowserAudioStreamConfig = AUDIO_STREAM_DEFAULTS['userMedia'],
 
 
 
78
  ) {
79
  const stream = await navigator.mediaDevices.getUserMedia({
80
  audio: {...config, channelCount: 1},
 
87
  }
88
 
89
  async function requestDisplayMediaAudioStream(
90
+ config: BrowserAudioStreamConfig = AUDIO_STREAM_DEFAULTS['displayMedia'],
 
 
 
91
  ) {
92
  const stream = await navigator.mediaDevices.getDisplayMedia({
93
  audio: {...config, channelCount: 1},
 
956
  </RadioGroup>
957
  </FormControl>
958
  </Box>
959
+
960
+ <Box sx={{flex: 1, flexGrow: 2}}>
961
+ <FormControl disabled={streamFixedConfigOptionsDisabled}>
962
  <FormLabel>Options</FormLabel>
963
  <FormControlLabel
964
  control={
 
975
  }
976
  />
977
  }
978
+ label="Noise Suppression"
979
  />
980
+ <FormControlLabel
981
  control={
982
  <Checkbox
983
  checked={
 
992
  }
993
  />
994
  }
995
+ label="Echo Cancellation (not recommended)"
996
  />
997
  <FormControlLabel
998
  control={
 
1003
  ) => setServerDebugFlag(event.target.checked)}
1004
  />
1005
  }
1006
+ label="Enable Server Debugging"
1007
  />
1008
  </FormControl>
1009
  </Box>
1010
  </Stack>
1011
 
1012
+ {isSpeaker &&
1013
+ isListener &&
1014
+ inputSource === 'userMedia' &&
1015
+ !enableEchoCancellation &&
1016
+ gain !== 0 && (
1017
+ <div>
1018
+ <Alert severity="warning" icon={<HeadphonesIcon />}>
1019
+ Headphones required to prevent feedback.
1020
+ </Alert>
1021
+ </div>
1022
+ )}
1023
+
1024
+ {isSpeaker && enableEchoCancellation && (
1025
+ <div>
1026
+ <Alert severity="warning">
1027
+ We don't recommend using echo cancellation as it may
1028
+ distort the input audio. If possible, use headphones and
1029
+ disable echo cancellation instead.
1030
+ </Alert>
1031
+ </div>
1032
+ )}
1033
+
1034
  <Stack direction="row" spacing={2}>
1035
  {streamingStatus === 'stopped' ? (
1036
  <Button
streaming-react-app/src/languageLookup.ts CHANGED
@@ -1,108 +1,110 @@
1
- const LANG3_FULL = {
2
- eng: 'english',
3
- arb: 'arabic',
4
- asm: 'assamese',
5
- bel: 'belarusian',
6
- bul: 'bulgarian',
7
- ben: 'bengali',
8
- cat: 'catalan',
9
- ces: 'czech',
10
- cym: 'welsh',
11
- dan: 'danish',
12
- deu: 'german',
13
- ell: 'greek',
14
- spa: 'spanish',
15
- est: 'estonian',
16
- pes: 'persian',
17
- fin: 'finnish',
18
- fra: 'french',
19
- hin: 'hindi',
20
- hun: 'hungarian',
21
- ind: 'indonesian',
22
- ita: 'italian',
23
- jpn: 'japanese',
24
- kat: 'georgian',
25
- lit: 'lithuanian',
26
- lvs: 'latvian',
27
- khk: 'mongolian',
28
- mar: 'marathi',
29
- mlt: 'maltese',
30
- nld: 'dutch',
31
- pan: 'punjabi',
32
- pol: 'polish',
33
- por: 'portuguese',
34
- ron: 'romanian',
35
- rus: 'russian',
36
- slk: 'slovak',
37
- slv: 'slovenian',
38
- swe: 'swedish',
39
- swh: 'swahili',
40
- tam: 'tamil',
41
- tha: 'thai',
42
- tur: 'turkish',
43
- ukr: 'ukrainian',
44
- urd: 'urdu',
45
- uzn: 'uzbek',
46
- vie: 'vietnamese',
47
- cmn: 'chinese',
48
- afr: 'afrikaans',
49
- isl: 'icelandic',
50
- ltz: 'luxembourgish',
51
- nob: 'norwegian',
52
- glg: 'galician',
53
- bos: 'bosnian',
54
- hrv: 'croatian',
55
- mkd: 'macedonian',
56
- srp: 'serbian',
57
- hye: 'armenian',
58
- azj: 'azerbaijani',
59
- kaz: 'kazakh',
60
- kor: 'korean',
61
- guj: 'gujarati',
62
- kan: 'kannada',
63
- npi: 'nepali',
64
- snd: 'sindhi',
65
- tel: 'telugu',
66
- jav: 'javanese',
67
- zlm: 'malay',
68
- mal: 'malayalam',
69
- tgl: 'tagalog',
70
- mya: 'myanmar',
71
- khm: 'khmer',
72
- lao: 'lao',
73
- heb: 'hebrew',
74
- pbt: 'pashto',
75
- tgk: 'tajik',
76
- amh: 'amharic',
77
- lin: 'lingala',
78
- som: 'somali',
79
- yor: 'yoruba',
80
- sna: 'shona',
81
- mri: 'maori',
82
- hau: 'hausa',
83
- oci: 'occitan',
84
- bak: 'bashkir',
85
- bre: 'breton',
86
- yid: 'yiddish',
87
- hat: 'haitian creole',
88
- mlg: 'malagasy',
89
- sin: 'sinhala',
90
- sqi: 'albanian',
91
- sun: 'sundanese',
92
- eus: 'basque',
93
- nno: 'nynorsk',
94
- tat: 'tatar',
95
- bod: 'tibetan',
96
- fao: 'faroese',
97
- haw: 'hawaiian',
98
- lat: 'latin',
99
- san: 'sanskrit',
100
- tuk: 'turkmen'
101
  };
102
 
103
- export function getLanguageFromThreeLetterCode(code: string): string | null {
 
 
104
  try {
105
- const name = LANG3_FULL[code] ?? null;
106
  if (name == null) {
107
  return null;
108
  }
@@ -111,7 +113,7 @@ export function getLanguageFromThreeLetterCode(code: string): string | null {
111
  .map((word: string) => word[0].toUpperCase() + word.slice(1));
112
  return capitalizedWords.join(' ');
113
  } catch (e) {
114
- console.warn(`Unable to get language name for code ${code}: ${e}`);
115
  }
116
  return null;
117
  }
 
1
+ const LANG3_TO_NAME = {
2
+ afr: 'afrikaans',
3
+ amh: 'amharic',
4
+ arb: 'arabic',
5
+ asm: 'assamese',
6
+ azj: 'azerbaijani',
7
+ bak: 'bashkir',
8
+ bel: 'belarusian',
9
+ ben: 'bengali',
10
+ bod: 'tibetan',
11
+ bos: 'bosnian',
12
+ bre: 'breton',
13
+ bul: 'bulgarian',
14
+ cat: 'catalan',
15
+ ces: 'czech',
16
+ cmn: 'chinese',
17
+ cym: 'welsh',
18
+ dan: 'danish',
19
+ deu: 'german',
20
+ ell: 'greek',
21
+ eng: 'english',
22
+ est: 'estonian',
23
+ eus: 'basque',
24
+ fao: 'faroese',
25
+ fin: 'finnish',
26
+ fra: 'french',
27
+ glg: 'galician',
28
+ guj: 'gujarati',
29
+ hat: 'haitian creole',
30
+ hau: 'hausa',
31
+ haw: 'hawaiian',
32
+ heb: 'hebrew',
33
+ hin: 'hindi',
34
+ hrv: 'croatian',
35
+ hun: 'hungarian',
36
+ hye: 'armenian',
37
+ ind: 'indonesian',
38
+ isl: 'icelandic',
39
+ ita: 'italian',
40
+ jav: 'javanese',
41
+ jpn: 'japanese',
42
+ kan: 'kannada',
43
+ kat: 'georgian',
44
+ kaz: 'kazakh',
45
+ khk: 'mongolian',
46
+ khm: 'khmer',
47
+ kor: 'korean',
48
+ lao: 'lao',
49
+ lat: 'latin',
50
+ lin: 'lingala',
51
+ lit: 'lithuanian',
52
+ ltz: 'luxembourgish',
53
+ lvs: 'latvian',
54
+ mal: 'malayalam',
55
+ mar: 'marathi',
56
+ mkd: 'macedonian',
57
+ mlg: 'malagasy',
58
+ mlt: 'maltese',
59
+ mri: 'maori',
60
+ mya: 'myanmar',
61
+ nld: 'dutch',
62
+ nno: 'nynorsk',
63
+ nob: 'norwegian',
64
+ npi: 'nepali',
65
+ oci: 'occitan',
66
+ pan: 'punjabi',
67
+ pbt: 'pashto',
68
+ pes: 'persian',
69
+ pol: 'polish',
70
+ por: 'portuguese',
71
+ ron: 'romanian',
72
+ rus: 'russian',
73
+ san: 'sanskrit',
74
+ sin: 'sinhala',
75
+ slk: 'slovak',
76
+ slv: 'slovenian',
77
+ sna: 'shona',
78
+ snd: 'sindhi',
79
+ som: 'somali',
80
+ spa: 'spanish',
81
+ sqi: 'albanian',
82
+ srp: 'serbian',
83
+ sun: 'sundanese',
84
+ swe: 'swedish',
85
+ swh: 'swahili',
86
+ tam: 'tamil',
87
+ tat: 'tatar',
88
+ tel: 'telugu',
89
+ tgk: 'tajik',
90
+ tgl: 'tagalog',
91
+ tha: 'thai',
92
+ tuk: 'turkmen',
93
+ tur: 'turkish',
94
+ ukr: 'ukrainian',
95
+ urd: 'urdu',
96
+ uzn: 'uzbek',
97
+ vie: 'vietnamese',
98
+ yid: 'yiddish',
99
+ yor: 'yoruba',
100
+ zlm: 'malay',
101
  };
102
 
103
+ export function getLanguageFromThreeLetterCode(
104
+ lang3Code: string,
105
+ ): string | null {
106
  try {
107
+ const name = LANG3_TO_NAME[lang3Code] ?? null;
108
  if (name == null) {
109
  return null;
110
  }
 
113
  .map((word: string) => word[0].toUpperCase() + word.slice(1));
114
  return capitalizedWords.join(' ');
115
  } catch (e) {
116
+ console.warn(`Unable to get language name for code ${lang3Code}: ${e}`);
117
  }
118
  return null;
119
  }
streaming-react-app/src/react-xr/TextBlocks.tsx CHANGED
@@ -1,9 +1,8 @@
1
- import {JSX, useEffect, useRef, useState} from 'react';
2
  import robotoFontFamilyJson from '../assets/RobotoMono-Regular-msdf.json?url';
3
  import robotoFontTexture from '../assets/RobotoMono-Regular.png';
4
  import ThreeMeshUIText, {ThreeMeshUITextType} from './ThreeMeshUIText';
5
- import {getURLParams} from '../URLParams';
6
- import {CURSOR_BLINK_INTERVAL_MS} from '../cursorBlinkInterval';
7
 
8
  const NUM_LINES = 3;
9
 
@@ -22,44 +21,80 @@ const SCROLL_Y_DELTA = 0.001;
22
  const OFFSET = 0.01;
23
  const OFFSET_WIDTH = OFFSET * 3;
24
 
25
- type Props = {
 
 
 
 
 
 
 
26
  content: string;
27
  // The actual position or end position when animating
28
  y: number;
29
  // The start position when animating
30
  startY: number;
31
- width: number;
32
- height: number;
33
  textOpacity: number;
34
  backgroundOpacity: number;
35
- // Use this to keep track of sentence + line position for animation
36
- index: string;
37
- enableAnimation: boolean;
 
 
 
 
 
 
 
 
38
  };
39
 
40
  function TextBlock({
41
  content,
42
  y,
43
  startY,
44
- width,
45
- height,
46
  textOpacity,
47
  backgroundOpacity,
48
  index,
49
- enableAnimation,
50
- }: Props) {
51
  const [scrollY, setScrollY] = useState<number>(y);
52
-
53
  // We are reusing text blocks so this keeps track of when we changed rows so we can restart animation
54
- const lastIndex = useRef<string>(index);
55
  useEffect(() => {
56
  if (index != lastIndex.current) {
57
  lastIndex.current = index;
58
- enableAnimation && setScrollY(startY);
59
  } else if (scrollY < y) {
60
  setScrollY((prev) => prev + SCROLL_Y_DELTA);
61
  }
62
- }, [enableAnimation, index, scrollY, setScrollY, startY, y]);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
  // This is needed to update text content (doesn't work if we just update the content prop)
65
  const textRef = useRef<ThreeMeshUITextType>();
@@ -111,125 +146,162 @@ function TextBlock({
111
  );
112
  }
113
 
114
- // Background behind the text so it covers any missing spaces
115
- function TranscriptionPanel() {
116
- const panelHeight = LINE_HEIGHT * NUM_LINES + 2 * BLOCK_SPACING + 2 * OFFSET;
117
- const xPosition = OFFSET_WIDTH;
118
- return (
119
- <block
120
- args={[
121
- {
122
- backgroundOpacity: 1,
123
- width:
124
- MAX_WIDTH * ((CHARS_PER_LINE + 2) / CHARS_PER_LINE) +
125
- 2 * OFFSET_WIDTH,
126
- height: panelHeight,
127
- borderRadius: 0,
128
- },
129
- ]}
130
- position={[
131
- -OFFSET + xPosition,
132
- Y_COORD_START + panelHeight / 2 - 2 * OFFSET,
133
- Z_COORD,
134
- ]}></block>
135
- );
136
  }
137
 
138
  export default function TextBlocks({
139
- sentences,
140
- blinkCursor,
141
  }: {
142
- sentences: string[][];
143
- blinkCursor: boolean;
144
  }) {
145
- const showTranscriptionPanel =
146
- getURLParams().ARTranscriptionType === 'lines_with_background';
147
- const textBlocks: JSX.Element[] = [];
 
 
 
 
 
 
 
 
 
 
148
 
149
- const [cursorBlinkOn, setCursorBlinkOn] = useState(false);
150
  useEffect(() => {
151
- if (blinkCursor) {
152
- const interval = setInterval(() => {
153
- setCursorBlinkOn((prev) => !prev);
154
- }, CURSOR_BLINK_INTERVAL_MS);
 
 
 
155
 
156
- return () => clearInterval(interval);
157
- } else {
158
- setCursorBlinkOn(false);
159
- }
160
- }, [blinkCursor]);
161
 
162
- // Start from bottom and populate most recent sentences by line until we fill max lines.
163
- let currentY = Y_COORD_START;
164
- for (let i = sentences.length - 1; i >= 0; i--) {
165
- const sentenceLines = sentences[i];
166
- for (let j = sentenceLines.length - 1; j >= 0; j--) {
167
- if (textBlocks.length == NUM_LINES) {
168
- if (showTranscriptionPanel) {
169
- textBlocks.push(<TranscriptionPanel key={textBlocks.length} />);
170
- }
171
- return textBlocks;
172
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
 
174
- const isBottomSentence = i === sentences.length - 1;
175
- const isBottomLine = isBottomSentence && textBlocks.length === 0;
176
- const y = currentY + LINE_HEIGHT / 2;
177
- let textBlockLine = sentenceLines[j];
178
- const numChars = textBlockLine.length;
179
 
180
- if (cursorBlinkOn && isBottomLine) {
181
- textBlockLine = textBlockLine + '|';
 
 
182
  }
 
183
 
184
- // Accounting for potential cursor for block width (the +1)
185
- const blockWidth =
186
- (numChars + (isBottomLine ? 1.1 : 0) + (numChars < 10 ? 1 : 0)) *
187
- CHAR_WIDTH;
188
- const textOpacity = 1 - 0.1 * textBlocks.length;
189
- textBlocks.push(
190
- <TextBlock
191
- key={textBlocks.length}
192
- y={y}
193
- startY={currentY}
194
- index={`${sentences.length - i},${j}`}
195
- textOpacity={textOpacity}
196
- backgroundOpacity={0.98}
197
- height={LINE_HEIGHT}
198
- width={blockWidth}
199
- // content={"BLOCK " + textBlocks.length + ": " + content}
200
- content={textBlockLine}
201
- enableAnimation={!isBottomLine}
202
- />,
203
- );
204
 
205
- currentY = y + LINE_HEIGHT / 2;
 
 
 
 
 
206
  }
207
- currentY += showTranscriptionPanel ? BLOCK_SPACING / 3 : BLOCK_SPACING;
208
- }
209
 
210
- const numRemainingBlocks = textBlocks.length - NUM_LINES;
211
- if (numRemainingBlocks > 0) {
212
- Array.from({length: numRemainingBlocks}).forEach(() => {
213
- // Push in non display blocks because mesh UI crashes if elements are add / removed from screen.
214
- textBlocks.push(
215
- <TextBlock
216
- key={textBlocks.length}
217
- y={Y_COORD_START}
218
- startY={0}
219
- index="0,0"
220
- textOpacity={0}
221
- backgroundOpacity={0}
222
- enableAnimation={false}
223
- width={MAX_WIDTH}
224
- height={LINE_HEIGHT}
225
- content=""
226
- />,
227
- );
228
  });
229
- }
230
 
231
- if (showTranscriptionPanel) {
232
- textBlocks.push(<TranscriptionPanel key={textBlocks.length} />);
 
 
 
 
233
  }
234
- return textBlocks;
 
 
 
 
 
235
  }
 
1
+ import {useEffect, useRef, useState} from 'react';
2
  import robotoFontFamilyJson from '../assets/RobotoMono-Regular-msdf.json?url';
3
  import robotoFontTexture from '../assets/RobotoMono-Regular.png';
4
  import ThreeMeshUIText, {ThreeMeshUITextType} from './ThreeMeshUIText';
5
+ import supportedCharSet from './supportedCharSet';
 
6
 
7
  const NUM_LINES = 3;
8
 
 
21
  const OFFSET = 0.01;
22
  const OFFSET_WIDTH = OFFSET * 3;
23
 
24
+ const CHARS_PER_SECOND = 10;
25
+
26
+ // The tick interval
27
+ const RENDER_INTERVAL = 300;
28
+
29
+ const CURSOR_BLINK_INTERVAL_MS = 1000;
30
+
31
+ type TextBlockProps = {
32
  content: string;
33
  // The actual position or end position when animating
34
  y: number;
35
  // The start position when animating
36
  startY: number;
 
 
37
  textOpacity: number;
38
  backgroundOpacity: number;
39
+ index: number;
40
+ isBottomLine: boolean;
41
+ // key: number;
42
+ };
43
+
44
+ type TranscriptState = {
45
+ textBlocksProps: TextBlockProps[];
46
+ lastTranslationStringIndex: number;
47
+ lastTranslationLineStartIndex: number;
48
+ transcriptLines: string[];
49
+ lastRenderTime: number;
50
  };
51
 
52
  function TextBlock({
53
  content,
54
  y,
55
  startY,
 
 
56
  textOpacity,
57
  backgroundOpacity,
58
  index,
59
+ isBottomLine,
60
+ }: TextBlockProps) {
61
  const [scrollY, setScrollY] = useState<number>(y);
 
62
  // We are reusing text blocks so this keeps track of when we changed rows so we can restart animation
63
+ const lastIndex = useRef<number>(index);
64
  useEffect(() => {
65
  if (index != lastIndex.current) {
66
  lastIndex.current = index;
67
+ !isBottomLine && setScrollY(startY);
68
  } else if (scrollY < y) {
69
  setScrollY((prev) => prev + SCROLL_Y_DELTA);
70
  }
71
+ }, [isBottomLine, index, scrollY, setScrollY, startY, y]);
72
+
73
+ const [cursorBlinkOn, setCursorBlinkOn] = useState(false);
74
+ useEffect(() => {
75
+ if (isBottomLine) {
76
+ const interval = setInterval(() => {
77
+ setCursorBlinkOn((prev) => !prev);
78
+ }, CURSOR_BLINK_INTERVAL_MS);
79
+
80
+ return () => clearInterval(interval);
81
+ } else {
82
+ setCursorBlinkOn(false);
83
+ }
84
+ }, [isBottomLine]);
85
+
86
+ const numChars = content.length;
87
+
88
+ if (cursorBlinkOn) {
89
+ content = content + '|';
90
+ }
91
+
92
+ // Accounting for potential cursor for block width (the +1)
93
+ const width =
94
+ (numChars + (isBottomLine ? 1.1 : 0) + (numChars < 10 ? 1 : 0)) *
95
+ CHAR_WIDTH;
96
+
97
+ const height = LINE_HEIGHT;
98
 
99
  // This is needed to update text content (doesn't work if we just update the content prop)
100
  const textRef = useRef<ThreeMeshUITextType>();
 
146
  );
147
  }
148
 
149
+ function initialTextBlockProps(count: number): TextBlockProps[] {
150
+ return Array.from({length: count}).map(() => {
151
+ // Push in non display blocks because mesh UI crashes if elements are add / removed from screen.
152
+ return {
153
+ y: Y_COORD_START,
154
+ startY: 0,
155
+ index: 0,
156
+ textOpacity: 0,
157
+ backgroundOpacity: 0,
158
+ width: MAX_WIDTH,
159
+ height: LINE_HEIGHT,
160
+ content: '',
161
+ isBottomLine: true,
162
+ };
163
+ });
 
 
 
 
 
 
 
164
  }
165
 
166
  export default function TextBlocks({
167
+ translationText,
 
168
  }: {
169
+ translationText: string;
 
170
  }) {
171
+ const transcriptStateRef = useRef<TranscriptState>({
172
+ textBlocksProps: initialTextBlockProps(NUM_LINES),
173
+ lastTranslationStringIndex: 0,
174
+ lastTranslationLineStartIndex: 0,
175
+ transcriptLines: [],
176
+ lastRenderTime: new Date().getTime(),
177
+ });
178
+
179
+ const transcriptState = transcriptStateRef.current;
180
+ const {textBlocksProps, lastTranslationStringIndex, lastRenderTime} =
181
+ transcriptState;
182
+
183
+ const [charsToRender, setCharsToRender] = useState<number>(0);
184
 
 
185
  useEffect(() => {
186
+ const interval = setInterval(() => {
187
+ const currentTime = new Date().getTime();
188
+ const charsToRender = Math.round(
189
+ ((currentTime - lastRenderTime) * CHARS_PER_SECOND) / 1000,
190
+ );
191
+ setCharsToRender(charsToRender);
192
+ }, RENDER_INTERVAL);
193
 
194
+ return () => clearInterval(interval);
195
+ }, [lastRenderTime]);
 
 
 
196
 
197
+ const currentTime = new Date().getTime();
198
+ if (charsToRender < 1) {
199
+ return textBlocksProps.map((props, idx) => (
200
+ <TextBlock {...props} key={idx} />
201
+ ));
202
+ }
203
+
204
+ const nextTranslationStringIndex = Math.min(
205
+ lastTranslationStringIndex + charsToRender,
206
+ translationText.length,
207
+ );
208
+ const newString = translationText.substring(
209
+ lastTranslationStringIndex,
210
+ nextTranslationStringIndex,
211
+ );
212
+ if (nextTranslationStringIndex === lastTranslationStringIndex) {
213
+ transcriptState.lastRenderTime = currentTime;
214
+ return textBlocksProps.map((props, idx) => (
215
+ <TextBlock {...props} key={idx} />
216
+ ));
217
+ }
218
+
219
+ // Wait until more characters are accumulated if its just blankspace
220
+ if (/^\s*$/.test(newString)) {
221
+ transcriptState.lastRenderTime = currentTime;
222
+ return textBlocksProps.map((props, idx) => (
223
+ <TextBlock {...props} key={idx} />
224
+ ));
225
+ }
226
+
227
+ // Ideally we continue where we left off but this is complicated when we have mid-words. Recalculating for now
228
+ const runAll = true;
229
+ const newSentences = runAll
230
+ ? translationText.substring(0, nextTranslationStringIndex).split('\n')
231
+ : newString.split('\n');
232
+ const transcriptLines = runAll ? [''] : transcriptState.transcriptLines;
233
+ newSentences.forEach((newSentence, sentenceIdx) => {
234
+ const words = newSentence.split(/\s+/);
235
+ words.forEach((word) => {
236
+ const filteredWord = [...word]
237
+ .filter((c) => {
238
+ if (supportedCharSet().has(c)) {
239
+ return true;
240
+ }
241
+ console.error(
242
+ `Unsupported char ${c} - make sure this is supported in the font family msdf file`,
243
+ );
244
+ return false;
245
+ })
246
+ .join('');
247
 
248
+ const lastLineSoFar = transcriptLines[0];
249
+ const charCount = lastLineSoFar.length + filteredWord.length + 1;
 
 
 
250
 
251
+ if (charCount <= CHARS_PER_LINE) {
252
+ transcriptLines[0] = lastLineSoFar + ' ' + filteredWord;
253
+ } else {
254
+ transcriptLines.unshift(filteredWord);
255
  }
256
+ });
257
 
258
+ if (sentenceIdx < newSentences.length - 1) {
259
+ transcriptLines.unshift('\n');
260
+ transcriptLines.unshift('');
261
+ }
262
+ });
263
+
264
+ transcriptState.transcriptLines = transcriptLines;
265
+ transcriptState.lastTranslationStringIndex = nextTranslationStringIndex;
 
 
 
 
 
 
 
 
 
 
 
 
266
 
267
+ const newTextBlocksProps: TextBlockProps[] = [];
268
+ let currentY = Y_COORD_START;
269
+
270
+ transcriptLines.forEach((line, i) => {
271
+ if (newTextBlocksProps.length == NUM_LINES) {
272
+ return;
273
  }
 
 
274
 
275
+ // const line = transcriptLines[i];
276
+ if (line === '\n') {
277
+ currentY += BLOCK_SPACING;
278
+ return;
279
+ }
280
+ const y = currentY + LINE_HEIGHT / 2;
281
+ const isBottomLine = newTextBlocksProps.length === 0;
282
+
283
+ const textOpacity = 1 - 0.1 * newTextBlocksProps.length;
284
+ newTextBlocksProps.push({
285
+ y,
286
+ startY: currentY,
287
+ index: i,
288
+ textOpacity,
289
+ backgroundOpacity: 0.98,
290
+ content: line,
291
+ isBottomLine,
 
292
  });
 
293
 
294
+ currentY = y + LINE_HEIGHT / 2;
295
+ });
296
+
297
+ const numRemainingBlocks = NUM_LINES - newTextBlocksProps.length;
298
+ if (numRemainingBlocks > 0) {
299
+ newTextBlocksProps.push(...initialTextBlockProps(numRemainingBlocks));
300
  }
301
+
302
+ transcriptState.textBlocksProps = newTextBlocksProps;
303
+ transcriptState.lastRenderTime = currentTime;
304
+ return newTextBlocksProps.map((props, idx) => (
305
+ <TextBlock {...props} key={idx} />
306
+ ));
307
  }
streaming-react-app/src/react-xr/XRConfig.tsx CHANGED
@@ -25,29 +25,15 @@ import {BLACK, WHITE} from './Colors';
25
  import robotoFontFamilyJson from '../assets/RobotoMono-Regular-msdf.json?url';
26
  import robotoFontTexture from '../assets/RobotoMono-Regular.png';
27
  import {getURLParams} from '../URLParams';
28
- import TextBlocks, {CHARS_PER_LINE} from './TextBlocks';
29
  import {BufferedSpeechPlayer} from '../createBufferedSpeechPlayer';
30
  import {CURSOR_BLINK_INTERVAL_MS} from '../cursorBlinkInterval';
 
31
 
32
  // Adds on react JSX for add-on libraries to react-three-fiber
33
  extend(ThreeMeshUI);
34
  extend({TextGeometry});
35
 
36
- async function fetchSupportedCharSet(): Promise<Set<string>> {
37
- try {
38
- const response = await fetch(robotoFontFamilyJson);
39
- const fontFamily = await response.json();
40
-
41
- return new Set(fontFamily.info.charset);
42
- } catch (e) {
43
- console.error('Failed to fetch supported XR charset', e);
44
- return new Set();
45
- }
46
- }
47
-
48
- let supportedCharSet = new Set();
49
- fetchSupportedCharSet().then((result) => (supportedCharSet = result));
50
-
51
  // This component wraps any children so it is positioned relative to the camera, rather than from the origin
52
  function CameraLinkedObject({children}) {
53
  const camera = useThree((state) => state.camera);
@@ -76,10 +62,7 @@ function ThreeMeshUIComponents({
76
  translationSentences={translationSentences}
77
  />
78
  ) : (
79
- <TranscriptPanelBlocks
80
- animateTextDisplay={animateTextDisplay}
81
- translationSentences={translationSentences}
82
- />
83
  )}
84
  {skipARIntro ? null : (
85
  <IntroPanel started={started} setStarted={setStarted} />
@@ -153,7 +136,7 @@ function TranscriptPanelSingleBlock({
153
  (wordChunks, currentWord) => {
154
  const filteredWord = [...currentWord]
155
  .filter((c) => {
156
- if (supportedCharSet.has(c)) {
157
  return true;
158
  }
159
  console.error(
@@ -223,59 +206,14 @@ function TranscriptPanelSingleBlock({
223
  // Splits up the lines into separate blocks to treat each one separately.
224
  // This allows changing of opacity, animating per line, changing height / width per line etc
225
  function TranscriptPanelBlocks({
226
- animateTextDisplay,
227
  translationSentences,
228
  }: {
229
- animateTextDisplay: boolean;
230
  translationSentences: TranslationSentences;
231
  }) {
232
- const [didReceiveTranslationSentences, setDidReceiveTranslationSentences] =
233
- // Currently causing issues with displaying dummy text, skip over
234
- useState(false);
235
-
236
- // Normally we don't setState in render, but here we need to for computed state, and this if statement assures it won't loop infinitely
237
- if (!didReceiveTranslationSentences && translationSentences.length > 0) {
238
- setDidReceiveTranslationSentences(true);
239
- }
240
-
241
- const initialPrompt = 'Listening...';
242
- const transcriptSentences: string[] = didReceiveTranslationSentences
243
- ? translationSentences
244
- : [initialPrompt];
245
-
246
- // The transcript is an array of sentences. For each sentence we break this down into an array of words per line.
247
- // This is needed so we can "scroll" through without changing the order of words in the transcript
248
- const sentenceLines = transcriptSentences.map((sentence) => {
249
- const words = sentence.split(/\s+/);
250
- // Here we break each sentence up with newlines so all words per line fit within the panel
251
- return words.reduce(
252
- (wordChunks, currentWord) => {
253
- const filteredWord = [...currentWord]
254
- .filter((c) => {
255
- if (supportedCharSet.has(c)) {
256
- return true;
257
- }
258
- console.error(
259
- `Unsupported char ${c} - make sure this is supported in the font family msdf file`,
260
- );
261
- return false;
262
- })
263
- .join('');
264
- const lastLineSoFar = wordChunks[wordChunks.length - 1];
265
- const charCount = lastLineSoFar.length + filteredWord.length + 1;
266
- if (charCount <= CHARS_PER_LINE) {
267
- wordChunks[wordChunks.length - 1] =
268
- lastLineSoFar + ' ' + filteredWord;
269
- } else {
270
- wordChunks.push(filteredWord);
271
- }
272
- return wordChunks;
273
- },
274
- [''],
275
- );
276
- });
277
  return (
278
- <TextBlocks sentences={sentenceLines} blinkCursor={animateTextDisplay} />
 
 
279
  );
280
  }
281
 
@@ -361,6 +299,8 @@ export type XRConfigProps = {
361
  startStreaming: () => Promise<void>;
362
  stopStreaming: () => Promise<void>;
363
  debugParam: boolean | null;
 
 
364
  };
365
 
366
  export default function XRConfig(props: XRConfigProps) {
 
25
  import robotoFontFamilyJson from '../assets/RobotoMono-Regular-msdf.json?url';
26
  import robotoFontTexture from '../assets/RobotoMono-Regular.png';
27
  import {getURLParams} from '../URLParams';
28
+ import TextBlocks from './TextBlocks';
29
  import {BufferedSpeechPlayer} from '../createBufferedSpeechPlayer';
30
  import {CURSOR_BLINK_INTERVAL_MS} from '../cursorBlinkInterval';
31
+ import supportedCharSet from './supportedCharSet';
32
 
33
  // Adds on react JSX for add-on libraries to react-three-fiber
34
  extend(ThreeMeshUI);
35
  extend({TextGeometry});
36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  // This component wraps any children so it is positioned relative to the camera, rather than from the origin
38
  function CameraLinkedObject({children}) {
39
  const camera = useThree((state) => state.camera);
 
62
  translationSentences={translationSentences}
63
  />
64
  ) : (
65
+ <TranscriptPanelBlocks translationSentences={translationSentences} />
 
 
 
66
  )}
67
  {skipARIntro ? null : (
68
  <IntroPanel started={started} setStarted={setStarted} />
 
136
  (wordChunks, currentWord) => {
137
  const filteredWord = [...currentWord]
138
  .filter((c) => {
139
+ if (supportedCharSet().has(c)) {
140
  return true;
141
  }
142
  console.error(
 
206
  // Splits up the lines into separate blocks to treat each one separately.
207
  // This allows changing of opacity, animating per line, changing height / width per line etc
208
  function TranscriptPanelBlocks({
 
209
  translationSentences,
210
  }: {
 
211
  translationSentences: TranslationSentences;
212
  }) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
213
  return (
214
+ <TextBlocks
215
+ translationText={'Listening...\n' + translationSentences.join('\n')}
216
+ />
217
  );
218
  }
219
 
 
299
  startStreaming: () => Promise<void>;
300
  stopStreaming: () => Promise<void>;
301
  debugParam: boolean | null;
302
+ onARVisible?: () => void;
303
+ onARHidden?: () => void;
304
  };
305
 
306
  export default function XRConfig(props: XRConfigProps) {
streaming-react-app/src/react-xr/XRDialog.tsx CHANGED
@@ -8,27 +8,12 @@ import {
8
  Typography,
9
  } from '@mui/material';
10
  import CloseIcon from '@mui/icons-material/Close';
 
11
  import {useEffect, useRef, useState} from 'react';
12
  import './XRDialog.css';
13
  import {getRenderer, init, updatetranslationText} from './XRRendering';
14
  import ARButton from './ARButton';
15
  import {getURLParams} from '../URLParams';
16
- import { BufferedSpeechPlayer } from '../createBufferedSpeechPlayer';
17
- import { TranslationSentences } from '../types/StreamingTypes';
18
- import { RoomState } from '../types/RoomState';
19
-
20
- type XRConfigProps = {
21
- animateTextDisplay: boolean;
22
- bufferedSpeechPlayer: BufferedSpeechPlayer;
23
- translationSentences: TranslationSentences;
24
- roomState: RoomState | null;
25
- roomID: string | null;
26
- startStreaming: () => Promise<void>;
27
- stopStreaming: () => Promise<void>;
28
- debugParam: boolean | null;
29
- onARVisible?: () => void;
30
- onARHidden?: () => void;
31
- };
32
 
33
  function XRContent(props: XRConfigProps) {
34
  const debugParam = getURLParams().debug;
 
8
  Typography,
9
  } from '@mui/material';
10
  import CloseIcon from '@mui/icons-material/Close';
11
+ import {XRConfigProps} from './XRConfig';
12
  import {useEffect, useRef, useState} from 'react';
13
  import './XRDialog.css';
14
  import {getRenderer, init, updatetranslationText} from './XRRendering';
15
  import ARButton from './ARButton';
16
  import {getURLParams} from '../URLParams';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
  function XRContent(props: XRConfigProps) {
19
  const debugParam = getURLParams().debug;
streaming-react-app/src/types/StreamingTypes.ts CHANGED
@@ -55,9 +55,9 @@ export const SUPPORTED_INPUT_SOURCES: Array<{
55
  value: SupportedInputSource;
56
  label: string;
57
  }> = [
58
- { value: 'userMedia', label: 'Microphone' },
59
- { value: 'displayMedia', label: 'Browser Tab' },
60
- ];
61
 
62
  export type StartStreamEventConfig = {
63
  event: 'config';
@@ -70,6 +70,7 @@ export type StartStreamEventConfig = {
70
  };
71
 
72
  export interface BrowserAudioStreamConfig {
 
73
  noiseSuppression: boolean;
74
  echoCancellation: boolean;
75
  }
 
55
  value: SupportedInputSource;
56
  label: string;
57
  }> = [
58
+ {value: 'userMedia', label: 'Microphone'},
59
+ {value: 'displayMedia', label: 'Browser Tab (Chrome only)'},
60
+ ];
61
 
62
  export type StartStreamEventConfig = {
63
  event: 'config';
 
70
  };
71
 
72
  export interface BrowserAudioStreamConfig {
73
+ echoCancellation: boolean;
74
  noiseSuppression: boolean;
75
  echoCancellation: boolean;
76
  }
streaming-react-app/vite.config.ts CHANGED
@@ -1,10 +1,5 @@
1
  import { defineConfig } from 'vite';
2
  import react from '@vitejs/plugin-react';
3
- // import {resolve} from 'path';
4
-
5
- // const rootDir = resolve(__dirname, 'src');
6
- // const assetsDir = resolve(rootDir, 'assets');
7
- // const typesDir = resolve(__dirname, 'types');
8
 
9
  // https://vitejs.dev/config/
10
  export default defineConfig(({ command }) => {
 
1
  import { defineConfig } from 'vite';
2
  import react from '@vitejs/plugin-react';
 
 
 
 
 
3
 
4
  // https://vitejs.dev/config/
5
  export default defineConfig(({ command }) => {
streaming-react-app/yarn.lock CHANGED
@@ -1853,11 +1853,6 @@ isexe@^2.0.0:
1853
  resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz"
1854
  integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
1855
 
1856
- iso-639-1@^3.1.0:
1857
- version "3.1.0"
1858
- resolved "https://registry.npmjs.org/iso-639-1/-/iso-639-1-3.1.0.tgz"
1859
- integrity sha512-rWcHp9dcNbxa5C8jA/cxFlWNFNwy5Vup0KcFvgA8sPQs9ZeJHj/Eq0Y8Yz2eL8XlWYpxw4iwh9FfTeVxyqdRMw==
1860
-
1861
  isomorphic-unfetch@^3.0.0:
1862
  version "3.1.0"
1863
  resolved "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz"
 
1853
  resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz"
1854
  integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
1855
 
 
 
 
 
 
1856
  isomorphic-unfetch@^3.0.0:
1857
  version "3.1.0"
1858
  resolved "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz"