File size: 3,247 Bytes
2485dd8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import * as THREE from 'three';
import {
  Button,
  Dialog,
  DialogContent,
  DialogTitle,
  IconButton,
  Typography,
} from '@mui/material';
import CloseIcon from '@mui/icons-material/Close';
import {useEffect, useRef, useState} from 'react';
import './XRDialog.css';
import {getRenderer, init, updatetranslationText} from './XRRendering';
import ARButton from './ARButton';
import {getURLParams} from '../URLParams';
import { BufferedSpeechPlayer } from '../createBufferedSpeechPlayer';
import { TranslationSentences } from '../types/StreamingTypes';
import { RoomState } from '../types/RoomState';

type XRConfigProps = {
  animateTextDisplay: boolean;
  bufferedSpeechPlayer: BufferedSpeechPlayer;
  translationSentences: TranslationSentences;
  roomState: RoomState | null;
  roomID: string | null;
  startStreaming: () => Promise<void>;
  stopStreaming: () => Promise<void>;
  debugParam: boolean | null;
  onARVisible?: () => void;
  onARHidden?: () => void;
};

function XRContent(props: XRConfigProps) {
  const debugParam = getURLParams().debug;
  const {translationSentences} = props;
  useEffect(() => {
    updatetranslationText(translationSentences);
  }, [translationSentences]);

  const [renderer, setRenderer] = useState<THREE.WebGLRenderer | null>(null);
  const canvasRef = useRef<HTMLDivElement | null>(null);
  useEffect(() => {
    if (canvasRef.current != null || debugParam === false) {
      const existingRenderer = getRenderer();
      if (existingRenderer) {
        setRenderer(existingRenderer);
      } else {
        const newRenderer = init(
          400,
          300,
          debugParam ? canvasRef.current : null,
        );
        setRenderer(newRenderer);
      }
    }
  }, [canvasRef.current]);

  return (
    <DialogContent
      dividers
      className="xr-dialog-container xr-dialog-text-center">
      <Typography gutterBottom>
        Welcome to the Seamless team streaming demo experience! In this demo you
        will experience AI powered text and audio translation in real time.
      </Typography>
      <div ref={canvasRef} className="xr-dialog-canvas-container" />
      <ARButton
        bufferedSpeechPlayer={props.bufferedSpeechPlayer}
        renderer={renderer}
        onARHidden={props.onARHidden}
        onARVisible={props.onARVisible}
      />
    </DialogContent>
  );
}

export default function XRDialog(props: XRConfigProps) {
  const [isDialogOpen, setIsDialogOpen] = useState<boolean>(false);

  return (
    <>
      <Button variant="contained" onClick={() => setIsDialogOpen(true)}>
        Enter AR Experience
      </Button>
      {isDialogOpen && (
        <Dialog onClose={() => setIsDialogOpen(false)} open={true}>
          <DialogTitle sx={{m: 0, p: 2}} className="xr-dialog-text-center">
            FAIR Seamless Streaming Demo
          </DialogTitle>
          <IconButton
            aria-label="close"
            onClick={() => setIsDialogOpen(false)}
            sx={{
              position: 'absolute',
              right: 8,
              top: 8,
              color: (theme) => theme.palette.grey[500],
            }}>
            <CloseIcon />
          </IconButton>
          <XRContent {...props} />
        </Dialog>
      )}
    </>
  );
}