<!DOCTYPE html>
<html>

<head>
    <meta charset="utf-8">
    <title>WebRTC</title>
</head>

<body>
    <div class="container">
        <!-- <h1>采集本地视频</h1> -->
        <hr>
        <div class="video_container">
        <button type="button" onclick="stop()">关闭</button>
        <button type="button" onclick="getSettings()">返回约束属性</button>
        <button type="button" onclick="getSupportedConstraints()">返回可修改约束属性</button>
        <button type="button" onclick="getConstraints()">返回约束</button>
        <button type="button" onclick="applyConstraints_width()">修改约束width</button>
        <button type="button" onclick="applyConstraints_audio()">修改约束channelCount</button>
        <button type="button" onclick="iofo()">iofo</button>
            <audio controls id="myAudio"></audio>
            <img id='images' width="100" alt="">
        </div>
        <button onclick="updateDeviceList()">updateDeviceList</button>
        <div id="audioList"></div>
        <div id="videoList"></div>
        <hr>
        <video id="local_video" width="640" height="480" autoplay poster="" muted></video>
        <br>
        <button type="button" onclick="getUserMedia()">获取本地视频</button>
        <button type="button" onclick="stopUserMediaAudio()">关闭user音频</button>
        <button type="button" onclick="stopUserMediaVideo()">关闭user视频</button>
        <hr>
        <video id="local_video2" width="320" height="240" autoplay poster="" muted></video>
        <br>
        <button type="button" onclick="getDisplayMedia()">获取本地屏幕</button>
        <button type="button" onclick="stopDisplayMediaAudio()">关闭Display音频</button>
        <button type="button" onclick="stopDisplayMediaVideo()">关闭Display视频</button>
        <hr>
        user音频
        <button type="button" onclick="startRecorder()">startRecorder</button>
        <button type="button" onclick="stopRecorder()">stopRecorder</button>
                <a id="downloadButton" class="button">
            Download
        </a>
        <hr>
        <video id="local_video3" width="320" height="240" autoplay poster="" muted></video>
        <br>
        <button type="button" onclick="canvasVideo()">canvasVideo</button>
        <button type="button" onclick="addvideoStream()">addvideoStream</button>
        <!-- <button type="button" onclick="addvideoStream2()">addvideoStream2</button> -->
        <button type="button" onclick="videoStreamstop()">stop</button>
        <canvas id="canvas" width="450" height="300"></canvas>
    </div>
    <!-- <script src="https://webrtc.github.io/adapter/adapter-latest.js"></script> -->
    <script src="./media.js"></script>
    <script src="./myMediaRecorder.js"></script>
    <script src="./mycanvas.js"></script>
</body>
<script>
const loaclVideo = document.getElementById('local_video');
const loaclVideo2 = document.getElementById('local_video2');
const loaclVideo3 = document.getElementById('local_video3');
const myAudio = document.getElementById('myAudio');
const images = document.getElementById('images');
const canvas = document.getElementById('canvas');
const downloadButton = document.getElementById('downloadButton');
let Media = new media()
let stream, audioTracks, videoTracks
getUserMedia = async () => {
    loaclVideo.srcObject = await Media.getUserMedia({ video: true, audio: true })
    stream = loaclVideo.srcObject
    audioTracks = stream.getAudioTracks()[0];
    videoTracks = stream.getVideoTracks()[0];
    // myAudio.srcObject = stream
}
getDisplayMedia = async () => {
    loaclVideo2.srcObject = await Media.getDisplayMedia()
    // stream = loaclVideo.srcObject
}
updateDeviceList=()=>{
    Media.updateDeviceList()
}
stop = () => {
    if (loaclVideo.srcObject) {
        Media.stop(loaclVideo.srcObject)
    }
    if (loaclVideo2.srcObject) {
        Media.stop(loaclVideo2.srcObject)
    }
}
getSupportedConstraints = () => {
    console.log(Media.getSupportedConstraints())
}
getSettings = () => {
    console.log(Media.getSettings(audioTracks))
    console.log(Media.getSettings(videoTracks))
}
getConstraints = () => {
    console.log(Media.getConstraints(audioTracks))
    console.log(Media.getConstraints(videoTracks))
}
applyConstraints_width = () => {
    Media.applyConstraints(videoTracks, { width: 320 })
}
applyConstraints_audio = () => {
    Media.applyConstraints(audioTracks, { latency: 2 })
}
stopUserMediaAudio = () => {
    Media.stopUserMediaAudio()
}
stopUserMediaVideo = () => {
    Media.stopUserMediaVideo()
}
stopDisplayMediaAudio = () => {
    Media.stopDisplayMediaAudio()
}
stopDisplayMediaVideo = () => {
    Media.stopDisplayMediaVideo()
}
iofo = () => {
    Media.iofo()
}
// ----------------------------------------------------------------
let mymediaRecorder
startRecorder = () => {
    mymediaRecorder = new myMediaRecorder(stream)
    mymediaRecorder.start()
    /*(stream,60000).then(res=>{
            console.log(res)
            myAudio.src=URL.createObjectURL(res.blob);
            myAudio.load()
        })*/
    mymediaRecorder.onstop = e => {
        console.log('onstop', e)
    }
    mymediaRecorder.ondataavailable = e => {
        console.log(e)
        var blob = new Blob([e.data],{'type':'video/webm\;codecs=vp8'});
        myAudio.src = URL.createObjectURL(blob);
        myAudio.load()
        downloadButton.href = myAudio.src;
        downloadButton.download = "video_webm_codecs_vp8.webm";
    }
}
stopRecorder = () => {
    mymediaRecorder.stop()
}
// +++++++++++++++++++++++++++++++++++++++++++++++++++
let Mycanvas = new mycanvas()

canvasVideo = () => {
    images.src = Mycanvas.addvideo(local_video, 0, 0, local_video.width, local_video.height)
}
addvideoStream = () => {
    loaclVideo3.srcObject = Mycanvas.addvideoStream(local_video, 0, 0, local_video.width, local_video.height,local_video2, 320, 0, local_video2.width, local_video2.height)
}
addvideoStream2 = () => {
    Mycanvas.addvideoStream(local_video2, 320, 0, local_video2.width, local_video2.height)
}
videoStreamstop = () => {
    Mycanvas.videoStreamstop()
}
draw=()=>{

}
    Mycanvas.draw(canvas)
</script>

</html>