<!DOCTYPE html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>PeerConnection Demo 1</title>
<style>
video {
  border:5px solid black;
  width:480px;
  height:360px;
}
button {
  font: 18px sans-serif;
  padding: 8px;
}
textarea {
  font-family: monospace;
  margin: 2px;
  width:480px;
  height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<script>

var RTCPeerConnection = null;
var getUserMedia = null;
var attachMediaStream = null;
var reattachMediaStream = null;
var webrtcDetectedBrowser = null;
// switch-hit between Chrome and Firefox

// By now firefox is disabled becase its implementation doesn work fine yet
/*if (navigator.mozGetUserMedia) {
  console.log("This appears to be Firefox");

  webrtcDetectedBrowser = "firefox";

  // The RTCPeerConnection object.
  RTCPeerConnection = mozRTCPeerConnection;

  // The RTCSessionDescription object.
  RTCSessionDescription = mozRTCSessionDescription;

  // The RTCIceCandidate object.
  RTCIceCandidate = mozRTCIceCandidate;

  // Get UserMedia (only difference is the prefix).
  // Code from Adam Barth.
  getUserMedia = navigator.mozGetUserMedia.bind(navigator);

  // Attach a media stream to an element.
  attachMediaStream = function(element, stream) {
    console.log("Attaching media stream");
    element.mozSrcObject = stream;
    element.play();
  };

  reattachMediaStream = function(to, from) {
    console.log("Reattaching media stream");
    to.mozSrcObject = from.mozSrcObject;
    to.play();
  };

  // Fake get{Video,Audio}Tracks
  MediaStream.prototype.getVideoTracks = function() {
    return [];
  };

  MediaStream.prototype.getAudioTracks = function() {
    return [];
  };
} else*/ if (navigator.webkitGetUserMedia) {
  console.log("This appears to be Chrome");

  webrtcDetectedBrowser = "chrome";

  // The RTCPeerConnection object.
  RTCPeerConnection = webkitRTCPeerConnection;

  // Get UserMedia (only difference is the prefix).
  // Code from Adam Barth.
  getUserMedia = navigator.webkitGetUserMedia.bind(navigator);

  // Attach a media stream to an element.
  attachMediaStream = function(element, stream) {
    element.src = webkitURL.createObjectURL(stream);
  };

  reattachMediaStream = function(to, from) {
    to.src = from.src;
  };

  // The representation of tracks in a stream is changed in M26.
  // Unify them for earlier Chrome versions in the coexisting period.
  if (!webkitMediaStream.prototype.getVideoTracks) {
    webkitMediaStream.prototype.getVideoTracks = function() {
      return this.videoTracks;
    };
  }

  if (!webkitMediaStream.prototype.getAudioTracks) {
    webkitMediaStream.prototype.getAudioTracks = function() {
      return this.audioTracks;
    };
  }
} else {
  console.log("Browser does not appear to be WebRTC-capable");
}

if (getUserMedia == null)
    alert("Browser does not appear to be WebRTC-capable")

// End of browser detector

var pc1;
var localstream;

function trace(text) {
  // This function is used for logging.
  if (text[text.length - 1] == '\n') {
    text = text.substring(0, text.length - 1);
  }
  console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}

function gotStream(stream){
  trace("Received local stream");
  // Call the polyfill wrapper to attach the media stream to this element.
  attachMediaStream(vid1, stream);
  localstream = stream;
  call()
}

function start() {
  trace("Requesting local stream");
  getUserMedia({video: {
                  mandatory: { maxFrameRate: 15 , maxWidth: 320 }
                  }
               }, gotStream, function() {});
}

function call() {
  trace("Starting call");
  videoTracks = localstream.getVideoTracks();
  audioTracks = localstream.getAudioTracks();
  if (videoTracks.length > 0)
    trace('Using Video device: ' + videoTracks[0].label);
  if (audioTracks.length > 0)
    trace('Using Audio device: ' + audioTracks[0].label);

  var servers = { "iceServers": [{ "url": "stun:77.72.174.167:3478" }] };
  pc1 = new RTCPeerConnection(servers);
  trace("Created local peer connection object pc1");
  pc1.onicecandidate = iceCallback1;
  pc1.onaddstream = gotRemoteStream;

  pc1.addStream(localstream);
  sdp = sdp1 + ice_user + sdp2 + ice_pwd + sdp3 + key + sdp4;
  var remoteDesc = new RTCSessionDescription({type: "offer", sdp: sdp});
  pc1.setRemoteDescription(remoteDesc, function () { // success callback
    trace("setRemoteDescription ok");
  }, function (s_error) { // error callback
    trace("setRemoteDescription no ok: " + s_error);
    trace("sdp:\n" + sdp);
  });

  pc1.createAnswer(gotDescription1);
}

function gotDescription1(desc){
  pc1.setLocalDescription(desc);
  trace("Generated answer from pc1 \n" + desc.sdp);
  for (candidate in remoteCandidates) {
    sdpline = remoteCandidates[candidate]
    trace(">" + sdpline + "<");
    pc1.addIceCandidate(new RTCIceCandidate({sdpMLineIndex: 1, sdpMid: "video", candidate: sdpline}));
  }
}

function hangup() {
  trace("Ending call");
  pc1.close();
  pc1 = null;
}

function gotRemoteStream(e){
  vid2.src = webkitURL.createObjectURL(e.stream);
  trace("Received remote stream");
}

function iceCallback1(event){
  if (event.candidate != null) {
    trace("Local ICE candidate: \n" + event.candidate.candidate);
  } else {
    post_ice_sdp ("/", pc1.localDescription.sdp);
  }
}

var reply_sent = false;

function post_ice_sdp(path, sdp) {
  if (reply_sent == true) {
    trace("Already sent");
    return;
  }

  reply_sent = true;

  var http = new XMLHttpRequest();

  var params = "sdp=" + encodeURIComponent(sdp);
  trace("Params: " + params);

  http.onreadystatechange=function(){
    if (http.readyState==4){
      if (http.status==200){
        trace("Request ok");
      } else{
        trace("An error has occured making the request: " + http.status);
      }
    }
  }

  http.open("GET", path  +  "?" + params, true);

  http.send(null);
}

window.onload = function() {
  trace("On load");
  for (candidate in remoteCandidates) {
    trace(">" + remoteCandidates[candidate] + "<");
  }
  start();
}

sdp1 = "v=0\r\no=- 2750483185 2 IN IP4 127.0.0.1\r\ns=\r\nt=0 0\r\na=group:BUNDLE video\r\nm=video 1 RTP/SAVPF 100\r\nc=IN IP4 0.0.0.0\r\na=rtcp:1 IN IP4 0.0.0.0\r\na=ice-ufrag:";
sdp2 = "\r\na=ice-pwd:";
sdp3 = "\r\na=sendrecv\r\na=mid:video\r\na=rtcp-mux\r\na=crypto:0 AES_CM_128_HMAC_SHA1_80 inline:";
sdp4 = "\r\na=rtpmap:100 VP8/90000\r\n";

// File will be completed by server at execution time

