Merge pull request #98 from jiayliu/master

Add a websocket signaling server implementation.
diff --git a/.gitignore b/.gitignore
index fd20fdd..a671fd5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,5 @@
-
+node_modules
+.DS_Store
 *.pyc
+validation-report.json
+validation-status.json
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..20fd86b
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,3 @@
+language: node_js
+node_js:
+  - 0.10
diff --git a/Gruntfile.js b/Gruntfile.js
new file mode 100644
index 0000000..8253728
--- /dev/null
+++ b/Gruntfile.js
@@ -0,0 +1,85 @@
+'use strict';
+
+/* globals module */
+
+module.exports = function(grunt) {
+
+  // configure project
+  grunt.initConfig({
+    // make node configurations available
+    pkg: grunt.file.readJSON('package.json'),
+
+    csslint: {
+      options: {
+        csslintrc: 'samples/web/.csslintrc'
+      },
+      strict: {
+        options: {
+          import: 2
+        },
+        src: ['samples/web/content/**/*.css']
+      },
+      lax: {
+        options: {
+          import: false
+        },
+        src: ['samples/web/content/**/*.css']
+      }
+    },
+
+    htmlhint: {
+      html1: {
+        src: [
+          'samples/web/content/apprtc/index.html',
+          'samples/web/content/datachannel/index.html',
+          'samples/web/content/getusermedia/**/index.html',
+          'samples/web/content/peerconnection/**/index.html'
+        ]
+      }
+    },
+
+    jscs: {
+      src: 'samples/web/content/**/*.js',
+      options: {
+        config: 'google', // as per Google style guide – could use '.jscsrc' instead
+        'excludeFiles': [
+          'samples/web/content/manual-test/**/*',
+          'samples/web/content/apprtc/js/vr.js',
+          'samples/web/content/apprtc/js/stereoscopic.js',
+          'samples/web/content/getusermedia/desktopcapture/extension/content-script.js'
+        ],
+        requireCurlyBraces: ['if']
+      }
+    },
+
+    jshint: {
+      options: {
+        ignores: [
+          'samples/web/content/manual-test/**/*',
+          'samples/web/content/getusermedia/desktopcapture/**',
+          'samples/web/content/apprtc/js/stereoscopic.js',
+          'samples/web/content/apprtc/js/ga.js',
+          'samples/web/content/apprtc/js/vr.js'
+        ],
+        // use default .jshintrc files
+        jshintrc: true
+      },
+      // files to validate
+      // can choose more than one name + array of paths
+      // usage with this name: grunt jshint:files
+      files: ['samples/web/content/**/*.js']
+    },
+
+  });
+
+  // enable plugins
+  grunt.loadNpmTasks('grunt-contrib-csslint');
+  grunt.loadNpmTasks('grunt-htmlhint');
+  grunt.loadNpmTasks('grunt-jscs');
+  grunt.loadNpmTasks('grunt-contrib-jshint');
+
+  // set default tasks to run when grunt is called without parameters
+  grunt.registerTask('default', ['csslint', 'htmlhint', 'jscs', 'jshint']);
+  // also possible to call JavaScript directly in registerTask()
+  // or to call external tasks with grunt.loadTasks()
+};
diff --git a/README.md b/README.md
index f2c1f37..652eb82 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,5 @@
+![Travis](https://travis-ci.org/samdutton/webrtc.svg?branch=demo-updates)
+
 # WebRTC code samples #
 
 This is a repository for client-side HTML/CSS/JavaScript WebRTC code samples.
@@ -14,44 +16,51 @@
 
 ## The demos ##
 
-[getUserMedia()](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia)
+### getUserMedia ###
 
-[getUserMedia() + Canvas](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia-canvas)
+[Basic getUserMedia demo](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/gum)
 
-[getUserMedia() + Canvas + CSS Filters](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia-filter)
+[getUserMedia + canvas](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/canvas)
 
-[getUserMedia() with resolution constraints](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia-resolution)
+[getUserMedia + canvas + CSS Filters](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/filter)
 
-[getUserMedia() with camera/mic selection](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia-source)
+[getUserMedia with resolution constraints](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/resolution)
 
-[Audio-only getUserMedia() output to local audio element](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia-audio)
+[getUserMedia with camera/mic selection](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/source)
 
-[Audio-only getUserMedia() displaying volume](http://googlechrome.github.io/webrtc/samples/web/content/getusermedia-volume)
+[Audio-only getUserMedia output to local audio element](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/audio)
 
-[Data channels](http://googlechrome.github.io/webrtc/samples/web/content/datachannel)
+[Audio-only getUserMedia displaying volume](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/volume)
 
-[Peer connection](http://googlechrome.github.io/webrtc/samples/web/content/peerconnection)
+[Face tracking](https://googlechrome.github.io/webrtc/samples/web/content/getusermedia/face)
 
-[Audio-only peer connection](http://googlechrome.github.io/webrtc/samples/web/content/peerconnection-audio)
+### RTCPeerConnection ###
 
-[Multiple peer connections](http://googlechrome.github.io/webrtc/samples/web/content/multiple)
+[Basic peer connection](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/pc1)
 
-[Multiple relay](http://googlechrome.github.io/webrtc/samples/web/content/multiple-relay)
+[Audio-only peer connection](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/audio)
 
-[Munge SDP](http://googlechrome.github.io/webrtc/samples/web/content/munge-sdp)
+[Multiple peer connections at once](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/multiple)
 
-[ICE candidate gathering](http://googlechrome.github.io/webrtc/samples/web/content/trickle-ice)
+[Forward output of one peer connection into another](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/multiple-relay)
 
-[Accept incoming peer connection](http://googlechrome.github.io/webrtc/samples/web/content/pr-answer)
+[Munge SDP parameters](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/munge-sdp)
 
-[Peer connection states](http://googlechrome.github.io/webrtc/samples/web/content/peerconnection-states)
+[Use pranswer when setting up a peer connection](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/pr-answer)
 
-[Web Audio output as input to peer connection](http://googlechrome.github.io/webrtc/samples/web/content/webaudio-input)
+[Adjust constraints, view stats](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/constraints)
 
-[Adjust constraints, view stats](http://googlechrome.github.io/webrtc/samples/web/content/constraints)
+[Display createOffer output](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/create-offer)
 
-[Display createOffer output](http://googlechrome.github.io/webrtc/samples/web/content/create-offer)
+[Use RTCDTMFSender](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/dtmf)
 
-[DTMF](http://googlechrome.github.io/webrtc/samples/web/content/dtmf)
+[Display peer connection states](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/states)
 
-[Face tracking](http://googlechrome.github.io/webrtc/samples/web/content/face)
+[ICE candidate gathering from STUN/TURN servers](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/trickle-ice)
+
+[Web Audio output as input to peer connection](https://googlechrome.github.io/webrtc/samples/web/content/peerconnection/webaudio-input)
+
+### RTCDataChannel ###
+
+[Data channels](https://googlechrome.github.io/webrtc/samples/web/content/datachannel)
+
diff --git a/index.html b/index.html
index 8882b34..882747a 100644
--- a/index.html
+++ b/index.html
@@ -55,47 +55,50 @@
 
 <!--     <p><a href="//apprtc.appspot.com" title="WebRTC video chat application">Video chat</a></p>
  -->
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia">getUserMedia()</a></p>
+    <h3 id="getusermedia">getUserMedia</h3>
+    <p><a href="samples/web/content/getusermedia/gum">Basic getUserMedia demo</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia-canvas">getUserMedia() + Canvas</a></p>
+    <p><a href="samples/web/content/getusermedia/canvas">Use getUserMedia with canvas</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia-filter">getUserMedia() + Canvas + CSS filters</a></p>
+    <p><a href="samples/web/content/getusermedia/filter">Use getUserMedia with canvas and CSS filters</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia-resolution">Choose camera resolution</a></p>
+    <p><a href="samples/web/content/getusermedia/resolution">Choose camera resolution</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia-source">Choose camera and microphone</a></p>
+    <p><a href="samples/web/content/getusermedia/source">Choose camera and microphone</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia-audio">Audio-only getUserMedia() output to local audio element</a></p>
+    <p><a href="samples/web/content/getusermedia/audio">Audio-only getUserMedia() output to local audio element</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/getusermedia-volume">Audio-only getUserMedia() displaying volume</a></p>
+    <p><a href="samples/web/content/getusermedia/volume">Audio-only getUserMedia() displaying volume</a></p>
+    
+    <p><a href="samples/web/content/getusermedia/face">Face tracking, using getUserMedia and canvas</a></p>
+    
+    <h3 id="peerconnection">RTCPeerConnection</h3>
+    <p><a href="samples/web/content/peerconnection/pc1">Basic peer connection demo</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/peerconnection">Peer connection</a></p>
+    <p><a href="samples/web/content/peerconnection/audio">Audio-only peer connection demo</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/peerconnection-audio">Audio-only peer connection</a></p>
+    <p><a href="samples/web/content/peerconnection/multiple">Multiple peer connections at once</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/multiple">Multiple peer connections</a></p>
+    <p><a href="samples/web/content/peerconnection/multiple-relay">Forward the output of one PC into another</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/multiple-relay">Multiple relay</a></p>
+    <p><a href="samples/web/content/peerconnection/munge-sdp">Munge SDP parameters</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/munge-sdp">Munge SDP</a></p>
+    <p><a href="samples/web/content/peerconnection/pr-answer">Use pranswer when setting up a peer connection</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/pr-answer">Accept incoming peer connection</a></p>
+    <p><a href="samples/web/content/peerconnection/constraints">Constraints and stats</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/peerconnection-states">Peer connection states</a></p>
+    <p><a href="samples/web/content/peerconnection/create-offer">Display createOffer output for various scenarios</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/webaudio-input">Web Audio output as input to peer connection</a></p>
+    <p><a href="samples/web/content/peerconnection/dtmf">Use RTCDTMFSender</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/datachannel">Data channels</a></p>
+    <p><a href="samples/web/content/peerconnection/states">Display peer connection states</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/constraints">Constraints and stats</a></p>
+    <p><a href="samples/web/content/peerconnection/trickle-ice">ICE candidate gathering from STUN/TURN servers</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/create-offer">Display createOffer output</a></p>
+    <p><a href="samples/web/content/peerconnection/webaudio-input">Web Audio output as input to peer connection</a></p>
 
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/dtmf">DTMF</a></p>
-
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/face">Face tracking</a></p>
-
-    <p><a href="//googlechrome.github.io/webrtc/samples/web/content/trickle-ice">ICE candidate gathering</a></p>
+    <h3 id="datachannel">RTCDataChannel</h3>
+    <p><a href="samples/web/content/datachannel">Basic data channel demo</a></p>
 
   </section>
 
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..28f0dbd
--- /dev/null
+++ b/package.json
@@ -0,0 +1,30 @@
+{
+  "name": "webrtc",
+  "version": "1.0.0",
+  "description": "Project checking for WebRTC GitHub sample repo",
+  "main": "Gruntfile.js",
+  "scripts": {
+    "test": "grunt --verbose"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/samdutton/webrtc.git"
+  },
+  "keywords": [
+    "webrtc"
+  ],
+  "author": "samdutton",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/samdutton/webrtc/issues"
+  },
+  "homepage": "https://github.com/samdutton/webrtc",
+  "devDependencies": {
+    "grunt": ">=0.4.5",
+    "grunt-cli": ">=0.1.9",
+    "grunt-contrib-csslint": ">=0.3.1",
+    "grunt-contrib-jshint": "^0.10.0",
+    "grunt-htmlhint": ">=0.4.1",
+    "grunt-jscs": ">=0.8.1"
+  }
+}
diff --git a/samples/web/.csslintrc b/samples/web/.csslintrc
new file mode 100644
index 0000000..5681687
--- /dev/null
+++ b/samples/web/.csslintrc
@@ -0,0 +1,6 @@
+{
+  "box-model": false,
+  "ids": false,
+  "overqualified-elements": false,
+  "unique-headings": false
+}
diff --git a/samples/web/.jshintrc b/samples/web/.jshintrc
new file mode 100644
index 0000000..6735580
--- /dev/null
+++ b/samples/web/.jshintrc
@@ -0,0 +1,43 @@
+{
+  "browser": true,
+  "camelcase": true,
+  "curly": true,
+  "devel": true,
+  "eqeqeq": true,
+  "forin": false,
+  "globalstrict": true,
+  "quotmark": "single",
+  "undef": true,
+  "unused": "strict",
+  "globals": {
+    "addTest": true,
+    "attachMediaStream": true,
+    "attachMediaStream": true,
+    "audioContext": true,
+    "AudioContext": true,
+    "Call": true,
+    "createIceServers": true,
+    "createIceServer": true,
+    "doGetUserMedia": true,
+    "expectEquals": true,
+    "getUserMedia": true,
+    "getUserMedia": true,
+    "MediaStreamTrack": true,
+    "reattachMediaStream": true,
+    "reportBug": true,
+    "reportError": true,
+    "reportFatal": true,
+    "reportInfo": true,
+    "reportSuccess": true,
+    "RTCIceCandidate": true,
+    "RTCPeerConnection": true,
+    "RTCSessionDescription": true,
+    "setTestProgress": true,
+    "Ssim": true,
+    "StatisticsAggregate": true,
+    "testFinished": true,
+    "trace": true,
+    "webrtcDetectedBrowser": true,
+    "webrtcDetectedVersion": true
+  }
+}
diff --git a/samples/web/content/apprtc/apprtc.py b/samples/web/content/apprtc/apprtc.py
index 1be1153..e669f3d 100755
--- a/samples/web/content/apprtc/apprtc.py
+++ b/samples/web/content/apprtc/apprtc.py
@@ -52,9 +52,6 @@
 def get_preferred_audio_send_codec(user_agent):
   # Empty string means no preference.
   preferred_audio_send_codec = ''
-  # Prefer to send ISAC on Chrome for Android.
-  if is_chrome_for_android(user_agent):
-    preferred_audio_send_codec = 'ISAC/16000'
   return preferred_audio_send_codec
 
 # HD is on by default for desktop Chrome, but not Android or Firefox (yet)
@@ -164,10 +161,12 @@
 
   return track_constraints
 
-def make_media_stream_constraints(audio, video):
+def make_media_stream_constraints(audio, video, firefox_fake_device):
   stream_constraints = (
       {'audio': make_media_track_constraints(audio),
        'video': make_media_track_constraints(video)})
+  if firefox_fake_device:
+    stream_constraints['fake'] = True
   logging.info('Applying media constraints: ' + str(stream_constraints))
   return stream_constraints
 
@@ -396,6 +395,10 @@
     audio = self.request.get('audio')
     video = self.request.get('video')
 
+    # Pass firefox_fake_device=1 to pass fake: true in the media constraints,
+    # which will make Firefox use its built-in fake device.
+    firefox_fake_device = self.request.get('firefox_fake_device')
+
     # The hd parameter is a shorthand to determine whether to open the
     # camera at 720p. If no value is provided, use a platform-specific default.
     # When defaulting to HD, use optional constraints, in case the camera
@@ -428,7 +431,10 @@
     stereo = self.request.get('stereo', default_value = 'false')
 
     # Set opusfec to false by default.
-    opusfec = self.request.get('opusfec', default_value = 'false')
+    opusfec = self.request.get('opusfec', default_value = 'true')
+
+    # Read url param for opusmaxpbr
+    opusmaxpbr = self.request.get('opusmaxpbr', default_value = '')
 
     # Read url params audio send bitrate (asbr) & audio receive bitrate (arbr)
     asbr = self.request.get('asbr', default_value = '')
@@ -533,7 +539,8 @@
     pc_config = make_pc_config(stun_server, turn_server, ts_pwd, ice_transports)
     pc_constraints = make_pc_constraints(dtls, dscp, ipv6)
     offer_constraints = make_offer_constraints()
-    media_constraints = make_media_stream_constraints(audio, video)
+    media_constraints = make_media_stream_constraints(audio, video,
+                                                      firefox_fake_device)
 
     params = {
       'error_messages': error_messages,
@@ -549,6 +556,7 @@
       'turn_url': turn_url,
       'stereo': stereo,
       'opusfec': opusfec,
+      'opusmaxpbr': opusmaxpbr,
       'arbr': arbr,
       'asbr': asbr,
       'vrbr': vrbr,
diff --git a/samples/web/content/apprtc/css/main.css b/samples/web/content/apprtc/css/main.css
index 6bcb254..7a584e0 100644
--- a/samples/web/content/apprtc/css/main.css
+++ b/samples/web/content/apprtc/css/main.css
@@ -32,24 +32,33 @@
   position: absolute;
   height: 100%;
   width: 100%;
-  margin: 0px auto;
+  margin: 0 auto;
   -webkit-perspective: 1000;
 }
 #card {
+  -moz-transition-duration: 0.8s;
+  -o-transition-duration: 0.8s;
   -webkit-transition-duration: 0.8s;
   -webkit-transform-style: preserve-3d;
 }
 #local {
   position: absolute;
   width: 100%;
-  transform: scale(-1, 1);
+  -moz-transform: scale(-1, 1);
+  -ms-transform: scale(-1, 1);
+  -o-transform: scale(-1, 1);
   -webkit-transform: scale(-1, 1);
+  transform: scale(-1, 1);
   -webkit-backface-visibility: hidden;
 }
 #remote {
   position: absolute;
   width: 100%;
+  -moz-transform: rotateY(180deg);
+  -ms-transform: rotateY(180deg);
+  -o-transform: rotateY(180deg);
   -webkit-transform: rotateY(180deg);
+  transform: rotateY(180deg);
   -webkit-backface-visibility: hidden;
 }
 #mini {
@@ -59,29 +68,56 @@
   bottom: 32px;
   right: 4px;
   opacity: 1.0;
-  transform: scale(-1, 1);
+  -moz-transform: scale(-1, 1);
+  -ms-transform: scale(-1, 1);
+  -o-transform: scale(-1, 1);
   -webkit-transform: scale(-1, 1);
+  transform: scale(-1, 1);
 }
 #localVideo {
   width: 100%;
   height: 100%;
   opacity: 0;
+  -moz-transition-property: opacity;
+  -ms-transition-property: opacity;
+  -o-transition-property: opacity;
   -webkit-transition-property: opacity;
+  transition-property: opacity;
+  -moz-transition-duration: 1s;
+  -ms-transition-duration: 1s;
+  -o-transition-duration: 1s;
   -webkit-transition-duration: 1s;
+  transition-duration: 1s;
 }
 #remoteVideo {
   width: 100%;
   height: 100%;
   opacity: 0;
+  -moz-transition-property: opacity;
+  -ms-transition-property: opacity;
+  -o-transition-property: opacity;
   -webkit-transition-property: opacity;
+  transition-property: opacity;
+  -moz-transition-duration: 1s;
+  -ms-transition-duration: 1s;
+  -o-transition-duration: 1s;
   -webkit-transition-duration: 1s;
+  transition-duration: 1s;
 }
 #miniVideo {
   width: 100%;
   height: 100%;
   opacity: 0;
+  -moz-transition-property: opacity;
+  -ms-transition-property: opacity;
+  -o-transition-property: opacity;
   -webkit-transition-property: opacity;
+  transition-property: opacity;
+  -moz-transition-duration: 1s;
+  -ms-transition-duration: 1s;
+  -o-transition-duration: 1s;
   -webkit-transition-duration: 1s;
+  transition-duration: 1s;
 }
 #hangup {
  font-size: 13px; font-weight: bold;
diff --git a/samples/web/content/apprtc/full.html b/samples/web/content/apprtc/full.html
index 9d0aef2..98334e2 100644
--- a/samples/web/content/apprtc/full.html
+++ b/samples/web/content/apprtc/full.html
@@ -1,3 +1,4 @@
+<!DOCTYPE html>
 <!--
  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
  *
@@ -5,9 +6,9 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
 -->
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
 <html>
 <head>
+<title>Room full</title>
 <script src="/_ah/channel/jsapi"></script>
 <style type="text/css">
   a:link { color: #ffffff; }
diff --git a/samples/web/content/apprtc/index.html b/samples/web/content/apprtc/index.html
index 06f7628..08ed33f 100644
--- a/samples/web/content/apprtc/index.html
+++ b/samples/web/content/apprtc/index.html
@@ -7,14 +7,12 @@
  *  tree.
 -->
 <html>
-
 <head>
 
   <title>WebRTC Reference App</title>
 
   <meta charset="utf-8">
   <meta name="description" content="WebRTC reference app">
-  <meta http-equiv="X-UA-Compatible" content="chrome=1">
   {{ meta_viewport }}
 
   <link rel="canonical" href="{{ room_link }}">
@@ -50,6 +48,7 @@
 {{ include_vr_js }}
 <!-- Load the polyfill to switch-hit between Chrome and Firefox -->
 <script src="/js/adapter.js"></script>
+<script src="/js/ga.js"></script>
 
 <script type="text/javascript">
   var errorMessages = {{ error_messages }};
@@ -65,6 +64,7 @@
   var turnUrl = '{{ turn_url }}';
   var stereo = {{ stereo }};
   var opusfec = {{ opusfec }};
+  var opusMaxPbr = '{{ opusmaxpbr }}';
   var audioSendBitrate = '{{ asbr }}';
   var audioRecvBitrate = '{{ arbr }}';
   var videoSendBitrate = '{{ vsbr }}';
diff --git a/samples/web/content/apprtc/js/ga.js b/samples/web/content/apprtc/js/ga.js
new file mode 120000
index 0000000..ac4b64a
--- /dev/null
+++ b/samples/web/content/apprtc/js/ga.js
@@ -0,0 +1 @@
+../../../js/lib/ga.js
\ No newline at end of file
diff --git a/samples/web/content/apprtc/js/main.js b/samples/web/content/apprtc/js/main.js
index 6553304..e1d8b97 100644
--- a/samples/web/content/apprtc/js/main.js
+++ b/samples/web/content/apprtc/js/main.js
@@ -6,10 +6,11 @@
  *  tree.
  */
 
-/* More information about these options at jshint.com/docs/options */
-/* jshint browser: true, camelcase: true, curly: true, devel: true, eqeqeq: true, forin: false, globalstrict: true, quotmark: single, undef: true, unused: strict */
-/* global attachMediaStream, audioRecvBitrate, audioRecvCodec, audioSendBitrate, audioSendCodec, channelToken, createIceServers, errorMessages, getUserMedia, goog, initiator:true, me, mediaConstraints, offerConstraints, pcConfig, pcConstraints, reattachMediaStream, roomKey, roomLink, RTCIceCandidate, RTCPeerConnection, RTCSessionDescription, setupStereoscopic, stereo, stereoscopic, trace, turnUrl, videoRecvBitrate, videoSendBitrate, videoSendInitialBitrate:true */
-/* exported enterFullScreen, initialize, onHangup */
+// Directives for JSHint checking (see jshint.com/docs/options).
+// globals: variables defined in apprtc/index.html
+/* globals audioRecvBitrate, audioRecvCodec, audioSendBitrate, audioSendCodec, channelToken, errorMessages, goog, initiator:true, me, mediaConstraints, offerConstraints, opusfec, opusMaxPbr, pcConfig, pcConstraints, roomKey, roomLink, setupStereoscopic, stereo, stereoscopic, turnUrl, videoRecvBitrate, videoSendBitrate, videoSendInitialBitrate:true */
+// exported: functions used in apprtc/index.html
+/* exported enterFullScreen, initialize, onHangup, doGetUserMedia */
 
 'use strict';
 
@@ -271,6 +272,11 @@
   if (opusfec) {
     message.sdp = addCodecParam(message.sdp, 'opus/48000', 'useinbandfec=1');
   }
+  // Set Opus maxplaybackrate, if requested.
+  if (opusMaxPbr) {
+    message.sdp = addCodecParam(message.sdp, 'opus/48000', 'maxplaybackrate=' +
+        opusMaxPbr);
+  }
   message.sdp = maybePreferAudioSendCodec(message.sdp);
   message.sdp = maybeSetAudioSendBitRate(message.sdp);
   message.sdp = maybeSetVideoSendBitRate(message.sdp);
diff --git a/samples/web/content/constraints/index.html b/samples/web/content/constraints/index.html
deleted file mode 100644
index a4eeab3..0000000
--- a/samples/web/content/constraints/index.html
+++ /dev/null
@@ -1,114 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-
-<base target="_blank">
-
-<title>Constraints and statistics</title>
-
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link href='//fonts.googleapis.com/css?family=Inconsolata' rel='stylesheet' type='text/css'>
-
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
-
-</head>
-
-<body>
-
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Constraints &amp; statistics</span></h1>
-
-  <section id="blurb">
-    <p>This demo shows ways to use constraints and statistics in WebRTC applications.</p>
-    <p>Set camera constraints, and click <strong>Get media</strong> to (re)open the camera with these included. Click <strong>Connect</strong> to create a (local) peer connection. The RTCPeerConnection objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> can be inspected from the console.</p>
-    <p>Setting a value to zero will remove that constraint. </p>
-    <p>The lefthand video shows the output of <code>getUserMedia()</code>; on the right is the video after being passed through the peer connection. The transmission bitrate is displayed below the righthand video.</p>
-  </section>
-
-  <button id="getMedia">Get media</button>
-  <button id="connect" disabled>Connect</button>
-
-
-  <section id="constraints">
-      <div id="getUserMedia">
-        <div class="input">
-          <h2>Camera constraints</h2>
-          <div id="minWidth">
-            <label>Min width <span>300</span>px:</label>
-            <input type="range" min="0" max="1280" value="300">
-          </div>
-          <div id="maxWidth">
-            <label>Max width <span>640</span>px:</label>
-            <input type="range" min="0" max="1280" value="640">
-          </div>
-          <div id="minHeight">
-            <label>Min height <span>200</span>px:</label>
-            <input type="range" min="0" max="1280" value="200">
-          </div>
-          <div id="maxHeight">
-            <label>Max height <span>480</span>px:</label>
-            <input type="range" min="0" max="1280" value="480">
-          </div>
-          <div id="framerate">
-            <label>Frame rate <span>0</span>fps:</label>
-            <input type="range" min="0" max="60" value="0">
-          </div>
-        </div>
-        <div id="getUserMediaConstraints" class="output"></div>
-     </div>
-
-      <div id="addStream">
-        <div class="input">
-          <h2>Stream constraints</h2>
-          <div id="maxBitrate">
-            <label>Max bitrate <span>1000</span>kb/s:</label>
-            <input type="range"  min="0" max="2000" value="1000">
-          </div>
-        </div>
-        <div id="addStreamConstraints" class="output"></div>
-      </div>
-
-  </section>
-
-  <section id="video">
-    <div id="localVideo">
-      <video autoplay muted></video>
-      <div></div>
-    </div>
-    <div id="remoteVideo">
-      <video autoplay muted></video>
-      <div></div>
-      <div id="bitrate"></div>
-    </div>
-  </section>
-
-  <section id="statistics">
-    <div id="senderStats"></div>
-    <div id="receiverStats"></div>
-  </section>
-
-  <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/constraints" title="View source for this page on Github" id="viewSource">View source on Github</a>
-
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/create-offer/index.html b/samples/web/content/create-offer/index.html
deleted file mode 100644
index 95ac6e7..0000000
--- a/samples/web/content/create-offer/index.html
+++ /dev/null
@@ -1,92 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<!-- This sample demonstrates calling createOffer to get a SDP blob that
-     indicates the capabilities of the PeerConnection. -->
-<title>Show createOffer Output Demo</title>
-<script src="../../js/adapter.js"></script>
-<style>
-button {
-  font: 18px sans-serif;
-  padding: 8px;
-}
-textarea {
-  font-family: monospace;
-  margin: 2px;
-  width:480px;
-  height:640px;
-}
-</style>
-</head>
-<body>
-<h1>WebRTC createOffer Test Page</h1>
-<p>This page tests the createOffer method for a WebRTC implementation. It
-  creates a PeerConnection, and then prints out the SDP generated by
-  createOffer, with the number of desired audio MediaStreamTracks and the
-  checked createOffer constraints. Currently, only audio tracks can be added,
-  as there is no programmatic way to generate video tracks. (Web Audio is
-  used to generate the audio tracks.)</p>
-<h3>Tracks</h3>
-<p>Number of Audio Tracks<input id="num-audio-tracks" value="0"></input></p>
-<h3>Constraints:</h3>
-<input id="audio" type="checkbox">Offer To Receive Audio</input><br>
-<input id="video" type="checkbox">Offer To Receive Video</input><br>
-<input id="vad" type="checkbox">Voice Activity Detection</input><br>
-<input id="restart" type="checkbox">Ice Restart</input><br>
-<button id="start" onclick="createOffer()">Create Offer</button><br>
-<br>
-<textarea id="output"></textarea>
-<script>
-var numAudioTracks = document.getElementById('num-audio-tracks');
-var audio = document.getElementById('audio');
-var video = document.getElementById('video');
-var vad = document.getElementById('vad');
-var restart = document.getElementById('restart');
-var output = document.getElementById('output');
-var pc = new RTCPeerConnection(null);
-var wacx = new webkitAudioContext();
-
-function createOffer() {
-  var numRequestedAudioTracks = numAudioTracks.value;
-  while (numRequestedAudioTracks < pc.getLocalStreams().length) {
-    pc.removeStream(pc.getLocalStreams()[pc.getLocalStreams().length - 1]);
-  }
-  while (numRequestedAudioTracks > pc.getLocalStreams().length) {
-    // Create some dummy audio streams using Web Audio.
-    // Note that this fails if you try to do more than one track in Chrome
-    // right now.
-    var dst = wacx.createMediaStreamDestination();
-    pc.addStream(dst.stream);
-  }
-  var offerConstraints = {
-    "optional": [
-      { "OfferToReceiveAudio": audio.checked },
-      { "OfferToReceiveVideo": video.checked },
-    ]
-  };
-  // These constraints confuse Firefox, even if declared as optional.
-  if (webrtcDetectedBrowser != "Firefox") {
-    offerConstraints.optional.push(
-        { "VoiceActivityDetection": vad.checked });
-    offerConstraints.optional.push(
-        { "IceRestart": restart.checked });
-  }
-  pc.createOffer(gotDescription, null, offerConstraints);
-}
-
-function gotDescription(desc) {
-  pc.setLocalDescription(desc);
-  output.value = desc.sdp;
-}
-</script>
-</body>
-</html>
-
-
diff --git a/samples/web/content/datachannel/index.html b/samples/web/content/datachannel/index.html
index 51cc2e3..2191290 100644
--- a/samples/web/content/datachannel/index.html
+++ b/samples/web/content/datachannel/index.html
@@ -1,3 +1,4 @@
+<!DOCTYPE html>
 <!--
  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
  *
@@ -5,62 +6,68 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
 -->
-<!DOCTYPE html>
 <html>
 <head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>RTCDataChannel</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>RTCDataChannel</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../css/main.css">
+  <link rel="stylesheet" href="css/main.css" />
+
 </head>
+
 <body>
-<div id="container">
 
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>RTCDataChannel</span></h1>
+  <div id="container">
 
-  <div id="buttons">
-    <button id="startButton">Start</button>
-    <button id="sendButton" disabled>Send</button>
-    <button id="closeButton" disabled>Stop</button>
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>RTCDataChannel</span></h1>
+
+    <div id="buttons">
+      <button id="startButton">Start</button>
+      <button id="sendButton" disabled>Send</button>
+      <button id="closeButton" disabled>Stop</button>
+    </div>
+
+    <form>
+      <span>Choose protocol for transmitting data:</span>
+      <input type="radio" id="useSctp" name="transportbtn" checked />
+      <label for="useSctp">SCTP</label>
+      <input type="radio" id="useRtp" name="transportbtn" />
+      <label for="useRtp">RTP</label>
+    </form>
+
+    <div id="sendReceive">
+      <div id="send">
+        <h2>Send</h2>
+        <textarea id="dataChannelSend" disabled placeholder="Press Start, enter some text, then press Send."></textarea>
+      </div>
+      <div id="receive">
+        <h2>Receive</h2>
+        <textarea id="dataChannelReceive" disabled></textarea>
+      </div>
+    </div>
+
+    <p>View the console to see logging.</p>
+
+    <p>The <code>RTCPeerConnection</code> objects <code>localConnection</code> and <code>remoteConnection</code> are in global scope, so you can inspect them in the console as well.</p>
+
+    <p>For more information about RTCDataChannel, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/#toc-rtcdatachannel" title="RTCDataChannel section of HTML5 Rocks article about WebRTC">Getting Started With WebRTC</a>.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/datachannel" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
   </div>
 
-  <form>
-    <span>Choose protocol for transmitting data:</span>
-    <input type="radio" id="useSctp" name="transportbtn" checked />
-    <label for="useSctp">SCTP</label>
-    <input type="radio" id="useRtp" name="transportbtn" />
-    <label for="useRtp">RTP</label>
-  </form>
+  <script src="../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
 
-  <div id="sendReceive">
-    <div id="send">
-      <h2>Send</h2>
-      <textarea id="dataChannelSend" disabled placeholder="Press Start, enter some text, then press Send."></textarea>
-    </div>
-    <div id="receive">
-      <h2>Receive</h2>
-      <textarea id="dataChannelReceive" disabled></textarea>
-    </div>
-  </div>
+  <script src="../../js/lib/ga.js"></script>
 
-  <p>View the console to see logging.</p>
-
-  <p>The <code>RTCPeerConnection</code> objects <code>localConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
-
-  <p>For more information about RTCDataChannel, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/#toc-rtcdatachannel" title="RTCDataChannel section of HTML5 Rocks article about WebRTC">Getting Started With WebRTC</a>.</p>
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/datachannel" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
 </body>
 </html>
diff --git a/samples/web/content/datachannel/js/main.js b/samples/web/content/datachannel/js/main.js
index 6820725..222b425 100644
--- a/samples/web/content/datachannel/js/main.js
+++ b/samples/web/content/datachannel/js/main.js
@@ -5,7 +5,11 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
-var localConnection, remotePeerConnection, sendChannel, receiveChannel, pcConstraint, dataConstraint;
+
+'use strict';
+
+var localConnection, remoteConnection, sendChannel, receiveChannel,
+  pcConstraint, dataConstraint;
 var dataChannelSend = document.querySelector('textarea#dataChannelSend');
 var dataChannelReceive = document.querySelector('textarea#dataChannelReceive');
 var sctpSelect = document.querySelector('input#useSctp');
@@ -41,45 +45,51 @@
   pcConstraint = null;
   dataConstraint = null;
   if (sctpSelect.checked &&
-     (webrtcDetectedBrowser === 'chrome' && webrtcDetectedVersion >= 31) ||
-      webrtcDetectedBrowser === 'firefox'){
-    // SCTP is supported from Chrome M31 and is supported in FF.
-    // No need to pass DTLS constraint as it is on by default in Chrome M31.
+      (webrtcDetectedBrowser === 'chrome' && webrtcDetectedVersion >= 31) ||
+      webrtcDetectedBrowser === 'firefox') {
+    // SCTP is supported from Chrome 31 and is supported in FF.
+    // No need to pass DTLS constraint as it is on by default in Chrome 31.
     // For SCTP, reliable and ordered is true by default.
-    trace('Using SCTP based Data Channels');
+    trace('Using SCTP based data channels');
   } else {
-    pcConstraint = {optional: [{RtpDataChannels: true}]};
+    pcConstraint = {
+      optional: [{
+        RtpDataChannels: true
+      }]
+    };
     if (!rtpSelect.checked) {
-      // Use rtp data channels for chrome versions older than M31.
-      trace('Using RTP based Data Channels,' +
-            'as you are on an older version than M31.');
+      // Use RTP data channels for Chrome versions older than 31.
+      trace('Using RTP based data channels,' +
+        'as you are on an older version than 31.');
       alert('Reverting to RTP based data channels,' +
-            'as you are on an older version than M31.');
+        'as you are on an older version than 31.');
       rtpSelect.checked = true;
     }
   }
-  localConnection = new RTCPeerConnection(servers, pcConstraint);
+  // Add localConnection to global scope to make it visible from the browser console.
+  window.localConnection = localConnection = new RTCPeerConnection(servers, pcConstraint);
   trace('Created local peer connection object localConnection');
 
   try {
-    // Data Channel api supported from Chrome M25.
-    // You might need to start chrome with  --enable-data-channels flag.
+    // Data channel API supported from Chrome 25.
+    // You might need to start Chrome with --enable-data-channels flag.
     sendChannel = localConnection.createDataChannel('sendDataChannel', dataConstraint);
     trace('Created send data channel');
   } catch (e) {
     alert('Failed to create data channel. ' +
-          'You need Chrome M25 or later with --enable-data-channels flag');
-    trace('Create Data channel failed with exception: ' + e.message);
+        'You need Chrome 25 or later with --enable-data-channels flag');
+    trace('Create data channel failed with exception: ' + e.message);
   }
   localConnection.onicecandidate = iceCallback1;
   sendChannel.onopen = onSendChannelStateChange;
   sendChannel.onclose = onSendChannelStateChange;
 
-  remotePeerConnection = new RTCPeerConnection(servers, pcConstraint);
-  trace('Created remote peer connection object remotePeerConnection');
+  // Add remoteConnection to global scope to make it visible from the browser console.
+  window.remoteConnection = remoteConnection = new RTCPeerConnection(servers, pcConstraint);
+  trace('Created remote peer connection object remoteConnection');
 
-  remotePeerConnection.onicecandidate = iceCallback2;
-  remotePeerConnection.ondatachannel = receiveChannelCallback;
+  remoteConnection.onicecandidate = iceCallback2;
+  remoteConnection.ondatachannel = receiveChannelCallback;
 
   localConnection.createOffer(gotDescription1, onCreateSessionDescriptionError);
   startButton.disabled = true;
@@ -97,15 +107,15 @@
 }
 
 function closeDataChannels() {
-  trace('Closing data Channels');
+  trace('Closing data channels');
   sendChannel.close();
   trace('Closed data channel with label: ' + sendChannel.label);
   receiveChannel.close();
   trace('Closed data channel with label: ' + receiveChannel.label);
   localConnection.close();
-  remotePeerConnection.close();
+  remoteConnection.close();
   localConnection = null;
-  remotePeerConnection = null;
+  remoteConnection = null;
   trace('Closed peer connections');
   startButton.disabled = false;
   sendButton.disabled = true;
@@ -118,21 +128,21 @@
 function gotDescription1(desc) {
   localConnection.setLocalDescription(desc);
   trace('Offer from localConnection \n' + desc.sdp);
-  remotePeerConnection.setRemoteDescription(desc);
-  remotePeerConnection.createAnswer(gotDescription2, onCreateSessionDescriptionError);
+  remoteConnection.setRemoteDescription(desc);
+  remoteConnection.createAnswer(gotDescription2, onCreateSessionDescriptionError);
 }
 
 function gotDescription2(desc) {
-  remotePeerConnection.setLocalDescription(desc);
-  trace('Answer from remotePeerConnection \n' + desc.sdp);
+  remoteConnection.setLocalDescription(desc);
+  trace('Answer from remoteConnection \n' + desc.sdp);
   localConnection.setRemoteDescription(desc);
 }
 
 function iceCallback1(event) {
   trace('local ice callback');
   if (event.candidate) {
-    remotePeerConnection.addIceCandidate(event.candidate,
-                        onAddIceCandidateSuccess, onAddIceCandidateError);
+    remoteConnection.addIceCandidate(event.candidate,
+        onAddIceCandidateSuccess, onAddIceCandidateError);
     trace('Local ICE candidate: \n' + event.candidate.candidate);
   }
 }
@@ -141,7 +151,7 @@
   trace('remote ice callback');
   if (event.candidate) {
     localConnection.addIceCandidate(event.candidate,
-                        onAddIceCandidateSuccess, onAddIceCandidateError);
+        onAddIceCandidateSuccess, onAddIceCandidateError);
     trace('Remote ICE candidate: \n ' + event.candidate.candidate);
   }
 }
@@ -170,7 +180,7 @@
 function onSendChannelStateChange() {
   var readyState = sendChannel.readyState;
   trace('Send channel state is: ' + readyState);
-  if (readyState == 'open') {
+  if (readyState === 'open') {
     dataChannelSend.disabled = false;
     dataChannelSend.focus();
     sendButton.disabled = false;
diff --git a/samples/web/content/dtmf/index.html b/samples/web/content/dtmf/index.html
deleted file mode 100644
index 27a58b5..0000000
--- a/samples/web/content/dtmf/index.html
+++ /dev/null
@@ -1,88 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-
-<base target="_blank">
-
-<title>DTMF</title>
-
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link href='//fonts.googleapis.com/css?family=Inconsolata' rel='stylesheet' type='text/css'>
-
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
-
-</head>
-
-<body>
-
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Send DTMF tones</span></h1>
-
-  <div id="dialPad">
-    <div>
-      <button>1</button><button>2</button><button>3</button><button>4</button>
-    </div>
-    <div>
-      <button>5</button><button>6</button><button>7</button><button>8</button>
-    </div>
-    <div>
-      <button>9</button><button>0</button><button>*</button><button>#</button>
-    </div>
-    <div>
-      <button>A</button><button>B</button><button>C</button><button>D</button>
-    </div>
-  </div>
-
-  <div id="dtmf">
-    <h2>Sent tones</h2>
-    <div id="dtmfStatus"></div>
-    <div id="sentTones"></div>
-    <audio autoplay="autoplay"></audio>
-  </div>
-
-  <div id="parameters">
-    <div>
-      <label for="duration">Duration:</label>
-      <input id="duration" type="text" value="500" />
-    </div>
-    <div>
-      <label for="gap">Gap:</label>
-      <input id="gap" type="text" value="50" />
-    </div>
-    <div>
-       <label for="tones">Tones:</label>
-       <input id="tones" type="text" value="1199##9,6633221,9966332,9966332,1199##9,6633221" />
-    </div>
-  </div>
-
-  <div id="buttons">
-    <button id="callButton">Call</button>
-    <button id="sendTonesButton">Send tones</button>
-    <button id="hangupButton">Hang up</button>
-  </div>
-
-  <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/dtmf" title="View source for this page on Github" id="viewSource">View source on Github</a>
-
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia-audio/index.html b/samples/web/content/getusermedia-audio/index.html
deleted file mode 100644
index 9b6a672..0000000
--- a/samples/web/content/getusermedia-audio/index.html
+++ /dev/null
@@ -1,87 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta charset="utf-8">
-<title>Local Audio Rendering Demo</title>
-<script type="text/javascript" src="../../js/adapter.js"></script>
-<script>
-  var audioElement;
-  var buttonStart;
-  var buttonStop;
-  var localStream;
-
-  $ = function(id) {
-    return document.getElementById(id);
-  };
-
-  function start() {
-    var constraints = {audio:true, video:false};
-    getUserMedia(constraints, gotStream, gotStreamFailed);
-    buttonStart.disabled = true;
-    buttonStop.disabled = false;
-  }
-
-  function stop() {
-    buttonStart.enabled = true;
-    buttonStop.enabled = false;
-    localStream.stop();
-  }
-
-  function gotStream(stream) {
-    videoTracks = stream.getVideoTracks();
-    audioTracks = stream.getAudioTracks();
-    if (audioTracks.length == 1 && videoTracks.length == 0) {
-      console.log('gotStream({audio:true, video:false})');
-      console.log('Using audio device: ' + audioTracks[0].label);
-      attachMediaStream(audioElement, stream);
-      stream.onended = function() {
-        console.log('stream.onended');
-        buttonStart.disabled = false;
-        buttonStop.disabled = true;
-      };
-
-      localStream = stream;
-    } else {
-      alert('The media stream contains an invalid amount of audio tracks.');
-      stream.stop();
-    }
-  }
-
-  function gotStreamFailed(error) {
-    buttonStart.disabled = false;
-    buttonStop.disabled = true;
-    alert('Failed to get access to local media. Error code: ' + error.code);
-  }
-
-  function onload() {
-    audioElement = $('audio');
-    buttonStart = $('start');
-    buttonStop = $('stop');
-    buttonStart.enabled = true;
-    buttonStop.disabled = true;
-  }
-</script>
-</head>
-
-<body onload="onload()">
-  <h2>Rendering of a local media stream using &lt;audio&gt;</h2>
-  <p>Demonstrates usage of a local media stream connected to an HTML5 audio tag.<br>
-     Press Start, select a microphone and listen to your own voice in loopback.</p>
-  <style>
-    button {
-      font: 14px sans-serif;
-      padding: 8px;
-    }
-  </style>
-  <audio id="audio" autoplay="autoplay" controls="controls"></audio><br><br>
-  <button id="start" onclick="start()">Start</button>
-  <button id="stop" onclick="stop()">Stop</button>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia-canvas/index.html b/samples/web/content/getusermedia-canvas/index.html
deleted file mode 100644
index e984fe3..0000000
--- a/samples/web/content/getusermedia-canvas/index.html
+++ /dev/null
@@ -1,41 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>getUserMedia to canvas</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-</head>
-<body>
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia ⇒ canvas</span></h1>
-
-  <video autoplay></video>
-  <button>Take snapshot</button>
-  <canvas></canvas>
-
-  <p>Draw a frame from the video onto the canvas element using the <code>drawImage()</code> method.</p>
-
-  <p>The variables <code>canvas</code>, <code>video</code> and <code>stream</code> are in global scope, so you can inspect them from the console.</p>
-
-  <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/canvas" title="View source for this page on Github" id="viewSource">View source on Github</a>
-  </div>
-
-  <script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia-canvas/js/main.js b/samples/web/content/getusermedia-canvas/js/main.js
deleted file mode 100644
index 7328b0e..0000000
--- a/samples/web/content/getusermedia-canvas/js/main.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
- */
-// variables in global scope so available to console
-button = document.querySelector("button");
-video = document.querySelector("video");
-canvas = document.querySelector("canvas");
-
-canvas.width = 480;
-canvas.height = 360;
-
-button.onclick = function(){
-  canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
-}
-
-navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
-
-var constraints = {audio: false, video: true};
-var video = document.querySelector("video");
-
-function successCallback(stream){
-  window.stream = stream; // stream available to console
-  if (window.URL) {
-    video.src = window.URL.createObjectURL(stream);
-  } else {
-    video.src = stream;
-  }
-}
-
-function errorCallback(error){
-  console.log("navigator.getUserMedia error: ", error);
-}
-
-navigator.getUserMedia(constraints, successCallback, errorCallback);
-
diff --git a/samples/web/content/getusermedia-filter/index.html b/samples/web/content/getusermedia-filter/index.html
deleted file mode 100644
index 4348a2e..0000000
--- a/samples/web/content/getusermedia-filter/index.html
+++ /dev/null
@@ -1,62 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>getUserMedia + CSS filters</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-  <style>
-  .blur {
-    -webkit-filter: blur(3px);
-  }
-  .grayscale {
-    -webkit-filter: grayscale(1);
-  }
-  .invert {
-    -webkit-filter: invert(1);
-  }
-  .sepia {
-    -webkit-filter: sepia(1);
-  }
-  button {
-    width: 8.1em;
-  }
-  button#snapshot {
-    margin: 0 1em 1em 0;
-  }
-  </style>
-</head>
-<body>
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia + CSS filters</span></h1>
-
-  <video autoplay></video>
-  <button id="snapshot">Take snapshot</button>
-  <button id="filter">Apply a filter</button>
-  <canvas></canvas>
-
-  <p>Draw a frame from the getUserMedia video stream onto the canvas element, then apply CSS filters.</p>
-
-  <p>The variables <code>canvas</code>, <code>video</code> and <code>stream</code> are in global scope, so you can inspect them from the console.</p>
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/filter" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia-filter/js/main.js b/samples/web/content/getusermedia-filter/js/main.js
deleted file mode 100644
index ee0d5db..0000000
--- a/samples/web/content/getusermedia-filter/js/main.js
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
- */
-// variables in global scope so available to console
-snapshotButton = document.querySelector("button#snapshot");
-filterButton = document.querySelector("button#filter");
-video = document.querySelector("video");
-canvas = document.querySelector("canvas");
-
-canvas.width = 480;
-canvas.height = 360;
-
-var filters = ['blur', 'grayscale', 'invert', 'sepia'];
-
-snapshotButton.onclick = function snap(){
-  canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
-}
-
-filterButton.onclick = function(){
-  var newIndex = (filters.indexOf(canvas.className) + 1) % filters.length;
-  canvas.className = filters[newIndex];
-}
-
-
-navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
-
-var constraints = {audio: false, video: true};
-var video = document.querySelector("video");
-
-function successCallback(stream){
-  window.stream = stream; // stream available to console
-  if (window.URL) {
-    video.src = window.URL.createObjectURL(stream);
-  } else {
-    video.src = stream;
-  }
-}
-
-function errorCallback(error){
-  console.log("navigator.getUserMedia error: ", error);
-}
-
-navigator.getUserMedia(constraints, successCallback, errorCallback);
-
diff --git a/samples/web/content/getusermedia-resolution/index.html b/samples/web/content/getusermedia-resolution/index.html
deleted file mode 100644
index 52f826b..0000000
--- a/samples/web/content/getusermedia-resolution/index.html
+++ /dev/null
@@ -1,83 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>getUserMedia: select resolution</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-
-  <style>
-  body, html {
-    height: 100%;
-  }
-
-  button {
-    margin: 0 1em 0 0;
-    width: 100px;
-  }
-
-  div#buttons {
-    margin: 0 0 1em 0;
-  }
-
-  div#container {
-    max-width: 100%;
-  }
-
-  p#dimensions {
-    height: 1em;
-    margin: 0 0 1.5em 0;
-  }
-
-  video {
-    background: none;
-    height: auto;
-    width: auto;
-  }
-  </style>
-
-</head>
-<body>
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia: select resolution</span></h1>
-  <p></p>
-
-  <p>This example uses <a href="http://tools.ietf.org/html/draft-alvestrand-constraints-resolution-00#page-4" title="IETF constraints draft proposal">constraints</a>.</p>
-
-  <p>Click a button to call <code>getUserMedia()</code> with appropriate resolution.</p>
-
-  <div id="buttons">
-    <button id="qvga">QVGA</button>
-    <button id="vga">VGA</button>
-    <button id="hd">HD</button>
-  </div>
-
-  <p id="dimensions"></p>
-
-  <video autoplay></video>
-
-  <script src="js/main.js"></script>
-
-  <p>For more information, see <a href="http://www.html5rocks.com/en/tutorials/getusermedia/intro/" title="Media capture article by Eric Bidelman on HTML5 Rocks">Capturing Audio &amp; Video in HTML5</a> on HTML5 Rocks.</p>
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/resolution" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia-resolution/js/main.js b/samples/web/content/getusermedia-resolution/js/main.js
deleted file mode 100644
index eb18042..0000000
--- a/samples/web/content/getusermedia-resolution/js/main.js
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
- */
-var vgaButton = document.querySelector("button#vga");
-var qvgaButton = document.querySelector("button#qvga");
-var hdButton = document.querySelector("button#hd");
-var dimensions = document.querySelector("p#dimensions");
-var video = document.querySelector("video");
-var stream;
-
-navigator.getUserMedia = navigator.getUserMedia ||
-  navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
-
-function successCallback(stream) {
-  window.stream = stream; // stream available to console
-  video.src = window.URL.createObjectURL(stream);
-}
-
-function errorCallback(error){
-  console.log("navigator.getUserMedia error: ", error);
-}
-
-function displayVideoDimensions() {
-  dimensions.innerHTML = "Actual video dimensions: " + video.videoWidth +
-    "x" + video.videoHeight + 'px.';
-}
-
-video.addEventListener('play', function(){
-  setTimeout(function(){
-    displayVideoDimensions();
-  }, 500);
-});
-
-var qvgaConstraints  = {
-  video: {
-    mandatory: {
-      maxWidth: 320,
-      maxHeight: 180
-    }
-  }
-};
-
-var vgaConstraints  = {
-  video: {
-    mandatory: {
-      maxWidth: 640,
-      maxHeight: 360
-    }
-  }
-};
-
-var hdConstraints  = {
-  video: {
-    mandatory: {
-      minWidth: 1280,
-      minHeight: 720
-    }
-  }
-};
-
-qvgaButton.onclick = function(){getMedia(qvgaConstraints)};
-vgaButton.onclick = function(){getMedia(vgaConstraints)};
-hdButton.onclick = function(){getMedia(hdConstraints)};
-
-function getMedia(constraints){
-  if (!!stream) {
-    video.src = null;
-    stream.stop();
-  }
-  navigator.getUserMedia(constraints, successCallback, errorCallback);
-}
-
diff --git a/samples/web/content/getusermedia-source/index.html b/samples/web/content/getusermedia-source/index.html
deleted file mode 100644
index b418857..0000000
--- a/samples/web/content/getusermedia-source/index.html
+++ /dev/null
@@ -1,57 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>Select audio and video sources</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel='stylesheet' href='../../css/main.css' />
-
-  <style>
-  div.select {
-    display: inline-block;
-    margin: 0 0 1em 0;
-  }
-  select {
-    width: 110px;
-  }
-  </style>
-
-</head>
-<body>
-<div id='container'>
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Select audio &amp; video sources</span></h1>
-
-  <p>Get available audio and video sources from <code>MediaStream.getSources()</code> then set the source for <code>getUserMedia()</code> using a <code>sourceId</code> constraint.</p>
-
-  <div class='select'>
-    <label for='audioSource'>Audio source: </label><select id='audioSource'></select>
-  </div>
-
-  <div class='select'>
-    <label for='videoSource'>Video source: </label><select id='videoSource'></select>
-  </div>
-
-  <video muted autoplay></video>
-
- <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/sources" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia-volume/index.html b/samples/web/content/getusermedia-volume/index.html
deleted file mode 100644
index 81f8d20..0000000
--- a/samples/web/content/getusermedia-volume/index.html
+++ /dev/null
@@ -1,179 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta charset="utf-8">
-<title>Local Audio Rendering Demo</title>
-<script type="text/javascript" src="../../js/adapter.js"></script>
-<script>
-  var buttonStart;
-  var buttonStop;
-  var localStream;
-  var reporter;
-  var audioContext;
-
-  // Meter class that generates a number correlated to audio volume.
-  // The meter class itself displays nothing, but it makes the
-  // instantaneous and time-decaying volumes available for inspection.
-  // It also reports on the fraction of samples that were at or near
-  // the top of the measurement range.
-  function SoundMeter(context) {
-    this.context = context
-    this.volume = 0.0;
-    this.slow_volume = 0.0;
-    this.clip = 0.0;
-    this.script = context.createScriptProcessor(2048, 1, 1);
-    that = this;
-    this.script.onaudioprocess = function(event) {
-      var input = event.inputBuffer.getChannelData(0);
-      var i;
-      var sum = 0.0;
-      var clipcount = 0;
-      for (i = 0; i < input.length; ++i) {
-        sum += input[i] * input[i];
-        if (Math.abs(input[i]) > 0.99) {
-          clipcount += 1
-        }
-      }
-      that.volume = Math.sqrt(sum / input.length);
-      that.slow_volume = 0.95 * that.slow_volume + 0.05 * that.volume;
-      that.clip = clipcount / input.length;
-    }
-  }
-
-  SoundMeter.prototype.connectToSource = function(stream) {
-    console.log('SoundMeter connecting');
-    this.mic = this.context.createMediaStreamSource(stream);
-    this.mic.connect(this.script);
-    // Necessary to make sample run, but should not be.
-    this.script.connect(this.context.destination);
-  }
-
-  SoundMeter.prototype.stop = function() {
-    this.mic.disconnect();
-    this.script.disconnect();
-  }
-
-  // End of SoundMeter class.
-
-  $ = function(id) {
-    return document.getElementById(id);
-  };
-
-  function start() {
-    var constraints = {audio:true, video:false};
-    getUserMedia(constraints, gotStream, gotStreamFailed);
-    buttonStart.disabled = true;
-    buttonStop.disabled = false;
-  }
-
-  function stop() {
-    buttonStart.enabled = true;
-    buttonStop.enabled = false;
-    localStream.stop();
-    clearInterval(reporter);
-    soundMeter.stop();
-  }
-
-  function gotStream(stream) {
-    var videoTracks = stream.getVideoTracks();
-    var audioTracks = stream.getAudioTracks();
-    if (audioTracks.length == 1 && videoTracks.length == 0) {
-      console.log('gotStream({audio:true, video:false})');
-      console.log('Using audio device: ' + audioTracks[0].label);
-      stream.onended = function() {
-        console.log('stream.onended');
-        buttonStart.disabled = false;
-        buttonStop.disabled = true;
-      };
-
-      localStream = stream;
-      var soundMeter = new SoundMeter(audioContext);
-      soundMeter.connectToSource(stream);
-
-      // Set up reporting of the volume every 0.2 seconds.
-      var meter = $('volume');
-      var decaying_meter = $('decaying_volume');
-      var meter_canvas = $('graphic_volume').getContext('2d');
-      var meter_slow = $('graphic_slow').getContext('2d');
-      var meter_clip = $('graphic_clip').getContext('2d');
-      reporter = setInterval(function() {
-         meter.textContent = soundMeter.volume.toFixed(2);
-         decaying_meter.textContent = soundMeter.slow_volume.toFixed(2);
-         paintMeter(meter_canvas, soundMeter.volume);
-         paintMeter(meter_slow, soundMeter.slow_volume);
-         paintMeter(meter_clip, soundMeter.clip);
-      }, 200);
-    } else {
-      alert('The media stream contains an invalid number of tracks:'
-         + audioTracks.length + ' audio ' + videoTracks.length + ' video');
-      stream.stop();
-    }
-  }
-
-  function gotStreamFailed(error) {
-    buttonStart.disabled = false;
-    buttonStop.disabled = true;
-    alert('Failed to get access to local media. Error code: ' + error.code);
-  }
-
-  function onload() {
-    try {
-      window.AudioContext = window.AudioContext || window.webkitAudioContext;
-      audioContext = new AudioContext();
-    } catch(e) {
-      alert('Web Audio API not found');
-    }
-    buttonStart = $('start');
-    buttonStop = $('stop');
-    buttonStart.enabled = true;
-    buttonStop.disabled = true;
-  }
-
-  function paintMeter(context, number) {
-     context.clearRect(0, 0, 400, 20);
-     context.fillStyle = 'red';
-     context.fillRect(0, 0, number * 400, 20);
-  }
-
-</script>
-<style>
-  button {
-    font: 14px sans-serif;
-    padding: 8px;
-  }
-  canvas {
-    border:1px solid #000000;
-  }
-</style>
-</head>
-
-<body onload="onload()">
-  <h2>Measuring the volume of an audio stream using WebAudio</h2>
-  <p>Demonstrates measuring the volume of a local media stream
-     using WebAudio.<br>
-     Press Start, select a microphone, listen to your own voice in loopback,
-     and see the numbers change as you speak.</p>
-     The "instant" volume changes approximately every 50 ms; the "slow"
-     volume approximates the average volume over about a second.
-  <br>
-     Note that you will NOT hear your own voice; use the
-     <a href="local-audio-rendering.html">local audio rendering demo</a>
-     for that.
-  <p>
-  <button id="start" onclick="start()">Start</button>
-  <button id="stop" onclick="stop()">Stop</button><br><br>
-  Volume (instant): <span id="volume">Not set</span><br>
-  Volume (slow): <span id="decaying_volume">Not set</span><br>
-  <canvas id="graphic_volume" width="400" height="20"></canvas> Volume<br>
-  <canvas id="graphic_slow" width="400" height="20"></canvas> Slow<br>
-  <canvas id="graphic_clip" width="400" height="20"></canvas> Clipping
-
-</body>
-</html>
diff --git a/samples/web/content/getusermedia/audio/index.html b/samples/web/content/getusermedia/audio/index.html
new file mode 100644
index 0000000..1240bc0
--- /dev/null
+++ b/samples/web/content/getusermedia/audio/index.html
@@ -0,0 +1,47 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>gUM audio</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia, audio only</span></h1>
+
+    <audio controls autoplay></audio>
+
+    <p>Render the audio stream from an audio-only <code>getUserMedia()</code> call with an audio element.</p>
+
+    <p>The <code>MediaStream</code> object <code><em>stream</em></code> passed to the <code>getUserMedia()</code> callback is in global scope, so you can inspect it from the console.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/audio" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/getusermedia/audio/js/main.js b/samples/web/content/getusermedia/audio/js/main.js
new file mode 100644
index 0000000..c6b2d74
--- /dev/null
+++ b/samples/web/content/getusermedia/audio/js/main.js
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+// Put variables in global scope to make them available to the browser console
+var audio = window.audio = document.querySelector('audio');
+var constraints = window.constraints = {
+  audio: true,
+  video: false
+};
+navigator.getUserMedia = navigator.getUserMedia ||
+  navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
+
+function successCallback(stream) {
+  var videoTracks = stream.getVideoTracks();
+  var audioTracks = stream.getAudioTracks();
+  if (audioTracks.length === 1 && videoTracks.length === 0) {
+    console.log('Got stream with constraints:', constraints);
+    console.log('Using audio device: ' + audioTracks[0].label);
+    stream.onended = function() {
+      console.log('Stream ended');
+    };
+  }
+  window.stream = stream; // make variable available to browser console
+  if (window.URL) {
+    audio.src = window.URL.createObjectURL(stream);
+  } else {
+    audio.src = stream;
+  }
+}
+
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
+}
+
+navigator.getUserMedia(constraints, successCallback, errorCallback);
diff --git a/samples/web/content/getusermedia/canvas/index.html b/samples/web/content/getusermedia/canvas/index.html
new file mode 100644
index 0000000..b2cabd6
--- /dev/null
+++ b/samples/web/content/getusermedia/canvas/index.html
@@ -0,0 +1,48 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>getUserMedia to canvas</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia ⇒ canvas</span></h1>
+
+    <video autoplay></video>
+    <button>Take snapshot</button>
+    <canvas></canvas>
+
+    <p>Draw a frame from the video onto the canvas element using the <code>drawImage()</code> method.</p>
+
+    <p>The variables <code>canvas</code>, <code>video</code> and <code>stream</code> are in global scope, so you can inspect them from the console.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/canvas" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="js/main.js"></script>
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/getusermedia/canvas/js/main.js b/samples/web/content/getusermedia/canvas/js/main.js
new file mode 100644
index 0000000..9abc8a8
--- /dev/null
+++ b/samples/web/content/getusermedia/canvas/js/main.js
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+// put variables in global scope to make them available to the browser console
+var canvas = window.canvas = document.querySelector('canvas');
+canvas.width = 480;
+canvas.height = 360;
+
+var button = document.querySelector('button');
+button.onclick = function() {
+  canvas.getContext('2d').
+    drawImage(video, 0, 0, canvas.width, canvas.height);
+};
+
+var video = document.querySelector('video');
+
+navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
+  navigator.mozGetUserMedia;
+
+var constraints = {
+  audio: false,
+  video: true
+};
+
+function successCallback(stream) {
+  window.stream = stream; // make stream available to browser console
+  if (window.URL) {
+    video.src = window.URL.createObjectURL(stream);
+  } else {
+    video.src = stream;
+  }
+}
+
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
+}
+
+navigator.getUserMedia(constraints, successCallback, errorCallback);
diff --git a/samples/web/content/getusermedia/desktopcapture/app.js b/samples/web/content/getusermedia/desktopcapture/app.js
new file mode 100644
index 0000000..6a3db64
--- /dev/null
+++ b/samples/web/content/getusermedia/desktopcapture/app.js
@@ -0,0 +1,63 @@
+'use strict';
+
+var extensionInstalled = false;
+
+document.getElementById('start').addEventListener('click', function() {
+  // send screen-sharer request to content-script
+  if (!extensionInstalled){
+    var message = 'Please install the extension:\n' +
+                  '1. Go to chrome://extensions\n' +
+                  '2. Check: "Enable Developer mode"\n' +
+                  '3. Click: "Load the unpacked extension..."\n' +
+                  '4. Choose "extension" folder from the repository\n' +
+                  '5. Reload this page';
+    alert(message);
+  }
+  window.postMessage({ type: 'SS_UI_REQUEST', text: 'start' }, '*');
+});
+
+// listen for messages from the content-script
+window.addEventListener('message', function (event) {
+  if (event.origin !== window.location.origin) {
+      return;
+  }
+
+  // content-script will send a 'SS_PING' msg if extension is installed
+  if (event.data.type && (event.data.type === 'SS_PING')) {
+    extensionInstalled = true;
+  }
+
+  // user chose a stream
+  if (event.data.type && (event.data.type === 'SS_DIALOG_SUCCESS')) {
+    startScreenStreamFrom(event.data.streamId);
+  }
+
+  // user clicked on 'cancel' in choose media dialog
+  if (event.data.type && (event.data.type === 'SS_DIALOG_CANCEL')) {
+    console.log('User cancelled!');
+  }
+});
+
+function startScreenStreamFrom(streamId) {
+  navigator.webkitGetUserMedia({
+    audio: false,
+    video: {
+      mandatory: {
+        chromeMediaSource: 'desktop',
+        chromeMediaSourceId: streamId,
+        maxWidth: window.screen.width,
+        maxHeight: window.screen.height
+      }
+    }
+  },
+  // successCallback
+  function(screenStream) {
+    var videoElement = document.getElementById('video');
+    videoElement.src = URL.createObjectURL(screenStream);
+    videoElement.play();
+  },
+  // errorCallback
+  function(err) {
+    console.log('getUserMedia failed!: ' + err);
+  });
+}
diff --git a/samples/web/content/getusermedia/desktopcapture/extension/background.js b/samples/web/content/getusermedia/desktopcapture/extension/background.js
new file mode 100644
index 0000000..b050e86
--- /dev/null
+++ b/samples/web/content/getusermedia/desktopcapture/extension/background.js
@@ -0,0 +1,37 @@
+var data_sources = ['screen', 'window'],
+    desktopMediaRequestId = '';
+
+chrome.runtime.onConnect.addListener(function(port) {
+  port.onMessage.addListener(function (msg) {
+    if(msg.type === 'SS_UI_REQUEST') {
+      requestScreenSharing(port, msg);
+    }
+
+    if(msg.type === 'SS_UI_CANCEL') {
+      cancelScreenSharing(msg);
+    }
+  });
+});
+
+function requestScreenSharing(port, msg) {
+  // https://developer.chrome.com/extensions/desktopCapture
+  // params:
+  //  - 'data_sources' Set of sources that should be shown to the user.
+  //  - 'targetTab' Tab for which the stream is created.
+  //  - 'streamId' String that can be passed to getUserMedia() API
+  desktopMediaRequestId = chrome.desktopCapture.chooseDesktopMedia(data_sources, port.sender.tab, function(streamId) {
+    if (streamId) {
+      msg.type = 'SS_DIALOG_SUCCESS';
+      msg.streamId = streamId;
+    } else {
+      msg.type = 'SS_DIALOG_CANCEL';
+    }
+    port.postMessage(msg);
+  });
+}
+
+function cancelScreenSharing(msg) {
+  if (desktopMediaRequestId) {
+     chrome.desktopCapture.cancelChooseDesktopMedia(desktopMediaRequestId);
+  }
+}
diff --git a/samples/web/content/getusermedia/desktopcapture/extension/content-script.js b/samples/web/content/getusermedia/desktopcapture/extension/content-script.js
new file mode 100644
index 0000000..d44eb56
--- /dev/null
+++ b/samples/web/content/getusermedia/desktopcapture/extension/content-script.js
@@ -0,0 +1,27 @@
+// https://chromeextensionsdocs.appspot.com/apps/content_scripts#host-page-communication
+//   - 'content_script' and execution env are isolated from each other
+//   - In order to communicate we use the DOM (window.postMessage)
+//
+// app.js            |        |content-script.js |      |background.js
+// window.postMessage|------->|port.postMessage  |----->| port.onMessage
+//                   | window |                  | port |
+// webkitGetUserMedia|<------ |window.postMessage|<-----| port.postMessage
+//
+
+var port = chrome.runtime.connect(chrome.runtime.id);
+
+port.onMessage.addListener(function(msg) {
+	window.postMessage(msg, '*');
+});
+
+window.addEventListener('message', function(event) {
+	// We only accept messages from ourselves
+	if (event.source != window) return;
+
+	if (event.data.type && ((event.data.type === 'SS_UI_REQUEST') ||
+							(event.data.type === 'SS_UI_CANCEL'))) {
+		port.postMessage(event.data);
+	}
+}, false);
+
+window.postMessage({ type: 'SS_PING', text: 'start' }, '*');
diff --git a/samples/web/content/getusermedia/desktopcapture/extension/icon.png b/samples/web/content/getusermedia/desktopcapture/extension/icon.png
new file mode 100644
index 0000000..755653f
--- /dev/null
+++ b/samples/web/content/getusermedia/desktopcapture/extension/icon.png
Binary files differ
diff --git a/samples/web/content/getusermedia/desktopcapture/extension/manifest.json b/samples/web/content/getusermedia/desktopcapture/extension/manifest.json
new file mode 100644
index 0000000..13cf955
--- /dev/null
+++ b/samples/web/content/getusermedia/desktopcapture/extension/manifest.json
@@ -0,0 +1,22 @@
+{
+  "name": "Screensharing Extension",
+  "description": "Screensharing Extension for my app",
+  "version": "1.0.0",
+  "manifest_version": 2,
+  "icons": {
+    "128": "icon.png"
+  },
+  "background": {
+    "scripts": ["background.js"]
+  },
+  "content_scripts": [
+    {
+      "matches": ["https://github.com/GoogleChrome/webrtc/*"],
+      "js": ["content-script.js"]
+    }
+  ],
+  "permissions": [
+    "desktopCapture",
+    "https://github.com/GoogleChrome/webrtc/*"
+  ]
+}
diff --git a/samples/web/content/getusermedia/desktopcapture/index.html b/samples/web/content/getusermedia/desktopcapture/index.html
new file mode 100644
index 0000000..48f5c67
--- /dev/null
+++ b/samples/web/content/getusermedia/desktopcapture/index.html
@@ -0,0 +1,30 @@
+<!DOCTYPE html>
+<html>
+<head>
+
+
+<title>Desktop capture</title>
+
+<style>
+  body {
+    background: white;
+    display: -webkit-flex;
+    -webkit-justify-content: center;
+    -webkit-align-items: center;
+    -webkit-flex-direction: column;
+  }
+  video {
+    width: 640px;
+    height: 480px;
+    border: 1px solid #e2e2e2;
+  }
+</style>
+</head>
+<body>
+  <video id="video" autoplay></video>
+  <p>
+    <button id="start">Share Screen</button>
+  </p>
+  <script src="app.js"></script>
+</body>
+</html>
diff --git a/samples/web/content/face/index.html b/samples/web/content/getusermedia/face/index.html
similarity index 93%
rename from samples/web/content/face/index.html
rename to samples/web/content/getusermedia/face/index.html
index 43b27d0..391c510 100644
--- a/samples/web/content/face/index.html
+++ b/samples/web/content/getusermedia/face/index.html
@@ -1,3 +1,4 @@
+<!DOCTYPE html>

 <!--

  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.

  *

@@ -5,13 +6,12 @@
  *  that can be found in the LICENSE file in the root of the source

  *  tree.

 -->

-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

 <html>

 <head>

-<script type="text/javascript" src="../../js/lib/ccv.js"></script>

-<script type="text/javascript" src="../../js/lib/face.js"></script>

+<script type="text/javascript" src="../../../js/lib/ccv.js"></script>

+<script type="text/javascript" src="../../../js/lib/face.js"></script>

 <!-- Load the polyfill to switch-hit between Chrome and Firefox -->

-<script src="../../js/adapter.js"></script>

+<script src="../../../js/adapter.js"></script>

   <style type="text/css">

 * { margin:0; padding:0; } /* to remove the top and left whitespace */

 html, body { width:100%; height:100%; } /* just to be sure these are full screen*/

@@ -147,7 +147,7 @@
 setTimeout(initialize, 1);

 </script>

 

-  <video id="localVideo" autoplay="autoplay" muted="true"></video>

+  <video id="localVideo" autoplay muted></video>

   <canvas width="1000" height="1000" id="localCanvas"></canvas>

   <a href="http://liuliu.me/eyes/javascript-face-detection-explained"><div id="credit">JS Face Detect by Liu Liu</div></a>

 </body>

diff --git a/samples/web/content/getusermedia/filter/index.html b/samples/web/content/getusermedia/filter/index.html
new file mode 100644
index 0000000..fddda17
--- /dev/null
+++ b/samples/web/content/getusermedia/filter/index.html
@@ -0,0 +1,68 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>getUserMedia + CSS filters</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+
+  <style>
+    .blur {
+      -webkit-filter: blur(3px);
+    }
+    .grayscale {
+      -webkit-filter: grayscale(1);
+    }
+    .invert {
+      -webkit-filter: invert(1);
+    }
+    .sepia {
+      -webkit-filter: sepia(1);
+    }
+    button {
+      width: 110px;
+    }
+    button#snapshot {
+      margin: 0 10px 25px 0;
+    }
+  </style>
+
+</head>
+
+<body>
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia + CSS filters</span></h1>
+
+    <video autoplay></video>
+    <button id="snapshot">Take snapshot</button>
+    <button id="filter">Apply a filter</button>
+    <canvas></canvas>
+
+    <p>Draw a frame from the getUserMedia video stream onto the canvas element, then apply CSS filters.</p>
+
+    <p>The variables <code>canvas</code>, <code>video</code> and <code>stream</code> are in global scope, so you can inspect them from the console.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/filter" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+  </div>
+
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+</body>
+</html>
diff --git a/samples/web/content/getusermedia/filter/js/main.js b/samples/web/content/getusermedia/filter/js/main.js
new file mode 100644
index 0000000..dfb5455
--- /dev/null
+++ b/samples/web/content/getusermedia/filter/js/main.js
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+var snapshotButton = document.querySelector('button#snapshot');
+var filterButton = document.querySelector('button#filter');
+
+// put variables in global scope to make them available to the browser console
+var video = window.video = document.querySelector('video');
+var canvas = window.canvas = document.querySelector('canvas');
+canvas.width = 480;
+canvas.height = 360;
+
+var filters = ['blur', 'grayscale', 'invert', 'sepia'];
+
+snapshotButton.onclick = function snap() {
+  canvas.getContext('2d').drawImage(video, 0, 0, canvas.width,
+    canvas.height);
+};
+
+filterButton.onclick = function() {
+  var newIndex = (filters.indexOf(canvas.className) + 1) % filters.length;
+  video.className = filters[newIndex];
+  canvas.className = filters[newIndex];
+};
+
+navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
+  navigator.mozGetUserMedia;
+
+var constraints = {
+  audio: false,
+  video: true
+};
+
+function successCallback(stream) {
+  window.stream = stream; // make stream available to browser console
+  if (window.URL) {
+    video.src = window.URL.createObjectURL(stream);
+  } else {
+    video.src = stream;
+  }
+}
+
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
+}
+
+navigator.getUserMedia(constraints, successCallback, errorCallback);
diff --git a/samples/web/content/getusermedia/gum/index.html b/samples/web/content/getusermedia/gum/index.html
new file mode 100644
index 0000000..c241685
--- /dev/null
+++ b/samples/web/content/getusermedia/gum/index.html
@@ -0,0 +1,46 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>getUserMedia</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia</span></h1>
+
+    <video autoplay></video>
+
+    <p>Display the video stream from <code>getUserMedia()</code> in a video element.</p>
+
+    <p>The <code>MediaStream</code> object <code>stream</code> passed to the <code>getUserMedia()</code> callback is in global scope, so you can inspect it from the console.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/gum" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+  </div>
+
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/getusermedia/js/main.js b/samples/web/content/getusermedia/gum/js/main.js
similarity index 67%
rename from samples/web/content/getusermedia/js/main.js
rename to samples/web/content/getusermedia/gum/js/main.js
index b55083e..395ec92 100644
--- a/samples/web/content/getusermedia/js/main.js
+++ b/samples/web/content/getusermedia/gum/js/main.js
@@ -5,12 +5,20 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
-// variables in global scope so available to console
-video = document.querySelector("video");
-constraints = {audio: false, video: true};
-navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
 
-function successCallback(stream){
+'use strict';
+
+// variables in global scope so available to console
+var video = document.querySelector('video');
+var constraints = {
+  audio: false,
+  video: true
+};
+
+navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
+  navigator.mozGetUserMedia;
+
+function successCallback(stream) {
   window.stream = stream; // stream available to console
   if (window.URL) {
     video.src = window.URL.createObjectURL(stream);
@@ -19,8 +27,8 @@
   }
 }
 
-function errorCallback(error){
-  console.log("navigator.getUserMedia error: ", error);
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
 }
 
 navigator.getUserMedia(constraints, successCallback, errorCallback);
diff --git a/samples/web/content/getusermedia/index.html b/samples/web/content/getusermedia/index.html
deleted file mode 100644
index 48e71a9..0000000
--- a/samples/web/content/getusermedia/index.html
+++ /dev/null
@@ -1,39 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>getUserMedia</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-</head>
-<body>
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia</span></h1>
-
-  <video autoplay></video>
-
-  <p>Display the video stream from <code>getUserMedia()</code> in the video element.</p>
-
-  <p>The <code>MediaStream</code> object <code>stream</code> passed to the <code>getUserMedia()</code> callback is in global scope, so you can inspect it from the console.</p>
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/getusermedia/resolution/index.html b/samples/web/content/getusermedia/resolution/index.html
new file mode 100644
index 0000000..465606c
--- /dev/null
+++ b/samples/web/content/getusermedia/resolution/index.html
@@ -0,0 +1,89 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>getUserMedia: select resolution</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+
+  <style>
+    body, html {
+      height: 100%;
+    }
+
+    button {
+      margin: 0 10px 20px 0;
+      width: 90px;
+    }
+
+    div#buttons {
+      margin: 0 0 1em 0;
+    }
+
+    div#container {
+      max-width: 100%;
+    }
+
+    p#dimensions {
+      height: 1em;
+      margin: 0 0 1.5em 0;
+    }
+
+    video {
+      background: none;
+      height: auto;
+      width: auto;
+    }
+  </style>
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia: select resolution</span></h1>
+    <p></p>
+
+    <p>This example uses <a href="http://tools.ietf.org/html/draft-alvestrand-constraints-resolution-00#page-4" title="IETF constraints draft proposal">constraints</a>.</p>
+
+    <p>Click a button to call <code>getUserMedia()</code> with appropriate resolution.</p>
+
+    <div id="buttons">
+      <button id="qvga">QVGA</button>
+      <button id="vga">VGA</button>
+      <button id="hd">HD</button>
+    </div>
+
+    <p id="dimensions"></p>
+
+    <video autoplay></video>
+
+    <script src="js/main.js"></script>
+
+    <p>For more information, see <a href="http://www.html5rocks.com/en/tutorials/getusermedia/intro/" title="Media capture article by Eric Bidelman on HTML5 Rocks">Capturing Audio &amp; Video in HTML5</a> on HTML5 Rocks.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/resolution" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+  </div>
+
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/getusermedia/resolution/js/main.js b/samples/web/content/getusermedia/resolution/js/main.js
new file mode 100644
index 0000000..61354c6
--- /dev/null
+++ b/samples/web/content/getusermedia/resolution/js/main.js
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+var dimensions = document.querySelector('p#dimensions');
+var video = document.querySelector('video');
+var stream;
+
+var vgaButton = document.querySelector('button#vga');
+var qvgaButton = document.querySelector('button#qvga');
+var hdButton = document.querySelector('button#hd');
+
+vgaButton.onclick = function() {
+  getMedia(vgaConstraints);
+};
+
+qvgaButton.onclick = function() {
+  getMedia(qvgaConstraints);
+};
+
+hdButton.onclick = function() {
+  getMedia(hdConstraints);
+};
+
+var qvgaConstraints = {
+  video: {
+    mandatory: {
+      maxWidth: 320,
+      maxHeight: 180
+    }
+  }
+};
+
+var vgaConstraints = {
+  video: {
+    mandatory: {
+      maxWidth: 640,
+      maxHeight: 360
+    }
+  }
+};
+
+var hdConstraints = {
+  video: {
+    mandatory: {
+      minWidth: 1280,
+      minHeight: 720
+    }
+  }
+};
+
+navigator.getUserMedia = navigator.getUserMedia ||
+  navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
+
+function successCallback(stream) {
+  window.stream = stream; // stream available to console
+  video.src = window.URL.createObjectURL(stream);
+}
+
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
+}
+
+function displayVideoDimensions() {
+  dimensions.innerHTML = 'Actual video dimensions: ' + video.videoWidth +
+    'x' + video.videoHeight + 'px.';
+}
+
+video.onplay = function() {
+  setTimeout(function() {
+    displayVideoDimensions();
+  }, 500);
+};
+
+function getMedia(constraints) {
+  if (!!stream) {
+    video.src = null;
+    stream.stop();
+  }
+  navigator.getUserMedia(constraints, successCallback, errorCallback);
+}
diff --git a/samples/web/content/getusermedia/source/index.html b/samples/web/content/getusermedia/source/index.html
new file mode 100644
index 0000000..90deba6
--- /dev/null
+++ b/samples/web/content/getusermedia/source/index.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Select audio and video sources</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+
+  <style>
+    div.select {
+      display: inline-block;
+      margin: 0 0 1em 0;
+    }
+    select {
+      width: 110px;
+    }
+  </style>
+
+</head>
+
+<body>
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Select audio &amp; video sources</span></h1>
+
+    <p>Get available audio and video sources from <code>MediaStream.getSources()</code> then set the source for <code>getUserMedia()</code> using a <code>sourceId</code> constraint.</p>
+
+    <div class="select">
+      <label for="audioSource">Audio source: </label><select id="audioSource"></select>
+    </div>
+
+    <div class="select">
+      <label for="videoSource">Video source: </label><select id="videoSource"></select>
+    </div>
+
+    <video muted autoplay></video>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/source" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+  </div>
+
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+</body>
+</html>
diff --git a/samples/web/content/getusermedia-source/js/main.js b/samples/web/content/getusermedia/source/js/main.js
similarity index 71%
rename from samples/web/content/getusermedia-source/js/main.js
rename to samples/web/content/getusermedia/source/js/main.js
index d9efd02..7bac0ff 100644
--- a/samples/web/content/getusermedia-source/js/main.js
+++ b/samples/web/content/getusermedia/source/js/main.js
@@ -5,18 +5,20 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
-var videoElement = document.querySelector("video");
-var audioSelect = document.querySelector("select#audioSource");
-var videoSelect = document.querySelector("select#videoSource");
-var startButton = document.querySelector("button#start");
+
+'use strict';
+
+var videoElement = document.querySelector('video');
+var audioSelect = document.querySelector('select#audioSource');
+var videoSelect = document.querySelector('select#videoSource');
 
 navigator.getUserMedia = navigator.getUserMedia ||
   navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
 
 function gotSources(sourceInfos) {
-  for (var i = 0; i != sourceInfos.length; ++i) {
+  for (var i = 0; i !== sourceInfos.length; ++i) {
     var sourceInfo = sourceInfos[i];
-    var option = document.createElement("option");
+    var option = document.createElement('option');
     option.value = sourceInfo.id;
     if (sourceInfo.kind === 'audio') {
       option.text = sourceInfo.label || 'microphone ' + (audioSelect.length + 1);
@@ -30,7 +32,7 @@
   }
 }
 
-if (typeof MediaStreamTrack === 'undefined'){
+if (typeof MediaStreamTrack === 'undefined') {
   alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
 } else {
   MediaStreamTrack.getSources(gotSources);
@@ -42,12 +44,12 @@
   videoElement.src = window.URL.createObjectURL(stream);
 }
 
-function errorCallback(error){
-  console.log("navigator.getUserMedia error: ", error);
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
 }
 
-function start(){
-  if (!!window.stream) {
+function start() {
+  if ( !! window.stream) {
     videoElement.src = null;
     window.stream.stop();
   }
@@ -55,10 +57,14 @@
   var videoSource = videoSelect.value;
   var constraints = {
     audio: {
-      optional: [{sourceId: audioSource}]
+      optional: [{
+        sourceId: audioSource
+      }]
     },
     video: {
-      optional: [{sourceId: videoSource}]
+      optional: [{
+        sourceId: videoSource
+      }]
     }
   };
   navigator.getUserMedia(constraints, successCallback, errorCallback);
diff --git a/samples/web/content/getusermedia/volume/css/main.css b/samples/web/content/getusermedia/volume/css/main.css
new file mode 100644
index 0000000..785d912
--- /dev/null
+++ b/samples/web/content/getusermedia/volume/css/main.css
@@ -0,0 +1,30 @@
+div#meters > div {
+  margin: 0 0 1em 0;
+}
+
+div#meters div.label {
+  display: inline-block;
+  font-weight: 400;
+  margin: 0 0.5em 0 0;
+  width: 3.5em;
+}
+
+div#meters div.value {
+  display: inline-block;
+}
+
+meter {
+  width: 50%;
+}
+
+meter#clip {
+  color: #db4437;
+}
+
+meter#slow {
+color: #f4b400;
+}
+
+meter#instant {
+color: #0f9d58;
+}
diff --git a/samples/web/content/getusermedia/volume/index.html b/samples/web/content/getusermedia/volume/index.html
new file mode 100644
index 0000000..0a2e4aa
--- /dev/null
+++ b/samples/web/content/getusermedia/volume/index.html
@@ -0,0 +1,68 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript" />
+  <meta name="description" content="Client-side WebRTC code samples." />
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Audio stream volume</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Audio stream volume</span></h1>
+
+    <p>Measure the volume of a local media stream using WebAudio.</p>
+
+    <div id="meters">
+      <div id="instant">
+        <div class="label">Instant: </div>
+        <meter high="0.25" max="1" value="0"></meter>
+        <div class="value"></div>
+      </div>
+      <div id="slow">
+        <div class="label">Slow: </div>
+        <meter high="0.25" max="1" value="0"></meter>
+        <div class="value"></div>
+      </div>
+      <div id="clip">
+        <div class="label">Clip: </div>
+        <meter max="1" value="0"></meter>
+        <div class="value"></div>
+      </div>
+    </div>
+
+    <p>The 'instant' volume changes approximately every 50ms; the 'slow' volume approximates the average volume over about a second.</p>
+    <p>Note that you will not hear your own voice; use the <a href="../audio">local audio rendering demo</a> for that.</p>
+    <p>The <code>audioContext</code>, <code>stream</code> and <code>soundMeter</code> variables are in global scope, so you can inspect them from the console.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/getusermedia/volume" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+
+  <script src="js/soundmeter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/getusermedia/volume/js/main.js b/samples/web/content/getusermedia/volume/js/main.js
new file mode 100644
index 0000000..42a6223
--- /dev/null
+++ b/samples/web/content/getusermedia/volume/js/main.js
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+/* global AudioContext, SoundMeter */
+
+'use strict';
+
+var instantMeter = document.querySelector('#instant meter');
+var slowMeter = document.querySelector('#slow meter');
+var clipMeter = document.querySelector('#clip meter');
+
+var instantValueDisplay = document.querySelector('#instant .value');
+var slowValueDisplay = document.querySelector('#slow .value');
+var clipValueDisplay = document.querySelector('#clip .value');
+
+try {
+  window.AudioContext = window.AudioContext || window.webkitAudioContext;
+  window.audioContext = new AudioContext();
+} catch (e) {
+  alert('Web Audio API not supported.');
+}
+
+// put variables in global scope to make them available to the browser console
+var constraints = window.constraints = {
+  audio: true,
+  video: false
+};
+
+navigator.getUserMedia = navigator.getUserMedia ||
+  navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
+
+function successCallback(stream) {
+  // put variables in global scope to make them available to the browser console
+  window.stream = stream;
+  var soundMeter = window.soundMeter = new SoundMeter(window.audioContext);
+  soundMeter.connectToSource(stream);
+
+  setInterval(function() {
+    instantMeter.value = instantValueDisplay.innerText =
+      soundMeter.instant.toFixed(2);
+    slowMeter.value = slowValueDisplay.innerText =
+      soundMeter.slow.toFixed(2);
+    clipMeter.value = clipValueDisplay.innerText =
+      soundMeter.clip;
+  }, 200);
+}
+
+function errorCallback(error) {
+  console.log('navigator.getUserMedia error: ', error);
+}
+
+navigator.getUserMedia(constraints, successCallback, errorCallback);
diff --git a/samples/web/content/getusermedia/volume/js/soundmeter.js b/samples/web/content/getusermedia/volume/js/soundmeter.js
new file mode 100644
index 0000000..1fe3d60
--- /dev/null
+++ b/samples/web/content/getusermedia/volume/js/soundmeter.js
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+// Meter class that generates a number correlated to audio volume.
+// The meter class itself displays nothing, but it makes the
+// instantaneous and time-decaying volumes available for inspection.
+// It also reports on the fraction of samples that were at or near
+// the top of the measurement range.
+function SoundMeter(context) {
+  this.context = context;
+  this.instant = 0.0;
+  this.slow = 0.0;
+  this.clip = 0.0;
+  this.script = context.createScriptProcessor(2048, 1, 1);
+  var that = this;
+  this.script.onaudioprocess = function(event) {
+    var input = event.inputBuffer.getChannelData(0);
+    var i;
+    var sum = 0.0;
+    var clipcount = 0;
+    for (i = 0; i < input.length; ++i) {
+      sum += input[i] * input[i];
+      if (Math.abs(input[i]) > 0.99) {
+        clipcount += 1;
+      }
+    }
+    that.instant = Math.sqrt(sum / input.length);
+    that.slow = 0.95 * that.slow + 0.05 * that.instant;
+    that.clip = clipcount / input.length;
+  };
+}
+
+SoundMeter.prototype.connectToSource = function(stream) {
+  console.log('SoundMeter connecting');
+  this.mic = this.context.createMediaStreamSource(stream);
+  this.mic.connect(this.script);
+  // necessary to make sample run, but should not be.
+  this.script.connect(this.context.destination);
+};
+
+SoundMeter.prototype.stop = function() {
+  this.mic.disconnect();
+  this.script.disconnect();
+};
diff --git a/samples/web/content/manual-test/constraints/index.html b/samples/web/content/manual-test/constraints/index.html
new file mode 100644
index 0000000..e2b8159
--- /dev/null
+++ b/samples/web/content/manual-test/constraints/index.html
@@ -0,0 +1,71 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+  <title>WebRTC GetUserMedia Constraints Manual Test</title>
+  <!-- Load the polyfill to switch-hit between Chrome and Firefox -->
+  <script type="text/javascript" src="../../../js/adapter.js"></script>
+  <script type="text/javascript" src="js/main.js"></script>
+  <link rel="stylesheet" type="text/css" href="../css/main.css">
+  <meta charset="utf-8">
+</head>
+<body>
+
+<div id="wrapper">
+  <div id="left">
+    <p>This page can create GetUserMedia <a href="http://goo.gl/qZQjV">
+    MediaStreamConstraints</a> that can be used on the<br/>
+    <a href="../peer2peer/index.html">PeerConnection Manual Test page.</a></p>
+    MediaStreamConstraints:<br/>
+    <textarea id="getusermedia-constraints" rows="10" cols="60"
+        readonly="readonly"/></textarea>
+    <br/>
+
+    Audio<input type="checkbox" id="audio" checked />
+    Video<input type="checkbox" id="video" checked />
+
+    <h2>Video constraints</h2>
+    Only applicable if the video checkbox is checked above.
+    <h3>Mandatory</h3>
+    Min
+    <input type="text" id="mandatory-min-width" size="4"/>x
+    <input type="text" id="mandatory-min-height" size="4"/>
+    FPS: <input type="text" id="mandatory-min-fps" size="2"/>
+    Aspect ratio: <input type="text" id="mandatory-min-ar" size="2"/><br/>
+    Max
+    <input type="text" id="mandatory-max-width" size="4"/>x
+    <input type="text" id="mandatory-max-height" size="4"/>
+    FPS: <input type="text" id="mandatory-max-fps" size="2"/>
+    Aspect ratio: <input type="text" id="mandatory-max-ar" size="2"/>
+
+    <h3>Optional</h3>
+    Min
+    <input type="text" id="optional-min-width" size="4"/>x
+    <input type="text" id="optional-min-height" size="4"/>
+    FPS: <input type="text" id="optional-min-fps" size="2"/>
+    Aspect ratio: <input type="text" id="optional-min-ar" size="2"/><br/>
+    Max
+    <input type="text" id="optional-max-width" size="4"/>x
+    <input type="text" id="optional-max-height" size="4"/>
+    FPS: <input type="text" id="optional-max-fps" size="2"/>
+    Aspect ratio: <input type="text" id="optional-max-ar" size="2"/><br/>
+    <button id="re-request" onclick="doGetUserMedia();">
+      Request GetUserMedia
+    </button>
+    <h2>Messages</h2>
+    <pre id="messages"></pre>
+  </div>
+  <div>
+    <h2>Local Preview</h2>
+    <video width="320" height="240" id="local-view" autoplay="autoplay">
+        </video>
+  </div>
+</div>
+</body>
+</html>
diff --git a/samples/web/content/manual-test/constraints/js/main.js b/samples/web/content/manual-test/constraints/js/main.js
new file mode 100644
index 0000000..d4fc586
--- /dev/null
+++ b/samples/web/content/manual-test/constraints/js/main.js
@@ -0,0 +1,152 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+/**
+ * See http://dev.w3.org/2011/webrtc/editor/getusermedia.html for more
+ * information on getUserMedia.
+ */
+
+/**
+ * Asks permission to use the webcam and mic from the browser.
+ */
+function doGetUserMedia() {
+  // Call into getUserMedia via the polyfill (adapter.js).
+  var constraints = getConstraints_();
+  var constraintsString = JSON.stringify(constraints, null, ' ');
+  $('getusermedia-constraints').innerHTML = constraintsString;
+  if (!getUserMedia) {
+    log_('Browser does not support WebRTC.');
+    return;
+  }
+  log_('Requesting getUserMedia with constraints: ' + constraintsString);
+  getUserMedia(constraints, getUserMediaOkCallback_,
+               getUserMediaFailedCallback_);
+}
+
+// Internals
+
+/**
+ * Builds a Javascript constraints dictionary out of the selected options in the
+ * HTML controls on the page.
+ * @private
+ * @return {Object} A dictionary of constraints.
+ */
+function getConstraints_() {
+  var c = {};
+  c.audio = $('audio').checked;
+  if (!$('video').checked) {
+    c.video = false;
+  } else {
+    c.video = { mandatory: {}, optional: [] };
+    // Mandatory - min
+    if ($('mandatory-min-width').value != '') {
+      c.video.mandatory.minWidth = $('mandatory-min-width').value;
+    }
+    if ($('mandatory-min-height').value != '') {
+      c.video.mandatory.minHeight = $('mandatory-min-height').value;
+    }
+    if ($('mandatory-min-fps').value != '') {
+      c.video.mandatory.minFrameRate = $('mandatory-min-fps').value;
+    }
+    if ($('mandatory-min-ar').value != '') {
+      c.video.mandatory.minAspectRatio = $('mandatory-min-ar').value;
+    }
+    // Mandatory - max
+    if ($('mandatory-max-width').value != '') {
+      c.video.mandatory.maxWidth = $('mandatory-max-width').value;
+    }
+    if ($('mandatory-max-height').value != '') {
+      c.video.mandatory.maxHeight = $('mandatory-max-height').value;
+    }
+    if ($('mandatory-max-fps').value != '') {
+      c.video.mandatory.maxFrameRate = $('mandatory-max-fps').value;
+    }
+    if ($('mandatory-max-ar').value != '') {
+      c.video.mandatory.maxAspectRatio = $('mandatory-max-ar').value;
+    }
+    // Optional - min
+    if ($('optional-min-width').value != '') {
+      c.video.optional.push({ minWidth: $('optional-min-width').value });
+    }
+    if ($('optional-min-height').value != '') {
+      c.video.optional.push({ minHeight: $('optional-min-height').value });
+    }
+    if ($('optional-min-fps').value != '') {
+      c.video.optional.push({ minFrameRate: $('optional-min-fps').value });
+    }
+    if ($('optional-min-ar').value != '') {
+      c.video.optional.push({ minAspectRatio: $('optional-min-ar').value });
+    }
+    // Optional - max
+    if ($('optional-max-width').value != '') {
+      c.video.optional.push({ maxWidth: $('optional-max-width').value });
+    }
+    if ($('optional-max-height').value != '') {
+      c.video.optional.push({ maxHeight: $('optional-max-height').value });
+    }
+    if ($('optional-max-fps').value != '') {
+      c.video.optional.push({ maxFrameRate: $('optional-max-fps').value });
+    }
+    if ($('optional-max-ar').value != '') {
+      c.video.optional.push({ maxAspectRatio: $('optional-max-ar').value });
+    }
+  }
+  return c;
+}
+
+/**
+ * @private
+ * @param {MediaStream} stream Media stream.
+ */
+function getUserMediaOkCallback_(stream) {
+  gLocalStream = stream;
+  var videoTag = $('local-view');
+  attachMediaStream(videoTag, stream);
+
+  // Due to crbug.com/110938 the size is 0 when onloadedmetadata fires.
+  // videoTag.onloadedmetadata = updateVideoTagSize_(videoTag);
+  // Use setTimeout as a workaround for now.
+  setTimeout(function() {updateVideoTagSize_(videoTag)}, 500);
+  gRequestWebcamAndMicrophoneResult = 'ok-got-stream';
+}
+
+/**
+ * @private
+ * @param {Object} videoTag The video tag to update.
+ */
+function updateVideoTagSize_(videoTag) {
+  // Don't update if sizes are 0 (happens for Chrome M23).
+  if (videoTag.videoWidth > 0 && videoTag.videoHeight > 0) {
+    log_('Set video tag width and height: ' + videoTag.videoWidth + 'x' +
+      videoTag.videoHeight);
+    videoTag.width = videoTag.videoWidth;
+    videoTag.height = videoTag.videoHeight;
+  }
+}
+
+/**
+ * @private
+ * @param {NavigatorUserMediaError} error Error containing details.
+ */
+function getUserMediaFailedCallback_(error) {
+  log_('Failed with error: ' + error);
+}
+
+$ = function(id) {
+  return document.getElementById(id);
+};
+
+/**
+ * Simple logging function.
+ * @private
+ * @param {string} message Message to print.
+ */
+function log_(message) {
+  console.log(message);
+  $('messages').innerHTML += message + '<br>';
+}
diff --git a/samples/web/content/manual-test/css/main.css b/samples/web/content/manual-test/css/main.css
new file mode 100644
index 0000000..9b037b5
--- /dev/null
+++ b/samples/web/content/manual-test/css/main.css
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+.drop-down {
+  width: 65%;
+  font-size: 10px;
+  white-space: nowrap
+}
+
+.constraints {
+  width: 75%;
+  height: auto;
+  position: absolute;
+  overflow: scroll;
+  display: none;
+}
+
+.float-left {
+  float: left;
+  width: 100%;
+}
+
+.float-clear-left {
+  float: left;
+  clear: left;
+  width: 100%;
+}
+
+.top-border {
+  border-top: 4px solid grey;
+}
+
+.bottom-border {
+  border-bottom: 4px solid grey;
+}
+
+#messages {
+  word-wrap: break-word;
+  white-space: pre-wrap;
+  font-size: 0.7em;
+}
+
+#audio-source {
+  float: left;
+  width: 50%;
+}
+
+#video-source {
+  margin-left: 50%;
+  width: 50%;
+}
+
+#video-res {
+  width: 30%;
+}
+
+#signal-server {
+  float: left;
+  width: 60%;
+}
+
+#pc-server {
+  width: 98%;
+  margin-left: 0.1em;
+  margin-top: 0.1em;
+}
+
+#peer-id-container {
+  margin-left: 60%;
+  height: 1.5em;
+}
+
+#peer-id {
+  margin-top: 0.1em;
+  width: 7em;
+}
+
+#pc-server-label {
+  width: 15%;
+}
+
+#pc-server-container {
+  position: absolute;
+  margin: 0 0 0 12%;
+  width: 40%;
+  overflow: hidden;
+  height: 1.5em;
+}
+
+#pc-constraints-left {
+  margin: 0.7em 0 0 0;
+  width: 60%;
+}
+
+#call {
+  float: left;
+  margin: 0.7em 0 0 0;
+}
+
+.float-left {
+  float: left;
+  width: 100%;
+}
+
+.float-clear-left {
+  float: left;
+  clear: left;
+  width: 100%;
+}
+
+.small-input {
+  width: 3em;
+}
+
+.medium-input {
+  width: 6em;
+}
+
+a {
+  color: lightBlue;
+  font-weight: 300;
+  text-decoration: none;
+}
+
+a:hover {
+  color: blue;
+  text-decoration: underline;
+}
+
+body {
+  font-family: 'Roboto', sans-serif;
+  margin: 0;
+  padding: 1em;
+  word-wrap: break-word;
+}
+
+button {
+  background-color: grey;
+  border: none;
+  border-radius: 1px;
+  color: white;
+  font-family: 'Roboto', sans-serif;
+  font-size: 0.8em;
+  margin: 0 0 1em 0;
+  padding: 0.2em;
+}
+
+button:hover {
+  background-color: darkGrey;
+}
+
+button.green {
+  background: darkGreen;
+  color: white;
+}
+
+button.green:hover {
+  background: forestGreen;
+  color: white;
+}
+
+button.red {
+  background: darkRed;
+  color: white;
+}
+
+button.red:hover {
+  background: fireBrick;
+}
+
+button.pressed {
+background-color: black;
+}
+
+div#container {
+  margin: 0 auto 0 auto;
+  max-width: 40em;
+  padding: 0 1.5em 1.3em 1.5em;
+  z-index: 2;
+  position: relative;
+}
+
+h2 {
+  color: black;
+  font-size: 1em;
+  font-weight: 700;
+  line-height: 1.2em;
+  margin: 0 0 0.8em 0;
+}
+
+div {
+  background: white;
+}
+
+html {
+  /* avoid annoying page width change
+  when moving from the home page.*/
+  overflow-y: scroll;
+}
+
+select {
+  margin: 0 1em 1em 0;
+  position: relative;
+  top: -1px;
+}
+
+video {
+  background: black;
+  width: 100%;
+}
+
+#log {
+  float: left;
+  position: fixed;
+  overflow: auto;
+  top: 0;
+  left: 0;
+  width: 20%;
+  padding: 16px;
+  word-wrap: break-word;
+  z-index: 1;
+}
+
+@media screen and (max-width: 1200px) {
+  div#log {
+    float: none;
+    width: 100%;
+    position: inherit;
+    padding: 0;
+  }
+  button {
+    padding: 0.7em;
+  }
+  button:active {
+    background: black;
+  }
+  #pc-server-label {
+    margin-top: 5px;
+  }
+  #pc-server-container {
+    margin-top: 5px;
+  }
+}
diff --git a/samples/web/content/manual-test/multiple-peerconnections/index.html b/samples/web/content/manual-test/multiple-peerconnections/index.html
new file mode 100644
index 0000000..fc2e59f
--- /dev/null
+++ b/samples/web/content/manual-test/multiple-peerconnections/index.html
@@ -0,0 +1,280 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+  <title>WebRTC Multi-PeerConnection Test</title>
+  <script type="text/javascript">
+  // This file can create an arbitrary number of peer connection calls, each
+  // with an arbitrary number of auto-echoing data channels. It can run with
+  // two separate cameras.
+
+  // Our two local video / audio streams.
+  var gLocalStream1 = null;
+  var gLocalStream2 = null;
+
+  // The number of remote view windows (2x number of calls).
+  var gNumRemoteViews = 0;
+
+  // Maps connection id -> { connection1, connection2 }.
+  var gAllConnections = [];
+  var gNumConnections = 0;
+
+  // Maps data channel id -> sending channel.
+  // Note: there can be many data channels per connection id.
+  var gSendingDataChannels = [];
+  var gTotalNumSendChannels = 0;
+
+  function startTest() {
+    navigator.webkitGetUserMedia(
+        {video: true, audio: true},
+        function(localStream) {
+          gLocalStream1 = localStream;
+          play(localStream, 'local-view-1');
+        },
+        getUserMediaFailedCallback);
+    navigator.webkitGetUserMedia(
+        {video: true, audio: true},
+        function(localStream) {
+          gLocalStream2 = localStream;
+          play(localStream, 'local-view-2');
+        },
+        getUserMediaFailedCallback);
+  }
+
+  function playStreamInNewRemoteView(stream, peerNumber) {
+    console.log('Remote stream to connection ' + peerNumber +
+        ': ' + stream.label);
+    gNumRemoteViews++;
+    var viewName = 'remote-view-' + gNumRemoteViews;
+    addRemoteView(viewName, peerNumber);
+    play(stream, viewName);
+  }
+
+  function addRemoteView(elementName, peerNumber) {
+    var remoteViews = $('remote-views-' + peerNumber);
+    remoteViews.innerHTML +=
+      '<tr><td><video width="320" height="240" id="' + elementName + '" ' +
+      'autoplay="autoplay"></video></td></tr>';
+  }
+
+  function play(stream, videoElement) {
+    var streamUrl = URL.createObjectURL(stream);
+    $(videoElement).src = streamUrl;
+  }
+
+  function getUserMediaFailedCallback(error) {
+    console.log('getUserMedia request failed with code ' + error.code);
+  }
+
+  function call() {
+    connection1 = new webkitRTCPeerConnection(null,
+        {optional:[{RtpDataChannels: true}]});
+    connection1.addStream(gLocalStream1);
+
+    connection2 = new webkitRTCPeerConnection(
+        null, {optional:[{RtpDataChannels: true}]});
+    connection2.addStream(gLocalStream2);
+    connection2.onicecandidate = function(event) {
+      if (event.candidate) {
+        var candidate = new RTCIceCandidate(event.candidate);
+        connection1.addIceCandidate(candidate);
+      }
+    };
+    connection1.onicecandidate = function(event) {
+      if (event.candidate) {
+        console.log('Ice candidate: ' + event.candidate);
+        var candidate = new RTCIceCandidate(event.candidate);
+        connection2.addIceCandidate(candidate);
+      }
+    };
+    connection1.onaddstream = function(event) {
+      playStreamInNewRemoteView(event.stream, 1);
+      //addDataChannelAnchor(connection1, connection2);
+    };
+    connection2.onaddstream = function(event) {
+      playStreamInNewRemoteView(event.stream, 2);
+    };
+    // TODO(phoglund): hack to work around
+    // https://code.google.com/p/webrtc/issues/detail?id=1203. When it is fixed,
+    // uncomment the negotiate call, remove addDataChannel and uncomment in
+    // connection1.onaddstream. Also remove the notice at the top of the HTML!
+    // negotiate(connection1, connection2);
+    addDataChannelAnchor(connection1, connection2);
+  }
+
+  function negotiate(connection1, connection2) {
+    connection1.createOffer(function(offer) {
+      connection1.setLocalDescription(offer);
+      connection2.setRemoteDescription(offer);
+      connection2.createAnswer(function(answer) {
+        console.log('Created answer ' + answer);
+        connection2.setLocalDescription(answer);
+        connection1.setRemoteDescription(answer);
+      });
+    });
+  }
+
+  function addDataChannelAnchor(connection1, connection2) {
+    var connectionId = gNumConnections++;
+    gAllConnections[connectionId] = { connection1: connection1,
+                                      connection2: connection2 };
+    addOneAnchor(1, connectionId);
+    addOneAnchor(2, connectionId);
+  }
+
+  function makeDataChannelAnchorName(peerId, connectionId) {
+    return 'data-channels-peer' + peerId + '-' + connectionId;
+  }
+
+  // This adds a target table we'll add our input fields to later.
+  function addOneAnchor(peerId, connectionId) {
+    var newButtonId = 'add-data-channel-' + connectionId;
+    var remoteViewContainer = 'remote-views-' + peerId;
+    $(remoteViewContainer).innerHTML +=
+        '<tr><td><button id="' + newButtonId + '" ' +
+        'onclick="addDataChannel(' + connectionId + ')">' +
+        '    Add Echoing Data Channel</button></td></tr>';
+
+    var anchorName = makeDataChannelAnchorName(peerId, connectionId);
+    $(remoteViewContainer).innerHTML +=
+        '<tr><td><table id="' + anchorName + '"></table></td></tr>';
+  }
+
+  // Called by clicking Add Echoing Data Channel.
+  function addDataChannel(connectionId) {
+    var dataChannelId = gTotalNumSendChannels++;
+
+    var peer1SinkId = addDataChannelSink(1, connectionId, dataChannelId);
+    var peer2SinkId = addDataChannelSink(2, connectionId, dataChannelId);
+    var connections = gAllConnections[connectionId];
+
+    configureChannels(connections.connection1, connections.connection2,
+                      peer1SinkId, peer2SinkId, dataChannelId);
+
+    // Add the field the user types in, and a
+    // dummy field so everything lines up nicely.
+    addDataChannelSource(1, connectionId, dataChannelId);
+    addDisabledInputField(2, connectionId, '(the above is echoed)');
+
+    negotiate(connections.connection1, connections.connection2);
+  }
+
+  function configureChannels(connection1, connection2, targetFor1, targetFor2,
+                             dataChannelId) {
+    // Label the channel so we know where to send the data later in dispatch.
+    sendChannel = connection1.createDataChannel(
+        targetFor2, { reliable : false });
+    sendChannel.onmessage = function(messageEvent) {
+      $(targetFor1).value = messageEvent.data;
+    }
+
+    gSendingDataChannels[dataChannelId] = sendChannel;
+
+    connection2.ondatachannel = function(event) {
+      // The channel got created by a message from a sending channel: hook this
+      // new receiver channel up to dispatch and then echo any messages.
+      event.channel.onmessage = dispatchAndEchoDataMessage;
+    }
+  }
+
+  function addDataChannelSink(peerNumber, connectionId, dataChannelId) {
+    var sinkId = 'data-sink-peer' + peerNumber + '-' + dataChannelId;
+    var anchor = $(makeDataChannelAnchorName(peerNumber, connectionId));
+    anchor.innerHTML +=
+      '<tr><td><input type="text" id="' + sinkId + '" disabled/></td></tr>';
+    return sinkId;
+  }
+
+  function addDataChannelSource(peerNumber, connectionId, dataChannelId) {
+    var sourceId = 'data-source-peer' + peerNumber + '-' + dataChannelId;
+    var anchor = $(makeDataChannelAnchorName(peerNumber, connectionId));
+    anchor.innerHTML +=
+      '<tr><td><input type="text" id="' + sourceId + '"' +
+          ' onchange="userWroteSomethingIn(\'' + sourceId + '\', ' +
+          dataChannelId + ');"/></td></tr>';
+  }
+
+  function userWroteSomethingIn(sourceId, dataChannelId) {
+    var source = $(sourceId);
+    var dataChannel = gSendingDataChannels[dataChannelId];
+    dataChannel.send(source.value);
+  }
+
+  function addDisabledInputField(peerNumber, connectionId, text) {
+    var anchor = $(makeDataChannelAnchorName(peerNumber, connectionId));
+    anchor.innerHTML +=
+      '<tr><td><input type="text" value="' + text + '" disabled/></td></tr>';
+  }
+
+  function dispatchAndEchoDataMessage(messageEvent) {
+    // Since we labeled the channel earlier, we know to which input element
+    // we should send the data.
+    var dataChannel = messageEvent.currentTarget;
+    var targetInput = $(dataChannel.label);
+    targetInput.value = messageEvent.data;
+    dataChannel.send('echo: ' + messageEvent.data);
+  }
+
+  window.onload = function() {
+    startTest();
+  }
+
+  $ = function(id) {
+    return document.getElementById(id);
+  };
+  </script>
+</head>
+<body>
+  <table border="0">
+    <tr>
+      <td colspan="2">
+        Notes:
+        <ul>
+          <li>Due to https://code.google.com/p/webrtc/issues/detail?id=1203,
+          you must create a data channel to actually get a call negotiated. Add
+          one call at a time and click "add echoing data channel" for each and
+          you'll be fine.</li>
+          <li>For unknown reasons, adding a new data channel will clear the
+          input field contents for all other channels on the same call. This is
+          not the data channel's fault though.</li>
+        </ul>
+      </td>
+    </tr>
+    <tr>
+      <td>Local Preview for Peer 1</td>
+      <td>Local Preview for Peer 2</td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="local-view-1"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="local-view-2"
+          autoplay="autoplay"></video></td>
+    </tr>
+    <tr>
+      <td><button id="add-call" onclick="call();">Add Call</button></td>
+    </tr>
+    <tr>
+      <td>
+        <table id="remote-views-1">
+          <tr>
+            <td>Remote (Incoming to Peer 1)</td>
+          </tr>
+        </table>
+      </td>
+      <td>
+        <table id="remote-views-2">
+          <tr>
+            <td>Remote (Incoming to Peer 2)</td>
+          </tr>
+        </table>
+      </td>
+    </tr>
+  </table>
+</body>
+</html>
diff --git a/samples/web/content/manual-test/peer2peer-iframe/index.html b/samples/web/content/manual-test/peer2peer-iframe/index.html
new file mode 100644
index 0000000..8b5ab7a
--- /dev/null
+++ b/samples/web/content/manual-test/peer2peer-iframe/index.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+  <title>WebRTC IFRAME peer2peer test page</title>
+  <meta charset="utf-8">
+</head>
+<body>
+  <iframe height="1200px" width="100%" src="../peer2peer/index.html"></iframe>
+</body>
+</html>
diff --git a/samples/web/content/manual-test/peer2peer/help.html b/samples/web/content/manual-test/peer2peer/help.html
new file mode 100644
index 0000000..ddddf54
--- /dev/null
+++ b/samples/web/content/manual-test/peer2peer/help.html
@@ -0,0 +1,112 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+  <title>WebRTC PeerConnection Manual Test Help Page</title>
+  <link rel="stylesheet" type="text/css" href="../css/stylesheet.css">
+  <meta charset="utf-8">
+</head>
+<body>
+
+<h1>WebRTC PeerConnection Manual Test Help Page</h1>
+<p>
+  The test page is intended for testing WebRTC calls.
+
+  This is how you set up a normal call:
+</p>
+<ol>
+  <li>Open this page in two tabs.</li>
+  <li>Start the peerconnection server. Click on the question mark next
+    to the 'server' field for instruction on how to do that. The easiest
+    thing is to start it on localhost, but you can start it on any
+    machine you like and connect to hostname:8888.</li>
+  <li>Click the Connect button in both tabs.</li>
+  <li>Click the Call:Negotiate button in one of the tabs. You should see a bunch
+    of printouts when this happens. Note that no streams are sent to
+    begin with (although you could run steps 5-6 before this step to get streams
+    even in the initial call).</li>
+  <li>Grant media access using the checkboxes and Request button.</li>
+  <li>Add the local stream by clicking the "Add" button, in both tabs.</li>
+  <li>Now you must re-negotiate the call by clicking on Negotiate again.</li>
+  <li>You should now have a call up and both sides should be receiving
+    media data (depending on what access you granted on the respective
+    pages).</li>
+  <li>You can now choose to stop, re-request, re-send or disable streams
+    in any way you like, or hang up and re-start the call. You don't
+    need to disconnect: that's done automatically when you close the
+    page. Hanging up is NOT done automatically though.</li>
+</ol>
+
+<p>
+  To create a data channel:
+</p>
+<ol>
+  <li>Make sure Chrome is started with the --enable-data-channels flag.</li>
+  <li>Follow the instructions above to connect two tabs to a
+   peerconnection_server.</li>
+  <li>Click the Data channel: Create button in one tab. Notice the status
+  changes to "connecting".</li>
+  <li>Click the Call:Negotiate button. You should see the status change to
+  "open" in both tabs. </li>
+  <li>Enter text in the textbox next to the Send data button and then click Send
+   data. Notice the text is received in the remote tab in the Received on data
+  channel text box. Data can be sent in both direct.</li>
+  <li>To close the channel press the Close button followed by Negotiate. Notice
+  the status change to "closed"</li>
+</ol>
+
+<p>Detailed descriptions:</p>
+<ul>
+  <li>Connect - once a connection is established, you generally won't
+    need to click this button again. Connecting really isn't something
+    related to WebRTC as such, it's just the signalling solution.</li>
+  <li>Note that if more than two users/machines have established a
+    connection to the same PC server, you will get an error when
+    pressing this button. The test is hard-coded to only allow 2 peers
+    on the server at the same time.</li>
+  <li>Pressing the Add button for local streams will in effect add
+    the current local stream, such as it is, to the current
+    peerconnection.</li>
+  <li>If you request user media again, it will overwrite the current
+    local stream with the new one. This means that pressing Add will
+    add the stream you just got from the request. The code will not
+    attempt to stop or remove the previous stream from the
+    peerconnection, so depending on peerconnection's semantics the old
+    stream will remain with the peerconnection (perhaps the streams will
+    be sent simultaneously?)</li>
+  <li>Hang Up will clear away peer connections on both sides, and a new
+    call can be started if desired. The peers remain connected to the
+    peerconnection server.</li>
+  <li>The Toggle buttons will set the .enabled properties on the first
+    video and audio track for the local or remote stream, respectively.
+    This is effectively a temporary "mute" for the streams.</li>
+  <li>Stop terminates a stream, which means it will no longer send any
+    more data.</li>
+  <li>Remove will remove the current local stream from the current
+    peerconnection. For instance, you should be able to send a stream,
+    remove it, re-request a new stream and send that within the same
+    call. Note that re-requesting user media overwrites the current
+    media stream, so the reverse is not possible.</li>
+  <li>The PeerConnection constraints field can pass in constraints for the
+    peerconnection to be established. The code will attempt to eval the code
+    you write in and pass it whenever the code asks for constraints.
+    [experimental]</li>
+  <li>The Force Opus checkbox will remove all codecs except OPUS for all
+    outgoing messages sent by this page. Note that this ONLY means that
+    we are guaranteed to send Opus to the other side; it does NOT mean
+    that the other side will necessarily send Opus to us. To do that,
+    you need to check the box on the other side too. You can either
+    check the box before the call, or check the box and then re-send the
+    local stream.</li>
+</ul>
+
+
+
+</body>
+</html>
diff --git a/samples/web/content/manual-test/peer2peer/index.html b/samples/web/content/manual-test/peer2peer/index.html
new file mode 100644
index 0000000..7ab1ba0
--- /dev/null
+++ b/samples/web/content/manual-test/peer2peer/index.html
@@ -0,0 +1,203 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <title>WebRTC peer2peer test page</title>
+
+  <!-- Load the polyfill to switch-hit between Chrome and Firefox -->
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+  <link rel="stylesheet" href="../css/main.css">
+
+  <meta charset="utf-8">
+
+</head>
+<body>
+
+<div id="wrapper">
+
+  <div id="container">
+
+    <div class="video-area">
+      <div>
+        <h2>Remote Video</h2>
+        <video width="640" height="360" id="remote-view" autoplay="autoplay">
+            </video>
+        <div>
+        <button id="remote-view-size"
+            onclick="updateVideoElementSize('remote-view')">Stream size</button>
+        <button onclick="updateVideoElementSize('remote-view', 640, 360);">
+            640x360</button>
+        </div>
+
+        <h2>Local Preview</h2>
+        <video width="320" height="180" id="local-view" autoplay="autoplay"
+            muted></video>
+        <div>
+          <button id="local-view-size"
+              onclick="updateVideoElementSize('local-view')">Stream size
+                </button>
+          <button onclick="updateVideoElementSize('local-view', 320, 180);">
+              320x180</button>
+        </div>
+      </div>
+    </div>
+
+    <div id="middle">
+
+      <div class="top-border">
+        <h2>GetUserMedia</h2>
+        <div class="float-left">
+          <textarea class="constraints" id="getusermedia-constraints">
+            </textarea>
+          <label>Audio<input type="checkbox" id="audio" checked
+              onclick="updateGetUserMediaConstraints();"/></label>
+          <label>Video<input type="checkbox" id="video" checked
+              onclick="updateGetUserMediaConstraints();"/></label>
+          <label>Screen capture<input type="checkbox" id="screencapture"
+              onclick="updateGetUserMediaConstraints();"/></label>
+          <button onclick="editConstraints('getusermedia-constraints');">
+              Edit constraints</button>
+          <button class="green" id="re-request"
+              onclick="getUserMediaFromHere();">Request GetUserMedia</button>
+        </div>
+        <div id="audio-source">
+          <label>Audio source <select class="drop-down" id="audiosrc"
+              onchange="updateGetUserMediaConstraints();"></select></label>
+          <label>Auto<input type="checkbox" id="get-devices-onload"></label>
+          <button id="get-devices" onclick="getDevices();">
+              Get devices</button>
+        </div>
+        <div id="video-source">
+          <label>Video source <select class="drop-down" id="videosrc"
+              onchange="updateGetUserMediaConstraints();"></select></label>
+          <div id="res-buttons">
+            <button id="video-hd">HD</button>
+            <button id="video-vga">VGA</button>
+            <button id="video-qvga">QVGA</button>
+            <span id="local-res"></span>
+          </div>
+        </div>
+      </div>
+
+      <div class="top-border float-left">
+        <h2>PeerConnection</h2>
+        <div>
+          <div id="pc-server-label" class="float-left">
+            <label>Server [<a href="" onclick="showServerHelp();">?</a>]:
+                </label>
+          </div>
+          <div id="pc-server-container">
+            <input type="text" id="pc-server" >
+          </div>
+        </div>
+        <div id="peer-id-container">
+          <label>Peer ID:</label>
+          <input type="text" id="peer-id" disabled />
+          <button class="green" id="connect" onclick="connectFromHere();">
+              Connect</button>
+        </div>
+      </div>
+      <div id="pc-constraints-container">
+        <div id="pc-constraints-left" class="float-left">
+          <label>Constraints:</label>
+          <textarea class="constraints" id="pc-constraints"></textarea>
+          <button onclick="editConstraints('pc-constraints');">Connection</button>
+          <textarea class="constraints" id="createoffer-constraints">{}</textarea>
+          <button onclick="editConstraints('createoffer-constraints');">
+              createOffer</button>
+          <textarea class="constraints" id="createanswer-constraints">{}
+              </textarea>
+          <button onclick="editConstraints('createanswer-constraints');">
+              createAnswer</button>
+        </div>
+        <div id="call">
+          <label>Call:</label>
+          <button class="green" onclick="negotiateCallFromHere();">Negotiate
+              </button>
+          <button class="red" onclick="hangUpFromHere();">Hang up</button>
+        </div>
+      </div>
+
+      <div class="top-border float-left">
+        <h2>Media streams</h2>
+        <div class="float-left">
+          <label>Local Stream:</label>
+          <button class="green" onclick="addLocalStreamFromHere();">Add
+              </button>
+          <button class="red" onclick="removeLocalStreamFromHere();">Remove
+              </button>
+          <button class="red" onclick="stopLocalFromHere();">Stop</button>
+          <button onclick="toggleLocalVideoFromHere();">Toggle Video</button>
+          <button onclick="toggleLocalAudioFromHere();">Toggle Audio</button>
+        </div>
+        <div class="float-clear-left">
+          <label>Remote Stream:</label>
+          <button onclick="toggleRemoteVideoFromHere();">Toggle Video</button>
+          <button onclick="toggleRemoteAudioFromHere();">Toggle Audio</button>
+        </div>
+        <div class="float-clear-left">
+          <label>Data Channel:</label>
+          <button onclick="createDataChannelFromHere();">Create</button>
+          <button onclick="closeDataChannelFromHere();">Close</button>
+          <label>RTP</label>
+          <input type="checkbox" id="data-channel-type-rtp"
+              onclick="setPeerConnectionConstraints();">
+          <label>status:</label>
+          <input type="text" id="data-channel-status" size="10"
+              value="not created" disabled="true"/>
+          <label>ID:</label>
+          <input type="text" id="data-channel-id" class="small-input"
+              disabled="true"/>
+          <div>
+            <label>Send on data channel:</label>
+            <input type="text" id="data-channel-send" class="medium-input"
+                size="10"/>
+            <button onclick="sendDataFromHere();">Send data</button>
+            <label>Received data:</label>
+            <input type="text" id="data-channel-receive"
+                size="10" disabled="true"/>
+          </div>
+        </div>
+      </div>
+
+      <div class="top-border float-clear-left">
+        <h2>DTMF Sender</h2>
+        <button onclick="createDtmfSenderFromHere();">Create</button>
+        <label>tones:</label>
+        <input type="text" id="dtmf-tones" class="medium-input" value="123,abc"/>
+        <label>dur(ms):</label>
+        <input type="text" id="dtmf-tones-duration" class="small-input"
+            value="100" />
+        <label>gap(ms):</label>
+        <input type="text" id="dtmf-tones-gap" class="small-input" value="50"/>
+        <button onclick="insertDtmfFromHere();">Send</button>
+      </div>
+
+      <div class="top-border bottom-border float-clear-left">
+        <h2>Options</h2>
+        <label>Force iSAC</label>
+        <input type="checkbox" id="force-isac" onclick="forceIsacChanged();"/>
+        <label>CPU overuse</label>
+        <input type="checkbox" id="cpuoveruse-detection"
+            onclick="setPeerConnectionConstraints();" checked="true"/>
+      </div>
+    </div>
+
+    <div id="log">
+      <h2>Log</h2>
+      <button onclick="clearLog()">Clear logs</button>
+      <p id="messages" class="small-font"></p>
+    </div>
+
+  </div>
+</div>
+</body>
+</html>
diff --git a/samples/web/content/manual-test/peer2peer/js/main.js b/samples/web/content/manual-test/peer2peer/js/main.js
new file mode 100644
index 0000000..4d32218
--- /dev/null
+++ b/samples/web/content/manual-test/peer2peer/js/main.js
@@ -0,0 +1,1283 @@
+//  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+//  Use of this source code is governed by a BSD-style license
+//  that can be found in the LICENSE file in the root of the source
+//  tree.
+
+// See http://dev.w3.org/2011/webrtc/editor/getusermedia.html for more
+// information on getUserMedia. See
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html for more information on
+// peerconnection and webrtc in general.
+
+'use strict';
+
+// TODO(jansson) rewrite to classes.
+// Global namespace object.
+var global = {};
+global.transformOutgoingSdp = function(sdp) { return sdp; };
+// Default getUserMedia video resolution.
+global.videoWidth = 1280;
+global.videoHeight = 720;
+
+// We need a STUN server for some API calls.
+var STUN_SERVER = 'stun.l.google.com:19302';
+
+// Used as a shortcut for finding DOM elements by ID.
+// @param {string} id is a case-sensitive string representing the unique ID of
+// the element being sought.
+// @return {object} id returns the element object specified as a parameter.
+var $ = function(id) {
+  return document.getElementById(id);
+};
+
+// Prepopulate constraints from JS to the UI. Enumerate devices available
+// via getUserMedia, register elements to be used for local storage.
+window.onload = function() {
+  hookupDataChannelCallbacks_();
+  hookupDtmfSenderCallback_();
+  updateGetUserMediaConstraints();
+  setupLocalStorageFieldValues();
+  acceptIncomingCalls();
+  setPeerConnectionConstraints();
+  if ($('get-devices-onload').checked === true) {
+    getDevices();
+  }
+  // Checks if the mobile UI should be used.
+  registerResButtonsEvents();
+};
+
+// Disconnect before the tab is closed.
+window.onbeforeunload = function() {
+  disconnect_();
+};
+
+// Handles the resolution button events.
+function registerResButtonsEvents() {
+  var lastResButtonPressed;
+  var elementIdAndResolutions = [
+    ['video-qvga', 320, 180],
+    ['video-vga', 640, 360],
+    ['video-hd', 1280, 720]
+  ];
+
+  function setResolution(elementAndRes) {
+    $(elementAndRes[0]).addEventListener('click', function() {
+      global.videoWidth = elementAndRes[1];
+      global.videoHeight = elementAndRes[2];
+      $(elementAndRes[0]).className = 'pressed';
+      if (typeof lastResButtonPressed !== 'undefined') {
+        lastResButtonPressed.className = '';
+      }
+      lastResButtonPressed = $(elementAndRes[0]);
+      updateGetUserMediaConstraints();
+    }, false );
+  }
+
+  for (var i in elementIdAndResolutions) {
+   setResolution(elementIdAndResolutions[i]);
+  }
+}
+
+// TODO (jansson) Setup events using addEventListener, applies in general.
+// A list of element id's to be registered for local storage.
+function setupLocalStorageFieldValues() {
+  registerLocalStorage_('pc-server');
+  registerLocalStorage_('get-devices-onload');
+}
+
+// Public HTML functions
+
+// The *Here functions are called from peer2peer.html and will make calls
+// into our underlying JavaScript library with the values from the page
+// (have to be named differently to avoid name clashes with existing functions).
+/* exported getUserMediaFromHere */
+function getUserMediaFromHere() {
+  var constraints = $('getusermedia-constraints').value;
+  try {
+    doGetUserMedia_(constraints);
+  } catch (exception) {
+    print_('getUserMedia says: ' + exception);
+  }
+}
+/* exported editConstraints */
+function editConstraints(elementId) {
+  $(elementId).style.display = 'inline';
+  $(elementId).style.height = '400px';
+  $(elementId).style.zIndex = '9';
+  $(elementId).focus();
+  $(elementId).onblur = function() {
+      $(elementId).style.display = 'none';
+  };
+}
+
+/* exported connectFromHere */
+function connectFromHere() {
+  var server = $('pc-server').value;
+  if ($('peer-id').value === '') {
+    // Generate a random name to distinguish us from other tabs:
+    $('peer-id').value = 'peer_' + Math.floor(Math.random()*10000);
+    print_('Our name from now on will be ' + $('peer-id').value);
+  }
+  connect(server, $('peer-id').value);
+}
+
+/* exported negotiateCallFromHere */
+function negotiateCallFromHere() {
+  // Set the global variables with values from our UI.
+  setCreateOfferConstraints(getEvaluatedJavaScript_(
+      $('createoffer-constraints').value));
+  setCreateAnswerConstraints(getEvaluatedJavaScript_(
+      $('createanswer-constraints').value));
+
+  ensureHasPeerConnection_();
+  negotiateCall_();
+}
+
+/* exported addLocalStreamFromHere */
+function addLocalStreamFromHere() {
+  ensureHasPeerConnection_();
+  addLocalStream();
+}
+
+/* exported removeLocalStreamFromHere */
+function removeLocalStreamFromHere() {
+  removeLocalStream();
+}
+
+/* exported hangUpFromHere */
+function hangUpFromHere() {
+  hangUp();
+  acceptIncomingCalls();
+}
+
+/* exported toggleRemoteVideoFromHere */
+function toggleRemoteVideoFromHere() {
+  toggleRemoteStream(function(remoteStream) {
+    return remoteStream.getVideoTracks()[0];
+  }, 'video');
+}
+
+/* exported toggleRemoteAudioFromHere */
+function toggleRemoteAudioFromHere() {
+  toggleRemoteStream(function(remoteStream) {
+    return remoteStream.getAudioTracks()[0];
+  }, 'audio');
+}
+/* exported toggleLocalVideoFromHere */
+function toggleLocalVideoFromHere() {
+  toggleLocalStream(function(localStream) {
+    return localStream.getVideoTracks()[0];
+  }, 'video');
+}
+
+/* exported toggleLocalAudioFromHere */
+function toggleLocalAudioFromHere() {
+  toggleLocalStream(function(localStream) {
+    return localStream.getAudioTracks()[0];
+  }, 'audio');
+}
+
+/* exported stopLocalFromHere */
+function stopLocalFromHere() {
+  stopLocalStream();
+}
+
+/* exported createDataChannelFromHere */
+function createDataChannelFromHere() {
+  ensureHasPeerConnection_();
+  createDataChannelOnPeerConnection();
+}
+
+/* exported closeDataChannelFromHere */
+function closeDataChannelFromHere() {
+  ensureHasPeerConnection_();
+  closeDataChannelOnPeerConnection();
+}
+
+/* exported sendDataFromHere */
+function sendDataFromHere() {
+  var data = $('data-channel-send').value;
+  sendDataOnChannel(data);
+}
+
+/* exported createDtmfSenderFromHere */
+function createDtmfSenderFromHere() {
+  ensureHasPeerConnection_();
+  createDtmfSenderOnPeerConnection();
+}
+
+/* exported insertDtmfFromHere */
+function insertDtmfFromHere() {
+  var tones = $('dtmf-tones').value;
+  var duration = $('dtmf-tones-duration').value;
+  var gap = $('dtmf-tones-gap').value;
+  insertDtmfOnSender(tones, duration, gap);
+}
+
+/* exported forceIsacChanged */
+function forceIsacChanged() {
+  var forceIsac = $('force-isac').checked;
+  if (forceIsac) {
+    forceIsac_();
+  } else {
+    dontTouchSdp_();
+  }
+}
+
+// Updates the constraints in the getusermedia-constraints text box with a
+// MediaStreamConstraints string. This string is created based on the state
+// of the 'audiosrc' and 'videosrc' checkboxes.
+// If device enumeration is supported and device source id's are not undefined
+// they will be added to the constraints string.
+function updateGetUserMediaConstraints() {
+  var selectedAudioDevice = $('audiosrc');
+  var selectedVideoDevice = $('videosrc');
+  global.constraints = {audio: $('audio').checked,
+                        video: $('video').checked
+  };
+
+  if ($('video').checked) {
+    // Default optional constraints placed here.
+    global.constraints.video = {optional: [{minWidth: global.videoWidth},
+                                           {minHeight: global.videoHeight},
+                                           {googLeakyBucket: true}]};
+  }
+
+  if (!selectedAudioDevice.disabled && !selectedAudioDevice.disabled) {
+    var devices = getSourcesFromField_(selectedAudioDevice,
+                                       selectedVideoDevice);
+
+    if ($('audio').checked) {
+      if (typeof devices.audioId !== 'undefined') {
+        global.constraints.audio = {optional: [{sourceId: devices.audioId}]};
+      }
+    }
+
+    if ($('video').checked) {
+      if (typeof devices.videoId !== 'undefined') {
+        global.constraints.video.optional.push({sourceId: devices.videoId});
+      }
+    }
+  }
+
+  if ($('screencapture').checked) {
+      global.constraints = {
+        audio: $('audio').checked,
+        video: {mandatory: {chromeMediaSource: 'screen',
+                            maxWidth: screen.width,
+                            maxHeight: screen.height}}
+      };
+    if ($('audio').checked) {
+      warning_('Audio for screencapture is not implemented yet, please ' +
+               'try to set audio = false prior requesting screencapture');
+    }
+  }
+
+  $('getusermedia-constraints').value = JSON.stringify(global.constraints,
+      null, ' ');
+  $('getusermedia-constraints').addEventListener('change', function() {
+    global.constraints = JSON.parse($('getusermedia-constraints').value);
+  }, false);
+  $('local-res').innerHTML = global.videoWidth + 'x' + global.videoHeight;
+}
+
+/* exported showServerHelp */
+function showServerHelp() {
+  alert('You need to build and run a peerconnection_server on some ' +
+        'suitable machine. To build it in chrome, just run make/ninja ' +
+        'peerconnection_server. Otherwise, read in https://code.google' +
+        '.com/searchframe#xSWYf0NTG_Q/trunk/peerconnection/README&q=REA' +
+        'DME%20package:webrtc%5C.googlecode%5C.com.');
+}
+
+/* exported clearLog */
+function clearLog() {
+  $('messages').innerHTML = '';
+}
+
+// Stops the local stream.
+function stopLocalStream() {
+  if (typeof global.localStream === 'undefined') {
+    error_('Tried to stop local stream, ' +
+           'but media access is not granted.');
+  }
+
+  global.localStream.stop();
+}
+
+// Adds the current local media stream to a peer connection.
+// @param {RTCPeerConnection} peerConnection
+function addLocalStreamToPeerConnection(peerConnection) {
+  if (typeof global.localStream  === 'undefined') {
+    error_('Tried to add local stream to peer connection, but there is no ' +
+           'stream yet.');
+  }
+  try {
+    peerConnection.addStream(global.localStream, global.addStreamConstraints);
+  } catch (exception) {
+    error_('Failed to add stream with constraints ' +
+           global.addStreamConstraints + ': ' + exception);
+  }
+  print_('Added local stream.');
+}
+
+// Removes the local stream from the peer connection.
+// @param {rtcpeerconnection} peerConnection
+function removeLocalStreamFromPeerConnection(peerConnection) {
+  if (typeof global.localStream  === 'undefined') {
+    error_('Tried to remove local stream from peer connection, but there is ' +
+           'no stream yet.');
+  }
+  try {
+    peerConnection.removeStream(global.localStream);
+  } catch (exception) {
+    error_('Could not remove stream: ' + exception);
+  }
+  print_('Removed local stream.');
+}
+
+// Enumerates the audio and video devices available in Chrome and adds the
+// devices to the HTML elements with Id 'audiosrc' and 'videosrc'.
+// Checks if device enumeration is supported and if the 'audiosrc' + 'videosrc'
+// elements exists, if not a debug printout will be displayed.
+// If the device label is empty, audio/video + sequence number will be used to
+// populate the name. Also makes sure the children has been loaded in order
+// to update the constraints.
+function getDevices() {
+  var selectedAudioDevice = $('audiosrc');
+  var selectedVideoDevice = $('videosrc');
+  selectedAudioDevice.innerHTML = '';
+  selectedVideoDevice.innerHTML = '';
+
+  if (typeof(MediaStreamTrack.getSources) === 'undefined') {
+    selectedAudioDevice.disabled = true;
+    selectedVideoDevice.disabled = true;
+    $('get-devices').disabled = true;
+    $('get-devices-onload').disabled = true;
+    updateGetUserMediaConstraints();
+    error_('getSources not found, device enumeration not supported');
+  }
+
+  MediaStreamTrack.getSources(function(devices) {
+    for (var i = 0; i < devices.length; i++) {
+      var option = document.createElement('option');
+      option.value = devices[i].id;
+      option.text = devices[i].label;
+
+      if (devices[i].kind === 'audio') {
+        if (option.text === '') {
+          option.text = devices[i].id;
+        }
+        selectedAudioDevice.appendChild(option);
+      } else if (devices[i].kind === 'video') {
+        if (option.text === '') {
+          option.text = devices[i].id;
+        }
+        selectedVideoDevice.appendChild(option);
+      } else {
+        error_('Device type ' + devices[i].kind + ' not recognized, ' +
+                'cannot enumerate device. Currently only device types' +
+                '\'audio\' and \'video\' are supported');
+        updateGetUserMediaConstraints();
+        return;
+      }
+    }
+  });
+
+  checkIfDeviceDropdownsArePopulated_();
+}
+
+// Sets the transform to apply just before setting the local description and
+// sending to the peer.
+// @param {function} transformFunction A function which takes one SDP string as
+// argument and returns the modified SDP string.
+function setOutgoingSdpTransform(transformFunction) {
+  global.transformOutgoingSdp = transformFunction;
+}
+
+// Sets the MediaConstraints to be used for PeerConnection createAnswer() calls.
+// @param {string} mediaConstraints The constraints, as defined in the
+// PeerConnection JS API spec.
+function setCreateAnswerConstraints(mediaConstraints) {
+  global.createAnswerConstraints = mediaConstraints;
+}
+
+// Sets the MediaConstraints to be used for PeerConnection createOffer() calls.
+// @param {string} mediaConstraints The constraints, as defined in the
+// PeerConnection JS API spec.
+function setCreateOfferConstraints(mediaConstraints) {
+  global.createOfferConstraints = mediaConstraints;
+}
+
+// Sets the callback functions that will receive DataChannel readyState updates
+// and received data.
+// @param {function} statusCallback The function that will receive a string
+// with the current DataChannel readyState.
+// @param {function} dataCallback The function that will a string with data
+// received from the remote peer.
+function setDataCallbacks(statusCallback, dataCallback) {
+  global.dataStatusCallback = statusCallback;
+  global.dataCallback = dataCallback;
+}
+
+// Sends data on an active DataChannel.
+// @param {string} data The string that will be sent to the remote peer.
+function sendDataOnChannel(data) {
+  if (typeof global.dataChannel  === 'undefined') {
+    error_('Trying to send data, but there is no DataChannel.');
+  }
+  global.dataChannel.send(data);
+}
+
+// Sets the callback function that will receive DTMF sender ontonechange events.
+// @param {function} ontonechange The function that will receive a string with
+// the tone that has just begun playout.
+function setOnToneChange(ontonechange) {
+  global.dtmfOnToneChange = ontonechange;
+}
+
+// Inserts DTMF tones on an active DTMF sender.
+// @param {string} tones to be sent.
+// @param {string} duration duration of the tones to be sent.
+// @param {string} interToneGap gap between the tones to be sent.
+function insertDtmf(tones, duration, interToneGap) {
+  if (typeof global.dtmfSender === 'undefined') {
+    error_('Trying to send DTMF, but there is no DTMF sender.');
+  }
+  global.dtmfSender.insertDTMF(tones, duration, interToneGap);
+}
+
+function handleMessage(peerConnection, message) {
+  var parsedMsg = JSON.parse(message);
+  if (parsedMsg.type) {
+    var sessionDescription = new RTCSessionDescription(parsedMsg);
+    peerConnection.setRemoteDescription(
+        sessionDescription,
+        function() { success_('setRemoteDescription'); },
+        function(error) { error_('setRemoteDescription', error); });
+    if (sessionDescription.type === 'offer') {
+      print_('createAnswer with constraints: ' +
+            JSON.stringify(global.createAnswerConstraints, null, ' '));
+      peerConnection.createAnswer(
+          setLocalAndSendMessage_,
+          function(error) { error_('createAnswer', error); },
+          global.createAnswerConstraints);
+    }
+    return;
+  } else if (parsedMsg.candidate) {
+    var candidate = new RTCIceCandidate(parsedMsg);
+    peerConnection.addIceCandidate(candidate,
+        function() { success_('addIceCandidate'); },
+        function(error) { error_('addIceCandidate', error); }
+    );
+    return;
+  }
+  error_('unknown message received');
+}
+
+// Sets the peerConnection constraints based on checkboxes.
+// TODO (jansson) Make it possible to use the text field for constraints like
+//     for getUserMedia.
+function setPeerConnectionConstraints() {
+  // Only added optional for now.
+  global.pcConstraints = {
+    optional: []
+  };
+
+  global.pcConstraints.optional.push(
+      {googCpuOveruseDetection: $('cpuoveruse-detection').checked});
+
+  global.pcConstraints.optional.push(
+      {RtpDataChannels: $('data-channel-type-rtp').checked});
+
+  $('pc-constraints').value = JSON.stringify(global.pcConstraints, null, ' ');
+}
+
+function createPeerConnection(stunServer) {
+  var servers = {iceServers: [{url: 'stun:' + stunServer}]};
+  var peerConnection;
+  try {
+    peerConnection = new RTCPeerConnection(servers, global.pcConstraints);
+  } catch (exception) {
+    error_('Failed to create peer connection: ' + exception);
+  }
+  peerConnection.onaddstream = addStreamCallback_;
+  peerConnection.onremovestream = removeStreamCallback_;
+  peerConnection.onicecandidate = iceCallback_;
+  peerConnection.ondatachannel = onCreateDataChannelCallback_;
+  return peerConnection;
+}
+
+function setupCall(peerConnection) {
+  print_('createOffer with constraints: ' +
+        JSON.stringify(global.createOfferConstraints, null, ' '));
+  peerConnection.createOffer(
+      setLocalAndSendMessage_,
+      function(error) { error_('createOffer', error); },
+      global.createOfferConstraints);
+}
+
+function answerCall(peerConnection, message) {
+  handleMessage(peerConnection, message);
+}
+
+function createDataChannel(peerConnection, label) {
+  if (typeof global.dataChannel  !== 'undefined' &&
+      global.dataChannel.readyState !== 'closed') {
+    error_('Creating DataChannel, but we already have one.');
+  }
+
+  global.dataChannel = peerConnection.createDataChannel(label,
+      { reliable: false });
+  print_('DataChannel with label ' + global.dataChannel.label + ' initiated ' +
+         'locally.');
+  hookupDataChannelEvents();
+}
+
+function closeDataChannel() {
+  if (typeof global.dataChannel === 'undefined') {
+    error_('Closing DataChannel, but none exists.');
+  }
+  print_('DataChannel with label ' + global.dataChannel.label +
+         ' is beeing closed.');
+  global.dataChannel.close();
+}
+
+function createDtmfSender(peerConnection) {
+  if (typeof global.dtmfSender !== 'undefined') {
+    error_('Creating DTMF sender, but we already have one.');
+  }
+  if (typeof global.localStream === 'undefined') {
+    error_('Creating DTMF sender but local stream is undefined.');
+  }
+  var localAudioTrack = global.localStream.getAudioTracks()[0];
+  global.dtmfSender = peerConnection.createDTMFSender(localAudioTrack);
+  global.dtmfSender.ontonechange = global.dtmfOnToneChange;
+}
+
+// Connects to the provided peerconnection_server.
+// @param {string} serverUrl The server URL in string form without an ending
+// slash, something like http://localhost:8888.
+// @param {string} clientName The name to use when connecting to the server.
+function connect(serverUrl, clientName) {
+  if (typeof global.ourPeerId !== 'undefined') {
+    error_('connecting, but is already connected.');
+  }
+  print_('Connecting to ' + serverUrl + ' as ' + clientName);
+  global.serverUrl = serverUrl;
+  global.ourClientName = clientName;
+
+  var request = new XMLHttpRequest();
+  request.open('GET', serverUrl + '/sign_in?' + clientName, true);
+  print_(serverUrl + '/sign_in?' + clientName);
+  request.onreadystatechange = function() {
+    connectCallback_(request);
+  };
+  request.send();
+}
+
+// Creates a peer connection. Must be called before most other public functions
+// in this file.
+function preparePeerConnection() {
+  if (typeof global.peerConnection !== 'undefined') {
+    error_('creating peer connection, but we already have one.');
+  }
+  global.peerConnection = createPeerConnection(STUN_SERVER);
+  success_('ok-peerconnection-created');
+}
+
+// Adds the local stream to the peer connection. You will have to re-negotiate
+// the call for this to take effect in the call.
+function addLocalStream() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('adding local stream, but we have no peer connection.');
+  }
+  addLocalStreamToPeerConnection(global.peerConnection);
+  print_('ok-added');
+}
+
+// Removes the local stream from the peer connection. You will have to
+// re-negotiate the call for this to take effect in the call.
+function removeLocalStream() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('attempting to remove local stream, but no call is up');
+  }
+  removeLocalStreamFromPeerConnection(global.peerConnection);
+  print_('ok-local-stream-removed');
+}
+
+// Toggles the remote audio stream's enabled state on the peer connection, given
+// that a call is active. Returns ok-[typeToToggle]-toggled-to-[true/false]
+// on success.
+// @param {function} selectAudioOrVideoTrack A function that takes a remote
+// stream as argument and returns a track (e.g. either the video or audio
+// track).
+// @param {function} typeToToggle Either "audio" or "video" depending on what
+// the selector function selects.
+function toggleRemoteStream(selectAudioOrVideoTrack, typeToToggle) {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('Tried to toggle remote stream, but have no peer connection.');
+  }
+  if (global.peerConnection.getRemoteStreams().length === 0) {
+    error_('Tried to toggle remote stream, but not receiving any stream.');
+  }
+  var track = selectAudioOrVideoTrack(
+      global.peerConnection.getRemoteStreams()[0]);
+  toggle_(track, 'remote', typeToToggle);
+}
+
+// See documentation on toggleRemoteStream (this function is the same except
+// we are looking at local streams).
+function toggleLocalStream(selectAudioOrVideoTrack, typeToToggle) {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('Tried to toggle local stream, but have no peer connection.');
+  }
+  if (global.peerConnection.getLocalStreams().length === 0) {
+    error_('Tried to toggle local stream, but there is no local stream in ' +
+           'the call.');
+  }
+  var track = selectAudioOrVideoTrack(
+      global.peerConnection.getLocalStreams()[0]);
+  toggle_(track, 'local', typeToToggle);
+}
+
+// Hangs up a started call. Returns ok-call-hung-up on success. This tab will
+// not accept any incoming calls after this call.
+function hangUp() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('hanging up, but has no peer connection');
+  }
+  if (getReadyState_() !== 'active') {
+    error_('hanging up, but ready state is not active (no call up).');
+  }
+  sendToPeer(global.remotePeerId, 'BYE');
+  closeCall_();
+  global.acceptsIncomingCalls = false;
+  print_('ok-call-hung-up');
+}
+
+
+// Start accepting incoming calls.
+function acceptIncomingCalls() {
+  global.acceptsIncomingCalls = true;
+}
+
+// Creates a DataChannel on the current PeerConnection. Only one DataChannel can
+// be created on each PeerConnection.
+// Returns ok-datachannel-created on success.
+function createDataChannelOnPeerConnection() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('Tried to create data channel, but have no peer connection.');
+  }
+  createDataChannel(global.peerConnection, global.ourClientName);
+  print_('ok-datachannel-created');
+}
+
+// Close the DataChannel on the current PeerConnection.
+// Returns ok-datachannel-close on success.
+function closeDataChannelOnPeerConnection() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('Tried to close data channel, but have no peer connection.');
+  }
+  closeDataChannel(global.peerConnection);
+  print_('ok-datachannel-close');
+}
+
+// Creates a DTMF sender on the current PeerConnection.
+// Returns ok-dtmfsender-created on success.
+function createDtmfSenderOnPeerConnection() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('Tried to create DTMF sender, but have no peer connection.');
+  }
+  createDtmfSender(global.peerConnection);
+  print_('ok-dtmfsender-created');
+}
+
+// Send DTMF tones on the global.dtmfSender.
+// Returns ok-dtmf-sent on success.
+function insertDtmfOnSender(tones, duration, interToneGap) {
+
+  if (typeof global.dtmfSender === 'undefined') {
+    error_('Tried to insert DTMF tones, but have no DTMF sender.');
+  }
+  insertDtmf(tones, duration, interToneGap);
+  print_('ok-dtmf-sent');
+}
+
+// Sends a message to a peer through the peerconnection_server.
+function sendToPeer(peer, message) {
+  var messageToLog = message.sdp ? message.sdp : message;
+  print_('Sending message ' + messageToLog + ' to peer ' + peer + '.');
+
+  var request = new XMLHttpRequest();
+  var url = global.serverUrl + '/message?peer_id=' + global.ourPeerId + '&to=' +
+      peer;
+  request.open('POST', url, false);
+  request.setRequestHeader('Content-Type', 'text/plain');
+  request.send(message);
+}
+
+// @param {!string} videoElementId The ID of the video element to update.
+// @param {!number} width of the video to update the video element, if width or
+// height is 0, size will be taken from videoElement.videoWidth.
+// @param {!number} height of the video to update the video element, if width or
+// height is 0 size will be taken from the videoElement.videoHeight.
+/* exported updateVideoElementSize */
+function updateVideoElementSize(videoElementId, width, height) {
+  var videoElement = $(videoElementId);
+  if (width > 0 || height > 0) {
+    videoElement.width = width;
+    videoElement.height = height;
+  } else {
+    if (videoElement.videoWidth > 0 || videoElement.videoHeight > 0) {
+      videoElement.width = videoElement.videoWidth;
+      videoElement.height = videoElement.videoHeight;
+      print_('Set video element "' + videoElementId + '" size to ' +
+             videoElement.width + 'x' + videoElement.height);
+    } else {
+      print_('"' + videoElementId + '" video stream size is 0, skipping ' +
+             ' resize');
+    }
+  }
+  displayVideoSize(videoElement);
+}
+
+// Disconnects from the peerconnection server. Returns ok-disconnected on
+// success.
+function disconnect_() {
+  if (typeof global.ourPeerId === 'undefined') {
+    return;
+  }
+  var request = new XMLHttpRequest();
+  request.open('GET', global.serverUrl + '/sign_out?peer_id=' +
+               global.ourPeerId, false);
+  request.send();
+  global.ourPeerId = 'undefined';
+  print_('ok-disconnected');
+}
+
+
+// Returns true if we are disconnected from peerconnection_server.
+function isDisconnected_() {
+  return global.ourPeerId === 'undefined';
+}
+
+// @return {!string} The current peer connection's ready state, or
+// 'no-peer-connection' if there is no peer connection up.
+// NOTE: The PeerConnection states are changing and until chromium has
+// implemented the new states we have to use this interim solution of always
+// assuming that the PeerConnection is 'active'.
+function getReadyState_() {
+  if (typeof global.peerConnection === 'undefined') {
+    return 'no-peer-connection';
+  }
+  return 'active';
+}
+
+// This function asks permission to use the webcam and mic from the browser. It
+// will return ok-requested to the test. This does not mean the request was
+// approved though. The test will then have to click past the dialog that
+// appears in Chrome, which will run either the OK or failed callback as a
+// a result. To see which callback was called, use obtainGetUserMediaResult_().
+// @param {string} constraints Defines what to be requested, with mandatory
+// and optional constraints defined. The contents of this parameter depends
+// on the WebRTC version. This should be JavaScript code that we eval().
+function doGetUserMedia_(constraints) {
+  if (!getUserMedia) {
+    print_('Browser does not support WebRTC.');
+    return;
+  }
+  var evaluatedConstraints;
+  try {
+    evaluatedConstraints = JSON.parse(constraints);
+  } catch (exception) {
+    error_('Not valid JavaScript expression: ' + constraints);
+  }
+
+  print_('Requesting doGetUserMedia: constraints: ' + constraints);
+  getUserMedia(evaluatedConstraints, getUserMediaOkCallback_,
+               getUserMediaFailedCallback_);
+}
+
+// Must be called after calling doGetUserMedia.
+// @return {string} Returns not-called-yet if we have not yet been called back
+// by WebRTC. Otherwise it returns either ok-got-stream or failed-with-error-x
+// (where x is the error code from the error callback) depending on which
+// callback got called by WebRTC.
+function obtainGetUserMediaResult_() {
+  if (typeof global.requestWebcamAndMicrophoneResult === 'undefined') {
+    global.requestWebcamAndMicrophoneResult = ' not called yet';
+  }
+  return global.requestWebcamAndMicrophoneResult;
+}
+
+
+// Negotiates a call with the other side. This will create a peer connection on
+// the other side if there isn't one.
+// To call this method we need to be aware of the other side, e.g. we must be
+// connected to peerconnection_server and we must have exactly one peer on that
+// server.
+// This method may be called any number of times. If you haven't added any
+// streams to the call, an "empty" call will result. The method will return
+// ok-negotiating immediately to the test if the negotiation was successfully
+// sent.
+function negotiateCall_() {
+  if (typeof global.peerConnection === 'undefined') {
+    error_('Negotiating call, but we have no peer connection.');
+  } else if (typeof global.ourPeerId === 'undefined') {
+    error_('Negotiating call, but not connected.');
+  } else if (typeof global.remotePeerId === 'undefined') {
+    error_('Negotiating call, but missing remote peer.');
+  }
+  setupCall(global.peerConnection);
+  print_('ok-negotiating');
+}
+
+// This provides the selected source id from the objects in the parameters
+// provided to this function. If the audioSelect or videoSelect objects does
+// not have any HTMLOptions children it will return null in the source
+// object.
+// @param {!object} audioSelect HTML drop down element with audio devices added
+// as HTMLOptionsCollection children.
+// @param {!object} videoSelect HTML drop down element with audio devices added
+// as HTMLOptionsCollection children.
+// @return {!object} source contains audio and video source ID from the selected
+// devices in the drop down menu elements.
+function getSourcesFromField_(audioSelect, videoSelect) {
+  var source = {
+    audioId: null,
+    videoId: null
+  };
+  if (audioSelect.options.length > 0) {
+    source.audioId = audioSelect.options[audioSelect.selectedIndex].value;
+  }
+  if (videoSelect.options.length > 0) {
+    source.videoId = videoSelect.options[videoSelect.selectedIndex].value;
+  }
+  return source;
+}
+
+// @param {NavigatorUserMediaError} error Error containing details.
+function getUserMediaFailedCallback_(error) {
+  error_('GetUserMedia failed with error: ' + error.name);
+}
+
+function iceCallback_(event) {
+  if (event.candidate) {
+    sendToPeer(global.remotePeerId, JSON.stringify(event.candidate));
+  }
+}
+
+function setLocalAndSendMessage_(sessionDescription) {
+  sessionDescription.sdp =
+    global.transformOutgoingSdp(sessionDescription.sdp);
+  global.peerConnection.setLocalDescription(sessionDescription,
+      function() { success_('setLocalDescription'); },
+      function(error) { error_('setLocalDescription', error); });
+  print_('Sending SDP message:\n' + sessionDescription.sdp);
+  sendToPeer(global.remotePeerId, JSON.stringify(sessionDescription));
+}
+
+function addStreamCallback_(event) {
+  print_('Receiving remote stream...');
+  var videoElement = document.getElementById('remote-view');
+  attachMediaStream(videoElement, event.stream);
+
+  window.addEventListener('loadedmetadata',
+      function() {displayVideoSize(videoElement);}, true);
+}
+
+function removeStreamCallback_() {
+  print_('Call ended.');
+  document.getElementById('remote-view').src = '';
+}
+
+function onCreateDataChannelCallback_(event) {
+  if (typeof global.dataChannel !== 'undefined' &&
+      global.dataChannel.readyState !== 'closed') {
+    error_('Received DataChannel, but we already have one.');
+  }
+  global.dataChannel = event.channel;
+  print_('DataChannel with label ' + global.dataChannel.label +
+         ' initiated by remote peer.');
+  hookupDataChannelEvents();
+}
+
+function hookupDataChannelEvents() {
+  global.dataChannel.onmessage = global.dataCallback;
+  global.dataChannel.onopen = onDataChannelReadyStateChange_;
+  global.dataChannel.onclose = onDataChannelReadyStateChange_;
+  // Trigger global.dataStatusCallback so an application is notified
+  // about the created data channel.
+  onDataChannelReadyStateChange_();
+}
+
+function onDataChannelReadyStateChange_() {
+  print_('DataChannel state:' + global.dataChannel.readyState);
+  global.dataStatusCallback(global.dataChannel.readyState);
+  console.log(global.dataStatusCallback);
+  // Display dataChannel.id only when dataChannel is active/open.
+  if (global.dataChannel.readyState === 'open') {
+    $('data-channel-id').value = global.dataChannel.id;
+  } else if (global.dataChannel.readyState === 'closed') {
+    $('data-channel-id').value = '';
+  }
+}
+
+// @param {MediaStream} stream Media stream.
+function getUserMediaOkCallback_(stream) {
+  global.localStream = stream;
+  success_('getUserMedia');
+
+  if (stream.getVideoTracks().length > 0) {
+    // Show the video element if we did request video in the getUserMedia call.
+    var videoElement = $('local-view');
+    attachMediaStream(videoElement, stream);
+    window.addEventListener('loadedmetadata', function() {
+        displayVideoSize(videoElement);}, true);
+    // Throw an error when no video is sent from camera but gUM returns OK.
+    stream.getVideoTracks()[0].onended = function() {
+      error_(global.localStream + ' getUserMedia successful but ' +
+             'MediaStreamTrack.onended event fired, no frames from camera.');
+    };
+    // Print information on track going to mute or back from it.
+    stream.getVideoTracks()[0].onmute = function() {
+      error_(global.localStream + ' MediaStreamTrack.onmute event has fired, ' +
+             'no frames to the track.');
+    };
+    stream.getVideoTracks()[0].onunmute = function() {
+      warning_(global.localStream + ' MediaStreamTrack.onunmute event has ' +
+               'fired.');
+    };
+  }
+}
+
+// @param {string} videoTag The ID of the video tag + stream used to write the
+// size to a HTML tag based on id if the div's exists.
+function displayVideoSize(videoTag) {
+  if (videoTag.videoWidth > 0 || videoTag.videoHeight > 0) {
+    $(videoTag.id + '-size').firstChild.data = videoTag.videoWidth + 'x' +
+                                               videoTag.videoHeight;
+  }
+}
+
+// Checks if the 'audiosrc' and 'videosrc' drop down menu elements has had all
+// of its children appended in order to provide device ID's to the function
+// 'updateGetUserMediaConstraints()', used in turn to populate the getUserMedia
+// constraints text box when the page has loaded.
+function checkIfDeviceDropdownsArePopulated_() {
+  if (document.addEventListener) {
+    $('audiosrc').addEventListener('DOMNodeInserted',
+         updateGetUserMediaConstraints, false);
+    $('videosrc').addEventListener('DOMNodeInserted',
+         updateGetUserMediaConstraints, false);
+  } else {
+    print_('addEventListener is not supported by your browser, cannot update ' +
+           'device source ID\'s automatically. Select a device from the audio' +
+           ' or video source drop down menu to update device source id\'s');
+  }
+}
+
+// Register an input element to use local storage to remember its state between
+// sessions (using local storage). Only input elements are supported.
+// @param {!string} element_id to be used as a key for local storage and the id
+// of the element to store the state for.
+function registerLocalStorage_(elementId) {
+  var element = $(elementId);
+  if (element.tagName !== 'INPUT') {
+    error_('You can only use registerLocalStorage_ for input elements. ' +
+          'Element \"' + element.tagName + '\" is not an input element. ');
+  }
+
+  if (localStorage.getItem(element.id) === null) {
+    storeLocalStorageField_(element);
+  } else {
+    getLocalStorageField_(element);
+  }
+
+  // Registers the appropriate events for input elements.
+  if (element.type === 'checkbox') {
+    element.onclick = function() { storeLocalStorageField_(this); };
+  } else if (element.type === 'text') {
+    element.onblur = function() { storeLocalStorageField_(this); };
+  } else {
+    error_('Unsupportered input type: ' + '\"' + element.type + '\"');
+  }
+}
+
+// Fetches the stored values from local storage and updates checkbox status.
+// @param {!Object} element of which id is representing the key parameter for
+// local storage.
+function getLocalStorageField_(element) {
+  // Makes sure the checkbox status is matching the local storage value.
+  if (element.type === 'checkbox') {
+    element.checked = (localStorage.getItem(element.id) === 'true');
+  } else if (element.type === 'text') {
+    element.value = localStorage.getItem(element.id);
+  } else {
+    error_('Unsupportered input type: ' + '\"' + element.type + '\"');
+  }
+}
+
+// Stores the string value of the element object using local storage.
+// @param {!Object} element of which id is representing the key parameter for
+// local storage.
+
+function storeLocalStorageField_(element) {
+  if (element.type === 'checkbox') {
+    localStorage.setItem(element.id, element.checked);
+  } else if (element.type === 'text') {
+    localStorage.setItem(element.id, element.value);
+  }
+}
+
+// Create the peer connection if none is up (this is just convenience to
+// avoid having a separate button for that).
+function ensureHasPeerConnection_() {
+  if (getReadyState_() === 'no-peer-connection') {
+    preparePeerConnection();
+  }
+}
+
+// @param {string} message Text to print.
+function print_(message) {
+  printHandler_(message, 'black');
+}
+
+// @param {string} message Text to print.
+function success_(message) {
+  printHandler_(message, 'green');
+}
+
+// @param {string} message Text to print.
+function warning_(message) {
+  printHandler_(message, 'orange');
+}
+
+// @param {string} message Text to print.
+function error_(message) {
+  printHandler_(message, 'red');
+}
+
+// @param {string} message Text to print.
+// @param {string} textField Element ID of where to print.
+// @param {string} color Color of the text.
+function printHandler_(message, color) {
+  if (color === 'green' ) {
+    message += ' success';
+  }
+  $('messages').innerHTML += '<span style="color:' + color + ';">' + message +
+                            '</span><br>';
+  console.log(message);
+  if (color === 'red' ) {
+    throw new Error(message);
+  }
+}
+
+// @param {string} stringRepresentation JavaScript as a string.
+// @return {Object} The PeerConnection constraints as a JavaScript dictionary.
+function getEvaluatedJavaScript_(stringRepresentation) {
+  try {
+    var evaluatedJavaScript;
+    evaluatedJavaScript = JSON.parse(stringRepresentation);
+    return evaluatedJavaScript;
+  } catch (exception) {
+    error_('Not valid JavaScript expression: ' + stringRepresentation);
+  }
+}
+
+function forceIsac_() {
+  setOutgoingSdpTransform(function(sdp) {
+    // Remove all other codecs (not the video codecs though).
+    sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
+                      'm=audio $1 RTP/SAVPF 104\r\n');
+    sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:104 minptime=10');
+    sdp = sdp.replace(/a=rtpmap:(?!104)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, '');
+    return sdp;
+  });
+}
+
+function dontTouchSdp_() {
+  setOutgoingSdpTransform(function(sdp) { return sdp; });
+}
+
+function hookupDataChannelCallbacks_() {
+  setDataCallbacks(function(status) {
+    $('data-channel-status').value = status;
+  },
+  function(dataMessage) {
+    print_('Received ' + dataMessage.data);
+    $('data-channel-receive').value =
+      dataMessage.data + '\n' + $('data-channel-receive').value;
+  });
+}
+
+function hookupDtmfSenderCallback_() {
+  setOnToneChange(function(tone) {
+    print_('Sent DTMF tone: ' + tone.tone);
+  });
+}
+
+function toggle_(track, localOrRemote, audioOrVideo) {
+  if (!track) {
+    error_('Tried to toggle ' + localOrRemote + ' ' + audioOrVideo +
+                 ' stream, but has no such stream.');
+  }
+  track.enabled = !track.enabled;
+  print_('ok-' + audioOrVideo + '-toggled-to-' + track.enabled);
+}
+
+function connectCallback_(request) {
+  print_('Connect callback: ' + request.status + ', ' + request.readyState);
+  if (request.status === 0) {
+    print_('peerconnection_server doesn\'t seem to be up.');
+    error_('failed connecting to peerConnection server');
+  }
+  if (request.readyState === 4 && request.status === 200) {
+    global.ourPeerId = parseOurPeerId_(request.responseText);
+    global.remotePeerId = parseRemotePeerIdIfConnected_(request.responseText);
+    startHangingGet_(global.serverUrl, global.ourPeerId);
+    print_('ok-connected');
+  }
+}
+
+function parseOurPeerId_(responseText) {
+  // According to peerconnection_server's protocol.
+  var peerList = responseText.split('\n');
+  return parseInt(peerList[0].split(',')[1]);
+}
+
+function parseRemotePeerIdIfConnected_(responseText) {
+  var peerList = responseText.split('\n');
+  if (peerList.length === 1) {
+    // No peers have connected yet - we'll get their id later in a notification.
+    return null;
+  }
+  var remotePeerId = null;
+  for (var i = 0; i < peerList.length; i++) {
+    if (peerList[i].length === 0) {
+      continue;
+    }
+    var parsed = peerList[i].split(',');
+    var name = parsed[0];
+    var id = parseInt(parsed[1]);
+    if (id !== global.ourPeerId  ) {
+      print_('Found remote peer with name ' + name + ', id ' +
+                id + ' when connecting.');
+      // There should be at most one remote peer in this test.
+      if (remotePeerId !== null) {
+        error_('Expected just one remote peer in this test: ' +
+               'found several.');
+      }
+      // Found a remote peer.
+      remotePeerId = id;
+    }
+  }
+  return remotePeerId;
+}
+
+function startHangingGet_(server, ourId) {
+  if (isDisconnected_()) {
+    return;
+  }
+  var hangingGetRequest = new XMLHttpRequest();
+  hangingGetRequest.onreadystatechange = function() {
+    hangingGetCallback_(hangingGetRequest, server, ourId);
+  };
+  hangingGetRequest.ontimeout = function() {
+    hangingGetTimeoutCallback_(hangingGetRequest, server, ourId);
+  };
+  var callUrl = server + '/wait?peer_id=' + ourId;
+  print_('Sending ' + callUrl);
+  hangingGetRequest.open('GET', callUrl, true);
+  hangingGetRequest.send();
+}
+
+function hangingGetCallback_(hangingGetRequest, server, ourId) {
+  if (hangingGetRequest.readyState !== 4 || hangingGetRequest.status === 0) {
+    // Code 0 is not possible if the server actually responded. Ignore.
+    return;
+  }
+  if (hangingGetRequest.status !== 200) {
+    error_('Error ' + hangingGetRequest.status + ' from server: ' +
+           hangingGetRequest.statusText);
+  }
+  var targetId = readResponseHeader_(hangingGetRequest, 'Pragma');
+  if (targetId === ourId) {
+    handleServerNotification_(hangingGetRequest.responseText);
+  } else {
+    handlePeerMessage_(targetId, hangingGetRequest.responseText);
+  }
+  hangingGetRequest.abort();
+
+  restartHangingGet_(server, ourId);
+}
+
+function hangingGetTimeoutCallback_(hangingGetRequest, server, ourId) {
+  print_('Hanging GET times out, re-issuing...');
+  hangingGetRequest.abort();
+  restartHangingGet_(server, ourId);
+}
+
+function handleServerNotification_(message) {
+  var parsed = message.split(',');
+  if (parseInt(parsed[2]) === 1) {
+    // Peer connected - this must be our remote peer, and it must mean we
+    // connected before them (except if we happened to connect to the server
+    // at precisely the same moment).
+    print_('Found remote peer with name ' + parsed[0] + ', id ' + parsed[1] +
+           ' when connecting.');
+    global.remotePeerId = parseInt(parsed[1]);
+  }
+}
+
+function closeCall_() {
+  if (typeof global.peerConnection === 'undefined') {
+    warning_('Closing call, but no call active.');
+  }
+  global.peerConnection.close();
+  global.peerConnection = undefined;
+}
+
+function handlePeerMessage_(peerId, message) {
+  print_('Received message from peer ' + peerId + ': ' + message);
+  if (peerId !== global.remotePeerId) {
+    error_('Received notification from unknown peer ' + peerId +
+           ' (only know about ' + global.remotePeerId + '.');
+  }
+  if (message.search('BYE') === 0) {
+    print_('Received BYE from peer: closing call');
+    closeCall_();
+    return;
+  }
+  if (typeof global.peerConnection  === 'undefined' &&
+      global.acceptsIncomingCalls) {
+    // The other side is calling us.
+    print_('We are being called: answer...');
+
+    global.peerConnection = createPeerConnection(STUN_SERVER);
+
+    if ($('auto-add-stream-oncall') &&
+        obtainGetUserMediaResult_() === 'ok-got-stream') {
+      print_('We have a local stream, so hook it up automatically.');
+      addLocalStreamToPeerConnection(global.peerConnection);
+    }
+    answerCall(global.peerConnection, message);
+    return;
+  }
+  handleMessage(global.peerConnection, message);
+}
+
+function restartHangingGet_(server, ourId) {
+  window.setTimeout(function() {
+    startHangingGet_(server, ourId);
+  }, 0);
+}
+
+function readResponseHeader_(request, key) {
+  var value = request.getResponseHeader(key);
+  if (value === null || value.length === 0) {
+    error_('Received empty value ' + value +
+           ' for response header key ' + key + '.');
+  }
+  return parseInt(value);
+}
diff --git a/samples/web/content/multiple-relay/index.html b/samples/web/content/multiple-relay/index.html
deleted file mode 100644
index 8a5eeda..0000000
--- a/samples/web/content/multiple-relay/index.html
+++ /dev/null
@@ -1,107 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<title>PeerConnection Demo 1</title>
-<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
-<script src="../../js/adapter.js"></script>
-<script src="../../js/videopipe.js"></script>
-<style>
-video {
-  border:5px solid black;
-  width:480px;
-  height:360px;
-}
-button {
-  font: 18px sans-serif;
-  padding: 8px;
-}
-textarea {
-  border: none;
-  resize: none;
-  font-family: monospace;
-  margin: 2px;
-  width:480px;
-  height:64px;
-}
-</style>
-</head>
-<body>
-<video id="vid1" autoplay></video>
-<video id="vid2" autoplay></video>
-<br>
-<button id="btn1" onclick="start()">Start</button>
-<button id="btn2" onclick="call()">Call</button>
-<button id="btn3" onclick="addrelay()">Insert relay</button>
-<button id="btn4" onclick="hangup()">Hang Up</button>
-<br>
-<textarea id="ta1"></textarea><br>
-<script>
-btn1.disabled = false;
-btn2.disabled = true;
-btn3.disabled = true;
-btn4.disabled = true;
-var pipes = new Array();
-
-var localstream;
-var remotestream;
-
-function gotStream(stream){
-  trace("Received local stream");
-  attachMediaStream(vid1, stream);
-  localstream = stream;
-  btn2.disabled = false;
-}
-
-function gotRemoteStream(stream){
-  remotestream = stream;
-  attachMediaStream(vid2, stream);
-  trace("Received remote stream");
-  trace(pipes.length + ' elements in chain');
-  ta1.textContent = pipes.length + ' elements in chain';
-  btn3.disabled = false;
-}
-
-function start() {
-  trace("Requesting local stream");
-  btn1.disabled = true;
-  getUserMedia({audio:false, video:true},
-                gotStream,
-                function() {
-                  alert('getUserMedia failed');
-                });
-}
-
-function call() {
-  btn2.disabled = true;
-  btn3.disabled = false;
-  btn4.disabled = false;
-  trace("Starting call");
-  pipes.push(new VideoPipe(localstream, gotRemoteStream));
-}
-
-function addrelay() {
-  pipes.push(new VideoPipe(remotestream, gotRemoteStream));
-  btn3.disabled = true;
-}
-
-function hangup() {
-  trace("Ending call");
-  while (pipes.length > 0) {
-    var pipe = pipes.pop()
-    pipe.close();
-  }
-  btn3.disabled = true;
-  btn4.disabled = true;
-  btn2.disabled = false;
-}
-
-</script>
-</body>
-</html>
diff --git a/samples/web/content/multiple/index.html b/samples/web/content/multiple/index.html
deleted file mode 100644
index 8f3ae42..0000000
--- a/samples/web/content/multiple/index.html
+++ /dev/null
@@ -1,203 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<title>PeerConnection Demo 1</title>
-<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
-<script src="../../js/adapter.js"></script>
-<style>
-video {
-  border:5px solid black;
-  width:480px;
-  height:360px;
-}
-button {
-  font: 18px sans-serif;
-  padding: 8px;
-}
-textarea {
-  font-family: monospace;
-  margin: 2px;
-  width:480px;
-  height:640px;
-}
-</style>
-</head>
-<body>
-<video id="vid1" autoplay="true" muted="true"></video>
-<video id="vid2" autoplay></video>
-<video id="vid3" autoplay></video>
-<br>
-<button id="btn1" onclick="start()">Start</button>
-<button id="btn2" onclick="call()">Call</button>
-<button id="btn3" onclick="hangup()">Hang Up</button>
-<br>
-<script>
-//var vid1 = document.getElementById("vid1");
-//var vid2 = document.getElementById("vid2");
-btn1.disabled = false;
-btn2.disabled = true;
-btn3.disabled = true;
-var pc1_local, pc1_remote;
-var pc2_local, pc2_remote;
-var localstream;
-var sdpConstraints = {'mandatory': {
-                        'OfferToReceiveAudio':true,
-                        'OfferToReceiveVideo':true }};
-
-function gotStream(stream){
-  trace("Received local stream");
-  // Call the polyfill wrapper to attach the media stream to this element.
-  attachMediaStream(vid1, stream);
-  localstream = stream;
-  btn2.disabled = false;
-}
-
-function start() {
-  trace("Requesting local stream");
-  btn1.disabled = true;
-  // Call into getUserMedia via the polyfill (adapter.js).
-  getUserMedia({audio:true, video:true},
-                gotStream, function() {});
-}
-
-function call() {
-  btn2.disabled = true;
-  btn3.disabled = false;
-  trace("Starting calls");
-  videoTracks = localstream.getVideoTracks();
-  audioTracks = localstream.getAudioTracks();
-  if (videoTracks.length > 0)
-    trace("Using Video device: " + videoTracks[0].label);
-  if (audioTracks.length > 0)
-    trace("Using Audio device: " + audioTracks[0].label);
-
-  // Create an RTCPeerConnection via the polyfill (adapter.js).
-  var servers = null;
-  pc1_local = new RTCPeerConnection(servers);
-  pc1_remote = new RTCPeerConnection(servers);
-  pc1_remote.onaddstream = gotRemoteStream1;
-  pc1_local.onicecandidate = iceCallback1Local;
-  pc1_remote.onicecandidate = iceCallback1Remote;
-  trace("PC1: created local and remote peer connection objects");
-
-  pc2_local = new RTCPeerConnection(servers);
-  pc2_remote = new RTCPeerConnection(servers);
-  pc2_remote.onaddstream = gotRemoteStream2;
-  pc2_local.onicecandidate = iceCallback2Local;
-  pc2_remote.onicecandidate = iceCallback2Remote;
-  trace("PC2: created local and remote peer connection objects");
-
-  pc1_local.addStream(localstream);
-  trace("Adding local stream to pc1_local");
-  pc1_local.createOffer(gotDescription1Local, onCreateSessionDescriptionError);
-
-  pc2_local.addStream(localstream);
-  trace("Adding local stream to pc2_local");
-  pc2_local.createOffer(gotDescription2Local, onCreateSessionDescriptionError);
-}
-
-function onCreateSessionDescriptionError(error) {
-  trace('Failed to create session description: ' + error.toString());
-}
-
-function gotDescription1Local(desc) {
-  pc1_local.setLocalDescription(desc);
-  trace("Offer from pc1_local \n" + desc.sdp);
-  pc1_remote.setRemoteDescription(desc);
-  // Since the "remote" side has no media stream we need
-  // to pass in the right constraints in order for it to
-  // accept the incoming offer of audio and video.
-  pc1_remote.createAnswer(gotDescription1Remote,
-                          onCreateSessionDescriptionError, sdpConstraints);
-}
-
-function gotDescription1Remote(desc) {
-  pc1_remote.setLocalDescription(desc);
-  trace("Answer from pc1_remote \n" + desc.sdp);
-  pc1_local.setRemoteDescription(desc);
-}
-
-function gotDescription2Local(desc) {
-  pc2_local.setLocalDescription(desc);
-  trace("Offer from pc2_local \n" + desc.sdp);
-  pc2_remote.setRemoteDescription(desc);
-  // Since the "remote" side has no media stream we need
-  // to pass in the right constraints in order for it to
-  // accept the incoming offer of audio and video.
-  pc2_remote.createAnswer(gotDescription2Remote,
-                          onCreateSessionDescriptionError, sdpConstraints);
-}
-
-function gotDescription2Remote(desc) {
-  pc2_remote.setLocalDescription(desc);
-  trace("Answer from pc2_remote \n" + desc.sdp);
-  pc2_local.setRemoteDescription(desc);
-}
-
-function hangup() {
-  trace("Ending calls");
-  pc1_local.close();
-  pc1_remote.close();
-  pc2_local.close();
-  pc2_remote.close();
-  pc1_local = pc1_remote = null;
-  pc2_local = pc2_remote = null;
-  btn3.disabled = true;
-  btn2.disabled = false;
-}
-
-function gotRemoteStream1(e) {
-  // Call the polyfill wrapper to attach the media stream to this element.
-  attachMediaStream(vid2, e.stream);
-  trace("PC1: Received remote stream");
-}
-
-function gotRemoteStream2(e) {
-  // Call the polyfill wrapper to attach the media stream to this element.
-  attachMediaStream(vid3, e.stream);
-  trace("PC2: Received remote stream");
-}
-
-function iceCallback1Local(event) {
-  handleCandidate(event.candidate, pc1_remote, "PC1: ", "local");
-}
-
-function iceCallback1Remote(event) {
-  handleCandidate(event.candidate, pc1_local, "PC1: ", "remote");
-}
-
-function iceCallback2Local(event) {
-  handleCandidate(event.candidate, pc2_remote, "PC2: ", "local");
-}
-
-function iceCallback2Remote(event) {
-  handleCandidate(event.candidate, pc2_local, "PC2: ", "remote");
-}
-
-function handleCandidate(candidate, dest, prefix, type) {
-  if (candidate) {
-    dest.addIceCandidate(new RTCIceCandidate(candidate),
-                         onAddIceCandidateSuccess, onAddIceCandidateError);
-    trace(prefix + "New " + type + " ICE candidate: " + candidate.candidate);
-  }
-}
-
-function onAddIceCandidateSuccess() {
-  trace("AddIceCandidate success.");
-}
-
-function onAddIceCandidateError(error) {
-  trace("Failed to add Ice Candidate: " + error.toString());
-}
-</script>
-</body>
-</html>
-
-
diff --git a/samples/web/content/munge-sdp/index.html b/samples/web/content/munge-sdp/index.html
deleted file mode 100644
index b24877d..0000000
--- a/samples/web/content/munge-sdp/index.html
+++ /dev/null
@@ -1,80 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>Munge SDP</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
-</head>
-<body>
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Munge SDP</span></h1>
-
-  <div id="selectSource" class="hidden">
-    <div id="select">Select an audio &amp; video source, then click <strong>Get media</strong>:</div>
-    <div class="source">
-      <label for="audioSrc">Audio source:</label>
-      <select id="audioSrc"></select>
-    </div>
-    <div class="source">
-      <label for="videoSrc">Video source:</label>
-      <select id="videoSrc"></select>
-    </div>
-  </div>
-
-  <div id="buttons">
-    <button id="getMedia">Get media</button>
-    <button id="createPeerConnection" disabled>Create peer connection</button>
-    <button id="createOffer" disabled>Create offer</button>
-    <button id="setOffer" disabled>Set offer</button>
-    <button id="createAnswer" disabled>Create answer</button>
-    <button id="setAnswer" disabled>Set answer</button>
-    <button id="hangup" disabled>Hang up</button>
-  </div>
-
-  <div id="preview">
-    <div id="local">
-      <h2>Local</h2>
-      <video autoplay muted></video>
-      <h2>Offer SDP</h2>
-      <textarea></textarea>
-      <br>
-      <br>
-    </div>
-    <div id="remote">
-      <h2>Remote</h2>
-      <video autoplay></video>
-      <h2>Answer SDP</h2>
-      <textarea></textarea>
-    </div>
-  </div>
-
-  <p>View the console to see logging.</p>
-
-  <p>The <code>RTCPeerConnection</code> objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
-
-  <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/#toc-rtcpeerconnection" title="RTCPeerConnection section of HTML5 Rocks article about WebRTC">Getting Started With WebRTC</a>.</p>
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/munge-sdp" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/peerconnection-audio/index.html b/samples/web/content/peerconnection-audio/index.html
deleted file mode 100644
index 2d5445e..0000000
--- a/samples/web/content/peerconnection-audio/index.html
+++ /dev/null
@@ -1,59 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-
-<base target="_blank">
-
-<title>Peer connection: audio only</title>
-
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link href='//fonts.googleapis.com/css?family=Inconsolata' rel='stylesheet' type='text/css'>
-
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
-
-</head>
-
-<body>
-
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection: audio only</span></h1>
-
-  <div id="audio">
-    <div>
-        <div class="label">Local audio:</div><audio id="audio1" autoplay controls muted></audio>
-    </div>
-    <div>
-      <div class="label">Remote audio:</div><audio id="audio2" autoplay controls></audio>
-    </div>
-  </div>
-
-  <div id="buttons">
-    <button id="callButton">Call</button>
-    <button id="hangupButton">Hang Up</button>
-  </div>
-
-  <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection-audio" title="View source for this page on Github" id="viewSource">View source on Github</a>
-
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/peerconnection-states/index.html b/samples/web/content/peerconnection-states/index.html
deleted file mode 100644
index c007a9f..0000000
--- a/samples/web/content/peerconnection-states/index.html
+++ /dev/null
@@ -1,65 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>Peer connection: states</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
-</head>
-<body>
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection: states</span></h1>
-
-  <video id="video1" autoplay></video>
-  <video id="video2" autoplay></video>
-
-  <div id="buttons">
-    <button id="startButton">Start</button>
-    <button id="callButton">Call</button>
-    <button id="hangupButton">Hang Up</button>
-  </div>
-
-  <div id="states">
-    <div>
-      <div class="label">PC1 state:</div><div id="pc1State" class="value"></div>
-    </div>
-    <div>
-      <div class="label">PC1 ICE state:</div><div id="pc1IceState" class="value"></div>
-    </div>
-    <div>
-      <div class="label">PC2 state:</div><div id="pc2State" class="value"></div>
-    </div>
-    <div>
-      <div class="label">PC2 ICE state:</div><div id="pc2IceState" class="value"></div>
-    </div>
-  </div>
-
-  <p>View the console to see logging. The <code>MediaStream</code> object <code>localStream</code>, and the <code>RTCPeerConnection</code> objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
-
-  <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/" title="HTML5 Rocks article about WebRTC by Sam Dutton">Getting Started With WebRTC</a>.</p>
-
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection" title="View source for this page on Github" id="viewSource">View source on Github</a>
-
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/peerconnection-audio/css/main.css b/samples/web/content/peerconnection/audio/css/main.css
similarity index 100%
rename from samples/web/content/peerconnection-audio/css/main.css
rename to samples/web/content/peerconnection/audio/css/main.css
diff --git a/samples/web/content/peerconnection/audio/index.html b/samples/web/content/peerconnection/audio/index.html
new file mode 100644
index 0000000..0c99dde
--- /dev/null
+++ b/samples/web/content/peerconnection/audio/index.html
@@ -0,0 +1,57 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Peer connection: audio only</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link href="//fonts.googleapis.com/css?family=Inconsolata" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection: audio only</span></h1>
+
+    <div id="audio">
+      <div>
+        <div class="label">Local audio:</div><audio id="audio1" autoplay controls muted></audio>
+      </div>
+      <div>
+        <div class="label">Remote audio:</div><audio id="audio2" autoplay controls></audio>
+      </div>
+    </div>
+
+    <div id="buttons">
+      <button id="callButton">Call</button>
+      <button id="hangupButton">Hang Up</button>
+    </div>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/audio" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+</body>
+</html>
diff --git a/samples/web/content/peerconnection-audio/js/main.js b/samples/web/content/peerconnection/audio/js/main.js
similarity index 94%
rename from samples/web/content/peerconnection-audio/js/main.js
rename to samples/web/content/peerconnection/audio/js/main.js
index 2667f9c..5d84910 100644
--- a/samples/web/content/peerconnection-audio/js/main.js
+++ b/samples/web/content/peerconnection/audio/js/main.js
@@ -5,6 +5,10 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
+var audio2 = document.querySelector('audio#audio2');
 var callButton = document.querySelector('button#callButton');
 var hangupButton = document.querySelector('button#hangupButton');
 hangupButton.disabled = true;
@@ -25,9 +29,10 @@
   trace('Received local stream');
   // Call the polyfill wrapper to attach the media stream to this element.
   localstream = stream;
-  audioTracks = localstream.getAudioTracks();
-  if (audioTracks.length > 0)
+  var audioTracks = localstream.getAudioTracks();
+  if (audioTracks.length > 0) {
     trace('Using Audio device: ' + audioTracks[0].label);
+  }
   pc1.addStream(localstream);
   trace('Adding Local Stream to peer connection');
 
@@ -59,7 +64,7 @@
       audio: true,
       video: false
     },
-    gotStream, function(e){
+    gotStream, function(e) {
       alert('getUserMedia() error: ' + e.name);
     });
 }
diff --git a/samples/web/content/constraints/css/main.css b/samples/web/content/peerconnection/constraints/css/main.css
similarity index 100%
rename from samples/web/content/constraints/css/main.css
rename to samples/web/content/peerconnection/constraints/css/main.css
diff --git a/samples/web/content/peerconnection/constraints/index.html b/samples/web/content/peerconnection/constraints/index.html
new file mode 100644
index 0000000..70584e8
--- /dev/null
+++ b/samples/web/content/peerconnection/constraints/index.html
@@ -0,0 +1,112 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Constraints and statistics</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link href="//fonts.googleapis.com/css?family=Inconsolata" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Constraints &amp; statistics</span></h1>
+
+    <section id="blurb">
+      <p>This demo shows ways to use constraints and statistics in WebRTC applications.</p>
+      <p>Set camera constraints, and click <strong>Get media</strong> to (re)open the camera with these included. Click <strong>Connect</strong> to create a (local) peer connection. The RTCPeerConnection objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> can be inspected from the console.</p>
+      <p>Setting a value to zero will remove that constraint. </p>
+      <p>The lefthand video shows the output of <code>getUserMedia()</code>; on the right is the video after being passed through the peer connection. The transmission bitrate is displayed below the righthand video.</p>
+    </section>
+
+    <button id="getMedia">Get media</button>
+    <button id="connect" disabled>Connect</button>
+
+
+    <section id="constraints">
+      <div id="getUserMedia">
+        <div class="input">
+          <h2>Camera constraints</h2>
+          <div id="minWidth">
+            <label>Min width <span>300</span>px:</label>
+            <input type="range" min="0" max="1280" value="300">
+          </div>
+          <div id="maxWidth">
+            <label>Max width <span>640</span>px:</label>
+            <input type="range" min="0" max="1280" value="640">
+          </div>
+          <div id="minHeight">
+            <label>Min height <span>200</span>px:</label>
+            <input type="range" min="0" max="1280" value="200">
+          </div>
+          <div id="maxHeight">
+            <label>Max height <span>480</span>px:</label>
+            <input type="range" min="0" max="1280" value="480">
+          </div>
+          <div id="framerate">
+            <label>Frame rate <span>0</span>fps:</label>
+            <input type="range" min="0" max="60" value="0">
+          </div>
+        </div>
+        <div id="getUserMediaConstraints" class="output"></div>
+      </div>
+
+      <div id="addStream">
+        <div class="input">
+          <h2>Stream constraints</h2>
+          <div id="maxBitrate">
+            <label>Max bitrate <span>1000</span>kb/s:</label>
+            <input type="range"  min="0" max="2000" value="1000">
+          </div>
+        </div>
+        <div id="addStreamConstraints" class="output"></div>
+      </div>
+
+    </section>
+
+    <section id="video">
+      <div id="localVideo">
+        <video autoplay muted></video>
+        <div></div>
+      </div>
+      <div id="remoteVideo">
+        <video autoplay muted></video>
+        <div></div>
+        <div id="bitrate"></div>
+      </div>
+    </section>
+
+    <section id="statistics">
+      <div id="senderStats"></div>
+      <div id="receiverStats"></div>
+    </section>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/constraints" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/constraints/js/main.js b/samples/web/content/peerconnection/constraints/js/main.js
similarity index 91%
rename from samples/web/content/constraints/js/main.js
rename to samples/web/content/peerconnection/constraints/js/main.js
index b94216e..0d37a88 100644
--- a/samples/web/content/constraints/js/main.js
+++ b/samples/web/content/peerconnection/constraints/js/main.js
@@ -5,6 +5,10 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
+
 var getMediaButton = document.querySelector('button#getMedia');
 var connectButton = document.querySelector('button#connect');
 
@@ -23,8 +27,10 @@
   framerateInput.onchange = maxBitrateInput.onchange =
   displayRangeValue;
 
-var getUserMediaConstraintsDiv = document.querySelector('div#getUserMediaConstraints');
-var addStreamConstraintsDiv = document.querySelector('div#addStreamConstraints');
+var getUserMediaConstraintsDiv =
+  document.querySelector('div#getUserMediaConstraints');
+var addStreamConstraintsDiv =
+  document.querySelector('div#addStreamConstraints');
 var bitrateDiv = document.querySelector('div#bitrate');
 var senderStatsDiv = document.querySelector('div#senderStats');
 var receiverStatsDiv = document.querySelector('div#receiverStats');
@@ -56,7 +62,7 @@
     }
   }
   getUserMedia(getUserMediaConstraints(), gotStream,
-    function (e) {
+    function(e) {
       var message = 'getUserMedia error: ' + e.name + '\n' +
         'PermissionDeniedError may mean invalid constraints.';
       alert(message);
@@ -130,36 +136,37 @@
   remotePeerConnection = new RTCPeerConnection(null);
   localPeerConnection.addStream(localStream, addStreamConstraints());
   console.log('localPeerConnection creating offer');
-  localPeerConnection.onnegotiationeeded = function () {
+  localPeerConnection.onnegotiationeeded = function() {
     console.log('Negotiation needed - localPeerConnection');
   };
-  remotePeerConnection.onnegotiationeeded = function () {
+  remotePeerConnection.onnegotiationeeded = function() {
     console.log('Negotiation needed - remotePeerConnection');
   };
-  localPeerConnection.onicecandidate = function (e) {
+  localPeerConnection.onicecandidate = function(e) {
     console.log('Candidate localPeerConnection');
     if (e.candidate) {
       remotePeerConnection.addIceCandidate(new RTCIceCandidate(e.candidate),
         onAddIceCandidateSuccess, onAddIceCandidateError);
     }
   };
-  remotePeerConnection.onicecandidate = function (e) {
+  remotePeerConnection.onicecandidate = function(e) {
     console.log('Candidate remotePeerConnection');
     if (e.candidate) {
       var newCandidate = new RTCIceCandidate(e.candidate);
-      localPeerConnection.addIceCandidate(newCandidate, onAddIceCandidateSuccess, onAddIceCandidateError);
+      localPeerConnection.addIceCandidate(newCandidate,
+        onAddIceCandidateSuccess, onAddIceCandidateError);
     }
   };
-  remotePeerConnection.onaddstream = function (e) {
+  remotePeerConnection.onaddstream = function(e) {
     console.log('remotePeerConnection got stream');
     attachMediaStream(remoteVideo, e.stream);
     console.log('Remote video is ' + remoteVideo.src);
   };
-  localPeerConnection.createOffer(function (desc) {
+  localPeerConnection.createOffer(function(desc) {
     console.log('localPeerConnection offering');
     localPeerConnection.setLocalDescription(desc);
     remotePeerConnection.setRemoteDescription(desc);
-    remotePeerConnection.createAnswer(function (desc2) {
+    remotePeerConnection.createAnswer(function(desc2) {
       console.log('remotePeerConnection answering');
       remotePeerConnection.setLocalDescription(desc2);
       localPeerConnection.setRemoteDescription(desc2);
@@ -183,7 +190,7 @@
   this.addressPairMap = [];
 }
 
-AugumentedStatsResponse.prototype.collectAddressPairs = function (componentId) {
+AugumentedStatsResponse.prototype.collectAddressPairs = function(componentId) {
   if (!this.addressPairMap[componentId]) {
     this.addressPairMap[componentId] = [];
     for (var i = 0; i < this.response.result().length; ++i) {
@@ -197,26 +204,26 @@
   return this.addressPairMap[componentId];
 };
 
-AugumentedStatsResponse.prototype.result = function () {
+AugumentedStatsResponse.prototype.result = function() {
   return this.response.result();
 };
 
 // The indexed getter isn't easy to prototype.
-AugumentedStatsResponse.prototype.get = function (key) {
+AugumentedStatsResponse.prototype.get = function(key) {
   return this.response[key];
 };
 
 
 // Display statistics
-setInterval(function () {
-  var display = function (string) {
+setInterval(function() {
+  var display = function(string) {
     bitrateDiv.innerHTML = '<strong>Bitrate:</strong> ' + string;
   };
 
   //  display('No stream');
   if (remotePeerConnection && remotePeerConnection.getRemoteStreams()[0]) {
     if (remotePeerConnection.getStats) {
-      remotePeerConnection.getStats(function (rawStats) {
+      remotePeerConnection.getStats(function(rawStats) {
         var stats = new AugumentedStatsResponse(rawStats);
         var statsString = '';
         var results = stats.result();
@@ -252,7 +259,7 @@
           '<h2>Receiver stats</h2>' + statsString;
         display(videoFlowInfo);
       });
-      localPeerConnection.getStats(function (stats) {
+      localPeerConnection.getStats(function(stats) {
         var statsString = '';
         var results = stats.result();
         for (var i = 0; i < results.length; ++i) {
diff --git a/samples/web/content/peerconnection/create-offer/css/main.css b/samples/web/content/peerconnection/create-offer/css/main.css
new file mode 100644
index 0000000..d9205e2
--- /dev/null
+++ b/samples/web/content/peerconnection/create-offer/css/main.css
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+button {
+  margin: 20px 10px 0 0;
+  width: 100px;
+}
+
+div#constraints {
+  margin: 0 0 20px 0;
+}
+
+div#numAudioTracks {
+  margin: 0 0 20px 0;
+}
+
+div#constraints div {
+  margin: 0 0 10px 0;
+}
+
+div#constraints input {
+  margin: 0 10px 0 0;
+  position: relative;
+  top: -2px;
+}
+
+div#numAudioTracks input {
+  max-width: 30%;
+  position: relative;
+  top: 2px;
+  width: 200px;
+}
+
+label {
+  font-weight: 500;
+  margin: 0 10px 0 0;
+}
+
+textarea {
+  height: 200px;
+  width: 100%;
+}
\ No newline at end of file
diff --git a/samples/web/content/peerconnection/create-offer/index.html b/samples/web/content/peerconnection/create-offer/index.html
new file mode 100644
index 0000000..66ca1c1
--- /dev/null
+++ b/samples/web/content/peerconnection/create-offer/index.html
@@ -0,0 +1,70 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript" />
+  <meta name="description" content="Client-side WebRTC code samples." />
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>createOffer() output</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>createOffer() output</span></h1>
+
+    <p>This page tests the <code>createOffer()</code> method. It creates a peer connection, then prints out the SDP generated by <code>createOffer()</code>, with the number of desired audio <code>MediaStreamTrack</code>s and the checked constraints. Currently, only audio tracks can be added, as there is no programmatic way to generate video tracks. (Web Audio is used to generate the audio tracks.)</p>
+
+    <div id="numAudioTracks">
+      <label for="numAudioTracksInput">Number of audio tracks:</label>
+      <input id="numAudioTracksInput" max="8" min="0" type="range" value="1" />
+      <span id="numAudioTracksDisplay">1</span>
+    </div>
+
+    <div id="constraints">
+      <div>
+        <input id="audio" type="checkbox" /><label for="audio">Offer to receive audio</label>
+      </div>
+      <div>
+        <input id="video" type="checkbox" /><label for="video">Offer to receive video</label>
+      </div>
+      <div>
+        <input id="vad" type="checkbox" /><label for="vad">Voice activity detection</label>
+      </div>
+      <div>
+        <input id="restart" type="checkbox" /><label for="restart">Ice restart</label>
+      </div>
+    </div>
+
+    <textarea id="output"></textarea>
+
+    <button id="createOffer">Create offer</button>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/create-offer" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/peerconnection/create-offer/js/main.js b/samples/web/content/peerconnection/create-offer/js/main.js
new file mode 100644
index 0000000..c6a4c61
--- /dev/null
+++ b/samples/web/content/peerconnection/create-offer/js/main.js
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+var audioInput = document.querySelector('input#audio');
+var restartInput = document.querySelector('input#restart');
+var vadInput = document.querySelector('input#vad');
+var videoInput = document.querySelector('input#video');
+
+var numAudioTracksInput = document.querySelector('div#numAudioTracks input');
+var numAudioTracksDisplay = document.querySelector('span#numAudioTracksDisplay');
+var outputTextarea = document.querySelector('textarea#output');
+var createOfferButton = document.querySelector('button#createOffer');
+
+createOfferButton.onclick = createOffer;
+
+numAudioTracksInput.onchange = function() {
+  numAudioTracksDisplay.textContent = this.value;
+};
+
+var pc = new RTCPeerConnection(null);
+/* global webkitAudioContext */
+var wacx = new webkitAudioContext();
+
+function createOffer() {
+  var numRequestedAudioTracks = numAudioTracksInput.value;
+  while (numRequestedAudioTracks < pc.getLocalStreams().length) {
+    pc.removeStream(pc.getLocalStreams()[pc.getLocalStreams().length - 1]);
+  }
+  while (numRequestedAudioTracks > pc.getLocalStreams().length) {
+    // Create some dummy audio streams using Web Audio.
+    // Note that this fails if you try to do more than one track in Chrome
+    // right now.
+    var dst = wacx.createMediaStreamDestination();
+    pc.addStream(dst.stream);
+  }
+  var offerConstraints = {
+    'optional': [{
+      'OfferToReceiveAudio': audioInput.checked
+    }, {
+      'OfferToReceiveVideo': videoInput.checked
+    }, ]
+  };
+  // These constraints confuse Firefox, even if declared as optional.
+  if (webrtcDetectedBrowser !== 'Firefox') {
+    offerConstraints.optional.push({
+      'VoiceActivityDetection': vadInput.checked
+    });
+    offerConstraints.optional.push({
+      'IceRestart': restartInput.checked
+    });
+  }
+  pc.createOffer(gotDescription, null, offerConstraints);
+}
+
+function gotDescription(desc) {
+  pc.setLocalDescription(desc);
+  outputTextarea.value = desc.sdp;
+}
diff --git a/samples/web/content/dtmf/css/main.css b/samples/web/content/peerconnection/dtmf/css/main.css
similarity index 100%
rename from samples/web/content/dtmf/css/main.css
rename to samples/web/content/peerconnection/dtmf/css/main.css
diff --git a/samples/web/content/peerconnection/dtmf/index.html b/samples/web/content/peerconnection/dtmf/index.html
new file mode 100644
index 0000000..3819a14
--- /dev/null
+++ b/samples/web/content/peerconnection/dtmf/index.html
@@ -0,0 +1,86 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>DTMF</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link href="//fonts.googleapis.com/css?family=Inconsolata" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Send DTMF tones</span></h1>
+
+    <div id="dialPad">
+      <div>
+        <button>1</button><button>2</button><button>3</button><button>4</button>
+      </div>
+      <div>
+        <button>5</button><button>6</button><button>7</button><button>8</button>
+      </div>
+      <div>
+        <button>9</button><button>0</button><button>*</button><button>#</button>
+      </div>
+      <div>
+        <button>A</button><button>B</button><button>C</button><button>D</button>
+      </div>
+    </div>
+
+    <div id="dtmf">
+      <h2>Sent tones</h2>
+      <div id="dtmfStatus"></div>
+      <div id="sentTones"></div>
+      <audio autoplay="autoplay"></audio>
+    </div>
+
+    <div id="parameters">
+      <div>
+        <label for="duration">Duration:</label>
+        <input id="duration" type="text" value="500" />
+      </div>
+      <div>
+        <label for="gap">Gap:</label>
+        <input id="gap" type="text" value="50" />
+      </div>
+      <div>
+       <label for="tones">Tones:</label>
+       <input id="tones" type="text" value="1199##9,6633221,9966332,9966332,1199##9,6633221" />
+     </div>
+   </div>
+
+   <div id="buttons">
+    <button id="callButton">Call</button>
+    <button id="sendTonesButton">Send tones</button>
+    <button id="hangupButton">Hang up</button>
+  </div>
+
+  <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/dtmf" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+</div>
+
+<script src="../../../js/adapter.js"></script>
+<script src="js/main.js"></script>
+<script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/dtmf/js/main.js b/samples/web/content/peerconnection/dtmf/js/main.js
similarity index 94%
rename from samples/web/content/dtmf/js/main.js
rename to samples/web/content/peerconnection/dtmf/js/main.js
index 42d6460..e4dc0b1 100644
--- a/samples/web/content/dtmf/js/main.js
+++ b/samples/web/content/peerconnection/dtmf/js/main.js
@@ -5,6 +5,9 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
 var callButton = document.querySelector('button#callButton');
 var sendTonesButton = document.querySelector('button#sendTonesButton');
 var hangupButton = document.querySelector('button#hangupButton');
@@ -48,8 +51,9 @@
   // Call the polyfill wrapper to attach the media stream to this element.
   localStream = stream;
   var audioTracks = localStream.getAudioTracks();
-  if (audioTracks.length > 0)
+  if (audioTracks.length > 0) {
     trace('Using Audio device: ' + audioTracks[0].label);
+  }
   pc1.addStream(localStream);
   trace('Adding Local Stream to peer connection');
   pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
@@ -79,7 +83,7 @@
       audio: true,
       video: false
     },
-    gotStream, function (e) {
+    gotStream, function(e) {
       alert('getUserMedia() error: ' + e.name);
     });
 
@@ -130,7 +134,9 @@
   if (pc1.createDTMFSender) {
     enableDtmfSender();
   } else {
-    alert('This demo requires the RTCPeerConnection method createDTMFSender() which is not support by this browser.');
+    alert(
+      'This demo requires the RTCPeerConnection method createDTMFSender() which is not support by this browser.'
+    );
   }
 
 }
@@ -187,19 +193,19 @@
   }
 }
 
-function handleSendTonesClick(){
+function handleSendTonesClick() {
   sendTones(tonesInput.value);
 }
 
 function addDialPadHandlers() {
   var dialPad = document.querySelector('div#dialPad');
   var buttons = dialPad.querySelectorAll('button');
-  for (var i = 0; i != buttons.length; ++i) {
+  for (var i = 0; i !== buttons.length; ++i) {
     buttons[i].onclick = sendDtmfTone;
   }
 }
 
 function sendDtmfTone() {
+  /*jshint validthis:true */
   sendTones(this.textContent);
 }
-
diff --git a/samples/web/content/peerconnection/index.html b/samples/web/content/peerconnection/index.html
deleted file mode 100644
index ed3223b..0000000
--- a/samples/web/content/peerconnection/index.html
+++ /dev/null
@@ -1,58 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-
-<base target="_blank">
-
-<title>Peer connection</title>
-
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel="stylesheet" href="css/main.css" />
-
-</head>
-
-<body>
-
-<div id="container">
-
-  <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection</span></h1>
-
-  <video id="localVideo" autoplay muted></video>
-  <video id="remoteVideo" autoplay></video>
-
-  <div>
-    <button id="startButton">Start</button>
-    <button id="callButton">Call</button>
-    <button id="hangupButton">Hang Up</button>
-  </div>
-
-  <p>View the console to see logging. The <code>MediaStream</code> object <code>localStream</code>, and the <code>RTCPeerConnection</code> objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
-
-  <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/" title="HTML5 Rocks article about WebRTC by Sam Dutton">Getting Started With WebRTC</a>.</p>
-
-
-<a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection" title="View source for this page on Github" id="viewSource">View source on Github</a>
-
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/peerconnection/multiple-relay/css/main.css b/samples/web/content/peerconnection/multiple-relay/css/main.css
new file mode 100644
index 0000000..78a824e
--- /dev/null
+++ b/samples/web/content/peerconnection/multiple-relay/css/main.css
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+button {
+  margin: 20px 10px 0 0;
+  width: 100px;
+}
+
+div#buttons {
+  margin: 0 0 20px 0;
+}
+
+div#status {
+  height: 1em;
+  margin: 0 0 20px 0;
+}
+
+div#videos {
+  height: 232px;
+}
+video {
+  display: block;
+  float: left;
+  height: 232px;
+  width: calc(50% - 10px);
+}
+
+video:first-child {
+  margin: 0 20px 0 0;
+}
\ No newline at end of file
diff --git a/samples/web/content/peerconnection/multiple-relay/index.html b/samples/web/content/peerconnection/multiple-relay/index.html
new file mode 100644
index 0000000..a3a4742
--- /dev/null
+++ b/samples/web/content/peerconnection/multiple-relay/index.html
@@ -0,0 +1,58 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript" />
+  <meta name="description" content="Client-side WebRTC code samples." />
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Peer connection relay</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection relay</span></h1>
+
+    <div id="videos">
+      <video id="video1" autoplay></video>
+      <video id="video2" autoplay></video>
+    </div>
+
+    <div id="status"></div>
+
+    <div id="buttons">
+      <button id="start">Start</button>
+      <button id="call" disabled>Call</button>
+      <button id="insertRelay" disabled>Insert relay</button>
+      <button id="hangup" disabled>Hang Up</button>
+    </div>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/multiple-relay" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="../../../js/videopipe.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/peerconnection/multiple-relay/js/main.js b/samples/web/content/peerconnection/multiple-relay/js/main.js
new file mode 100644
index 0000000..79f9a95
--- /dev/null
+++ b/samples/web/content/peerconnection/multiple-relay/js/main.js
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+/* global VideoPipe */
+
+var video1 = document.querySelector('video#video1');
+var video2 = document.querySelector('video#video2');
+
+var statusDiv = document.querySelector('div#status');
+
+var startButton = document.querySelector('button#start');
+var callButton = document.querySelector('button#call');
+var insertRelayButton = document.querySelector('button#insertRelay');
+var hangupButton = document.querySelector('button#hangup');
+
+startButton.onclick = start;
+callButton.onclick = call;
+insertRelayButton.onclick = insertRelay;
+hangupButton.onclick = hangup;
+
+var pipes = [];
+
+var localStream;
+var remoteStream;
+
+function gotStream(stream) {
+  trace('Received local stream');
+  attachMediaStream(video1, stream);
+  localStream = stream;
+  callButton.disabled = false;
+}
+
+function gotremoteStream(stream) {
+  remoteStream = stream;
+  attachMediaStream(video2, stream);
+  trace('Received remote stream');
+  trace(pipes.length + ' elements in chain');
+  statusDiv.textContent = pipes.length + ' elements in chain';
+  insertRelayButton.disabled = false;
+}
+
+function start() {
+  trace('Requesting local stream');
+  startButton.disabled = true;
+  getUserMedia({
+      audio: false,
+      video: true
+    },
+    gotStream,
+    function(e) {
+      alert('getUserMedia() failed');
+      trace('getUserMedia() error: ', e);
+    });
+}
+
+function call() {
+  callButton.disabled = true;
+  insertRelayButton.disabled = false;
+  hangupButton.disabled = false;
+  trace('Starting call');
+  pipes.push(new VideoPipe(localStream, gotremoteStream));
+}
+
+function insertRelay() {
+  pipes.push(new VideoPipe(remoteStream, gotremoteStream));
+  insertRelayButton.disabled = true;
+}
+
+function hangup() {
+  trace('Ending call');
+  while (pipes.length > 0) {
+    var pipe = pipes.pop();
+    pipe.close();
+  }
+  insertRelayButton.disabled = true;
+  hangupButton.disabled = true;
+  callButton.disabled = false;
+}
diff --git a/samples/web/content/peerconnection/multiple/css/main.css b/samples/web/content/peerconnection/multiple/css/main.css
new file mode 100644
index 0000000..d723aaf
--- /dev/null
+++ b/samples/web/content/peerconnection/multiple/css/main.css
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+button {
+  margin: 0 20px 0 0;
+  width: 83px;
+}
+
+button#hangupButton {
+    margin: 0;
+}
+
+video {
+  height: 225px;
+  margin: 0 0 20px 0;
+  width: calc(50% - 12px);
+}
+
+video#video1 {
+  margin: 0 20px 20px 0;
+}
+
+@media screen and (max-width: 400px) {
+  button {
+    margin: 0 11px 10px 0;
+  }
+
+  video {
+    height: 90px;
+    margin: 0 0 10px 0;
+    width: calc(50% - 8px);
+  }
+
+  video#video1 {
+    margin: 0 10px 10px 0;
+  }
+
+}
diff --git a/samples/web/content/peerconnection/multiple/index.html b/samples/web/content/peerconnection/multiple/index.html
new file mode 100644
index 0000000..5e56086
--- /dev/null
+++ b/samples/web/content/peerconnection/multiple/index.html
@@ -0,0 +1,58 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Multiple peer connections</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Multiple peer connections</span></h1>
+
+    <video id="video1" autoplay muted></video>
+    <video id="video2" autoplay></video>
+    <video id="video3" autoplay></video>
+
+    <div>
+      <button id="startButton">Start</button>
+      <button id="callButton">Call</button>
+      <button id="hangupButton">Hang Up</button>
+    </div>
+
+    <p>View the console to see logging and to inspect the <code>MediaStream</code> object <code>localStream</code>.</p>
+
+    <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/" title="HTML5 Rocks article about WebRTC by Sam Dutton">Getting Started With WebRTC</a>.</p>
+
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/multiple" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/peerconnection/multiple/js/main.js b/samples/web/content/peerconnection/multiple/js/main.js
new file mode 100644
index 0000000..7be8b59
--- /dev/null
+++ b/samples/web/content/peerconnection/multiple/js/main.js
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+var startButton = document.getElementById('startButton');
+var callButton = document.getElementById('callButton');
+var hangupButton = document.getElementById('hangupButton');
+callButton.disabled = true;
+hangupButton.disabled = true;
+startButton.onclick = start;
+callButton.onclick = call;
+hangupButton.onclick = hangup;
+
+var video1 = document.querySelector('video#video1');
+var video2 = document.querySelector('video#video2');
+var video3 = document.querySelector('video#video3');
+
+var pc1Local, pc1Remote;
+var pc2Local, pc2Remote;
+var sdpConstraints = {
+  'mandatory': {
+    'OfferToReceiveAudio': true,
+    'OfferToReceiveVideo': true
+  }
+};
+
+function gotStream(stream) {
+  trace('Received local stream');
+  // Call the polyfill (adapter.js) to attach the media stream to this element.
+  attachMediaStream(video1, stream);
+  window.localstream = stream;
+  callButton.disabled = false;
+}
+
+function start() {
+  trace('Requesting local stream');
+  startButton.disabled = true;
+  // Call getUserMedia() via the polyfill.
+  getUserMedia({
+      audio: true,
+      video: true
+    },
+    gotStream,
+    function(e) {
+      console.log('getUserMedia() error: ', e);
+    });
+}
+
+function call() {
+  callButton.disabled = true;
+  hangupButton.disabled = false;
+  trace('Starting calls');
+  var audioTracks = window.localstream.getAudioTracks();
+  var videoTracks = window.localstream.getVideoTracks();
+  if (audioTracks.length > 0) {
+    trace('Using audio device: ' + audioTracks[0].label);
+  }
+  if (videoTracks.length > 0) {
+    trace('Using video device: ' + videoTracks[0].label);
+  }
+  // Create an RTCPeerConnection via the polyfill.
+  var servers = null;
+  pc1Local = new RTCPeerConnection(servers);
+  pc1Remote = new RTCPeerConnection(servers);
+  pc1Remote.onaddstream = gotRemoteStream1;
+  pc1Local.onicecandidate = iceCallback1Local;
+  pc1Remote.onicecandidate = iceCallback1Remote;
+  trace('pc1: created local and remote peer connection objects');
+
+  pc2Local = new RTCPeerConnection(servers);
+  pc2Remote = new RTCPeerConnection(servers);
+  pc2Remote.onaddstream = gotRemoteStream2;
+  pc2Local.onicecandidate = iceCallback2Local;
+  pc2Remote.onicecandidate = iceCallback2Remote;
+  trace('pc2: created local and remote peer connection objects');
+
+  pc1Local.addStream(window.localstream);
+  trace('Adding local stream to pc1Local');
+  pc1Local.createOffer(gotDescription1Local, onCreateSessionDescriptionError);
+
+  pc2Local.addStream(window.localstream);
+  trace('Adding local stream to pc2Local');
+  pc2Local.createOffer(gotDescription2Local, onCreateSessionDescriptionError);
+}
+
+function onCreateSessionDescriptionError(error) {
+  trace('Failed to create session description: ' + error.toString());
+}
+
+function gotDescription1Local(desc) {
+  pc1Local.setLocalDescription(desc);
+  trace('Offer from pc1Local \n' + desc.sdp);
+  pc1Remote.setRemoteDescription(desc);
+  // Since the 'remote' side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc1Remote.createAnswer(gotDescription1Remote,
+    onCreateSessionDescriptionError, sdpConstraints);
+}
+
+function gotDescription1Remote(desc) {
+  pc1Remote.setLocalDescription(desc);
+  trace('Answer from pc1Remote \n' + desc.sdp);
+  pc1Local.setRemoteDescription(desc);
+}
+
+function gotDescription2Local(desc) {
+  pc2Local.setLocalDescription(desc);
+  trace('Offer from pc2Local \n' + desc.sdp);
+  pc2Remote.setRemoteDescription(desc);
+  // Since the 'remote' side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc2Remote.createAnswer(gotDescription2Remote,
+    onCreateSessionDescriptionError, sdpConstraints);
+}
+
+function gotDescription2Remote(desc) {
+  pc2Remote.setLocalDescription(desc);
+  trace('Answer from pc2Remote \n' + desc.sdp);
+  pc2Local.setRemoteDescription(desc);
+}
+
+function hangup() {
+  trace('Ending calls');
+  pc1Local.close();
+  pc1Remote.close();
+  pc2Local.close();
+  pc2Remote.close();
+  pc1Local = pc1Remote = null;
+  pc2Local = pc2Remote = null;
+  hangupButton.disabled = true;
+  callButton.disabled = false;
+}
+
+function gotRemoteStream1(e) {
+  // Call the polyfill wrapper to attach the media stream to this element.
+  attachMediaStream(video2, e.stream);
+  trace('pc1: received remote stream');
+}
+
+function gotRemoteStream2(e) {
+  // Call the polyfill wrapper to attach the media stream to this element.
+  attachMediaStream(video3, e.stream);
+  trace('pc2: received remote stream');
+}
+
+function iceCallback1Local(event) {
+  handleCandidate(event.candidate, pc1Remote, 'pc1: ', 'local');
+}
+
+function iceCallback1Remote(event) {
+  handleCandidate(event.candidate, pc1Local, 'pc1: ', 'remote');
+}
+
+function iceCallback2Local(event) {
+  handleCandidate(event.candidate, pc2Remote, 'pc2: ', 'local');
+}
+
+function iceCallback2Remote(event) {
+  handleCandidate(event.candidate, pc2Local, 'pc2: ', 'remote');
+}
+
+function handleCandidate(candidate, dest, prefix, type) {
+  if (candidate) {
+    dest.addIceCandidate(new RTCIceCandidate(candidate),
+      onAddIceCandidateSuccess, onAddIceCandidateError);
+    trace(prefix + 'New ' + type + ' ICE candidate: ' + candidate.candidate);
+  }
+}
+
+function onAddIceCandidateSuccess() {
+  trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+  trace('Failed to add ICE candidate: ' + error.toString());
+}
diff --git a/samples/web/content/munge-sdp/css/main.css b/samples/web/content/peerconnection/munge-sdp/css/main.css
similarity index 90%
rename from samples/web/content/munge-sdp/css/main.css
rename to samples/web/content/peerconnection/munge-sdp/css/main.css
index 01b2c25..aa1d5dc 100644
--- a/samples/web/content/munge-sdp/css/main.css
+++ b/samples/web/content/peerconnection/munge-sdp/css/main.css
@@ -76,7 +76,7 @@
   }
 }
 
-@media screen and (max-height: 350px) {
+@media screen and (max-width: 800px) {
   button {
     margin: 0 15px 15px 0;
     width: 155px;
@@ -90,4 +90,13 @@
   select {
     margin: 0 1.5em 0 0;
   }
+  textarea {
+    font-size: 0.7em;
+  }
+}
+
+@media screen and (max-width: 500px) {
+  textarea {
+    font-size: 0.5em;
+  }
 }
diff --git a/samples/web/content/peerconnection/munge-sdp/index.html b/samples/web/content/peerconnection/munge-sdp/index.html
new file mode 100644
index 0000000..2415a0a
--- /dev/null
+++ b/samples/web/content/peerconnection/munge-sdp/index.html
@@ -0,0 +1,87 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Munge SDP</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Munge SDP</span></h1>
+
+    <div id="selectSource" class="hidden">
+      <div id="select">Select an audio &amp; video source, then click <strong>Get media</strong>:</div>
+      <div class="source">
+        <label for="audioSrc">Audio source:</label>
+        <select id="audioSrc"></select>
+      </div>
+      <div class="source">
+        <label for="videoSrc">Video source:</label>
+        <select id="videoSrc"></select>
+      </div>
+    </div>
+
+    <div id="buttons">
+      <button id="getMedia">Get media</button>
+      <button id="createPeerConnection" disabled>Create peer connection</button>
+      <button id="createOffer" disabled>Create offer</button>
+      <button id="setOffer" disabled>Set offer</button>
+      <button id="createAnswer" disabled>Create answer</button>
+      <button id="setAnswer" disabled>Set answer</button>
+      <button id="hangup" disabled>Hang up</button>
+    </div>
+
+    <div id="preview">
+      <div id="local">
+        <h2>Local</h2>
+        <video autoplay muted></video>
+        <h2>Offer SDP</h2>
+        <textarea></textarea>
+        <br>
+        <br>
+      </div>
+      <div id="remote">
+        <h2>Remote</h2>
+        <video autoplay muted></video>
+        <h2>Answer SDP</h2>
+        <textarea></textarea>
+      </div>
+    </div>
+
+    <p>View the console to see logging.</p>
+
+    <p>The <code>RTCPeerConnection</code> objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
+
+    <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/#toc-rtcpeerconnection" title="RTCPeerConnection section of HTML5 Rocks article about WebRTC">Getting Started With WebRTC</a>.</p>
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/munge-sdp" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/munge-sdp/js/main.js b/samples/web/content/peerconnection/munge-sdp/js/main.js
similarity index 92%
rename from samples/web/content/munge-sdp/js/main.js
rename to samples/web/content/peerconnection/munge-sdp/js/main.js
index ca4f3a1..1297715 100644
--- a/samples/web/content/munge-sdp/js/main.js
+++ b/samples/web/content/peerconnection/munge-sdp/js/main.js
@@ -5,6 +5,9 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
 var getMediaButton = document.querySelector('button#getMedia');
 var createPeerConnectionButton = document.querySelector('button#createPeerConnection');
 var createOfferButton = document.querySelector('button#createOffer');
@@ -47,8 +50,9 @@
 getSources();
 
 function getSources() {
-  if (typeof MediaStreamTrack === 'undefined'){
-    alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
+  if (typeof MediaStreamTrack === 'undefined') {
+    alert(
+      'This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
   } else {
     MediaStreamTrack.getSources(gotSources);
     selectSourceDiv.classList.remove('hidden');
@@ -82,7 +86,7 @@
   getMediaButton.disabled = true;
   createPeerConnectionButton.disabled = false;
 
-  if (!!localStream) {
+  if ( !! localStream) {
     localVideo.src = null;
     localStream.stop();
   }
@@ -93,15 +97,19 @@
 
   var constraints = {
     audio: {
-      optional: [{sourceId: audioSource}]
+      optional: [{
+        sourceId: audioSource
+      }]
     },
     video: {
-      optional: [{sourceId: videoSource}]
+      optional: [{
+        sourceId: videoSource
+      }]
     }
   };
   trace('Requested local stream');
-  getUserMedia(constraints, gotStream, function(e){
-    console.log("navigator.getUserMedia error: ", e);
+  getUserMedia(constraints, gotStream, function(e) {
+    console.log('navigator.getUserMedia error: ', e);
   });
 }
 
@@ -159,7 +167,8 @@
 }
 
 function createOffer() {
-  localPeerConnection.createOffer(gotDescription1, onCreateSessionDescriptionError);
+  localPeerConnection.createOffer(gotDescription1,
+    onCreateSessionDescriptionError);
 }
 
 function onCreateSessionDescriptionError(error) {
@@ -191,7 +200,8 @@
   // Since the 'remote' side has no media stream we need
   // to pass in the right constraints in order for it to
   // accept the incoming offer of audio and video.
-  remotePeerConnection.createAnswer(gotDescription2, onCreateSessionDescriptionError,
+  remotePeerConnection.createAnswer(gotDescription2,
+    onCreateSessionDescriptionError,
     sdpConstraints);
 }
 
@@ -219,7 +229,7 @@
 function hangup() {
   remoteVideo.src = '';
   trace('Ending call');
-//  localStream.stop();
+  //  localStream.stop();
   localPeerConnection.close();
   remotePeerConnection.close();
   localPeerConnection = null;
diff --git a/samples/web/content/peerconnection/css/main.css b/samples/web/content/peerconnection/pc1/css/main.css
similarity index 100%
rename from samples/web/content/peerconnection/css/main.css
rename to samples/web/content/peerconnection/pc1/css/main.css
diff --git a/samples/web/content/peerconnection/pc1/index.html b/samples/web/content/peerconnection/pc1/index.html
new file mode 100644
index 0000000..a60c26e
--- /dev/null
+++ b/samples/web/content/peerconnection/pc1/index.html
@@ -0,0 +1,56 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Peer connection</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection</span></h1>
+
+    <video id="localVideo" autoplay muted></video>
+    <video id="remoteVideo" autoplay></video>
+
+    <div>
+      <button id="startButton">Start</button>
+      <button id="callButton">Call</button>
+      <button id="hangupButton">Hang Up</button>
+    </div>
+
+    <p>View the console to see logging. The <code>MediaStream</code> object <code>localStream</code>, and the <code>RTCPeerConnection</code> objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
+
+    <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/" title="HTML5 Rocks article about WebRTC by Sam Dutton">Getting Started With WebRTC</a>.</p>
+
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/pc1" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+</body>
+</html>
diff --git a/samples/web/content/peerconnection/js/main.js b/samples/web/content/peerconnection/pc1/js/main.js
similarity index 77%
rename from samples/web/content/peerconnection/js/main.js
rename to samples/web/content/peerconnection/pc1/js/main.js
index 2569a9f..7e1a542 100644
--- a/samples/web/content/peerconnection/js/main.js
+++ b/samples/web/content/peerconnection/pc1/js/main.js
@@ -5,6 +5,9 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
 var startButton = document.getElementById('startButton');
 var callButton = document.getElementById('callButton');
 var hangupButton = document.getElementById('hangupButton');
@@ -18,13 +21,13 @@
 var localVideo = document.getElementById('localVideo');
 var remoteVideo = document.getElementById('remoteVideo');
 
-localVideo.addEventListener('loadedmetadata', function () {
+localVideo.addEventListener('loadedmetadata', function() {
   trace('Local video currentSrc: ' + this.currentSrc +
     ', videoWidth: ' + this.videoWidth +
     'px,  videoHeight: ' + this.videoHeight + 'px');
 });
 
-remoteVideo.addEventListener('loadedmetadata', function () {
+remoteVideo.addEventListener('loadedmetadata', function() {
   trace('Remote video currentSrc: ' + this.currentSrc +
     ', videoWidth: ' + this.videoWidth +
     'px,  videoHeight: ' + this.videoHeight + 'px');
@@ -32,15 +35,15 @@
 
 remoteVideo.onresize = function() {
   trace('Remote video size changed to ' +
-        remoteVideo.videoWidth  + 'x' + remoteVideo.videoHeight);
+    remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight);
   // We'll use the first onsize callback as an indication that video has started
   // playing out.
   if (startTime) {
-    var elapsedTime = performance.now() - startTime;
+    var elapsedTime = window.performance.now() - startTime;
     trace('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
     startTime = null;
   }
-}
+};
 
 var localStream, pc1, pc2;
 var sdpConstraints = {
@@ -51,10 +54,11 @@
 };
 
 function getName(pc) {
-  return (pc == pc1) ? 'pc1' : 'pc2';
+  return (pc === pc1) ? 'pc1' : 'pc2';
 }
+
 function getOtherPc(pc) {
-  return (pc == pc1) ? pc2 : pc1;
+  return (pc === pc1) ? pc2 : pc1;
 }
 
 function gotStream(stream) {
@@ -73,7 +77,7 @@
       audio: true,
       video: true
     }, gotStream,
-    function (e) {
+    function(e) {
       alert('getUserMedia() error: ' + e.name);
     });
 }
@@ -82,22 +86,32 @@
   callButton.disabled = true;
   hangupButton.disabled = false;
   trace('Starting call');
-  startTime = performance.now();
+  startTime = window.performance.now();
   var videoTracks = localStream.getVideoTracks();
   var audioTracks = localStream.getAudioTracks();
-  if (videoTracks.length > 0)
+  if (videoTracks.length > 0) {
     trace('Using video device: ' + videoTracks[0].label);
-  if (audioTracks.length > 0)
+  }
+  if (audioTracks.length > 0) {
     trace('Using audio device: ' + audioTracks[0].label);
+  }
   var servers = null;
   pc1 = new RTCPeerConnection(servers);
   trace('Created local peer connection object pc1');
-  pc1.onicecandidate = function(e) { onIceCandidate(pc1, e) };
+  pc1.onicecandidate = function(e) {
+    onIceCandidate(pc1, e);
+  };
   pc2 = new RTCPeerConnection(servers);
   trace('Created remote peer connection object pc2');
-  pc2.onicecandidate = function(e) { onIceCandidate(pc2, e) };
-  pc1.oniceconnectionstatechange = function(e) { onIceStateChange(pc1, e) };
-  pc2.oniceconnectionstatechange = function(e) { onIceStateChange(pc2, e) };
+  pc2.onicecandidate = function(e) {
+    onIceCandidate(pc2, e);
+  };
+  pc1.oniceconnectionstatechange = function(e) {
+    onIceStateChange(pc1, e);
+  };
+  pc2.oniceconnectionstatechange = function(e) {
+    onIceStateChange(pc2, e);
+  };
   pc2.onaddstream = gotRemoteStream;
 
   pc1.addStream(localStream);
@@ -114,15 +128,19 @@
 function onCreateOfferSuccess(desc) {
   trace('Offer from pc1\n' + desc.sdp);
   trace('pc1 setLocalDescription start');
-  pc1.setLocalDescription(desc, function() { onSetLocalSuccess(pc1); });
+  pc1.setLocalDescription(desc, function() {
+    onSetLocalSuccess(pc1);
+  });
   trace('pc2 setRemoteDescription start');
-  pc2.setRemoteDescription(desc, function() { onSetRemoteSuccess(pc2); });
+  pc2.setRemoteDescription(desc, function() {
+    onSetRemoteSuccess(pc2);
+  });
   trace('pc2 createAnswer start');
   // Since the 'remote' side has no media stream we need
   // to pass in the right constraints in order for it to
   // accept the incoming offer of audio and video.
   pc2.createAnswer(onCreateAnswerSuccess, onCreateSessionDescriptionError,
-                   sdpConstraints);
+    sdpConstraints);
 }
 
 function onSetLocalSuccess(pc) {
@@ -142,17 +160,25 @@
 function onCreateAnswerSuccess(desc) {
   trace('Answer from pc2:\n' + desc.sdp);
   trace('pc2 setLocalDescription start');
-  pc2.setLocalDescription(desc, function() { onSetLocalSuccess(pc2); });
+  pc2.setLocalDescription(desc, function() {
+    onSetLocalSuccess(pc2);
+  });
   trace('pc1 setRemoteDescription start');
-  pc1.setRemoteDescription(desc, function() { onSetRemoteSuccess(pc1); });
+  pc1.setRemoteDescription(desc, function() {
+    onSetRemoteSuccess(pc1);
+  });
 }
 
 
 function onIceCandidate(pc, event) {
   if (event.candidate) {
     getOtherPc(pc).addIceCandidate(new RTCIceCandidate(event.candidate),
-        function() { onAddIceCandidateSuccess(pc) },
-        function(err) { onAddIceCandidateError(pc, err); });
+      function() {
+        onAddIceCandidateSuccess(pc);
+      },
+      function(err) {
+        onAddIceCandidateError(pc, err);
+      });
     trace(getName(pc) + ' ICE candidate: \n' + event.candidate.candidate);
   }
 }
@@ -168,6 +194,7 @@
 function onIceStateChange(pc, event) {
   if (pc) {
     trace(getName(pc) + ' ICE state: ' + pc.iceConnectionState);
+    console.log('ICE state change event: ', event);
   }
 }
 
diff --git a/samples/web/content/pr-answer/index.html b/samples/web/content/peerconnection/pr-answer/index.html
similarity index 98%
rename from samples/web/content/pr-answer/index.html
rename to samples/web/content/peerconnection/pr-answer/index.html
index 807be9a..7408c82 100644
--- a/samples/web/content/pr-answer/index.html
+++ b/samples/web/content/peerconnection/pr-answer/index.html
@@ -1,3 +1,4 @@
+<!DOCTYPE html>
 <!--
  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
  *
@@ -5,12 +6,11 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
 -->
-<!DOCTYPE html>
 <html>
 <head>
 <title>PeerConnection PRANSWER Demo</title>
 <!-- Load the polyfill to switch-hit between Chrome and Firefox -->
-<script src="../../js/adapter.js"></script>
+<script src="../../../js/adapter.js"></script>
 <style>
 video {
   border:5px solid black;
diff --git a/samples/web/content/peerconnection-states/css/main.css b/samples/web/content/peerconnection/states/css/main.css
similarity index 100%
rename from samples/web/content/peerconnection-states/css/main.css
rename to samples/web/content/peerconnection/states/css/main.css
diff --git a/samples/web/content/peerconnection/states/index.html b/samples/web/content/peerconnection/states/index.html
new file mode 100644
index 0000000..51cdee4
--- /dev/null
+++ b/samples/web/content/peerconnection/states/index.html
@@ -0,0 +1,72 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Peer connection: states</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection: states</span></h1>
+
+    <video id="video1" autoplay></video>
+    <video id="video2" autoplay></video>
+
+    <div id="buttons">
+      <button id="startButton">Start</button>
+      <button id="callButton">Call</button>
+      <button id="hangupButton">Hang Up</button>
+    </div>
+
+    <div id="states">
+      <div>
+        <div class="label">PC1 state:</div><div id="pc1State" class="value"></div>
+      </div>
+      <div>
+        <div class="label">PC1 ICE state:</div><div id="pc1IceState" class="value"></div>
+      </div>
+      <div>
+        <div class="label">PC2 state:</div><div id="pc2State" class="value"></div>
+      </div>
+      <div>
+        <div class="label">PC2 ICE state:</div><div id="pc2IceState" class="value"></div>
+      </div>
+    </div>
+
+    <p>View the console to see logging. The <code>MediaStream</code> object <code>localStream</code>, and the <code>RTCPeerConnection</code> objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> are in global scope, so you can inspect them in the console as well.</p>
+
+    <p>For more information about RTCPeerConnection, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/" title="HTML5 Rocks article about WebRTC by Sam Dutton">Getting Started With WebRTC</a>.</p>
+
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/states" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/peerconnection-states/js/main.js b/samples/web/content/peerconnection/states/js/main.js
similarity index 84%
rename from samples/web/content/peerconnection-states/js/main.js
rename to samples/web/content/peerconnection/states/js/main.js
index 71df0df..373cb06 100644
--- a/samples/web/content/peerconnection-states/js/main.js
+++ b/samples/web/content/peerconnection/states/js/main.js
@@ -5,6 +5,9 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
 var video1 = document.querySelector('video#video1');
 var video2 = document.querySelector('video#video2');
 
@@ -25,15 +28,14 @@
 
 var localstream, pc1, pc2;
 
-var sdpConstraints =
-  {
-    mandatory: {
-      OfferToReceiveAudio: true,
-      OfferToReceiveVideo: true
-    }
-  };
+var sdpConstraints = {
+  mandatory: {
+    OfferToReceiveAudio: true,
+    OfferToReceiveVideo: true
+  }
+};
 
-function gotStream(stream){
+function gotStream(stream) {
   trace('Received local stream');
   // Call the polyfill wrapper to attach the media stream to this element.
   attachMediaStream(video1, stream);
@@ -45,8 +47,11 @@
   trace('Requesting local stream');
   startButton.disabled = true;
   // Call into getUserMedia via the polyfill (adapter.js).
-  getUserMedia({audio: true, video: true}, gotStream,
-    function(e){
+  getUserMedia({
+      audio: true,
+      video: true
+    }, gotStream,
+    function(e) {
       alert('getUserMedia() error: ', e.name);
     });
 }
@@ -57,12 +62,16 @@
   trace('Starting call');
   var videoTracks = localstream.getVideoTracks();
   var audioTracks = localstream.getAudioTracks();
-  if (videoTracks.length > 0)
+  if (videoTracks.length > 0) {
     trace('Using Video device: ' + videoTracks[0].label);
-  if (audioTracks.length > 0)
+  }
+  if (audioTracks.length > 0) {
     trace('Using Audio device: ' + audioTracks[0].label);
+  }
   var servers = null;
-  var pcConstraints = {'optional': []};
+  var pcConstraints = {
+    'optional': []
+  };
 
   pc1 = new RTCPeerConnection(servers, pcConstraints);
   trace('Created local peer connection object pc1');
@@ -123,17 +132,17 @@
   trace('Ending call');
   pc1.close();
   pc2.close();
-  pc1StateDiv.textContent += ' ⇒ ' + pc1.signalingState || pc1.readyState;
-  pc2StateDiv.textContent += ' ⇒ ' + pc2.signalingState || pc2.readyState;
-  pc1IceStateDiv.textContent += ' ⇒ ' + pc1.iceConnectionState;
-  pc2IceStateDiv.textContent += ' ⇒ ' + pc2.iceConnectionState;
+  pc1StateDiv.textContent += ' => ' + pc1.signalingState || pc1.readyState;
+  pc2StateDiv.textContent += ' => ' + pc2.signalingState || pc2.readyState;
+  pc1IceStateDiv.textContent += ' => ' + pc1.iceConnectionState;
+  pc2IceStateDiv.textContent += ' => ' + pc2.iceConnectionState;
   pc1 = null;
   pc2 = null;
   hangupButton.disabled = true;
   callButton.disabled = false;
 }
 
-function gotRemoteStream(e){
+function gotRemoteStream(e) {
   attachMediaStream(video2, e.stream);
   trace('Got remote stream');
 }
@@ -143,7 +152,7 @@
   if (pc1) {
     state = pc1.signalingState || pc1.readyState;
     trace('pc1 state change callback, state: ' + state);
-    pc1StateDiv.textContent += ' ⇒ ' + state;
+    pc1StateDiv.textContent += ' => ' + state;
   }
 }
 
@@ -152,7 +161,7 @@
   if (pc2) {
     state = pc2.signalingState || pc2.readyState;
     trace('pc2 state change callback, state: ' + state);
-    pc2StateDiv.textContent += ' ⇒ ' + state;
+    pc2StateDiv.textContent += ' => ' + state;
   }
 }
 
@@ -161,7 +170,7 @@
   if (pc1) {
     iceState = pc1.iceConnectionState;
     trace('pc1 ICE connection state change callback, state: ' + iceState);
-    pc1IceStateDiv.textContent += ' ⇒ ' + iceState;
+    pc1IceStateDiv.textContent += ' => ' + iceState;
   }
 }
 
@@ -170,11 +179,11 @@
   if (pc2) {
     iceState = pc2.iceConnectionState;
     trace('pc2 ICE connection state change callback, state: ' + iceState);
-    pc2IceStateDiv.textContent += ' ⇒ ' + iceState;
+    pc2IceStateDiv.textContent += ' => ' + iceState;
   }
 }
 
-function iceCallback1(event){
+function iceCallback1(event) {
   if (event.candidate) {
     pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
       onAddIceCandidateSuccess, onAddIceCandidateError);
@@ -184,7 +193,7 @@
   }
 }
 
-function iceCallback2(event){
+function iceCallback2(event) {
   if (event.candidate) {
     pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
       onAddIceCandidateSuccess, onAddIceCandidateError);
@@ -201,8 +210,3 @@
 function onAddIceCandidateError(error) {
   trace('Failed to add Ice Candidate: ' + error.toString());
 }
-
-function trace(text) {
-  console.log((window.performance.now() / 1000).toFixed(3) + ': ' + text);
-}
-
diff --git a/samples/web/content/trickle-ice/css/main.css b/samples/web/content/peerconnection/trickle-ice/css/main.css
similarity index 100%
rename from samples/web/content/trickle-ice/css/main.css
rename to samples/web/content/peerconnection/trickle-ice/css/main.css
diff --git a/samples/web/content/peerconnection/trickle-ice/index.html b/samples/web/content/peerconnection/trickle-ice/index.html
new file mode 100644
index 0000000..e73edcf
--- /dev/null
+++ b/samples/web/content/peerconnection/trickle-ice/index.html
@@ -0,0 +1,120 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Trickle ICE</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css">
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Trickle ICE</span></h1>
+
+    <section>
+
+      <p>This page tests the trickle ICE functionality in a WebRTC implementation. It creates a PeerConnection with the specified ICEServers, and then starts candidate gathering for a session with a single audio stream. As candidates are gathered, they are displayed in the text box below, along with an indication when candidate gathering is complete.</p>
+
+      <p>Individual STUN and TURN servers can be added using the Add server / Remove server controls below; in addition, the type of candidates released to the application can be controlled via the IceTransports constraint.</p>
+
+    </section>
+
+    <section id="iceServers">
+
+      <h2>ICE servers</h2>
+
+      <select id="servers" size="4">
+        <option value="{&quot;url&quot;:&quot;stun:stun.l.google.com:19302&quot;}">stun:stun.l.google.com:19302</option>
+      </select>
+
+      <div>
+        <label for="url">STUN or TURN URI:</label>
+        <input id="url">
+      </div>
+
+      <div>
+        <label for="username">TURN username:</label>
+        <input id="username">
+      </div>
+
+      <div>
+        <label for="password">TURN password:</label>
+        <input id="password">
+      </div>
+
+      <div>
+        <button id="add">Add Server</button>
+        <button id="remove">Remove Server</button>
+      </div>
+
+    </section>
+
+    <section id="iceOptions">
+
+      <h2>ICE options</h2>
+
+      <div id="iceTransports">
+        <span>IceTransports value:</span>
+        <input type="radio" name="transports" value="all" id="all" checked><label for="all">all</label>
+        <input type="radio" name="transports" value="relay" id="relay">
+        <label for="relay">relay</label>
+        <input type="radio" name="transports" value="none" id="none">
+        <label for="none">none</label>
+      </div>
+      <div>
+        <label for="ipv6">Gather IPv6 candidates:</label>
+        <input id="ipv6" type="checkbox" checked>
+      </div>
+      <div>
+        <label for="unbundle">Gather unbundled RTCP candidates:</label>
+        <input id="unbundle" type="checkbox" checked>
+      </div>
+
+    </section>
+
+    <section>
+
+      <table id="candidates">
+        <thead id="candidatesHead"><tr>
+          <th>Time</th><th>Component</th><th>Type</th><th>Foundation</th>
+          <th>Protocol</th><th>Address</th><th>Port</th><th>Priority</th>
+        </tr></thead>
+        <tbody id="candidatesBody"></tbody>
+      </table>
+      <button id="gather">Gather candidates</button>
+
+    </section>
+
+
+
+
+
+    <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/trickle-ice" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+  </div>
+
+  <script src="../../../js/adapter.js"></script>
+  <script src="js/main.js"></script>
+
+  <script src="../../../js/lib/ga.js"></script>
+
+</body>
+</html>
diff --git a/samples/web/content/trickle-ice/js/main.js b/samples/web/content/peerconnection/trickle-ice/js/main.js
similarity index 81%
rename from samples/web/content/trickle-ice/js/main.js
rename to samples/web/content/peerconnection/trickle-ice/js/main.js
index dac28c5..d1c5855 100644
--- a/samples/web/content/trickle-ice/js/main.js
+++ b/samples/web/content/peerconnection/trickle-ice/js/main.js
@@ -5,6 +5,9 @@
  *  that can be found in the LICENSE file in the root of the source
  *  tree.
  */
+
+'use strict';
+
 var addButton = document.querySelector('button#add');
 var candidateTBody = document.querySelector('tbody#candidatesBody');
 var gatherButton = document.querySelector('button#gather');
@@ -57,7 +60,7 @@
     candidateTBody.removeChild(candidateTBody.firstChild);
   }
 
-  
+
   // Read the values from the input boxes.
   var iceServers = [];
   for (var i = 0; i < servers.length; ++i) {
@@ -75,16 +78,16 @@
   // Create a PeerConnection with no streams, but force a m=audio line.
   // This will gather candidates for either 1 or 2 ICE components, depending
   // on whether the unbundle RTCP checkbox is checked.
-  var config = {"iceServers": iceServers };
-  var pcConstraints = {"mandatory": {"IceTransports": iceTransports}};
-  var offerConstraints = {"mandatory": {"OfferToReceiveAudio": true}};
+  var config = {'iceServers': iceServers };
+  var pcConstraints = {'mandatory': {'IceTransports': iceTransports}};
+  var offerConstraints = {'mandatory': {'OfferToReceiveAudio': true}};
   // Whether we gather IPv6 candidates.
-  pcConstraints.optional = [{"googIPv6": ipv6Check.checked}];
+  pcConstraints.optional = [{'googIPv6': ipv6Check.checked}];
   // Whether we only gather a single set of candidates for RTP and RTCP.
-  offerConstraints.optional = [{"googUseRtpMUX": !unbundleCheck.checked}];
+  offerConstraints.optional = [{'googUseRtpMUX': !unbundleCheck.checked}];
 
-  trace("Creating new PeerConnection with config=" + JSON.stringify(config) +
-        ", constraints=" + JSON.stringify(pcConstraints));
+  trace('Creating new PeerConnection with config=' + JSON.stringify(config) +
+        ', constraints=' + JSON.stringify(pcConstraints));
   pc = new RTCPeerConnection(config, pcConstraints);
   pc.onicecandidate = iceCallback;
   pc.createOffer(gotDescription, noDescription, offerConstraints);
@@ -96,22 +99,22 @@
 }
 
 function noDescription(error) {
-  console.log("Error creating offer");
+  console.log('Error creating offer: ', error);
 }
 
 // Parse a candidate:foo string into an object, for easier use by other methods.
 function parseCandidate(text) {
-  var candidateStr = "candidate:";
+  var candidateStr = 'candidate:';
   var pos = text.indexOf(candidateStr) + candidateStr.length;
-  var fields = text.substr(pos).split(" ");
+  var fields = text.substr(pos).split(' ');
   return {
-    "component": fields[1],
-    "type": fields[7],
-    "foundation": fields[0],
-    "protocol": fields[2],
-    "address": fields[4],
-    "port": fields[5],
-    "priority": fields[3]
+    'component': fields[1],
+    'type': fields[7],
+    'foundation': fields[0],
+    'protocol': fields[2],
+    'address': fields[4],
+    'port': fields[5],
+    'priority': fields[3]
   };
 }
 
@@ -119,27 +122,27 @@
 // type preference, local preference, and (256 - component ID).
 // ex: 126 | 32252 | 255 (126 is host preference, 255 is component ID 1)
 function formatPriority(priority) {
-  var s = "";
+  var s = '';
   s += (priority >> 24);
-  s += " | ";
+  s += ' | ';
   s += (priority >> 8) & 0xFFFF;
-  s += " | ";
+  s += ' | ';
   s += priority & 0xFF;
-  return s;  
+  return s;
 }
 
 function appendCell(row, val, span) {
-  var cell = document.createElement("td");
+  var cell = document.createElement('td');
   cell.textContent = val;
   if (span) {
-    cell.setAttribute("colspan", span);
+    cell.setAttribute('colspan', span);
   }
   row.appendChild(cell);
 }
 
 function iceCallback(event) {
   var elapsed = ((window.performance.now() - begin) / 1000).toFixed(3);
-  var row = document.createElement("tr");
+  var row = document.createElement('tr');
   appendCell(row, elapsed);
   if (event.candidate) {
     var c = parseCandidate(event.candidate.candidate);
@@ -151,7 +154,7 @@
     appendCell(row, c.port);
     appendCell(row, formatPriority(c.priority));
   } else {
-    appendCell(row, "Done", 7);
+    appendCell(row, 'Done', 7);
     pc.close();
     pc = null;
   }
diff --git a/samples/web/content/webaudio-input/audio/Shamisen-C4.wav b/samples/web/content/peerconnection/webaudio-input/audio/Shamisen-C4.wav
similarity index 100%
rename from samples/web/content/webaudio-input/audio/Shamisen-C4.wav
rename to samples/web/content/peerconnection/webaudio-input/audio/Shamisen-C4.wav
Binary files differ
diff --git a/samples/web/content/peerconnection/webaudio-input/css/main.css b/samples/web/content/peerconnection/webaudio-input/css/main.css
new file mode 100644
index 0000000..7d937d5
--- /dev/null
+++ b/samples/web/content/peerconnection/webaudio-input/css/main.css
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+audio {
+  margin: 0 0 20px 0;
+  width: 50%;
+}
+
+button {
+  margin: 0 20px 20px 0;
+  width: 89px;
+}
+
+div#options {
+  margin: 0 0 20px 0;
+}
+
+div#status {
+  background-color: #eee;
+  min-height: 140px;
+  overflow-y: scroll;
+  margin: 0 0 20px 0;
+  padding: 0 0 0 10px;
+  width: 50%;
+}
+
+input[type='checkbox'] {
+  margin: 0 10px 0 0;
+  position: relative;
+  top: -2px;
+}
+
+label {
+  font-weight: 400;
+}
+
+li {
+  margin: 0 0 10px 0;
+}
+
+ul {
+  list-style-type: square;
+  padding: 0 0 0 18px;
+}
\ No newline at end of file
diff --git a/samples/web/content/peerconnection/webaudio-input/index.html b/samples/web/content/peerconnection/webaudio-input/index.html
new file mode 100644
index 0000000..494ae7d
--- /dev/null
+++ b/samples/web/content/peerconnection/webaudio-input/index.html
@@ -0,0 +1,81 @@
+<!DOCTYPE html>
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<html>
+<head>
+
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript">
+  <meta name="description" content="Client-side WebRTC code samples.">
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+  <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1">
+
+  <base target="_blank">
+
+  <title>Web Audio input</title>
+
+  <link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
+  <link href="//fonts.googleapis.com/css?family=Inconsolata" rel="stylesheet" type="text/css">
+  <link rel="stylesheet" href="../../../css/main.css" />
+  <link rel="stylesheet" href="css/main.css" />
+
+</head>
+
+<body>
+
+  <div id="container">
+
+    <h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Web Audio input</span></h1>
+
+    <audio autoplay controls></audio>
+
+    <div id="options">
+      <input type="checkbox" id="renderLocally" disabled><label for="renderLocally">Add local audio to output</label>
+    </div>
+
+    <div id="buttons">
+      <button id="start">Start</button>
+      <button id="stop" disabled>Stop</button>
+    </div>
+
+    <div id="status"></div>
+
+    <p>Capture microphone input and stream it to a peer with processing applied to the audio.</p>
+
+    <p>The audio stream is:</p>
+
+    <ul>
+      <li>Recorded using <a href="http://www.html5audio.org/2012/09/live-audio-input-comes-to-googles-chrome-canary.html" title="Live audio input comes to Google's Chrome Canary">live audio input</a>.</li>
+      <li>Filtered using an HP filter with fc=1500 Hz.</li>
+      <li>Encoded using <a href="http://www.opus-codec.org/" title="Opus Codec">Opus</a>.</li>
+      <li>Transmitted (in loopback) to a remote peer using <a href="http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcpeerconnection-interface" title="RTCPeerConnection Interface">RTCPeerConnection</a> where it is decoded.</li>
+      <li>Finally, the received remote stream is used as source to an &lt;audio&gt; element and played out locally. </li>
+    </ul>
+
+    <p>Press any key to add an effect to the transmitted audio while talking.</p>
+
+    <p>Please note that:</p>
+    <ul>
+      <li>Linux is currently not supported.</li>
+      <li>Sample rate and channel configuration must be the same for input and
+        output sides on Windows.</li>
+        <li>Only the default microphone device can be used for capturing.</li>
+      </ul>
+
+      <p>For more information, see <a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/webrtc-integration.html" title="Example 3: Capture microphone input and stream it out to a peer with a processing effect applied to the audio">WebRTC integration with the Web Audio API</a>.</p>
+
+
+      <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/peerconnection/webaudio-input" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+    </div>
+
+    <script src="../../../js/adapter.js"></script>
+    <script src="js/webaudioextended.js"></script>
+    <script src="js/main.js"></script>
+    <script src="../../../js/lib/ga.js"></script>
+  </body>
+  </html>
diff --git a/samples/web/content/peerconnection/webaudio-input/js/main.js b/samples/web/content/peerconnection/webaudio-input/js/main.js
new file mode 100644
index 0000000..425f4f4
--- /dev/null
+++ b/samples/web/content/peerconnection/webaudio-input/js/main.js
@@ -0,0 +1,163 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+/* global WebAudioExtended, webkitRTCPeerConnection */
+
+var audioElement = document.querySelector('audio');
+var statusDiv = document.querySelector('div#status');
+
+var startButton = document.querySelector('button#start');
+var stopButton = document.querySelector('button#stop');
+startButton.onclick = start;
+stopButton.onclick = stop;
+
+var renderLocallyCheckbox = document.querySelector('input#renderLocally');
+renderLocallyCheckbox.onclick = toggleRenderLocally;
+
+document.addEventListener('keydown', handleKeyDown, false);
+
+
+var localStream;
+var pc1, pc2;
+
+var webAudio = new WebAudioExtended();
+webAudio.loadSound('audio/Shamisen-C4.wav');
+
+
+function trace(txt) {
+  statusDiv.innerHTML += '<p>' + txt + '</p>';
+}
+
+function start() {
+  webAudio.start();
+  var constraints = {
+    audio: true,
+    video: false
+  };
+  getUserMedia(constraints, gotStream, gotStreamFailed);
+  startButton.disabled = true;
+  stopButton.disabled = false;
+}
+
+function stop() {
+  webAudio.stop();
+  pc1.close();
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  startButton.enabled = true;
+  stopButton.enabled = false;
+  renderLocallyCheckbox.disabled = true;
+  localStream.stop();
+}
+
+function gotStream(stream) {
+  renderLocallyCheckbox.disabled = false;
+  var audioTracks = stream.getAudioTracks();
+  if (audioTracks.length === 1) {
+    console.log('gotStream({audio:true, video:false})');
+
+    var filteredStream = webAudio.applyFilter(stream);
+
+    var servers = null;
+
+    pc1 = new webkitRTCPeerConnection(servers);
+    console.log('Created local peer connection object pc1');
+    pc1.onicecandidate = iceCallback1;
+    pc2 = new webkitRTCPeerConnection(servers);
+    console.log('Created remote peer connection object pc2');
+    pc2.onicecandidate = iceCallback2;
+    pc2.onaddstream = gotRemoteStream;
+
+    pc1.addStream(filteredStream);
+    pc1.createOffer(gotDescription1);
+
+    stream.onended = function() {
+      console.log('stream.onended');
+      startButton.disabled = false;
+      stopButton.disabled = true;
+    };
+
+    localStream = stream;
+  } else {
+    alert('The media stream contains an invalid amount of audio tracks.');
+    stream.stop();
+  }
+}
+
+function gotStreamFailed(error) {
+  startButton.disabled = false;
+  stopButton.disabled = true;
+  alert('Failed to get access to local media. Error code: ' +
+    error.code);
+}
+
+function forceOpus(sdp) {
+  // Remove all other codecs (not the video codecs though).
+  sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
+    'm=audio $1 RTP/SAVPF 111\r\n');
+  sdp = sdp.replace(/a=rtpmap:(?!111)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, '');
+  return sdp;
+}
+
+function gotDescription1(desc) {
+  console.log('Offer from pc1 \n' + desc.sdp);
+  var modifiedOffer = new RTCSessionDescription({
+    type: 'offer',
+    sdp: forceOpus(desc.sdp)
+  });
+  pc1.setLocalDescription(modifiedOffer);
+  console.log('Offer from pc1 \n' + modifiedOffer.sdp);
+  pc2.setRemoteDescription(modifiedOffer);
+  pc2.createAnswer(gotDescription2);
+}
+
+function gotDescription2(desc) {
+  pc2.setLocalDescription(desc);
+  console.log('Answer from pc2 \n' + desc.sdp);
+  pc1.setRemoteDescription(desc);
+}
+
+function gotRemoteStream(e) {
+  attachMediaStream(audioElement, e.stream);
+}
+
+function iceCallback1(event) {
+  if (event.candidate) {
+    pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
+      onAddIceCandidateSuccess, onAddIceCandidateError);
+    console.log('Local ICE candidate: \n' + event.candidate.candidate);
+  }
+}
+
+function iceCallback2(event) {
+  if (event.candidate) {
+    pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
+      onAddIceCandidateSuccess, onAddIceCandidateError);
+    console.log('Remote ICE candidate: \n ' + event.candidate.candidate);
+  }
+}
+
+function onAddIceCandidateSuccess() {
+  trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+  trace('Failed to add Ice Candidate: ' + error.toString());
+}
+
+function handleKeyDown() {
+  webAudio.addEffect();
+}
+
+function toggleRenderLocally() {
+  console.log('Render locally: ', renderLocallyCheckbox.checked);
+  webAudio.renderLocally(renderLocallyCheckbox.checked);
+}
diff --git a/samples/web/content/peerconnection/webaudio-input/js/webaudioextended.js b/samples/web/content/peerconnection/webaudio-input/js/webaudioextended.js
new file mode 100644
index 0000000..804439a
--- /dev/null
+++ b/samples/web/content/peerconnection/webaudio-input/js/webaudioextended.js
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+// WebAudioExtended helper class which takes care of the WebAudio related parts.
+
+function WebAudioExtended() {
+  window.AudioContext = window.AudioContext || window.webkitAudioContext;
+  /* global AudioContext */
+  this.context = new AudioContext();
+  this.soundBuffer = null;
+}
+
+WebAudioExtended.prototype.start = function() {
+  this.filter = this.context.createBiquadFilter();
+  this.filter.type = 'highpass';
+  this.filter.frequency.value = 1500;
+};
+
+WebAudioExtended.prototype.applyFilter = function(stream) {
+  this.mic = this.context.createMediaStreamSource(stream);
+  this.mic.connect(this.filter);
+  this.peer = this.context.createMediaStreamDestination();
+  this.filter.connect(this.peer);
+  return this.peer.stream;
+};
+
+WebAudioExtended.prototype.renderLocally = function(enabled) {
+  if (enabled) {
+    this.mic.connect(this.context.destination);
+  } else {
+    this.mic.disconnect(0);
+    this.mic.connect(this.filter);
+  }
+};
+
+WebAudioExtended.prototype.stop = function() {
+  this.mic.disconnect(0);
+  this.filter.disconnect(0);
+  this.mic = null;
+  this.peer = null;
+};
+
+WebAudioExtended.prototype.addEffect = function() {
+  var effect = this.context.createBufferSource();
+  effect.buffer = this.soundBuffer;
+  if (this.peer) {
+    effect.connect(this.peer);
+    effect.start(0);
+  }
+};
+
+WebAudioExtended.prototype.loadCompleted = function() {
+  this.context.decodeAudioData(this.request.response, function(buffer) {
+    this.soundBuffer = buffer;
+  }.bind(this));
+};
+
+WebAudioExtended.prototype.loadSound = function(url) {
+  this.request = new XMLHttpRequest();
+  this.request.open('GET', url, true);
+  this.request.responseType = 'arraybuffer';
+  this.request.onload = this.loadCompleted.bind(this);
+  this.request.send();
+};
+
diff --git a/samples/web/content/testrtc/app.yaml b/samples/web/content/testrtc/app.yaml
new file mode 100644
index 0000000..1f863b5
--- /dev/null
+++ b/samples/web/content/testrtc/app.yaml
@@ -0,0 +1,26 @@
+application: test-rtc
+version: 1
+runtime: python27
+threadsafe: true
+api_version: 1
+
+handlers:
+- url: /html
+  static_dir: html
+
+- url: /images
+  static_dir: images
+
+- url: /js
+  static_dir: js
+
+- url: /css
+  static_dir: css
+
+- url: /.*
+  script: testrtc.app
+  secure: always
+
+libraries:
+- name: jinja2
+  version: latest
diff --git a/samples/web/content/testrtc/css/main.css b/samples/web/content/testrtc/css/main.css
new file mode 100644
index 0000000..f54af88
--- /dev/null
+++ b/samples/web/content/testrtc/css/main.css
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+body {
+  font-family: 'Roboto', sans-serif;
+  font-weight: 300;
+  margin: 0;
+  padding: 1em;
+}
+
+.title {
+  height: 60px;
+  line-height: 60px;
+  font-size: 18px;
+  font-weight: 500;
+  background-color: #4F7DC9;
+  color: #FFF;
+  transition: height .2s;
+  font-family: 'Roboto', sans-serif;
+  max-width: 40em;
+  margin: 0 auto 1em auto;
+}
+
+paper-button {
+  background-color: #d84a38;
+}
+
+div#content {
+  margin: 0 auto 0 auto;
+  max-width: 40em;
+  padding: 1em 1.5em 1.3em 1.5em;
+}
+
+/* Test suite style */
+core-toolbar.test-suite {
+  margin-top: 1.5em;
+  cursor: pointer;
+}
+core-toolbar.test-suite[state='pending'] {
+  background-color: #e2e2e2;
+}
+core-toolbar.test-suite[state='success'] {
+  background-color: #90ec90;
+}
+core-toolbar.test-suite[state='failure'] {
+  background-color: #ec9090;
+}
+
+core-toolbar.test-suite + core-collapse {
+  background-color: #f4f4f4;
+  padding: 0 0 1em 0;
+}
+
+core-toolbar.test-suite core-icon[icon="check"],
+core-toolbar.test core-icon[icon="check"] {
+  color: green;
+}
+
+core-toolbar.test-suite core-icon[icon="close"],
+core-toolbar.test core-icon[icon="close"] {
+  color: red;
+}
+
+/* Test style */
+core-toolbar.test {
+  cursor: pointer;
+  background-color: transparent;
+  height: 2em;
+}
+
+core-toolbar.test span {
+  min-width: 80%;
+}
+
+.test-output {
+  margin-left: 2em;
+}
+
+.test-progress {
+  display: block;
+  min-width: 2em;
+}
diff --git a/samples/web/content/testrtc/index.html b/samples/web/content/testrtc/index.html
new file mode 100644
index 0000000..43bca8e
--- /dev/null
+++ b/samples/web/content/testrtc/index.html
@@ -0,0 +1,67 @@
+<!--
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+-->
+<!DOCTYPE html>
+<html>
+<head>
+  <meta name="keywords" content="WebRTC, HTML5, JavaScript" />
+  <meta name="description" content="Client-side WebRTC code samples." />
+  <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+  <meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
+  <meta http-equiv="X-UA-Compatible" content="chrome=1" />
+  <base target="_blank">
+  <title>WebRTC Troubleshooter</title>
+
+  <link rel="stylesheet" href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700">
+  <link rel="stylesheet" href="css/main.css" />
+
+  <script src="//www.polymer-project.org/components/platform/platform.js"></script>
+  <link rel="import" href="//www.polymer-project.org/components/core-toolbar/core-toolbar.html">
+  <link rel="import" href="//www.polymer-project.org/components/core-icon-button/core-icon-button.html">
+  <link rel="import" href="//www.polymer-project.org/components/core-icon/core-icon.html">
+  <link rel="import" href="//www.polymer-project.org/components/core-collapse/core-collapse.html">
+  <link rel="import" href="//www.polymer-project.org/components/paper-dialog/paper-dialog.html">
+  <link rel="import" href="//www.polymer-project.org/components/paper-dialog/paper-dialog-transition.html">
+  <link rel="import" href="//www.polymer-project.org/components/paper-progress/paper-progress.html">
+  <link rel="import" href="//www.polymer-project.org/components/paper-button/paper-button.html">
+</head>
+<body unresolved>
+  <!-- Title toolbar -->
+  <core-toolbar class="title">
+    <span flex>WebRTC Troubleshooter</span>
+    <core-icon-button icon="menu" onclick="settingsDialog.toggle()"></core-icon-button>
+    <core-icon-button icon="bug-report" id="bug-button" onclick="reportBug()"></core-icon-button>
+    <paper-button id="start-button" onclick="start()" raised>Start</paper-button>
+  </core-toolbar>
+
+  <!-- Placeholder for dynamically generated test suites. -->
+  <div id="content"></div>
+
+  <!-- Settings dialog -->
+  <paper-dialog id="settings-dialog" heading="Settings" transition="paper-dialog-transition-center">
+    <br>
+    <div class="select"><label>Audio source: <select id="audioSource"></select></label></div>
+    <div class="select"><label>Video source: <select id="videoSource"></select></label></div>
+  </paper-dialog>
+
+  <!-- Add common files after this line. -->
+  <script src="js/adapter.js"></script>
+  <script src="js/bugreport.js"></script>
+  <script src="js/main.js"></script>
+  <script src="js/call.js"></script>
+  <script src="js/stats.js"></script>
+  <script src="js/ssim.js"></script>
+  <!-- TODO(juberti): Add analytics script src="js/lib/ga.js" -->
+
+  <!-- Add tests after this line. -->
+  <script src="js/mictest.js"></script>
+  <script src="js/camresolutionstest.js"></script>
+  <script src="js/camtest.js"></script>
+  <script src="js/conntest.js"></script>
+  <script src="js/bandwidth_test.js"></script>
+</body>
+</html>
diff --git a/samples/web/content/testrtc/js/adapter.js b/samples/web/content/testrtc/js/adapter.js
new file mode 120000
index 0000000..9513fbb
--- /dev/null
+++ b/samples/web/content/testrtc/js/adapter.js
@@ -0,0 +1 @@
+../../../js/adapter.js
\ No newline at end of file
diff --git a/samples/web/content/testrtc/js/bandwidth_test.js b/samples/web/content/testrtc/js/bandwidth_test.js
new file mode 100644
index 0000000..00b9b0b
--- /dev/null
+++ b/samples/web/content/testrtc/js/bandwidth_test.js
@@ -0,0 +1,164 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+// Creates a loopback via relay candidates and tries to send as many packets
+// with 1024 chars as possible while keeping dataChannel bufferedAmmount above
+// zero.
+addTest('Connectivity', 'Data throughput',
+  Call.asyncCreateTurnConfig.bind(null, testDataChannelThroughput, reportFatal));
+
+function testDataChannelThroughput(config) {
+  var call = new Call(config);
+  call.setIceCandidateFilter(Call.isRelay);
+  var testDurationSeconds = 5.0;
+  var startTime = null;
+  var sentPayloadBytes = 0;
+  var receivedPayloadBytes = 0;
+  var stopSending = false;
+  var samplePacket = '';
+
+  for (var i = 0; i !== 1024; ++i) {
+    samplePacket += 'h';
+  }
+
+  var maxNumberOfPacketsToSend = 100;
+  var bytesToKeepBuffered = 1024 * maxNumberOfPacketsToSend;
+
+  var lastBitrateMeasureTime;
+  var lastReceivedPayloadBytes = 0;
+
+  var receiveChannel = null;
+  var senderChannel = call.pc1.createDataChannel(null);
+  senderChannel.addEventListener('open', sendingStep);
+
+  call.pc2.addEventListener('datachannel', onReceiverChannel);
+  call.establishConnection();
+
+  function onReceiverChannel(event) {
+     receiveChannel = event.channel;
+     receiveChannel.addEventListener('message', onMessageReceived);
+  }
+
+  function sendingStep() {
+    var now = new Date();
+    if (!startTime) {
+      startTime = now;
+      lastBitrateMeasureTime = now;
+    }
+
+    for (var i = 0; i !== maxNumberOfPacketsToSend; ++i) {
+      if (senderChannel.bufferedAmount >= bytesToKeepBuffered) {
+        break;
+      }
+      sentPayloadBytes += samplePacket.length;
+      senderChannel.send(samplePacket);
+    }
+
+    if (now - startTime >= 1000 * testDurationSeconds) {
+      setTestProgress(100);
+      stopSending = true;
+    } else {
+      setTestProgress((now - startTime) / (10 * testDurationSeconds));
+      setTimeout(sendingStep, 1);
+    }
+  }
+
+  function onMessageReceived(event) {
+    receivedPayloadBytes += event.data.length;
+    var now = new Date();
+    if (now - lastBitrateMeasureTime >= 1000) {
+      var bitrate = (receivedPayloadBytes - lastReceivedPayloadBytes) /
+                    (now - lastBitrateMeasureTime);
+      bitrate = Math.round(bitrate * 1000 * 8) / 1000;
+      reportSuccess('Transmitting at ' + bitrate + ' kbps.');
+      lastReceivedPayloadBytes = receivedPayloadBytes;
+      lastBitrateMeasureTime = now;
+    }
+    if (stopSending && sentPayloadBytes === receivedPayloadBytes) {
+      call.close();
+
+      var elapsedTime = Math.round((now - startTime) * 10) / 10000.0;
+      var receivedKBits = receivedPayloadBytes * 8 / 1000;
+      reportSuccess('Total transmitted: ' + receivedKBits + ' kilo-bits in ' +
+                    elapsedTime + ' seconds.');
+      testFinished();
+    }
+  }
+}
+
+// Measures video bandwidth estimation performance by doing a loopback call via
+// relay candidates for 40 seconds. Computes rtt and bandwidth estimation
+// average and maximum as well as time to ramp up (defined as reaching 75% of
+// the max bitrate. It reports infinite time to ramp up if never reaches it.
+addTest('Connectivity', 'Video bandwidth',
+  Call.asyncCreateTurnConfig.bind(null, testVideoBandwidth, reportFatal));
+
+function testVideoBandwidth(config) {
+  var maxVideoBitrateKbps = 2000;
+  var durationMs = 40000;
+  var statStepMs = 100;
+  var bweStats = new StatisticsAggregate(0.75 * maxVideoBitrateKbps * 1000);
+  var rttStats = new StatisticsAggregate();
+  var startTime;
+
+  var call = new Call(config);
+  call.setIceCandidateFilter(Call.isRelay);
+  call.constrainVideoBitrate(maxVideoBitrateKbps);
+
+  // FEC makes it hard to study bandwidth estimation since there seems to be
+  // a spike when it is enabled and disabled. Disable it for now. FEC issue
+  // tracked on: https://code.google.com/p/webrtc/issues/detail?id=3050
+  call.disableVideoFec();
+
+  doGetUserMedia({audio: false, video: true}, gotStream, reportFatal);
+
+  function gotStream(stream) {
+    call.pc1.addStream(stream);
+    call.establishConnection();
+    startTime = new Date();
+    setTimeout(gatherStats, statStepMs);
+  }
+
+  function gatherStats() {
+    var now = new Date();
+    if (now - startTime > durationMs) {
+      setTestProgress(100);
+      completed();
+    } else {
+      setTestProgress((now - startTime) * 100 / durationMs);
+      call.pc1.getStats(gotStats);
+    }
+  }
+
+  function gotStats(response) {
+    for (var index in response.result()) {
+      var report = response.result()[index];
+      if (report.id === 'bweforvideo') {
+        bweStats.add(Date.parse(report.timestamp),
+          parseInt(report.stat('googAvailableSendBandwidth')));
+      } else if (report.type === 'ssrc') {
+        rttStats.add(Date.parse(report.timestamp),
+          parseInt(report.stat('googRtt')));
+      }
+    }
+    setTimeout(gatherStats, statStepMs);
+  }
+
+  function completed() {
+    call.pc1.getLocalStreams()[0].getVideoTracks()[0].stop();
+    call.close();
+    reportSuccess('RTT average: ' + rttStats.getAverage() + ' ms');
+    reportSuccess('RTT max: ' + rttStats.getMax() + ' ms');
+    reportSuccess('Send bandwidth estimate average: ' + bweStats.getAverage() + ' bps');
+    reportSuccess('Send bandwidth estimate max: ' + bweStats.getMax() + ' bps');
+    reportSuccess('Send bandwidth ramp-up time: ' + bweStats.getRampUpTime() + ' ms');
+    reportSuccess('Test finished');
+    testFinished();
+  }
+}
diff --git a/samples/web/content/testrtc/js/bugreport.js b/samples/web/content/testrtc/js/bugreport.js
new file mode 100644
index 0000000..77894ed
--- /dev/null
+++ b/samples/web/content/testrtc/js/bugreport.js
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* More information about these options at jshint.com/docs/options */
+/* exported reportBug */
+'use strict';
+
+function reportBug() {
+  // Detect browser and version.
+  var res = getBrowserNameAndVersion();
+  var browserName = res.name;
+  var browserVersion = res.version;
+  console.log('Detected browser: ' + browserName + ' ' + browserVersion);
+
+  var description = 'Browser: ' + browserName + ' ' + browserVersion +
+      ' (' + navigator.platform + ')\n\n' +
+      'Output from the troubleshooting page at http://test.webrtc.org:\n\n' +
+      'Please replace this text with the copy+pasted output from test page!';
+
+  // Labels for the bug to be filed.
+  var osLabel = 'OS-';
+  if (navigator.platform.indexOf('Win') !== -1) { osLabel += 'Windows'; }
+  if (navigator.platform.indexOf('Mac') !== -1) { osLabel += 'Mac'; }
+  if (navigator.platform.match('iPhone|iPad|iPod|iOS')) { osLabel += 'iOS'; }
+  if (navigator.platform.indexOf('Linux') !== -1) { osLabel += 'Linux'; }
+  if (navigator.platform.indexOf('Android') !== -1) { osLabel += 'Android'; }
+
+  var labels = 'webrtc-troubleshooter,Cr-Blink-WebRTC,' + osLabel;
+  var url = 'https://code.google.com/p/chromium/issues/entry?' +
+      'comment=' + encodeURIComponent(description) +
+      '&labels=' + encodeURIComponent(labels);
+  console.log('Navigating to: ' + url);
+  window.open(url);
+}
+
+/*
+ * Detects the running browser name and version.
+ *
+ * @return {!Object.<string, string>} Object containing the browser name and
+ *     version (mapped to the keys "name" and "version").
+ */
+function getBrowserNameAndVersion() {
+  // Code inspired by http://goo.gl/9dZZqE with
+  // added support of modern Internet Explorer versions (Trident).
+  var agent = navigator.userAgent;
+  var browserName = navigator.appName;
+  var version = '' + parseFloat(navigator.appVersion);
+  var offsetName, offsetVersion, ix;
+
+  if ((offsetVersion = agent.indexOf('Chrome')) !== -1) {
+    browserName = 'Chrome';
+    version = agent.substring(offsetVersion + 7);
+  } else if ((offsetVersion = agent.indexOf('MSIE')) !== -1) {
+    browserName = 'Microsoft Internet Explorer'; // Older IE versions.
+    version = agent.substring(offsetVersion + 5);
+  } else if ((offsetVersion = agent.indexOf('Trident')) !== -1) {
+    browserName = 'Microsoft Internet Explorer'; // Newer IE versions.
+    version = agent.substring(offsetVersion + 8);
+  } else if ((offsetVersion = agent.indexOf('Firefox')) !== -1) {
+    browserName = 'Firefox';
+  } else if ((offsetVersion = agent.indexOf('Safari')) !== -1) {
+    browserName = 'Safari';
+    version = agent.substring(offsetVersion + 7);
+    if ((offsetVersion = agent.indexOf('Version')) !== -1) {
+      version = agent.substring(offsetVersion + 8);
+    }
+  } else if ( (offsetName = agent.lastIndexOf(' ') + 1) <
+              (offsetVersion = agent.lastIndexOf('/')) ) {
+    // For other browsers 'name/version' is at the end of userAgent
+    browserName = agent.substring(offsetName, offsetVersion);
+    version = agent.substring(offsetVersion + 1);
+    if (browserName.toLowerCase() === browserName.toUpperCase()) {
+      browserName = navigator.appName;
+    }
+  } // Trim the version string at semicolon/space if present.
+  if ((ix = version.indexOf(';')) !== -1) {
+    version = version.substring(0, ix);
+  }
+  if ((ix = version.indexOf(' ')) !== -1) {
+    version = version.substring(0, ix);
+  }
+
+  return { 'name': browserName, 'version': version };
+}
diff --git a/samples/web/content/testrtc/js/call.js b/samples/web/content/testrtc/js/call.js
new file mode 100644
index 0000000..55b04f5
--- /dev/null
+++ b/samples/web/content/testrtc/js/call.js
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+function Call(config) {
+  this.pc1 = new RTCPeerConnection(config);
+  this.pc2 = new RTCPeerConnection(config);
+
+  this.pc1.addEventListener('icecandidate', this.onIceCandidate_.bind(this, this.pc2));
+  this.pc2.addEventListener('icecandidate', this.onIceCandidate_.bind(this, this.pc1));
+
+  this.iceCandidateFilter_ = Call.noFilter;
+}
+
+Call.prototype = {
+  establishConnection: function () {
+    this.pc1.createOffer(this.gotOffer_.bind(this));
+  },
+
+  close: function () {
+    this.pc1.close();
+    this.pc2.close();
+  },
+
+  setIceCandidateFilter: function (filter) {
+    this.iceCandidateFilter_ = filter;
+  },
+
+  // Constraint max video bitrate by modifying the SDP when creating an answer.
+  constrainVideoBitrate: function (maxVideoBitrateKbps) {
+    this.constrainVideoBitrateKbps_ = maxVideoBitrateKbps;
+  },
+
+  // Remove video FEC if available on the offer.
+  disableVideoFec: function () {
+    this.constrainOfferToRemoveVideoFec_ = true;
+  },
+
+  gotOffer_: function (offer) {
+    if (this.constrainOfferToRemoveVideoFec_) {
+      offer.sdp = offer.sdp.replace(/(m=video 1 [^\r]+)(116 117)(\r\n)/g,
+                                    '$1\r\n');
+      offer.sdp = offer.sdp.replace(/a=rtpmap:116 red\/90000\r\n/g, '');
+      offer.sdp = offer.sdp.replace(/a=rtpmap:117 ulpfec\/90000\r\n/g, '');
+    }
+    this.pc1.setLocalDescription(offer);
+    this.pc2.setRemoteDescription(offer);
+    this.pc2.createAnswer(this.gotAnswer_.bind(this));
+  },
+
+  gotAnswer_: function (answer) {
+    if (this.constrainVideoBitrateKbps_) {
+      answer.sdp = answer.sdp.replace(
+          /a=mid:video\r\n/g,
+          'a=mid:video\r\nb=AS:' + this.constrainVideoBitrateKbps_ + '\r\n');
+    }
+    this.pc2.setLocalDescription(answer);
+    this.pc1.setRemoteDescription(answer);
+  },
+
+  onIceCandidate_: function (otherPeer) {
+    if (event.candidate) {
+      var parsed = Call.parseCandidate(event.candidate.candidate);
+      if (this.iceCandidateFilter_(parsed)) {
+        otherPeer.addIceCandidate(event.candidate);
+      }
+    }
+  }
+};
+
+Call.noFilter = function () {
+  return true;
+};
+
+Call.isRelay = function (candidate) {
+  return candidate.type === 'relay';
+};
+
+Call.isIpv6 = function (candidate) {
+  return candidate.address.indexOf(':') !== -1;
+};
+
+// Parse a 'candidate:' line into a JSON object.
+Call.parseCandidate = function (text) {
+  var candidateStr = 'candidate:';
+  var pos = text.indexOf(candidateStr) + candidateStr.length;
+  var fields = text.substr(pos).split(' ');
+  return {
+    'type': fields[7],
+    'protocol': fields[2],
+    'address': fields[4],
+  };
+};
+
+// Ask computeengineondemand to give us TURN server credentials and URIs.
+Call.CEOD_URL = 'https://computeengineondemand.appspot.com/turn?username=1234&key=5678';
+Call.asyncCreateTurnConfig = function (onSuccess, onError) {
+  var xhr = new XMLHttpRequest();
+  function onResult() {
+    if (xhr.readyState !== 4) {
+      return;
+    }
+
+    if (xhr.status !== 200) {
+      onError('TURN request failed');
+      return;
+    }
+
+    var response = JSON.parse(xhr.responseText);
+    var iceServer = {
+      'username': response.username,
+      'credential': response.password,
+      'urls': response.uris
+    };
+    onSuccess({ 'iceServers': [ iceServer ] });
+  }
+
+  xhr.onreadystatechange = onResult;
+  xhr.open('GET', Call.CEOD_URL, true);
+  xhr.send();
+};
diff --git a/samples/web/content/testrtc/js/camresolutionstest.js b/samples/web/content/testrtc/js/camresolutionstest.js
new file mode 100644
index 0000000..776bfde
--- /dev/null
+++ b/samples/web/content/testrtc/js/camresolutionstest.js
@@ -0,0 +1,112 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+/* This test tries calling getUserMedia() with each resolution from the list
+ * below. Each gUM() call triggers a success or a fail callback; we report
+ * ok/nok and schedule another gUM() with the next resolution until the list
+ * is exhausted. Some resolutions are mandatory and make the test fail if not
+ * supported.
+ *
+ * In generic cameras using Chrome rescaler, all resolutions should be supported
+ * up to a given one and none beyond there. Special cameras, such as digitizers,
+ * might support only one resolution.
+ */
+
+addTest('Camera', 'Supported resolutions', function() {
+  var test = new CamResolutionsTest();
+  test.run();
+});
+
+function CamResolutionsTest() {
+  // Each resolution has width, height and 'mandatory' fields.
+  this.resolutions = [ [ 160, 120, false],
+                       [ 320, 180, false],
+                       [ 320, 240,  true],  // QVGA
+                       [ 640, 360, false],
+                       [ 640, 480,  true],  // VGA
+                       [ 768, 576, false],  // PAL
+                       [1024, 576, false],
+                       [1280, 720,  true],  // HD
+                       [1280, 768, false],
+                       [1280, 800, false],
+                       [1920,1080, false],  // Full HD
+                       [1920,1200, false],
+                       [3840,2160, false],  // 4K
+                       [4096,2160, false] ];
+  this.mandatoryUnsupportedResolutions = 0;
+  this.numResolutions = this.resolutions.length;
+  this.counter = 0;
+  this.supportedResolutions = 0;
+  this.unsupportedResolutions = 0;
+}
+
+CamResolutionsTest.prototype = {
+  run: function() {
+    this.triggerGetUserMedia(this.resolutions[0]);
+  },
+
+  triggerGetUserMedia: function(resolution) {
+    var constraints = {
+      audio: false,
+      video: {
+        mandatory: {
+          minWidth:  resolution[0],
+          minHeight: resolution[1],
+          maxWidth:  resolution[0],
+          maxHeight: resolution[1]
+        }
+      }
+    };
+    try {
+      doGetUserMedia(constraints, this.successFunc.bind(this),
+          this.failFunc.bind(this));
+    } catch (e) {
+      reportFatal('GetUserMedia failed.');
+    }
+  },
+
+  successFunc: function(stream) {
+    this.supportedResolutions++;
+    var theResolution = this.resolutions[this.counter++];
+    reportInfo('Supported ' + theResolution[0] + 'x' + theResolution[1]);
+    stream.stop();
+    this.finishTestOrRetrigger();
+  },
+
+  failFunc: function() {
+    this.unsupportedResolutions++;
+    var theResolution = this.resolutions[this.counter++];
+    if (theResolution[2]) {
+      this.mandatoryUnsupportedResolutions++;
+      reportError('Camera does not support a mandatory resolution: ' +
+                  theResolution[0] + 'x' + theResolution[1]);
+    } else {
+      reportInfo('NOT supported ' + theResolution[0] + 'x' +
+                 theResolution[1]);
+    }
+    this.finishTestOrRetrigger();
+  },
+
+  finishTestOrRetrigger: function() {
+    if (this.counter === this.numResolutions) {
+      if (this.mandatoryUnsupportedResolutions === 0) {
+        if (this.supportedResolutions) {
+          reportSuccess(this.supportedResolutions + '/' + this.numResolutions +
+                        ' resolutions supported.');
+        } else {
+          reportError('No camera resolutions supported, most likely the ' +
+                      'camera is not accessible or dead.');
+        }
+      }
+      testFinished();
+    } else {
+      this.triggerGetUserMedia(this.resolutions[this.counter]);
+    }
+  }
+};
diff --git a/samples/web/content/testrtc/js/camtest.js b/samples/web/content/testrtc/js/camtest.js
new file mode 100644
index 0000000..a9f1fe5
--- /dev/null
+++ b/samples/web/content/testrtc/js/camtest.js
@@ -0,0 +1,200 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+// Test spec
+// 1. TODO: Enumerate cameras.
+// 2. Try opening the (a) camera in VGA.
+// 3. TODO: Try camera in other resolutions, particularly HD.
+// 4. TODO: Translate gum failures to user friendly messages, using
+//   MediaStreamError.name in { NotSupportedError, PermissionDeniedError,
+//   ConstrainNotSatisfiedError, OverconstrainedError, NotFoundError,
+//   AbortError, SourceUnavailableError }.
+// 4.Check that the MediaStreamTrack associated with the camera looks good.
+// 4.a Capture for a couple of seconds and monitor the events on the
+//   MediaStreamTrack: onEnded(), onMute(), onUnmute().
+// 4.b If onEnded() fires, reportFatal() is called (e.g. camera is unplugged).
+// 4.c We keep a local |isMuted| state during the capture period (4.a) to check
+//   it at the end. (TODO: local isMuted can be deprecated once
+//   mediaStreamTrack.muted property is wired up in Chrome).
+// 4.d After the wait period we check that the video tag where the |stream| is
+//   plugged in has the appropriate width and height.
+// 4.e We also check that all frames were non-near-black.
+// 5. Tear down the |stream|. TODO: this should be done in the test harness.
+
+addTest('Camera', 'Test video feed', function() {
+  var test = new CamCaptureTest();
+  test.run();
+});
+
+function CamCaptureTest() {
+  this.isMuted = false;
+  this.stream = null;
+  this.testActive = false;
+  this.numFrames = 0;
+  // Variables associated with near-black frame detection.
+  this.numBlackFrames = 0;
+  this.nonBlackPixelLumaThreshold = 20;
+  // Variables associated with nearly-frozen frames detection.
+  this.numFrozenFrames = 0;
+  this.previousFrame = [];
+  this.identicalFrameSsimThreshold = 0.985;
+  this.frameComparator = new Ssim();
+
+  this.constraints = {
+    video: { mandatory: { minWidth: 640, minHeight: 480} }
+  };
+  this.video = document.createElement('video');
+  this.video.width = this.constraints.video.mandatory.minWidth;
+  this.video.height = this.constraints.video.mandatory.minHeight;
+  this.video.setAttribute('autoplay','');
+  this.video.setAttribute('muted','');
+}
+
+CamCaptureTest.prototype = {
+  run: function() {
+    doGetUserMedia(this.constraints, this.gotStream.bind(this));
+  },
+
+  gotStream: function(stream) {
+    this.stream = stream;
+    if (!this.checkVideoTracks(this.stream)) {
+      testFinished();
+      return;
+    }
+    this.setupVideoExpectations(this.stream);
+    attachMediaStream(this.video, this.stream);
+    this.setupCanvas();
+    reportInfo('Checking if your camera is delivering frames for five ' +
+               'seconds...');
+    this.setTimeoutWithProgressBar(this.checkVideoFinish.bind(this, this.video), 5000);
+  },
+
+  setTimeoutWithProgressBar: function (timeoutCallback, timeoutMs) {
+    var start = new Date();
+    var updateProgressBar = setInterval(function () {
+      var now = new Date();
+      setTestProgress((now - start) * 100 / timeoutMs);
+    }, 100);
+
+    setTimeout(function () {
+      clearInterval(updateProgressBar);
+      setTestProgress(100);
+      timeoutCallback();
+    }, timeoutMs);
+  },
+
+  checkVideoTracks: function(stream) {
+    reportSuccess('getUserMedia succeeded.');
+    var tracks = stream.getVideoTracks();
+    if (tracks.length < 1) {
+      reportFatal('No video track in returned stream.');
+      return false;
+    }
+    var videoTrack = tracks[0];
+    reportSuccess('Video track exists with label = ' + videoTrack.label);
+    this.testActive = true;
+    return true;
+  },
+
+  setupVideoExpectations: function(stream) {
+    var videoTrack = stream.getVideoTracks()[0];
+    videoTrack.onended = function() {
+      reportError('Video track ended, camera stopped working');
+    };
+    videoTrack.onmute = function() {
+      reportError('Your camera reported itself as muted.');
+      // MediaStreamTrack.muted property is not wired up in Chrome yet, checking
+      // isMuted local state.
+      this.isMuted = true;
+    };
+    videoTrack.onunmute = function() {
+      this.isMuted = false;
+    };
+  },
+
+  checkVideoFinish: function(video) {
+    expectEquals(this.constraints.video.mandatory.minWidth,
+        video.videoWidth, 'Incorrect width', 'Width OK');
+    expectEquals(this.constraints.video.mandatory.minHeight,
+        video.videoHeight, 'Incorrect height', 'Height OK');
+    if (this.stream.getVideoTracks()[0].readyState !== 'ended') {
+      expectEquals(false, this.isMuted, 'Your camera reported ' +
+                   'itself as muted! It is probably not delivering frames. ' +
+                   'Please try another webcam.', 'Camera is delivering frames');
+    }
+    // Check: amount of near-black frames should be 0.
+    expectEquals(0, this.numBlackFrames, 'Your camera seems to be ' +
+                 'delivering near-black frames. This might be all right or ' +
+                 'it could be a symptom of a camera in a bad state; if it\'s ' +
+                 'a USB WebCam, try plugging it out and in again. (FYI: It ' +
+                 'has produced ' + this.numBlackFrames + '/' + this.numFrames +
+                 ' near-black frames in total).', 'Camera is sending ' +
+                 'non-black frames.');
+    // Check: amount of frozen frames should be 0.
+    expectEquals(0, this.numFrozenFrames, 'Your camera seems to be ' +
+                 'delivering frozen frames. This might be a symptom of the ' +
+                 'camera in a bad state; if it\'s a USB WebCam, try plugging ' +
+                 'it out and in again. (FYI: It has produced ' +
+                 this.numFrozenFrames + '/' + (this.numFrames - 1) +
+                 ' analyzed frame-pairs in total).', 'Camera is sending ' +
+                 'non-frozen frames.');
+    this.stream.getVideoTracks()[0].onended = null;
+    this.testActive = false;
+    this.stream.getVideoTracks()[0].stop();
+    testFinished();
+  },
+
+  setupCanvas: function() {
+    this.canvas = document.createElement('canvas');
+    this.canvas.width = this.video.width;
+    this.canvas.height = this.video.height;
+    this.context = this.canvas.getContext('2d');
+    this.video.addEventListener('play', this.testFrame.bind(this), false);
+  },
+
+  testFrame: function() {
+    if (!this.testActive || this.video.ended) {
+      return false;
+    }
+    this.context.drawImage(this.video, 0, 0, this.canvas.width,
+        this.canvas.height);
+    var imageData = this.context.getImageData(0, 0, this.canvas.width,
+        this.canvas.height);
+
+    if (this.isBlackFrame(imageData.data, imageData.data.length)) {
+      this.numBlackFrames++;
+    }
+
+    if (this.frameComparator.calculate(this.previousFrame, imageData.data) >
+        this.identicalFrameSsimThreshold) {
+      this.numFrozenFrames++;
+    }
+    this.previousFrame = imageData.data;
+
+    this.numFrames++;
+    if (this.testActive) {
+      setTimeout(this.testFrame.bind(this), 20);
+    }
+  },
+
+  isBlackFrame: function(data, length) {
+    // TODO: Use a statistical, histogram-based detection.
+    var thresh = this.nonBlackPixelLumaThreshold;
+    var accuLuma = 0;
+    for (var i = 4; i < length; i += 4) {
+      // Use Luma as in Rec. 709: Y′709 = 0.21R + 0.72G + 0.07B;
+      accuLuma += 0.21 * data[i] +  0.72 * data[i+1] + 0.07 * data[i+2];
+      // Early termination if the average Luma so far is bright enough.
+      if (accuLuma  > (thresh * i / 4)) {
+        return false;
+      }
+    }
+    return true;
+  }
+};
diff --git a/samples/web/content/testrtc/js/conntest.js b/samples/web/content/testrtc/js/conntest.js
new file mode 100644
index 0000000..f3d347a
--- /dev/null
+++ b/samples/web/content/testrtc/js/conntest.js
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+addTest('Connectivity', 'Udp connectivity', testUdpConnectivity);
+addTest('Connectivity', 'Tcp connectivity', testTcpConnectivity);
+addTest('Connectivity', 'Ipv6 enabled', testHasIpv6Candidates);
+
+// Test whether it can connect via UDP to a TURN server
+// Get a TURN config, and try to get a relay candidate using UDP.
+function testUdpConnectivity() {
+  Call.asyncCreateTurnConfig(
+      function(config) { 
+        filterConfig(config, 'udp');
+        gatherCandidates(config, null, Call.isRelay);
+      },
+      reportFatal);
+}
+
+// Test whether it can connect via TCP to a TURN server
+// Get a TURN config, and try to get a relay candidate using TCP.
+function testTcpConnectivity() {
+  Call.asyncCreateTurnConfig(
+      function(config) { 
+        filterConfig(config, 'tcp');
+        gatherCandidates(config, null, Call.isRelay);
+      },
+      reportFatal);
+}
+
+// Test whether it is IPv6 enabled (TODO: test IPv6 to a destination).
+// Turn on IPv6, and try to get an IPv6 host candidate.
+function testHasIpv6Candidates() {
+  var params = { optional: [ { googIPv6: true } ] };
+  gatherCandidates(null, params, Call.isIpv6);
+}
+
+// Filter the RTCConfiguration |config| to only contain URLs with the
+// specified transport protocol |protocol|.
+function filterConfig(config, protocol) {
+  var transport = 'transport=' + protocol;
+  for (var i = 0; i < config.iceServers.length; ++i) {
+    var iceServer = config.iceServers[i];
+    var newUrls = [];
+    for (var j = 0; j < iceServer.urls.length; ++j) {
+      if (iceServer.urls[j].indexOf(transport) !== -1) {
+        newUrls.push(iceServer.urls[j]);
+      }
+    }
+    iceServer.urls = newUrls;
+  }
+}
+
+// Create a PeerConnection, and gather candidates using RTCConfig |config|
+// and ctor params |params|. Succeed if any candidates pass the |isGood| 
+// check, fail if we complete gathering without any passing.
+function gatherCandidates(config, params, isGood) {
+  var pc = new RTCPeerConnection(config, params);
+
+  // In our candidate callback, stop if we get a candidate that passes |isGood|.
+  pc.onicecandidate = function(e) {
+    // Once we've decided, ignore future callbacks.
+    if (pc.signalingState === 'closed') {
+      return;
+    }
+
+    if (e.candidate) {
+      var parsed = Call.parseCandidate(e.candidate.candidate);
+      if (isGood(parsed)) {
+        reportSuccess('Gathered candidate with type: ' + parsed.type +
+                      ' address: ' + parsed.address);
+        pc.close();
+        testFinished();
+      }
+    } else {
+      pc.close();
+      reportFatal('Failed to gather specified candidates');
+    }
+  };
+
+  // Create an audio-only, recvonly offer, and setLD with it.
+  // This will trigger candidate gathering.
+  var createOfferParams = { mandatory: { OfferToReceiveAudio: true } };
+  pc.createOffer(function(offer) { pc.setLocalDescription(offer, noop, noop); },
+                 noop, createOfferParams);
+}
+
+function noop() {
+}
diff --git a/samples/web/content/testrtc/js/main.js b/samples/web/content/testrtc/js/main.js
new file mode 100644
index 0000000..9edb56c
--- /dev/null
+++ b/samples/web/content/testrtc/js/main.js
@@ -0,0 +1,373 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+/* More information about these options at jshint.com/docs/options */
+/* exported addTest, doGetUserMedia, reportInfo, expectEquals, testFinished, start, setTestProgress, audioContext, reportSuccess, reportError, settingsDialog */
+'use strict';
+
+// Global WebAudio context that can be shared by all tests.
+// There is a very finite number of WebAudio contexts.
+var audioContext = new AudioContext();
+var contentDiv = document.getElementById('content');
+var startButton = document.getElementById('start-button');
+var audioSelect = document.querySelector('select#audioSource');
+var videoSelect = document.querySelector('select#videoSource');
+var settingsDialog = document.getElementById('settings-dialog');
+var PREFIX_INFO    = '[   INFO ]';
+var PREFIX_OK      = '[     OK ]';
+var PREFIX_FAILED  = '[ FAILED ]';
+var testSuites = [];
+var testFilters = [];
+var currentTest;
+
+// A test suite is a composition of many tests.
+function TestSuite(name, output) {
+  this.name = name;
+  this.tests = [];
+
+  // UI elements.
+  this.toolbar_ = document.createElement('core-toolbar');
+  this.toolbar_.setAttribute('class', 'test-suite');
+  this.toolbar_.setAttribute('state', 'pending');
+  this.toolbar_.addEventListener('click', this.onClickToolbar_.bind(this));
+
+  var title = document.createElement('span');
+  title.setAttribute('flex', null);
+  title.textContent = name;
+  this.toolbar_.appendChild(title);
+
+  this.statusIcon_ = document.createElement('core-icon');
+  this.statusIcon_.setAttribute('icon', '');
+  this.toolbar_.appendChild(this.statusIcon_);
+
+  this.content_ = document.createElement('core-collapse');
+  this.content_.opened = false;
+
+  output.appendChild(this.toolbar_);
+  output.appendChild(this.content_);
+}
+
+TestSuite.prototype = {
+  addTest: function(testName, testFunction) {
+    this.tests.push(new Test(this, testName, testFunction));
+  },
+
+  run: function(doneCallback) {
+    this.content_.opened = true;
+    this.toolbar_.setAttribute('state', 'pending');
+    this.statusIcon_.setAttribute('icon', 'more-horiz');
+    runAllSequentially(this.tests, this.allTestFinished.bind(this, doneCallback));
+  },
+
+  allTestFinished: function(doneCallback) {
+    var errors = 0;
+    var successes = 0;
+    for (var i = 0; i !== this.tests.length; ++i) {
+      successes += this.tests[i].successCount;
+      errors += this.tests[i].errorCount;
+    }
+
+    if (errors === 0 && successes > 0) {
+      this.toolbar_.setAttribute('state', 'success');
+      this.statusIcon_.setAttribute('icon', 'check');
+      this.content_.opened = false;
+    } else {
+      this.toolbar_.setAttribute('state', 'failure');
+      this.statusIcon_.setAttribute('icon', 'close');
+      this.content_.opened = true;
+    }
+
+    doneCallback();
+  },
+
+  onClickToolbar_: function() {
+    this.content_.toggle();
+  }
+};
+
+function Test(suite, name, func) {
+  this.suite = suite;
+  this.name = name;
+  this.func = func;
+
+  var progressBar = document.createElement('paper-progress');
+  progressBar.setAttribute('class', 'test-progress');
+  progressBar.setAttribute('flex', null);
+  progressBar.style.display = 'none';
+
+  var toolbar = document.createElement('core-toolbar');
+  toolbar.setAttribute('class', 'test');
+  var title = document.createElement('span');
+  title.textContent = name;
+  title.setAttribute('flex', null);
+  var statusIcon = document.createElement('core-icon');
+  statusIcon.setAttribute('icon', '');
+  toolbar.addEventListener('click', this.onClickToolbar_.bind(this));
+  toolbar.appendChild(title);
+  toolbar.appendChild(progressBar);
+  toolbar.appendChild(statusIcon);
+
+  var collapse = document.createElement('core-collapse');
+  collapse.setAttribute('class', 'test-output');
+  collapse.opened = false;
+  suite.content_.appendChild(toolbar);
+  suite.content_.appendChild(collapse);
+
+  this.statusIcon_ = statusIcon;
+  this.progressBar_ = progressBar;
+  this.output_ = collapse;
+
+  this.successCount = 0;
+  this.errorCount = 0;
+  this.doneCallback_ = null;
+
+  this.isDisabled = testIsDisabled(name);
+  this.reportInfo('Test not run yet.');
+}
+
+Test.prototype = {
+  run: function(doneCallback) {
+    this.successCount = 0;
+    this.errorCount = 0;
+    this.doneCallback_ = doneCallback;
+    this.clearMessages_();
+    this.statusIcon_.setAttribute('icon', 'more-horiz');
+    this.setProgress(null);
+
+    currentTest = this;
+    if (!this.isDisabled) {
+      this.func();
+    } else {
+      this.reportInfo('Test is disabled.');
+      this.done();
+    }
+  },
+
+  done: function() {
+    this.setProgress(null);
+    if (this.errorCount === 0 && this.successCount > 0) {
+      this.statusIcon_.setAttribute('icon', 'check');
+      // On success, always close the details.
+      this.output_.opened = false;
+    } else {
+      this.statusIcon_.setAttribute('icon', 'close');
+      // Only close the details if there is only one expectations in which
+      // case the test name should provide enough information.
+      if (this.errorCount + this.successCount === 1) {
+        this.output_.opened = false;
+      }
+    }
+    this.doneCallback_();
+  },
+
+  setProgress: function(value) {
+    var bar = this.progressBar_;
+    var statusIcon = this.statusIcon_;
+    if (value !== null) {
+      bar.style.display = 'block';
+      bar.setAttribute('value', value);
+      statusIcon.style.display = 'none';
+    } else {
+      bar.style.display = 'none';
+      statusIcon.style.display = 'block';
+    }
+  },
+
+  expectEquals: function(expected, actual, failMsg, okMsg) {
+    if (expected !== actual) {
+      this.reportError('Failed expectation: ' + expected + ' !== ' + actual + ': ' + failMsg);
+    } else if (okMsg) {
+      this.reportSuccess(okMsg);
+    }
+  },
+
+  reportSuccess: function(str) {
+    this.reportMessage_(PREFIX_OK, str);
+    this.successCount++;
+  },
+
+  reportError: function(str) {
+    this.output_.opened = true;
+    this.reportMessage_(PREFIX_FAILED, str);
+    this.errorCount++;
+  },
+
+  reportInfo: function(str) {
+    this.reportMessage_(PREFIX_INFO, str);
+  },
+
+  reportFatal: function(str) {
+    this.reportError(str);
+    this.done();
+  },
+
+  reportMessage_: function(prefix, str) {
+    var message = document.createElement('div');
+    message.textContent = prefix + ' ' + str;
+    this.output_.appendChild(message);
+  },
+
+  clearMessages_: function() {
+    while (this.output_.lastChild !== null) {
+      this.output_.removeChild(this.output_.lastChild);
+    }
+  },
+
+  onClickToolbar_: function() {
+    this.output_.toggle();
+  }
+};
+
+// TODO(andresp): Pass Test object to test instead of using global methods.
+function reportSuccess(str) { currentTest.reportSuccess(str); }
+function reportError(str) { currentTest.reportError(str); }
+function reportFatal(str) { currentTest.reportFatal(str); }
+function reportInfo(str) { currentTest.reportInfo(str); }
+function setTestProgress(value) { currentTest.setProgress(value); }
+function testFinished() { currentTest.done(); }
+function expectEquals() { currentTest.expectEquals.apply(currentTest, arguments); }
+
+function addTest(suiteName, testName, func) {
+  for (var i = 0; i !== testSuites.length; ++i) {
+    if (testSuites[i].name === suiteName) {
+      testSuites[i].addTest(testName, func);
+      return;
+    }
+  }
+  // Non-existent suite.
+  var testSuite = new TestSuite(suiteName, contentDiv);
+  testSuite.addTest(testName, func);
+  testSuites.push(testSuite);
+}
+
+// Helper to run a list of tasks sequentially:
+//   tasks - Array of { run: function(doneCallback) {} }.
+//   doneCallback - called once all tasks have run sequentially.
+function runAllSequentially(tasks, doneCallback) {
+  var current = -1;
+  var runNextAsync = setTimeout.bind(null, runNext);
+
+  runNextAsync();
+
+  function runNext() {
+    current++;
+    if (current === tasks.length) {
+      doneCallback();
+      return;
+    }
+    tasks[current].run(runNextAsync);
+  }
+}
+
+function start() {
+  startButton.setAttribute('disabled', null);
+  runAllSequentially(testSuites, onComplete);
+
+  function onComplete() {
+    startButton.removeAttribute('disabled');
+  }
+}
+
+function doGetUserMedia(constraints, onSuccess, onFail) {
+  // Call into getUserMedia via the polyfill (adapter.js).
+  var successFunc = function(stream) {
+    trace('User has granted access to local media.');
+    onSuccess(stream);
+  };
+  var failFunc = onFail || function(error) {
+    // If onFail function is provided error callback is propagated to the
+    // caller.
+    var errorMessage = 'Failed to get access to local media. Error name was ' +
+        error.name;
+    return reportFatal(errorMessage);
+  };
+  try {
+    // Append the constraints with the getSource constraints.
+    appendSourceId(audioSelect.value, 'audio', constraints);
+    appendSourceId(videoSelect.value, 'video', constraints);
+
+    getUserMedia(constraints, successFunc, failFunc);
+    trace('Requested access to local media with constraints:\n' +
+        '  \'' + JSON.stringify(constraints) + '\'');
+  } catch (e) {
+    return reportFatal('getUserMedia failed with exception: ' + e.message);
+  }
+}
+
+function appendSourceId(id, type, constraints) {
+  if (constraints[type] === true) {
+    constraints[type] = {optional: [{sourceId: id}]};
+  } else if (typeof constraints[type] === 'object') {
+    if (typeof constraints[type].optional === 'undefined') {
+      constraints[type].optional = [];
+    }
+    constraints[type].optional.push({sourceId: id});
+  }
+}
+
+function gotSources(sourceInfos) {
+  for (var i = 0; i !== sourceInfos.length; ++i) {
+    var sourceInfo = sourceInfos[i];
+    var option = document.createElement('option');
+    option.value = sourceInfo.id;
+    appendOption(sourceInfo, option);
+  }
+}
+
+function appendOption(sourceInfo, option) {
+  if (sourceInfo.kind === 'audio') {
+    option.text = sourceInfo.label || 'microphone ' + (audioSelect.length + 1);
+    audioSelect.appendChild(option);
+  } else if (sourceInfo.kind === 'video') {
+    option.text = sourceInfo.label || 'camera ' + (videoSelect.length + 1);
+    videoSelect.appendChild(option);
+  } else {
+    console.log('Some other kind of source');
+  }
+}
+
+if (typeof MediaStreamTrack === 'undefined') {
+  reportFatal('This browser does not support MediaStreamTrack.\n Try Chrome Canary.');
+} else {
+  MediaStreamTrack.getSources(gotSources);
+}
+
+function testIsDisabled(testName) {
+  if (testFilters.length === 0) {
+    return false;
+  }
+
+  for (var i = 0; i !== testFilters.length; ++i) {
+    if (testFilters[i] === testName) {
+      return false;
+    }
+  }
+  return true;
+}
+
+// Parse URL parameters and configure test filters.
+{
+  var parseUrlParameters = function() {
+    var output = {};
+    // python SimpleHTTPServer always adds a / on the end of the request.
+    // Remove it so developers can easily run testrtc on their machines.
+    // Note that an actual / is still sent in most cases as %2F.
+    var args = window.location.search.replace(/\//g, '').substr(1).split('&');
+    for (var i = 0; i !== args.length; ++i) {
+      var split = args[i].split('=');
+      output[decodeURIComponent(split[0])] = decodeURIComponent(split[1]);
+    }
+    return output;
+  };
+
+  var parameters = parseUrlParameters();
+  var filterParameterName = 'test_filter';
+  if (filterParameterName in parameters) {
+    testFilters = parameters[filterParameterName].split(',');
+  }
+}
diff --git a/samples/web/content/testrtc/js/mictest.js b/samples/web/content/testrtc/js/mictest.js
new file mode 100644
index 0000000..1cccd2d
--- /dev/null
+++ b/samples/web/content/testrtc/js/mictest.js
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+addTest('Microphone', 'Audio capture', function() {
+  var test = new MicTest();
+  test.run();
+});
+
+function MicTest() {
+  this.inputChannels = 6;
+  this.outputChannels = 2;
+  this.lowVolumeThreshold = -60;
+  // To be able to capture any data on Windows we need a large buffer size.
+  this.bufferSize = 8192;
+  this.activeChannels = [];
+  // TODO (jansson) Currently only getting mono on two channels, need to figure
+  // out how to fix that.
+  // Turning off all audio processing constraints enables stereo input.
+  this.constraints = { 
+    audio: {
+      optional: [ 
+        { googEchoCancellation: false },
+        { googAutoGainControl:false},
+        { googNoiseSuppression:false},
+        { googHighpassFilter:false},
+        { googAudioMirroring:true},
+        { googNoiseSuppression2:false},
+        { googEchoCancellation2:false},
+        { googAutoGainControl2:false}
+      ]
+    }
+  };
+}
+
+MicTest.prototype = {
+  run: function() {
+    doGetUserMedia(this.constraints, this.gotStream.bind(this));
+  },
+
+  gotStream: function(stream) {
+    if (!this.checkAudioTracks(stream)) {
+      return;
+    }
+    this.createAudioBuffer(stream);
+  },
+
+  checkAudioTracks: function(stream) {
+    this.stream = stream;
+    var audioTracks = stream.getAudioTracks();
+    if (audioTracks.length < 1) {
+      reportFatal('No audio track in returned stream.');
+      return false;
+    }
+    reportSuccess('Audio track created using device=' + audioTracks[0].label);
+    return true;
+  },
+
+  createAudioBuffer: function() {
+    this.audioSource = audioContext.createMediaStreamSource(this.stream);
+    this.scriptNode = audioContext.createScriptProcessor(this.bufferSize, 
+        this.inputChannels, this.outputChannels);
+    this.audioSource.connect(this.scriptNode);
+    this.scriptNode.connect(audioContext.destination);
+    this.scriptNode.onaudioprocess = this.processAudio.bind(this);
+  },
+
+  processAudio: function(event) {
+    var inputBuffer = event.inputBuffer;
+    this.stream.stop();
+    this.audioSource.disconnect(this.scriptNode);
+    this.scriptNode.disconnect(audioContext.destination);
+    // Start analazying the audio buffer.
+    reportInfo('Audio input sample rate=' + inputBuffer.sampleRate);
+    this.testNumberOfActiveChannels(inputBuffer);
+  },
+
+  testNumberOfActiveChannels: function(buffer) {
+    var numberOfChannels = buffer.numberOfChannels;
+    for (var channel = 0; channel < numberOfChannels; channel++) {
+      var numberOfZeroSamples = 0;
+      for (var sample = 0; sample < buffer.length; sample++) {
+        if (buffer.getChannelData(channel)[sample] === 0) {
+          numberOfZeroSamples++;
+        }
+      }
+      if (numberOfZeroSamples !== buffer.length) {
+        this.activeChannels[channel] = numberOfZeroSamples;
+        this.testInputVolume(buffer, channel);
+      }        
+    }
+    if (this.activeChannels.length === 0) {
+      reportFatal('No active input channels detected. Microphone is most ' +
+                  'likely muted or broken, please check if muted in the ' +
+                  'sound settings or physically on the device.');
+      return;
+    } else {
+      reportSuccess('Audio input channels=' + this.activeChannels.length);
+    }
+    // TODO (jansson) Add logic to get stereo input to webaudio, currently
+    // always getting mono, e.g. same data on 2 channels.
+    // If two channel input compare zero data samples to determine if it's mono.
+    if (this.activeChannels.length === 2) {
+      if (this.activeChannels[0][0] === this.activeChannels[1][0]) {
+        reportInfo('Mono stream detected.');
+      }
+    }
+    testFinished();
+  },
+
+  testInputVolume: function(buffer, channel) {
+    var data = buffer.getChannelData(channel);
+    var sum = 0;
+    for (var sample = 0; sample < buffer.length; ++sample) {
+      sum += Math.abs(data[sample]);
+    }
+    var rms = Math.sqrt(sum / buffer.length);
+    var db = 20 * Math.log(rms) / Math.log(10);
+
+    // Check input audio level.
+    if (db < this.lowVolumeThreshold) {
+      reportError('Audio input level=' + db + ' db' + 'Microphone input ' +
+                  'level is low, increase input volume or move closer to the ' +
+                  'microphone.');
+    } else {
+      reportSuccess('Audio power for channel ' + channel + '=' + db + ' db');
+    }
+  }
+};
diff --git a/samples/web/content/testrtc/js/ssim.js b/samples/web/content/testrtc/js/ssim.js
new file mode 100644
index 0000000..2f0ccc8
--- /dev/null
+++ b/samples/web/content/testrtc/js/ssim.js
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+/* More information about these options at jshint.com/docs/options */
+/* jshint camelcase: false */
+'use strict';
+
+
+/* This is an implementation of the algorithm for calculating the Structural
+ * SIMilarity (SSIM) index between two images. Please refer to the article [1],
+ * the website [2] and/or the Wikipedia article [3]. This code takes the value
+ * of the constants C1 and C2 from the Matlab implementation in [4].
+ *
+ * [1] Z. Wang, A. C. Bovik, H. R. Sheikh, and E. P. Simoncelli, "Image quality
+ * assessment: From error measurement to structural similarity",
+ * IEEE Transactions on Image Processing, vol. 13, no. 1, Jan. 2004.
+ * [2] http://www.cns.nyu.edu/~lcv/ssim/
+ * [3] http://en.wikipedia.org/wiki/Structural_similarity
+ * [4] http://www.cns.nyu.edu/~lcv/ssim/ssim_index.m
+ */
+
+function Ssim() {}
+
+Ssim.prototype = {
+  // Implementation of Eq.2, a simple average of a vector and Eq.4., except the
+  // square root. The latter is actually an unbiased estimate of the variance,
+  // not the exact variance.
+  statistics: function(a) {
+    var accu = 0;
+    var i;
+    for (i = 0; i < a.length; ++i) {
+      accu += a[i];
+    }
+    var mean_a = accu / (a.length - 1);
+    var diff = 0;
+    for (i = 1; i < a.length; ++i) {
+      diff = a[i - 1] - mean_a;
+      accu += a[i] + (diff * diff);
+    }
+    return { mean : mean_a, variance : accu / a.length };
+  },
+
+  // Implementation of Eq.11., cov(Y, Z) = E((Y - uY), (Z - uZ)).
+  covariance: function(a, b, mean_a, mean_b) {
+    var accu = 0;
+    for (var i = 0; i < a.length; i += 1) {
+      accu += (a[i] - mean_a) * (b[i] - mean_b);
+    }
+    return accu / a.length;
+  },
+
+  calculate: function(x, y) {
+    if (x.length !== y.length) {
+      return 0;
+    }
+
+    // Values of the constants come from the Matlab code referred before.
+    var K1 = 0.01;
+    var K2 = 0.03;
+    var L = 255;
+    var C1 = (K1 * L) * (K1 * L);
+    var C2 = (K2 * L) * (K2 * L);
+    var C3 = C2 / 2;
+
+    var stats_x = this.statistics(x);
+    var mu_x = stats_x.mean;
+    var sigma_x2 = stats_x.variance;
+    var sigma_x = Math.sqrt(sigma_x2);
+    var stats_y = this.statistics(y);
+    var mu_y = stats_y.mean;
+    var sigma_y2 = stats_y.variance;
+    var sigma_y = Math.sqrt(sigma_y2);
+    var sigma_xy = this.covariance(x, y, mu_x, mu_y);
+
+    // Implementation of Eq.6.
+    var luminance = (2 * mu_x * mu_y + C1) /
+        ((mu_x * mu_x) + (mu_y * mu_y) + C1);
+    // Implementation of Eq.10.
+    var structure = (sigma_xy + C3) / (sigma_x * sigma_y + C3);
+    // Implementation of Eq.9.
+    var contrast = (2 * sigma_x * sigma_y + C2) / (sigma_x2 + sigma_y2 + C2);
+
+    // Implementation of Eq.12.
+    return luminance * contrast * structure;
+  }
+};
diff --git a/samples/web/content/testrtc/js/stats.js b/samples/web/content/testrtc/js/stats.js
new file mode 100644
index 0000000..e27679b
--- /dev/null
+++ b/samples/web/content/testrtc/js/stats.js
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+
+function StatisticsAggregate(rampUpThreshold) {
+  this.startTime_ = 0;
+  this.sum_ = 0;
+  this.count_ = 0;
+  this.max_ = 0;
+  this.rampUpThreshold_ = rampUpThreshold;
+  this.rampUpTime_ = Infinity;
+}
+
+StatisticsAggregate.prototype = {
+  add: function (time, datapoint) {
+    if (this.startTime_ === 0) {
+      this.startTime_ = time;
+    }
+    this.sum_ += datapoint;
+    this.max_ = Math.max(this.max_, datapoint);
+    if (this.rampUpTime_ === Infinity &&
+        datapoint > this.rampUpThreshold_) {
+      this.rampUpTime_ = time;
+    }
+    this.count_++;
+  },
+
+  getAverage: function () {
+    if (this.count_ === 0) {
+      return 0;
+    }
+    return Math.round(this.sum_ / this.count_);
+  },
+
+  getMax: function () {
+    return this.max_;
+  },
+
+  getRampUpTime: function () {
+    return this.rampUpTime_ - this.startTime_;
+  },
+};
diff --git a/samples/web/content/testrtc/testrtc.py b/samples/web/content/testrtc/testrtc.py
new file mode 100755
index 0000000..a520f05
--- /dev/null
+++ b/samples/web/content/testrtc/testrtc.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2014 Google Inc. All Rights Reserved.
+
+"""WebRTC Test
+
+This module serves the WebRTC Test Page.
+"""
+
+import cgi
+import logging
+import random
+import os
+import jinja2
+import webapp2
+
+jinja_environment = jinja2.Environment(
+    loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
+
+# Generate 10 kilobytes of random data and create a 10MB buffer from it.
+random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
+
+class MainPage(webapp2.RequestHandler):
+  """The main UI page, renders the 'index.html' template."""
+  def get(self):
+    template = jinja_environment.get_template('index.html')
+    content = template.render({})
+    self.response.out.write(content)
+
+class TestDownloadFile(webapp2.RequestHandler):
+  def get(self, size_kbytes):
+    self.response.headers.add_header("Access-Control-Allow-Origin", "*")
+    self.response.headers['Content-Type'] = 'application/octet-stream'
+    self.response.out.write(random_file[0: int(size_kbytes)*1000])
+    
+app = webapp2.WSGIApplication([
+    ('/', MainPage),
+    (r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
+  ], debug=True)
diff --git a/samples/web/content/trickle-ice/index.html b/samples/web/content/trickle-ice/index.html
deleted file mode 100644
index 7d2a58f..0000000
--- a/samples/web/content/trickle-ice/index.html
+++ /dev/null
@@ -1,113 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta name="keywords" content="WebRTC, HTML5, JavaScript" />
-<meta name="description" content="Client-side WebRTC code samples." />
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
-<meta http-equiv="X-UA-Compatible" content="chrome=1" />
-<base target="_blank">
-<title>Trickle ICE</title>
-<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="../../css/main.css" />
-<link rel='stylesheet' href="css/main.css" />
-</head>
-<body>
-<div id='container'>
-
-<h1><a href="https://googlechrome.github.io/webrtc/" title="WebRTC samples homepage">WebRTC samples</a> <span>Trickle ICE</span></h1>
-
-<section>
-
-  <p>This page tests the trickle ICE functionality in a WebRTC implementation. It creates a PeerConnection with the specified ICEServers, and then starts candidate gathering for a session with a single audio stream. As candidates are gathered, they are displayed in the text box below, along with an indication when candidate gathering is complete.</p>
-
-  <p>Individual STUN and TURN servers can be added using the Add server / Remove server controls below; in addition, the type of candidates released to the application can be controlled via the IceTransports constraint.</p>
-
-</section>
-
-<section id="iceServers">
-
-  <h2>ICE servers</h2>
-
-  <select id="servers" size="4">
-    <option value="{&quot;url&quot;:&quot;stun:stun.l.google.com:19302&quot;}">stun:stun.l.google.com:19302</option>
-  </select>
-
-  <div>
-    <label for="url">STUN or TURN URI:</label>
-    <input id="url">
-  </div>
-
-  <div>
-    <label for="username">TURN username:</label>
-    <input id="username">
-  </div>
-
-  <div>
-    <label for="password">TURN password:</label>
-    <input id="password">
-  </div>
-
-  <div>
-    <button id="add">Add Server</button>
-    <button id="remove">Remove Server</button>
-  </div>
-
-</section>
-
-<section id="iceOptions">
-
-  <h2>ICE options</h2>
-
-  <div id="iceTransports">
-    <span>IceTransports value:</span>
-    <input type="radio" name="transports" value="all" id="all" checked><label for="all">all</label>
-    <input type="radio" name="transports" value="relay" id="relay">
-    <label for="relay">relay</label>
-    <input type="radio" name="transports" value="none" id="none">
-    <label for="none">none</label>
-  </div>
-  <div>
-    <label for="ipv6">Gather IPv6 candidates:</label>
-    <input id="ipv6" type="checkbox" checked="true">
-  </div>
-  <div>
-    <label for="unbundle">Gather unbundled RTCP candidates:</label>
-    <input id="unbundle" type="checkbox" checked="true">
-  </div>
-
-</section>
-
-<section>
-
-  <table id="candidates">
-    <thead id="candidatesHead"><tr>
-      <th>Time</th><th>Component</th><th>Type</th><th>Foundation</th>
-      <th>Protocol</th><th>Address</th><th>Port</th><th>Priority</th>
-    </tr></thead>
-    <tbody id="candidatesBody"></tbody>
-  </table>
-  <button id="gather">Gather candidates</button>
-
-</section>
-
-
-
-
-
- <a href="https://github.com/GoogleChrome/webrtc/tree/gh-pages/samples/web/content/trickleice" title="View source for this page on Github" id="viewSource">View source on Github</a>
-</div>
-
-<script src="../../js/adapter.js"></script>
-<script src="js/main.js"></script>
-
-<script src="../../js/lib/ga.js"></script>
-</body>
-</html>
diff --git a/samples/web/content/webaudio-input/css/main.css b/samples/web/content/webaudio-input/css/main.css
deleted file mode 100644
index eaf9130..0000000
--- a/samples/web/content/webaudio-input/css/main.css
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
- */
-button {
-  margin: 0 20px 20px 0;
-  width: 96px;
-}
-
-div#dialPad button {
-  background-color: #ddd;
-  border: 1px solid #ccc;
-  color: black;
-  font-size: 1em;
-  font-weight: 400;
-  height: 40px;
-  margin: 0 10px 10px 0;
-  width: 40px;
-}
-
-div#dialPad button:hover {
-  background-color: #aaa;
-}
-
-div#dialPad button:active {
-  background-color: #888;
-}
-
-div#dialPad {
-  display: inline-block;
-  margin: 0 20px 20px 0;
-  vertical-align: top;
-}
-
-div#parameters {
-  margin: 0 0 25px 0;
-}
-
-div#parameters > div {
-  height: 28px;
-  margin: 0 0 10px 0;
-}
-
-div#dtmf {
-  background-color: #eee;
-  display: inline-block;
-  height: 180px;
-  margin: 0 0 20px 0;
-  padding: 5px 5px 5px 10px;
-  width: calc(100% - 239px);
-}
-
-div#dtmf div {
-  font-family: 'Inconsolata', 'Courier New', monospace;
-}
-
-div#sentTones {
-  display: inline-block;
-  line-height: 1.2em;
-}
-
-div#dtmfStatus {
-  margin: 0 0 10px 0;
-}
-
-div#parameters input {
-  font-size: 1em;
-  width: 62px;
-}
-
-div#parameters input#tones {
-  width: calc(100% - 78px);
-}
-
-div#parameters label {
-  display: inline-block;
-  font-weight: 400;
-  height: 28px;
-  position: relative;
-  top: 4px;
-  vertical-align: top;
-  width: 68px;
-}
-
diff --git a/samples/web/content/webaudio-input/index.html b/samples/web/content/webaudio-input/index.html
deleted file mode 100644
index 70fe2da..0000000
--- a/samples/web/content/webaudio-input/index.html
+++ /dev/null
@@ -1,273 +0,0 @@
-<!--
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
--->
-<!DOCTYPE html>
-<html>
-<head>
-<meta charset="utf-8">
-<title>Audio effects with WebAudio in WebRTC</title>
-<script type="text/javascript" src="../../js/adapter.js"></script>
-<script>
-  var audioElement;
-  var buttonStart;
-  var buttonStop;
-  var localStream;
-  var pc1, pc2;
-  var display;
-
-  var webAudio;
-
-  // WebAudio helper class which takes care of the WebAudio related parts.
-
-  function WebAudio() {
-    window.AudioContext = window.AudioContext || window.webkitAudioContext;
-    this.context = new AudioContext();
-    this.soundBuffer = null;
-  }
-
-  WebAudio.prototype.start = function() {
-    this.filter = this.context.createBiquadFilter();
-    this.filter.type = "highpass";
-    this.filter.frequency.value = 1500;
-  }
-
-  WebAudio.prototype.applyFilter = function(stream) {
-    this.mic = this.context.createMediaStreamSource(stream);
-    this.mic.connect(this.filter);
-    this.peer = this.context.createMediaStreamDestination();
-    this.filter.connect(this.peer);
-    return this.peer.stream;
-  }
-
-  WebAudio.prototype.renderLocally = function(enabled) {
-    if (enabled) {
-      this.mic.connect(this.context.destination);
-    } else {
-      this.mic.disconnect(0);
-      this.mic.connect(this.filter);
-    }
-  }
-
-  WebAudio.prototype.stop = function() {
-    this.mic.disconnect(0);
-    this.filter.disconnect(0);
-    mic = null;
-    peer = null;
-  }
-
-  WebAudio.prototype.addEffect = function() {
-    var effect = this.context.createBufferSource();
-    effect.buffer = this.soundBuffer;
-    if (this.peer) {
-      effect.connect(this.peer);
-      effect.start(0);
-    }
-  }
-
-  WebAudio.prototype.loadCompleted = function() {
-    this.context.decodeAudioData(this.request.response, function (buffer) {this.soundBuffer = buffer;}.bind(this));
-  }
-
-  WebAudio.prototype.loadSound = function(url) {
-    this.request = new XMLHttpRequest();
-    this.request.open('GET', url, true);
-    this.request.responseType = 'arraybuffer';
-    this.request.onload = this.loadCompleted.bind(this);
-    this.request.send();
-  }
-
-  // Global methods.
-
-  function trace(txt) {
-    display.innerHTML += txt + "<br>";
-  }
-
-  function logEvent(e) {
-    console.log(e.type + ':' + e.target + ':' + e.target.id + ':muted=' +
-                e.target.muted);
-  }
-
-  $ = function(id) {
-    return document.getElementById(id);
-  };
-
-  function start() {
-    webAudio.start();
-    var constraints = {audio:true, video:false};
-    getUserMedia(constraints, gotStream, gotStreamFailed);
-    buttonStart.disabled = true;
-    buttonStop.disabled = false;
-  }
-
-  function stop() {
-    webAudio.stop();
-    pc1.close();
-    pc2.close();
-    pc1 = null;
-    pc2 = null;
-    buttonStart.enabled = true;
-    buttonStop.enabled = false;
-    localStream.stop();
-  }
-
-  function gotStream(stream) {
-    audioTracks = stream.getAudioTracks();
-    if (audioTracks.length == 1) {
-      console.log('gotStream({audio:true, video:false})');
-
-      var filteredStream = webAudio.applyFilter(stream);
-
-      var servers = null;
-
-      pc1 = new webkitRTCPeerConnection(servers);
-      console.log('Created local peer connection object pc1');
-      pc1.onicecandidate = iceCallback1;
-      pc2 = new webkitRTCPeerConnection(servers);
-      console.log('Created remote peer connection object pc2');
-      pc2.onicecandidate = iceCallback2;
-      pc2.onaddstream = gotRemoteStream;
-
-      pc1.addStream(filteredStream);
-      pc1.createOffer(gotDescription1);
-
-      stream.onended = function() {
-        console.log('stream.onended');
-        buttonStart.disabled = false;
-        buttonStop.disabled = true;
-      };
-
-      localStream = stream;
-    } else {
-      alert('The media stream contains an invalid amount of audio tracks.');
-      stream.stop();
-    }
-  }
-
-  function gotStreamFailed(error) {
-    buttonStart.disabled = false;
-    buttonStop.disabled = true;
-    alert('Failed to get access to local media. Error code: ' + error.code);
-  }
-
-  function forceOpus(sdp) {
-    // Remove all other codecs (not the video codecs though).
-    sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
-                      'm=audio $1 RTP/SAVPF 111\r\n');
-    sdp = sdp.replace(/a=rtpmap:(?!111)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, '');
-    return sdp;
-}
-
-  function gotDescription1(desc){
-    console.log('Offer from pc1 \n' + desc.sdp);
-    var modifiedOffer = new RTCSessionDescription({type: 'offer',
-                                                   sdp: forceOpus(desc.sdp)});
-    pc1.setLocalDescription(modifiedOffer);
-    console.log('Offer from pc1 \n' + modifiedOffer.sdp);
-    pc2.setRemoteDescription(modifiedOffer);
-    pc2.createAnswer(gotDescription2);
-  }
-
-  function gotDescription2(desc){
-    pc2.setLocalDescription(desc);
-    console.log('Answer from pc2 \n' + desc.sdp);
-    pc1.setRemoteDescription(desc);
-  }
-
-  function gotRemoteStream(e){
-    attachMediaStream(audioElement, e.stream);
-  }
-
-  function iceCallback1(event){
-    if (event.candidate) {
-      pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
-                          onAddIceCandidateSuccess, onAddIceCandidateError);
-      console.log('Local ICE candidate: \n' + event.candidate.candidate);
-    }
-  }
-
-  function iceCallback2(event){
-    if (event.candidate) {
-      pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
-                          onAddIceCandidateSuccess, onAddIceCandidateError);
-      console.log('Remote ICE candidate: \n ' + event.candidate.candidate);
-    }
-  }
-
-  function onAddIceCandidateSuccess() {
-    trace("AddIceCandidate success.");
-  }
-
-  function onAddIceCandidateError(error) {
-    trace("Failed to add Ice Candidate: " + error.toString());
-  }
-
-  function handleKeyDown(event) {
-    var keyCode = event.keyCode;
-    webAudio.addEffect();
-  }
-
-  function doMix(checkbox) {
-    webAudio.renderLocally(checkbox.checked);
-  }
-
-  function onload() {
-    webAudio = new WebAudio();
-    webAudio.loadSound('audio/Shamisen-C4.wav');
-
-    audioElement = $('audio');
-    buttonStart = $('start');
-    buttonStop = $('stop');
-    display = $('display');
-
-    document.addEventListener('keydown', handleKeyDown, false);
-
-    buttonStart.enabled = true;
-    buttonStop.disabled = true;
-  }
-</script>
-</head>
-
-<body onload='onload()'>
-  <h2>Capture microphone input and stream it out to a peer with a processing
-      effect applied to the audio.</h2>
-  <p>The audio stream is: <br><br>
-     o Recorded using <a href="http://www.html5audio.org/2012/09/live-audio-input-comes-to-googles-chrome-canary.html"
-       title="Live audio input comes to Google's Chrome Canary">live-audio
-       input.</a><br>
-     o Filtered using an HP filter with fc=1500 Hz.<br>
-     o Encoded using <a href="http://www.opus-codec.org/" title="Opus Codec">
-       Opus.</a><br>
-     o Transmitted (in loopback) to remote peer using
-       <a href="http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcpeerconnection-interface"
-       title="RTCPeerConnection Interface">RTCPeerConnection</a> where it is decoded.<br>
-     o Finally, the received remote stream is used as source to an &lt;audio&gt;
-       tag and played out locally.<br>
-     <br>Press any key to add an effect to the transmitted audio while talking.
-  </p>
-  <p>Please note that: <br><br>
-     o Linux is currently not supported.<br>
-     o Sample rate and channel configuration must be the same for input and
-       output sides on Windows.<br>
-     o Only the Default microphone device can be used for capturing.
-  </p>
-  <p>For more information, see <a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/webrtc-integration.html"
-     title="Example 3: Capture microphone input and stream it out to a peer with a processing effect applied to the audio">
-     WebRTC integration with the Web Audio API.</a>
-  </p>
-  <style>
-    button {
-      font: 14px sans-serif;
-      padding: 8px;
-    }
-  </style>
-  <audio id="audio" autoplay controls></audio><br><br>
-  <button id="start" onclick="start()">Start</button>
-  <button id="stop" onclick="stop()">Stop</button><br><br>
-  Add local audio to output:<input id="mix" type="checkbox" onclick="doMix(this);"><br><br>
-  <pre id="display"></pre>
-</body>
-</html>
diff --git a/samples/web/content/webaudio-input/js/main.js b/samples/web/content/webaudio-input/js/main.js
deleted file mode 100644
index 5a82d76..0000000
--- a/samples/web/content/webaudio-input/js/main.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree.
- */
-var callButton = document.querySelector('button#callButton');
-var sendTonesButton = document.querySelector('button#sendTonesButton');
-var hangupButton = document.querySelector('button#hangupButton');
-
-sendTonesButton.disabled = true;
-hangupButton.disabled = true;
-
-callButton.onclick = call;
-sendTonesButton.onclick = handleSendTonesClick;
-hangupButton.onclick = hangup;
-
-var durationInput = document.querySelector('input#duration');
-var gapInput = document.querySelector('input#gap');
-var tonesInput = document.querySelector('input#tones');
-
-var sentTonesDiv = document.querySelector('div#sentTones');
-var dtmfStatusDiv = document.querySelector('div#dtmfStatus');
-
-var audio = document.querySelector('audio');
-
-var pc1, pc2;
-var localStream;
-var dtmfSender;
-
-var sdpConstraints = {
-  'mandatory': {
-    'OfferToReceiveAudio': true,
-    'OfferToReceiveVideo': false
-  }
-};
-
-
-main();
-
-function main() {
-  addDialPadHandlers();
-}
-
-function gotStream(stream) {
-  trace('Received local stream');
-  // Call the polyfill wrapper to attach the media stream to this element.
-  localStream = stream;
-  var audioTracks = localStream.getAudioTracks();
-  if (audioTracks.length > 0)
-    trace('Using Audio device: ' + audioTracks[0].label);
-  pc1.addStream(localStream);
-  trace('Adding Local Stream to peer connection');
-  pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
-}
-
-function onCreateSessionDescriptionError(error) {
-  trace('Failed to create session description: ' + error.toString());
-}
-
-function call() {
-  trace('Starting call');
-  var servers = null;
-  var pcConstraints = {
-    'optional': []
-  };
-  pc1 = new RTCPeerConnection(servers, pcConstraints);
-  trace('Created local peer connection object pc1');
-  pc1.onicecandidate = iceCallback1;
-  pc2 = new RTCPeerConnection(servers, pcConstraints);
-  trace('Created remote peer connection object pc2');
-  pc2.onicecandidate = iceCallback2;
-  pc2.onaddstream = gotRemoteStream;
-
-  trace('Requesting local stream');
-  // Call into getUserMedia via the polyfill (adapter.js).
-  getUserMedia({
-      audio: true,
-      video: false
-    },
-    gotStream, function (e) {
-      alert('getUserMedia() error: ' + e.name);
-    });
-
-  callButton.disabled = true;
-  hangupButton.disabled = false;
-  sendTonesButton.disabled = false;
-}
-
-function gotDescription1(desc) {
-  pc1.setLocalDescription(desc);
-  trace('Offer from pc1 \n' + desc.sdp);
-  pc2.setRemoteDescription(desc);
-  // Since the 'remote' side has no media stream we need
-  // to pass in the right constraints in order for it to
-  // accept the incoming offer of audio.
-  pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
-    sdpConstraints);
-}
-
-function gotDescription2(desc) {
-  // Setting PCMU as the preferred codec.
-  desc.sdp = desc.sdp.replace(/m=.*\r\n/, 'm=audio 1 RTP/SAVPF 0 126\r\n');
-  // Workaround for issue 1603.
-  desc.sdp = desc.sdp.replace(/.*fmtp.*\r\n/g, '');
-  pc2.setLocalDescription(desc);
-  trace('Answer from pc2: \n' + desc.sdp);
-  pc1.setRemoteDescription(desc);
-}
-
-function hangup() {
-  trace('Ending call');
-  pc1.close();
-  pc2.close();
-  pc1 = null;
-  pc2 = null;
-  localStream = null;
-  dtmfSender = null;
-  callButton.disabled = false;
-  hangupButton.disabled = true;
-  sendTonesButton.disabled = true;
-  dtmfStatusDiv.textContent = 'DTMF deactivated';
-}
-
-function gotRemoteStream(e) {
-  // Call the polyfill wrapper to attach the media stream to this element.
-  attachMediaStream(audio, e.stream);
-  trace('Received remote stream');
-  if (RTCPeerConnection.prototype.createDTMFSender) {
-    enableDtmfSender();
-  } else {
-    alert('This demo requires the RTCPeerConnection method createDTMFSender() which is not support by this browser.');
-  }
-
-}
-
-function iceCallback1(event) {
-  if (event.candidate) {
-    pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
-      onAddIceCandidateSuccess, onAddIceCandidateError);
-    trace('Local ICE candidate: \n' + event.candidate.candidate);
-  }
-}
-
-function iceCallback2(event) {
-  if (event.candidate) {
-    pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
-      onAddIceCandidateSuccess, onAddIceCandidateError);
-    trace('Remote ICE candidate: \n ' + event.candidate.candidate);
-  }
-}
-
-function onAddIceCandidateSuccess() {
-  trace('AddIceCandidate success');
-}
-
-function onAddIceCandidateError(error) {
-  trace('Failed to add Ice Candidate: ' + error.toString());
-}
-
-function enableDtmfSender() {
-  dtmfStatusDiv.textContent = 'DTMF activated';
-  if (localStream !== null) {
-    var localAudioTrack = localStream.getAudioTracks()[0];
-    dtmfSender = pc1.createDTMFSender(localAudioTrack);
-    trace('Created DTMFSender:\n');
-    dtmfSender.ontonechange = dtmfOnToneChange;
-  } else {
-    trace('No local stream to create DTMF Sender\n');
-  }
-}
-
-function dtmfOnToneChange(tone) {
-  if (tone) {
-    trace('Sent DTMF tone: ' + tone.tone);
-    sentTonesDiv.textContent += tone.tone + ' ';
-  }
-}
-
-function sendTones(tones) {
-  if (dtmfSender) {
-    var duration = durationInput.value;
-    var gap = gapInput.value;
-    console.log('Tones, duration, gap: ', tones, duration, gap);
-    dtmfSender.insertDTMF(tones, duration, gap);
-  }
-}
-
-function handleSendTonesClick(){
-  sendTones(tonesInput.value);
-}
-
-function addDialPadHandlers() {
-  var dialPad = document.querySelector('div#dialPad');
-  var buttons = dialPad.querySelectorAll('button');
-  for (var i = 0; i != buttons.length; ++i) {
-    buttons[i].onclick = sendDtmfTone;
-  }
-}
-
-function sendDtmfTone() {
-  sendTones(this.textContent);
-}
-
diff --git a/samples/web/css/main.css b/samples/web/css/main.css
index 22e459f..5da32dd 100644
--- a/samples/web/css/main.css
+++ b/samples/web/css/main.css
@@ -9,7 +9,7 @@
   display: none;
 }
 
-* {
+body {
   font-family: 'Roboto', sans-serif;
   font-weight: 300;
 }
@@ -201,6 +201,10 @@
   font-weight: 500;
 }
 
+textarea {
+  font-family: 'Roboto', sans-serif;
+}
+
 video {
   background: #222;
   margin: 0 0 20px 0;
diff --git a/samples/web/js/adapter.js b/samples/web/js/adapter.js
index 9943dc7..ff9344e 100644
--- a/samples/web/js/adapter.js
+++ b/samples/web/js/adapter.js
@@ -7,14 +7,8 @@
  */
 
 /* More information about these options at jshint.com/docs/options */
-
-/* jshint browser: true, camelcase: true, curly: true, devel: true,
-eqeqeq: true, forin: false, globalstrict: true, quotmark: single,
-undef: true, unused: strict */
-
 /* global mozRTCIceCandidate, mozRTCPeerConnection,
 mozRTCSessionDescription, webkitRTCPeerConnection */
-
 /* exported trace */
 
 'use strict';