App Engine Python SDK version 1.9.15

git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@490 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 3d9c59c..1d79dff 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,8 +3,52 @@
 
 App Engine SDK - Release Notes
 
-Version 1.9.13
+Version 1.9.15
 
+All
+==============================
+- Fixed an issue with Datastore stats not displaying entity counts.
+    https://code.google.com/p/googleappengine/issues/detail?id=9328
+
+Python
+==============================
+- search.GetResponse in the Search API now supports __len__.
+    https://code.google.com/p/googleappengine/issues/detail?id=10340
+- The get_access_token() method of the App Identity API now caches access
+  tokens within the instance.
+- Fixed an issue with Cloud Endpoints where multi-class Python APIs
+  that reused method names could result in incorrect API descriptors.
+    https://code.google.com/p/googleappengine/issues/detail?id=10595
+    https://code.google.com/p/googleappengine/issues/detail?id=11366
+    https://code.google.com/p/googleappengine/issues/detail?id=11373
+
+PHP
+==============================
+- Temporary uploaded files are now unlinked (deleted) if not moved. Empty
+  files not written to a temporary directory are removed.
+    http://stackoverflow.com/questions/21571418/google-app-engine-empty-form
+    -upload-field-submission-issue
+- Fixed an issue with get_define_constraints not returning a Core array.
+    https://code.google.com/p/googleappengine/issues/detail?id=11236
+
+Version 1.9.14
+
+All
+==============================
+- The URL Fetch service will always issue a GET request when it receives
+  and responds to a 302 response; previously it would preserve the original
+  HTTP method. This new behavior is in line with modern conventions (see
+  http://tools.ietf.org/html/rfc7231#section-6.4).  You can
+  use the final_url field of the Response object (or a similar object in other
+  APIs, see
+  https://cloud.google.com/appengine/docs/python/urlfetch/responseobjects)
+  to determine if your fetches are being redirected. If this change affects
+  your application, please contact our support team - even if you do not have
+  a support contract at
+  https://support.google.com/cloud/answer/3420056?hl=en&ref_topic=3473162.
+
+Version 1.9.13
+==============================
 - No changes for 1.9.13
 
 
diff --git a/VERSION b/VERSION
index bd34a5a..4abc0b6 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
-release: "1.9.13"
-timestamp: 1411710077
+release: "1.9.15"
+timestamp: 1413915397
 api_versions: ['1']
 supported_api_versions:
   python:
diff --git a/_php_runtime.py b/_php_runtime.py
index c47660d..4f1d74e 100755
--- a/_php_runtime.py
+++ b/_php_runtime.py
@@ -17,6 +17,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/_python_runtime.py b/_python_runtime.py
index c47660d..4f1d74e 100755
--- a/_python_runtime.py
+++ b/_python_runtime.py
@@ -17,6 +17,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/api_server.py b/api_server.py
index de775b6..833a338 100755
--- a/api_server.py
+++ b/api_server.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/appcfg.py b/appcfg.py
index de775b6..833a338 100755
--- a/appcfg.py
+++ b/appcfg.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/backends_conversion.py b/backends_conversion.py
index de775b6..833a338 100755
--- a/backends_conversion.py
+++ b/backends_conversion.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/bulkload_client.py b/bulkload_client.py
index de775b6..833a338 100755
--- a/bulkload_client.py
+++ b/bulkload_client.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/bulkloader.py b/bulkloader.py
index de775b6..833a338 100755
--- a/bulkloader.py
+++ b/bulkloader.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/dev_appserver.py b/dev_appserver.py
index c47660d..4f1d74e 100755
--- a/dev_appserver.py
+++ b/dev_appserver.py
@@ -17,6 +17,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/download_appstats.py b/download_appstats.py
index de775b6..833a338 100755
--- a/download_appstats.py
+++ b/download_appstats.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/endpointscfg.py b/endpointscfg.py
index de775b6..833a338 100755
--- a/endpointscfg.py
+++ b/endpointscfg.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/gen_protorpc.py b/gen_protorpc.py
index de775b6..833a338 100755
--- a/gen_protorpc.py
+++ b/gen_protorpc.py
@@ -20,6 +20,7 @@
 """Convenience wrapper for starting an appengine tool."""
 
 
+
 import os
 import sys
 
diff --git a/google/appengine/api/apiproxy_rpc.py b/google/appengine/api/apiproxy_rpc.py
index a95f0c0..0ed4129 100644
--- a/google/appengine/api/apiproxy_rpc.py
+++ b/google/appengine/api/apiproxy_rpc.py
@@ -28,6 +28,7 @@
 
 
 
+
 import sys
 
 
diff --git a/google/appengine/api/apiproxy_stub.py b/google/appengine/api/apiproxy_stub.py
index f27b609..f3ef604 100644
--- a/google/appengine/api/apiproxy_stub.py
+++ b/google/appengine/api/apiproxy_stub.py
@@ -30,6 +30,7 @@
 
 
 
+
 import random
 import threading
 
diff --git a/google/appengine/api/apiproxy_stub_map.py b/google/appengine/api/apiproxy_stub_map.py
index f4f5787..f9ec3aa 100644
--- a/google/appengine/api/apiproxy_stub_map.py
+++ b/google/appengine/api/apiproxy_stub_map.py
@@ -36,6 +36,7 @@
 
 
 
+
 import inspect
 import sys
 import threading
diff --git a/google/appengine/api/app_identity/app_identity.py b/google/appengine/api/app_identity/app_identity.py
index 448aaa2..3e92c75 100644
--- a/google/appengine/api/app_identity/app_identity.py
+++ b/google/appengine/api/app_identity/app_identity.py
@@ -28,7 +28,9 @@
 
 
 
+
 import os
+import time
 
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api import memcache
@@ -72,6 +74,26 @@
 _MEMCACHE_NAMESPACE = '_ah_'
 
 
+
+_TOKEN_EXPIRY_SAFETY_MARGIN = 300
+_MAX_TOKEN_CACHE_SIZE = 100
+
+
+_MAX_RANDOM_EXPIRY_DELTA = 60
+
+
+
+
+_access_token_cache = {}
+
+
+
+
+
+_random_cache_expiry_delta = (
+    hash(time.time()) % (_MAX_RANDOM_EXPIRY_DELTA * 1000) / 1000.0)
+
+
 class Error(Exception):
   """Base error type."""
 
@@ -537,7 +559,7 @@
   Each application has an associated Google account. This function returns
   OAuth2 access token corresponding to the running app. Access tokens are safe
   to cache and reuse until their expiry time as returned. This method will
-  do that using memcache.
+  do that using both an in-process cache and memcache.
 
   Args:
     scopes: The requested API scope string, or a list of strings.
@@ -548,16 +570,42 @@
 
 
 
-  memcache_key = _MEMCACHE_KEY_PREFIX + str(scopes)
+  cache_key = _MEMCACHE_KEY_PREFIX + str(scopes)
   if service_account_id:
-    memcache_key += ',%s' % service_account_id
-  memcache_value = memcache.get(memcache_key, namespace=_MEMCACHE_NAMESPACE)
+    cache_key += ',%s' % service_account_id
+
+
+  cached = _access_token_cache.get(cache_key)
+  if cached is not None:
+    access_token, expires_at = cached
+    safe_expiry = (expires_at - _TOKEN_EXPIRY_SAFETY_MARGIN -
+                   _random_cache_expiry_delta)
+    if time.time() < safe_expiry:
+      return access_token, expires_at
+
+
+  memcache_value = memcache.get(cache_key, namespace=_MEMCACHE_NAMESPACE)
   if memcache_value:
     access_token, expires_at = memcache_value
   else:
     access_token, expires_at = get_access_token_uncached(
         scopes, service_account_id=service_account_id)
 
-    memcache.add(memcache_key, (access_token, expires_at), expires_at - 300,
+
+
+
+    memcache_expiry = expires_at - _TOKEN_EXPIRY_SAFETY_MARGIN
+    memcache_expiry -= _MAX_RANDOM_EXPIRY_DELTA
+    memcache_expiry -= 10
+    memcache.add(cache_key, (access_token, expires_at),
+                 memcache_expiry,
                  namespace=_MEMCACHE_NAMESPACE)
+
+
+  if len(_access_token_cache) >= _MAX_TOKEN_CACHE_SIZE:
+
+
+    _access_token_cache.clear()
+  _access_token_cache[cache_key] = (access_token, expires_at)
+
   return access_token, expires_at
diff --git a/google/appengine/api/app_identity/app_identity_keybased_stub.py b/google/appengine/api/app_identity/app_identity_keybased_stub.py
index fab7db6..267305c 100644
--- a/google/appengine/api/app_identity/app_identity_keybased_stub.py
+++ b/google/appengine/api/app_identity/app_identity_keybased_stub.py
@@ -33,6 +33,7 @@
 
 
 
+
 import base64
 import json
 import os
diff --git a/google/appengine/api/app_identity/app_identity_stub.py b/google/appengine/api/app_identity/app_identity_stub.py
index f50776c..64ae88a 100644
--- a/google/appengine/api/app_identity/app_identity_stub.py
+++ b/google/appengine/api/app_identity/app_identity_stub.py
@@ -34,6 +34,7 @@
 
 
 
+
 import binascii
 import time
 
diff --git a/google/appengine/api/app_logging.py b/google/appengine/api/app_logging.py
index b718d0f..d591da8 100644
--- a/google/appengine/api/app_logging.py
+++ b/google/appengine/api/app_logging.py
@@ -33,6 +33,7 @@
 
 
 
+
 import logging
 
 from google.appengine import runtime
diff --git a/google/appengine/api/appinfo.py b/google/appengine/api/appinfo.py
index c4a1e05..b531c3b 100644
--- a/google/appengine/api/appinfo.py
+++ b/google/appengine/api/appinfo.py
@@ -38,6 +38,7 @@
 
 
 
+
 import logging
 import os
 import re
@@ -232,7 +233,11 @@
 BASIC_SCALING = 'basic_scaling'
 VM = 'vm'
 VM_SETTINGS = 'vm_settings'
+BETA_SETTINGS = 'beta_settings'
 VM_HEALTH_CHECK = 'vm_health_check'
+HEALTH_CHECK = 'health_check'
+RESOURCES = 'resources'
+NETWORK = 'network'
 VERSION = 'version'
 MAJOR_VERSION = 'major_version'
 MINOR_VERSION = 'minor_version'
@@ -313,6 +318,14 @@
 HOST = 'host'
 
 
+CPU = 'cpu'
+MEMORY_GB = 'memory_gb'
+
+
+FORWARDED_PORTS = 'forwarded_ports'
+INSTANCE_TAG = 'instance_tag'
+
+
 class _VersionedLibrary(object):
   """A versioned library supported by App Engine."""
 
@@ -1331,7 +1344,7 @@
 class VmSettings(validation.ValidatedDict):
   """Class for VM settings.
 
-  We don't validate these further because the feature is in flux.
+  We don't validate these further here.  They're validated in Olympus.
   """
 
   KEY_VALIDATOR = validation.Regex('[a-zA-Z_][a-zA-Z0-9_]*')
@@ -1348,6 +1361,20 @@
     return VmSettings(**result_vm_settings) if result_vm_settings else None
 
 
+class BetaSettings(VmSettings):
+  """Class for Beta (internal or unreleased) settings.
+
+  This class is meant to replace VmSettings eventually.
+
+  We don't validate these further here.  They're validated in Olympus.
+  """
+
+  @classmethod
+  def Merge(cls, beta_settings_one, beta_settings_two):
+    merged = VmSettings.Merge(beta_settings_one, beta_settings_two)
+    return BetaSettings(**merged.ToDict()) if merged else None
+
+
 class EnvironmentVariables(validation.ValidatedDict):
   """Class representing a mapping of environment variable key value pairs."""
 
@@ -1391,11 +1418,13 @@
       appyaml.vm_settings = VmSettings()
 
 
+
     if runtime == 'dart' or runtime == 'contrib-dart':
       runtime = 'dart'
       appyaml.vm_settings['has_docker_image'] = True
 
 
+
     appyaml.vm_settings['vm_runtime'] = runtime
     appyaml.runtime = 'vm'
   else:
@@ -1421,8 +1450,24 @@
   if appyaml.vm:
     if not appyaml.vm_settings:
       appyaml.vm_settings = VmSettings()
+
     if 'vm_runtime' not in appyaml.vm_settings:
       appyaml = VmSafeSetRuntime(appyaml, appyaml.runtime)
+
+
+    if hasattr(appyaml, 'beta_settings') and appyaml.beta_settings:
+
+
+
+      if 'vm_runtime' not in appyaml.beta_settings:
+
+        appyaml.beta_settings['vm_runtime'] = appyaml.vm_settings[
+            'vm_runtime']
+      if ('has_docker_image' not in appyaml.beta_settings and
+          'has_docker_image' in appyaml.vm_settings):
+        appyaml.beta_settings['has_docker_image'] = appyaml.vm_settings[
+            'has_docker_image']
+
   return appyaml
 
 
@@ -1439,6 +1484,41 @@
       HOST: validation.Optional(validation.TYPE_STR)}
 
 
+class HealthCheck(VmHealthCheck):
+  """Class representing the health check configuration.
+
+  This class is meant to replace VmHealthCheck eventually.
+  """
+  pass
+
+
+class Resources(validation.Validated):
+  """Class representing the configuration of VM resources."""
+
+
+
+
+
+  ATTRIBUTES = {
+      CPU: validation.Optional(validation.TYPE_FLOAT, default=.5),
+      MEMORY_GB: validation.Optional(validation.TYPE_FLOAT, default=1.3)
+  }
+
+
+class Network(validation.Validated):
+  """Class representing the VM network configuration."""
+
+  ATTRIBUTES = {
+
+      FORWARDED_PORTS: validation.Optional(validation.Repeated(validation.Regex(
+          '[0-9]+(:[0-9]+)?'))),
+
+
+      INSTANCE_TAG: validation.Optional(validation.Regex(
+          r'^[a-z\d]([a-z\d-]{0,61}[a-z\d])?$'))
+  }
+
+
 class AppInclude(validation.Validated):
   """Class representing the contents of an included app.yaml file.
 
@@ -1456,6 +1536,7 @@
       MANUAL_SCALING: validation.Optional(ManualScaling),
       VM: validation.Optional(bool),
       VM_SETTINGS: validation.Optional(VmSettings),
+      BETA_SETTINGS: validation.Optional(BetaSettings),
       ENV_VARIABLES: validation.Optional(EnvironmentVariables),
       SKIP_FILES: validation.RegexStr(default=SKIP_NO_FILES),
 
@@ -1516,6 +1597,11 @@
                                        two.vm_settings)
 
 
+    if hasattr(one, 'beta_settings'):
+      one.beta_settings = BetaSettings.Merge(one.beta_settings,
+                                             two.beta_settings)
+
+
 
     one.env_variables = EnvironmentVariables.Merge(one.env_variables,
                                                    two.env_variables)
@@ -1647,7 +1733,11 @@
       BASIC_SCALING: validation.Optional(BasicScaling),
       VM: validation.Optional(bool),
       VM_SETTINGS: validation.Optional(VmSettings),
+      BETA_SETTINGS: validation.Optional(BetaSettings),
       VM_HEALTH_CHECK: validation.Optional(VmHealthCheck),
+      HEALTH_CHECK: validation.Optional(HealthCheck),
+      RESOURCES: validation.Optional(Resources),
+      NETWORK: validation.Optional(Network),
       BUILTINS: validation.Optional(validation.Repeated(BuiltinHandler)),
       INCLUDES: validation.Optional(validation.Type(list)),
       HANDLERS: validation.Optional(validation.Repeated(URLMap), default=[]),
@@ -1739,8 +1829,12 @@
     if self.libraries:
       vm_runtime_python27 = (
           self.runtime == 'vm' and
-          hasattr(self, 'vm_settings') and
-          self.vm_settings['vm_runtime'] == 'python27')
+          (hasattr(self, 'vm_settings') and
+           self.vm_settings and
+           self.vm_settings['vm_runtime'] == 'python27') or
+          (hasattr(self, 'beta_settings') and
+           self.beta_settings and
+           self.beta_settings['vm_runtime'] == 'python27'))
       if not self._skip_runtime_checks and not (
           vm_runtime_python27 or self.runtime == 'python27'):
         raise appinfo_errors.RuntimeDoesNotSupportLibraries(
@@ -1873,12 +1967,15 @@
     """Returns the app's runtime, resolving VMs to the underlying vm_runtime.
 
     Returns:
-      The effective runtime: the value of vm_settings.vm_runtime if runtime is
-      "vm", or runtime otherwise.
+      The effective runtime: the value of beta/vm_settings.vm_runtime if
+      runtime is "vm", or runtime otherwise.
     """
     if (self.runtime == 'vm' and hasattr(self, 'vm_settings')
         and self.vm_settings is not None):
       return self.vm_settings.get('vm_runtime')
+    if (self.runtime == 'vm' and hasattr(self, 'beta_settings')
+        and self.beta_settings is not None):
+      return self.beta_settings.get('vm_runtime')
     return self.runtime
 
 
diff --git a/google/appengine/api/appinfo_errors.py b/google/appengine/api/appinfo_errors.py
index f378fb0..1ad1a42 100644
--- a/google/appengine/api/appinfo_errors.py
+++ b/google/appengine/api/appinfo_errors.py
@@ -30,6 +30,7 @@
 
 
 
+
 class Error(Exception):
   """Base datastore AppInfo type."""
 
diff --git a/google/appengine/api/appinfo_includes.py b/google/appengine/api/appinfo_includes.py
index ab80db0..da8fd8e 100644
--- a/google/appengine/api/appinfo_includes.py
+++ b/google/appengine/api/appinfo_includes.py
@@ -31,6 +31,7 @@
 
 
 
+
 import logging
 import os
 
diff --git a/google/appengine/api/backendinfo.py b/google/appengine/api/backendinfo.py
index 935e465..285bf9d 100644
--- a/google/appengine/api/backendinfo.py
+++ b/google/appengine/api/backendinfo.py
@@ -29,6 +29,7 @@
 
 
 
+
 import os
 import yaml
 from yaml import representer
diff --git a/google/appengine/api/backends/backends.py b/google/appengine/api/backends/backends.py
index a76b5a0..e3b2379 100644
--- a/google/appengine/api/backends/backends.py
+++ b/google/appengine/api/backends/backends.py
@@ -33,6 +33,7 @@
 
 
 
+
 import logging
 import os
 import re
diff --git a/google/appengine/api/background_thread/background_thread.py b/google/appengine/api/background_thread/background_thread.py
index b188775..580d736 100644
--- a/google/appengine/api/background_thread/background_thread.py
+++ b/google/appengine/api/background_thread/background_thread.py
@@ -28,6 +28,7 @@
 """
 
 
+
 __all__ = ['start_new_background_thread',
            'BackgroundThread',
            'Error',
diff --git a/google/appengine/api/blobstore/blobstore.py b/google/appengine/api/blobstore/blobstore.py
index a435122..2b6d553 100644
--- a/google/appengine/api/blobstore/blobstore.py
+++ b/google/appengine/api/blobstore/blobstore.py
@@ -31,6 +31,7 @@
 
 
 
+
 import datetime
 import time
 
diff --git a/google/appengine/api/blobstore/blobstore_stub.py b/google/appengine/api/blobstore/blobstore_stub.py
index 5c0ef1f..9a3ab55 100644
--- a/google/appengine/api/blobstore/blobstore_stub.py
+++ b/google/appengine/api/blobstore/blobstore_stub.py
@@ -33,6 +33,7 @@
 
 
 
+
 import base64
 import os
 import time
diff --git a/google/appengine/api/blobstore/dict_blob_storage.py b/google/appengine/api/blobstore/dict_blob_storage.py
index da03692..b40079f 100644
--- a/google/appengine/api/blobstore/dict_blob_storage.py
+++ b/google/appengine/api/blobstore/dict_blob_storage.py
@@ -33,6 +33,7 @@
 
 
 
+
 import StringIO
 
 from google.appengine.api import blobstore
diff --git a/google/appengine/api/blobstore/file_blob_storage.py b/google/appengine/api/blobstore/file_blob_storage.py
index 391faea..a0edd82 100644
--- a/google/appengine/api/blobstore/file_blob_storage.py
+++ b/google/appengine/api/blobstore/file_blob_storage.py
@@ -33,6 +33,7 @@
 
 
 
+
 import errno
 import os
 
diff --git a/google/appengine/api/capabilities/__init__.py b/google/appengine/api/capabilities/__init__.py
index a44eacd..1aa9b10 100644
--- a/google/appengine/api/capabilities/__init__.py
+++ b/google/appengine/api/capabilities/__init__.py
@@ -55,6 +55,7 @@
 
 
 
+
 import warnings
 
 from google.appengine.api.capabilities import capability_service_pb
diff --git a/google/appengine/api/capabilities/capability_stub.py b/google/appengine/api/capabilities/capability_stub.py
index bfd5da1..0dffe93 100644
--- a/google/appengine/api/capabilities/capability_stub.py
+++ b/google/appengine/api/capabilities/capability_stub.py
@@ -26,6 +26,7 @@
 
 
 
+
 from google.appengine.api import apiproxy_stub
 from google.appengine.api import capabilities
 
diff --git a/google/appengine/api/channel/channel.py b/google/appengine/api/channel/channel.py
index 2758216..cdd0d46 100644
--- a/google/appengine/api/channel/channel.py
+++ b/google/appengine/api/channel/channel.py
@@ -32,6 +32,7 @@
 
 
 
+
 import os
 
 from google.appengine.api import api_base_pb
diff --git a/google/appengine/api/channel/channel_service_stub.py b/google/appengine/api/channel/channel_service_stub.py
index 533064b..c9cc512 100644
--- a/google/appengine/api/channel/channel_service_stub.py
+++ b/google/appengine/api/channel/channel_service_stub.py
@@ -27,6 +27,7 @@
 
 
 
+
 import hashlib
 import logging
 import random
diff --git a/google/appengine/api/conf.py b/google/appengine/api/conf.py
index 56a755d..8921338 100644
--- a/google/appengine/api/conf.py
+++ b/google/appengine/api/conf.py
@@ -22,6 +22,7 @@
 """
 
 
+
 import logging
 import os
 import re
diff --git a/google/appengine/api/croninfo.py b/google/appengine/api/croninfo.py
index 22d228c..949a2a9 100644
--- a/google/appengine/api/croninfo.py
+++ b/google/appengine/api/croninfo.py
@@ -30,6 +30,7 @@
 
 
 
+
 import logging
 import sys
 import traceback
diff --git a/google/appengine/api/datastore.py b/google/appengine/api/datastore.py
index 0324a8f..f01e9bb 100644
--- a/google/appengine/api/datastore.py
+++ b/google/appengine/api/datastore.py
@@ -44,6 +44,7 @@
 
 
 
+
 import heapq
 import itertools
 import logging
diff --git a/google/appengine/api/datastore_admin.py b/google/appengine/api/datastore_admin.py
index c43da53..d518495 100644
--- a/google/appengine/api/datastore_admin.py
+++ b/google/appengine/api/datastore_admin.py
@@ -28,6 +28,7 @@
 
 
 
+
 from google.appengine.api import api_base_pb
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api import datastore
diff --git a/google/appengine/api/datastore_entities.py b/google/appengine/api/datastore_entities.py
index 6e2fcb1..26fabad 100644
--- a/google/appengine/api/datastore_entities.py
+++ b/google/appengine/api/datastore_entities.py
@@ -35,6 +35,7 @@
 
 
 
+
 import types
 import urlparse
 from xml.sax import saxutils
diff --git a/google/appengine/api/datastore_errors.py b/google/appengine/api/datastore_errors.py
index 54bdffc..a285af5 100644
--- a/google/appengine/api/datastore_errors.py
+++ b/google/appengine/api/datastore_errors.py
@@ -32,6 +32,7 @@
 
 
 
+
 class Error(Exception):
   """Base datastore error type.
   """
diff --git a/google/appengine/api/datastore_file_stub.py b/google/appengine/api/datastore_file_stub.py
index 586a05e..1e326a1 100644
--- a/google/appengine/api/datastore_file_stub.py
+++ b/google/appengine/api/datastore_file_stub.py
@@ -40,6 +40,7 @@
 
 
 
+
 import collections
 import logging
 import os
diff --git a/google/appengine/api/datastore_types.py b/google/appengine/api/datastore_types.py
index a349aab..e183880 100644
--- a/google/appengine/api/datastore_types.py
+++ b/google/appengine/api/datastore_types.py
@@ -43,6 +43,7 @@
 
 
 
+
 import base64
 import calendar
 import datetime
diff --git a/google/appengine/api/dispatchinfo.py b/google/appengine/api/dispatchinfo.py
index 0336c4d..9088500 100644
--- a/google/appengine/api/dispatchinfo.py
+++ b/google/appengine/api/dispatchinfo.py
@@ -27,6 +27,7 @@
 
 
 
+
 import re
 
 from google.appengine.api import appinfo
diff --git a/google/appengine/api/dosinfo.py b/google/appengine/api/dosinfo.py
index 22637ce..c7d2f5f 100644
--- a/google/appengine/api/dosinfo.py
+++ b/google/appengine/api/dosinfo.py
@@ -29,6 +29,7 @@
 
 
 
+
 import re
 import google
 import ipaddr
diff --git a/google/appengine/api/files/__init__.py b/google/appengine/api/files/__init__.py
index acd0c9d..ebafe57 100644
--- a/google/appengine/api/files/__init__.py
+++ b/google/appengine/api/files/__init__.py
@@ -25,6 +25,7 @@
 """
 
 
+
 from file import *
 import blobstore
 import gs
diff --git a/google/appengine/api/files/blobstore.py b/google/appengine/api/files/blobstore.py
index 65d5ea1..72819b9 100644
--- a/google/appengine/api/files/blobstore.py
+++ b/google/appengine/api/files/blobstore.py
@@ -28,6 +28,7 @@
 from __future__ import with_statement
 
 
+
 __all__ = ['create', 'get_blob_key', 'get_file_name']
 
 import hashlib
diff --git a/google/appengine/api/files/crc32c.py b/google/appengine/api/files/crc32c.py
index 4426849..1e90a4b 100644
--- a/google/appengine/api/files/crc32c.py
+++ b/google/appengine/api/files/crc32c.py
@@ -30,6 +30,7 @@
 
 
 
+
 import array
 
 CRC_TABLE = (
diff --git a/google/appengine/api/files/file.py b/google/appengine/api/files/file.py
index 1646d8d..5ee7c3d 100644
--- a/google/appengine/api/files/file.py
+++ b/google/appengine/api/files/file.py
@@ -26,6 +26,7 @@
 from __future__ import with_statement
 
 
+
 __all__ = [
            'ApiTemporaryUnavailableError',
            'BLOBSTORE_FILESYSTEM',
diff --git a/google/appengine/api/files/file_service_stub.py b/google/appengine/api/files/file_service_stub.py
index 07334bb..c8cdf75 100644
--- a/google/appengine/api/files/file_service_stub.py
+++ b/google/appengine/api/files/file_service_stub.py
@@ -21,6 +21,7 @@
 """Stubs for File service."""
 
 
+
 import base64
 import datetime
 import hashlib
diff --git a/google/appengine/api/files/gs.py b/google/appengine/api/files/gs.py
index 9eb40a4..69340e2 100644
--- a/google/appengine/api/files/gs.py
+++ b/google/appengine/api/files/gs.py
@@ -33,6 +33,7 @@
 from __future__ import with_statement
 
 
+
 __all__ = ['create']
 
 import os
diff --git a/google/appengine/api/files/records.py b/google/appengine/api/files/records.py
index 0808931..d06eff0 100644
--- a/google/appengine/api/files/records.py
+++ b/google/appengine/api/files/records.py
@@ -86,6 +86,7 @@
 """
 
 
+
 import logging
 import struct
 
diff --git a/google/appengine/api/files/shuffler.py b/google/appengine/api/files/shuffler.py
index 5f9a86d..2ba0a7f 100644
--- a/google/appengine/api/files/shuffler.py
+++ b/google/appengine/api/files/shuffler.py
@@ -26,6 +26,7 @@
 Files API Shuffler interface"""
 
 
+
 import logging
 
 from google.appengine.api.files import file as files
diff --git a/google/appengine/api/files/testutil.py b/google/appengine/api/files/testutil.py
index 696999c..09a4ab6 100644
--- a/google/appengine/api/files/testutil.py
+++ b/google/appengine/api/files/testutil.py
@@ -26,6 +26,7 @@
 Testing utils for writing tests involving Files API."""
 
 
+
 __all__ = ['TestFileServiceStub']
 
 
diff --git a/google/appengine/api/images/__init__.py b/google/appengine/api/images/__init__.py
index 9320643..7bd2345 100644
--- a/google/appengine/api/images/__init__.py
+++ b/google/appengine/api/images/__init__.py
@@ -37,6 +37,7 @@
 
 
 
+
 import struct
 
 try:
diff --git a/google/appengine/api/labs/taskqueue/__init__.py b/google/appengine/api/labs/taskqueue/__init__.py
index 1e8fb50..caa2499 100644
--- a/google/appengine/api/labs/taskqueue/__init__.py
+++ b/google/appengine/api/labs/taskqueue/__init__.py
@@ -26,6 +26,7 @@
 
 
 
+
 import os
 import warnings
 
diff --git a/google/appengine/api/labs/taskqueue/taskqueue.py b/google/appengine/api/labs/taskqueue/taskqueue.py
index 1dcb061..14b7a72 100644
--- a/google/appengine/api/labs/taskqueue/taskqueue.py
+++ b/google/appengine/api/labs/taskqueue/taskqueue.py
@@ -37,6 +37,8 @@
 
 
 
+
+
 __all__ = [
 
     'BadTaskStateError', 'BadTransactionState', 'BadTransactionStateError',
diff --git a/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py b/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
index 803f373..071e250 100644
--- a/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
+++ b/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
@@ -24,6 +24,7 @@
 """
 
 
+
 from google.appengine.api.taskqueue import taskqueue_service_pb
 
 
diff --git a/google/appengine/api/labs/taskqueue/taskqueue_stub.py b/google/appengine/api/labs/taskqueue/taskqueue_stub.py
index 7fafe87..2037645 100644
--- a/google/appengine/api/labs/taskqueue/taskqueue_stub.py
+++ b/google/appengine/api/labs/taskqueue/taskqueue_stub.py
@@ -37,6 +37,7 @@
 
 
 
+
 import StringIO
 import base64
 import bisect
diff --git a/google/appengine/api/lib_config.py b/google/appengine/api/lib_config.py
index 19e4671..5972f58 100644
--- a/google/appengine/api/lib_config.py
+++ b/google/appengine/api/lib_config.py
@@ -73,6 +73,7 @@
 """
 
 
+
 __all__ = ['DEFAULT_MODNAME',
            'LibConfigRegistry',
            'ConfigHandle',
diff --git a/google/appengine/api/logservice/logservice.py b/google/appengine/api/logservice/logservice.py
index 51da582..5107b0d 100644
--- a/google/appengine/api/logservice/logservice.py
+++ b/google/appengine/api/logservice/logservice.py
@@ -29,6 +29,7 @@
 
 from __future__ import with_statement
 import base64
+import collections
 import cStringIO
 import logging
 import os
@@ -121,15 +122,11 @@
     return self.__last_end_time
 
 
-class LogsBuffer(object):
+class LogsBufferNew(object):
   """Threadsafe buffer for storing and periodically flushing app logs."""
 
-  _MAX_FLUSH_SIZE = 1000 * 1000
-  _MAX_LINE_SIZE = _MAX_FLUSH_SIZE
-  assert _MAX_LINE_SIZE <= _MAX_FLUSH_SIZE
-
   def __init__(self, stream=None, stderr=False):
-    """Initializes the buffer, which wraps the given stream or sys.stderr.
+    """Initializes the buffer, which wraps an internal buffer or sys.stderr.
 
     The state of the LogsBuffer is protected by a separate lock.  The lock is
     acquired before any variables are mutated or accessed, and released
@@ -138,30 +135,49 @@
     'unlock()' calls have been performed.
 
     Args:
-      stream: A file-like object to store logs. Defaults to a cStringIO object.
+      stream: Unused. Left there for backward compatibility.
       stderr: If specified, use sys.stderr as the underlying stream.
+
+    Raises:
+      ValueError: if stream is provided.
     """
+    if stream is not None:
+      raise ValueError('underlying streams are no longer supported')
+
+
+
+
+    self._buffer = collections.deque()
     self._stderr = stderr
-    if self._stderr:
-      assert stream is None
-    else:
-      self._stream = stream or cStringIO.StringIO()
     self._lock = threading.RLock()
     self._reset()
 
+  _MAX_FLUSH_SIZE = 1000 * 1000
+  _MAX_LINE_SIZE = _MAX_FLUSH_SIZE
+
+  @staticmethod
+  def _truncate(line, max_length=_MAX_LINE_SIZE):
+    """Truncates a potentially long log down to a specified maximum length."""
+    if len(line) > max_length:
+      original_length = len(line)
+      suffix = '...(length %d)' % original_length
+      line = line[:max_length - len(suffix)] + suffix
+    return line
+
   def stream(self):
     """Returns the underlying file-like object used to buffer logs."""
     if self._stderr:
 
 
       return sys.stderr
-    else:
-      return self._stream
+
+
+    return cStringIO.StringIO(self.contents())
 
   def lines(self):
     """Returns the number of log lines currently buffered."""
     with self._lock:
-      return self._lines
+      return len(self._buffer)
 
   def bytes(self):
     """Returns the size of the log buffer, in bytes."""
@@ -185,12 +201,7 @@
 
   def _contents(self):
     """Internal version of contents() with no locking."""
-    try:
-      return self.stream().getvalue()
-    except AttributeError:
-
-
-      return ''
+    return ''.join(self._buffer)
 
   def reset(self):
     """Resets the buffer state, without clearing the underlying stream."""
@@ -199,9 +210,7 @@
 
   def _reset(self):
     """Internal version of reset() with no locking."""
-    contents = self._contents()
-    self._bytes = len(contents)
-    self._lines = len(contents.split('\n')) - 1
+    self._bytes = sum(len(line) for line in self._buffer)
     self._flush_time = time.time()
     self._request = logsutil.RequestID()
 
@@ -212,8 +221,7 @@
 
   def _clear(self):
     """Internal version of clear() with no locking."""
-    if self._bytes > 0:
-      self.stream().truncate(0)
+    self._buffer.clear()
     self._reset()
 
   def close(self):
@@ -224,11 +232,12 @@
   def _close(self):
     """Internal version of close() with no locking."""
     self._flush()
-    self.stream().close()
 
   def parse_logs(self):
     """Parse the contents of the buffer and return an array of log lines."""
-    return logsutil.ParseLogs(self.contents())
+    without_newlines = (line[:-1] if line[-1] == '\n' else line
+                        for line in self._buffer)
+    return [logsutil.ParseLogEntry(line) for line in without_newlines if line]
 
   def write(self, line):
     """Writes a line to the logs buffer."""
@@ -240,27 +249,34 @@
     for line in seq:
       self.write(line)
 
+  def _put_line(self, line):
+    """Write the line in the internal buffer for the next flush."""
+    self._buffer.append(line)
+    self._bytes += len(line)
+
+  def _get_line(self):
+    """Get and deque the oldest log line from the internal buffer."""
+    line = self._buffer.popleft()
+    self._bytes -= len(line)
+    return line
+
+  def _rollback_line(self, line):
+    """Write back the line as the oldest in the internal buffer."""
+    self._buffer.appendleft(line)
+    self._bytes += len(line)
+
   def _write(self, line):
     """Writes a line to the logs buffer."""
     if self._request != logsutil.RequestID():
 
 
       self._reset()
-    self.stream().write(line)
-
-    self._lines += 1
-    self._bytes += len(line)
+    if self._stderr:
+      sys.stderr.write(line)
+    else:
+      self._put_line(line)
     self._autoflush()
 
-  @staticmethod
-  def _truncate(line, max_length=_MAX_LINE_SIZE):
-    """Truncates a potentially long log down to a specified maximum length."""
-    if len(line) > max_length:
-      original_length = len(line)
-      suffix = '...(length %d)' % original_length
-      line = line[:max_length - len(suffix)] + suffix
-    return line
-
   def flush(self):
     """Flushes the contents of the logs buffer.
 
@@ -275,36 +291,67 @@
 
   def _flush(self):
     """Internal version of flush() with no locking."""
-    logs = self.parse_logs()
-    self._clear()
+    if self._stderr:
+      sys.stderr.flush()
+      return
 
-    while True:
-      group = log_service_pb.UserAppLogGroup()
-      byte_size = 0
-      n = 0
-      for timestamp_usec, level, message in logs:
+    lines_to_be_flushed = []
+    try:
+      while True:
+        group = log_service_pb.UserAppLogGroup()
+        bytes_left = LogsBufferNew._MAX_FLUSH_SIZE
+        while self._buffer:
+          bare_line = self._get_line()
+
+          timestamp_usec, level, message = logsutil.ParseLogEntry(bare_line)
+
+          if message[-1] == '\n':
+            message = message[:-1]
+
+          if not message:
+            continue
 
 
-        message = self._truncate(message, LogsBuffer._MAX_LINE_SIZE)
+
+          message = LogsBufferNew._truncate(
+              message, LogsBufferNew._MAX_LINE_SIZE)
 
 
-        if byte_size + len(message) > LogsBuffer._MAX_FLUSH_SIZE:
+          if len(message) > bytes_left:
+            self._rollback_line(bare_line)
+            break
+
+          lines_to_be_flushed.append(bare_line)
+
+          line = group.add_log_line()
+          line.set_timestamp_usec(timestamp_usec)
+          line.set_level(level)
+          line.set_message(message)
+
+          bytes_left -= 1 + group.lengthString(line.ByteSize())
+
+        request = log_service_pb.FlushRequest()
+        request.set_logs(group.Encode())
+        response = api_base_pb.VoidProto()
+        apiproxy_stub_map.MakeSyncCall('logservice', 'Flush', request, response)
+        if not self._buffer:
           break
-        line = group.add_log_line()
-        line.set_timestamp_usec(timestamp_usec)
-        line.set_level(level)
-        line.set_message(message)
-        byte_size += 1 + group.lengthString(line.ByteSize())
-        n += 1
-      assert n > 0 or not logs
-      logs = logs[n:]
+    except apiproxy_errors.CancelledError:
 
-      request = log_service_pb.FlushRequest()
-      request.set_logs(group.Encode())
-      response = api_base_pb.VoidProto()
-      apiproxy_stub_map.MakeSyncCall('logservice', 'Flush', request, response)
-      if not logs:
-        break
+
+      lines_to_be_flushed.reverse()
+      self._buffer.extendleft(lines_to_be_flushed)
+    except Exception, e:
+      lines_to_be_flushed.reverse()
+      self._buffer.extendleft(lines_to_be_flushed)
+      if not self._stderr:
+        line = '-' * 80
+        msg = 'ERROR: Could not flush to log_service (%s)\n%s\n%s\n%s\n'
+        sys.stderr.write(msg % (str(e), line, '\n'.join(self._buffer), line))
+      self._clear()
+      raise
+    else:
+      self._clear()
 
   def autoflush(self):
     """Flushes the buffer if certain conditions have been met."""
@@ -326,10 +373,6 @@
     return AUTOFLUSH_ENABLED
 
 
-
-_global_buffer = LogsBuffer(stderr=True)
-
-
 def logs_buffer():
   """Returns the LogsBuffer used by the current request."""
 
@@ -1004,3 +1047,222 @@
     request.set_count(batch_size)
 
   return _LogQueryResult(request, timeout=timeout)
+
+
+
+
+
+
+
+
+
+class LogsBufferOld(object):
+  """Threadsafe buffer for storing and periodically flushing app logs."""
+
+  _MAX_FLUSH_SIZE = 1000 * 1000
+  _MAX_LINE_SIZE = _MAX_FLUSH_SIZE
+  assert _MAX_LINE_SIZE <= _MAX_FLUSH_SIZE
+
+  def __init__(self, stream=None, stderr=False):
+    """Initializes the buffer, which wraps the given stream or sys.stderr.
+
+    The state of the LogsBuffer is protected by a separate lock.  The lock is
+    acquired before any variables are mutated or accessed, and released
+    afterward.  A recursive lock is used so that a single thread can acquire the
+    lock multiple times, and release it only when an identical number of
+    'unlock()' calls have been performed.
+
+    Args:
+      stream: A file-like object to store logs. Defaults to a cStringIO object.
+      stderr: If specified, use sys.stderr as the underlying stream.
+    """
+    self._stderr = stderr
+    if self._stderr:
+      assert stream is None
+    else:
+      self._stream = stream or cStringIO.StringIO()
+    self._lock = threading.RLock()
+    self._reset()
+
+  def stream(self):
+    """Returns the underlying file-like object used to buffer logs."""
+    if self._stderr:
+
+
+      return sys.stderr
+    else:
+      return self._stream
+
+  def lines(self):
+    """Returns the number of log lines currently buffered."""
+    with self._lock:
+      return self._lines
+
+  def bytes(self):
+    """Returns the size of the log buffer, in bytes."""
+    with self._lock:
+      return self._bytes
+
+  def age(self):
+    """Returns the number of seconds since the log buffer was flushed."""
+    with self._lock:
+      return time.time() - self._flush_time
+
+  def flush_time(self):
+    """Returns last time that the log buffer was flushed."""
+    with self._lock:
+      return self._flush_time
+
+  def contents(self):
+    """Returns the contents of the logs buffer."""
+    with self._lock:
+      return self._contents()
+
+  def _contents(self):
+    """Internal version of contents() with no locking."""
+    try:
+      return self.stream().getvalue()
+    except AttributeError:
+
+
+      return ''
+
+  def reset(self):
+    """Resets the buffer state, without clearing the underlying stream."""
+    with self._lock:
+      return self._reset()
+
+  def _reset(self):
+    """Internal version of reset() with no locking."""
+    contents = self._contents()
+    self._bytes = len(contents)
+    self._lines = len(contents.split('\n')) - 1
+    self._flush_time = time.time()
+    self._request = logsutil.RequestID()
+
+  def clear(self):
+    """Clears the contents of the logs buffer, and resets autoflush state."""
+    with self._lock:
+      return self._clear()
+
+  def _clear(self):
+    """Internal version of clear() with no locking."""
+    if self._bytes > 0:
+      self.stream().truncate(0)
+    self._reset()
+
+  def close(self):
+    """Closes the underlying stream, flushing the current contents."""
+    with self._lock:
+      return self._close()
+
+  def _close(self):
+    """Internal version of close() with no locking."""
+    self._flush()
+    self.stream().close()
+
+  def parse_logs(self):
+    """Parse the contents of the buffer and return an array of log lines."""
+    return logsutil.ParseLogs(self.contents())
+
+  def write(self, line):
+    """Writes a line to the logs buffer."""
+    with self._lock:
+      return self._write(line)
+
+  def writelines(self, seq):
+    """Writes each line in the given sequence to the logs buffer."""
+    for line in seq:
+      self.write(line)
+
+  def _write(self, line):
+    """Writes a line to the logs buffer."""
+    if self._request != logsutil.RequestID():
+
+
+      self._reset()
+    self.stream().write(line)
+
+    self._lines += 1
+    self._bytes += len(line)
+    self._autoflush()
+
+  @staticmethod
+  def _truncate(line, max_length=_MAX_LINE_SIZE):
+    """Truncates a potentially long log down to a specified maximum length."""
+    if len(line) > max_length:
+      original_length = len(line)
+      suffix = '...(length %d)' % original_length
+      line = line[:max_length - len(suffix)] + suffix
+    return line
+
+  def flush(self):
+    """Flushes the contents of the logs buffer.
+
+    This method holds the buffer lock until the API call has finished to ensure
+    that flush calls are performed in the correct order, so that log messages
+    written during the flush call aren't dropped or accidentally wiped, and so
+    that the other buffer state variables (flush time, lines, bytes) are updated
+    synchronously with the flush.
+    """
+    with self._lock:
+      self._flush()
+
+  def _flush(self):
+    """Internal version of flush() with no locking."""
+    logs = self.parse_logs()
+    self._clear()
+
+    while True:
+      group = log_service_pb.UserAppLogGroup()
+      byte_size = 0
+      n = 0
+      for timestamp_usec, level, message in logs:
+
+
+        message = self._truncate(message, LogsBufferOld._MAX_LINE_SIZE)
+
+        if byte_size + len(message) > LogsBufferOld._MAX_FLUSH_SIZE:
+          break
+        line = group.add_log_line()
+        line.set_timestamp_usec(timestamp_usec)
+        line.set_level(level)
+        line.set_message(message)
+        byte_size += 1 + group.lengthString(line.ByteSize())
+        n += 1
+      assert n > 0 or not logs
+      logs = logs[n:]
+
+      request = log_service_pb.FlushRequest()
+      request.set_logs(group.Encode())
+      response = api_base_pb.VoidProto()
+      apiproxy_stub_map.MakeSyncCall('logservice', 'Flush', request, response)
+      if not logs:
+        break
+
+  def autoflush(self):
+    """Flushes the buffer if certain conditions have been met."""
+    with self._lock:
+      return self._autoflush()
+
+  def _autoflush(self):
+    """Internal version of autoflush() with no locking."""
+    if not self.autoflush_enabled():
+      return
+
+    if ((AUTOFLUSH_EVERY_SECONDS and self.age() >= AUTOFLUSH_EVERY_SECONDS) or
+        (AUTOFLUSH_EVERY_LINES and self.lines() >= AUTOFLUSH_EVERY_LINES) or
+        (AUTOFLUSH_EVERY_BYTES and self.bytes() >= AUTOFLUSH_EVERY_BYTES)):
+      self._flush()
+
+  def autoflush_enabled(self):
+    """Indicates if the buffer will periodically flush logs during a request."""
+    return AUTOFLUSH_ENABLED
+
+
+
+
+LogsBuffer = LogsBufferOld
+
+
+_global_buffer = LogsBuffer(stderr=True)
diff --git a/google/appengine/api/logservice/logsutil.py b/google/appengine/api/logservice/logsutil.py
index 493ef21..1d0b1f5 100644
--- a/google/appengine/api/logservice/logsutil.py
+++ b/google/appengine/api/logservice/logsutil.py
@@ -19,6 +19,7 @@
 """Utility methods for working with logs."""
 
 
+
 import os
 import time
 
diff --git a/google/appengine/api/mail.py b/google/appengine/api/mail.py
index 09ae32c..3191ee7 100644
--- a/google/appengine/api/mail.py
+++ b/google/appengine/api/mail.py
@@ -33,6 +33,7 @@
 
 
 
+
 import email
 from email import MIMEBase
 from email import MIMEMultipart
diff --git a/google/appengine/api/mail_errors.py b/google/appengine/api/mail_errors.py
index 2b488cf..5f22e16 100644
--- a/google/appengine/api/mail_errors.py
+++ b/google/appengine/api/mail_errors.py
@@ -21,6 +21,7 @@
 """Exceptions raised my mail API."""
 
 
+
 class Error(Exception):
   """Base Mail error type."""
 
diff --git a/google/appengine/api/mail_stub.py b/google/appengine/api/mail_stub.py
index 63239d2..8e6b261 100644
--- a/google/appengine/api/mail_stub.py
+++ b/google/appengine/api/mail_stub.py
@@ -29,6 +29,7 @@
 
 
 
+
 from email import encoders
 from email import MIMEBase
 from email import MIMEMultipart
diff --git a/google/appengine/api/memcache/__init__.py b/google/appengine/api/memcache/__init__.py
index e20515d..0677f04 100644
--- a/google/appengine/api/memcache/__init__.py
+++ b/google/appengine/api/memcache/__init__.py
@@ -31,6 +31,7 @@
 
 
 
+
 import cPickle
 import cStringIO
 import hashlib
diff --git a/google/appengine/api/memcache/memcache_stub.py b/google/appengine/api/memcache/memcache_stub.py
index e07cf6d..75facfe 100644
--- a/google/appengine/api/memcache/memcache_stub.py
+++ b/google/appengine/api/memcache/memcache_stub.py
@@ -26,6 +26,8 @@
 
 
 
+
+
 import logging
 import time
 
diff --git a/google/appengine/api/namespace_manager/namespace_manager.py b/google/appengine/api/namespace_manager/namespace_manager.py
index c6b46b3..d435e81 100644
--- a/google/appengine/api/namespace_manager/namespace_manager.py
+++ b/google/appengine/api/namespace_manager/namespace_manager.py
@@ -31,6 +31,7 @@
 
 
 
+
 import os
 import re
 import warnings
diff --git a/google/appengine/api/oauth/oauth_api.py b/google/appengine/api/oauth/oauth_api.py
index f0b730f..b46ff27 100644
--- a/google/appengine/api/oauth/oauth_api.py
+++ b/google/appengine/api/oauth/oauth_api.py
@@ -42,6 +42,7 @@
 
 
 
+
 import cPickle
 import os
 
diff --git a/google/appengine/api/pagespeedinfo.py b/google/appengine/api/pagespeedinfo.py
index e7cd234..73b0f82 100644
--- a/google/appengine/api/pagespeedinfo.py
+++ b/google/appengine/api/pagespeedinfo.py
@@ -30,6 +30,7 @@
 
 
 
+
 import google
 
 from google.appengine.api import validation
diff --git a/google/appengine/api/prospective_search/prospective_search.py b/google/appengine/api/prospective_search/prospective_search.py
index b336525..3eac3fe 100644
--- a/google/appengine/api/prospective_search/prospective_search.py
+++ b/google/appengine/api/prospective_search/prospective_search.py
@@ -45,6 +45,7 @@
 
 
 
+
 __all__ = ['get_document',
            'get_subscription',
            'list_subscriptions',
diff --git a/google/appengine/api/prospective_search/prospective_search_admin.py b/google/appengine/api/prospective_search/prospective_search_admin.py
index 0989536..052996d 100644
--- a/google/appengine/api/prospective_search/prospective_search_admin.py
+++ b/google/appengine/api/prospective_search/prospective_search_admin.py
@@ -32,6 +32,7 @@
 
 
 
+
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api.prospective_search import prospective_search_pb
 
diff --git a/google/appengine/api/prospective_search/prospective_search_stub.py b/google/appengine/api/prospective_search/prospective_search_stub.py
index d887b0a..7e07da3 100644
--- a/google/appengine/api/prospective_search/prospective_search_stub.py
+++ b/google/appengine/api/prospective_search/prospective_search_stub.py
@@ -26,6 +26,7 @@
 
 
 
+
 import base64
 import bisect
 
diff --git a/google/appengine/api/queueinfo.py b/google/appengine/api/queueinfo.py
index 3be0cad..c7ba941 100644
--- a/google/appengine/api/queueinfo.py
+++ b/google/appengine/api/queueinfo.py
@@ -134,6 +134,7 @@
 
 
 
+
 from google.appengine.api import appinfo
 from google.appengine.api import validation
 from google.appengine.api import yaml_builder
diff --git a/google/appengine/api/quota.py b/google/appengine/api/quota.py
index 9fe6a86..746f36b 100644
--- a/google/appengine/api/quota.py
+++ b/google/appengine/api/quota.py
@@ -28,6 +28,7 @@
 
 
 
+
 import os
 import warnings
 
diff --git a/google/appengine/api/rdbms.py b/google/appengine/api/rdbms.py
index 5402d77..d1e13a8 100644
--- a/google/appengine/api/rdbms.py
+++ b/google/appengine/api/rdbms.py
@@ -29,6 +29,7 @@
 
 
 
+
 import logging
 
 from google.storage.speckle.python.api import rdbms_apiproxy
diff --git a/google/appengine/api/rdbms_mysqldb.py b/google/appengine/api/rdbms_mysqldb.py
index fac4a14..f1e2b58 100644
--- a/google/appengine/api/rdbms_mysqldb.py
+++ b/google/appengine/api/rdbms_mysqldb.py
@@ -29,6 +29,7 @@
 
 
 
+
 import logging
 import os
 
diff --git a/google/appengine/api/remote_socket/_remote_socket.py b/google/appengine/api/remote_socket/_remote_socket.py
index ecc5e8b..95c28bc 100644
--- a/google/appengine/api/remote_socket/_remote_socket.py
+++ b/google/appengine/api/remote_socket/_remote_socket.py
@@ -37,6 +37,7 @@
 
 
 
+
 import errno
 import os
 import re
diff --git a/google/appengine/api/remote_socket/_remote_socket_stub.py b/google/appengine/api/remote_socket/_remote_socket_stub.py
index 5a8e53b..c328847 100644
--- a/google/appengine/api/remote_socket/_remote_socket_stub.py
+++ b/google/appengine/api/remote_socket/_remote_socket_stub.py
@@ -22,6 +22,7 @@
 from __future__ import with_statement
 
 
+
 import binascii
 import errno
 import os
diff --git a/google/appengine/api/remote_socket/remote_socket_service_pb.py b/google/appengine/api/remote_socket/remote_socket_service_pb.py
index 91d8736..99d4121 100644
--- a/google/appengine/api/remote_socket/remote_socket_service_pb.py
+++ b/google/appengine/api/remote_socket/remote_socket_service_pb.py
@@ -665,6 +665,8 @@
   remote_ip_ = None
   has_app_id_ = 0
   app_id_ = ""
+  has_project_id_ = 0
+  project_id_ = 0
 
   def __init__(self, contents=None):
     self.socket_options_ = []
@@ -777,6 +779,19 @@
 
   def has_app_id(self): return self.has_app_id_
 
+  def project_id(self): return self.project_id_
+
+  def set_project_id(self, x):
+    self.has_project_id_ = 1
+    self.project_id_ = x
+
+  def clear_project_id(self):
+    if self.has_project_id_:
+      self.has_project_id_ = 0
+      self.project_id_ = 0
+
+  def has_project_id(self): return self.has_project_id_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -787,6 +802,7 @@
     if (x.has_listen_backlog()): self.set_listen_backlog(x.listen_backlog())
     if (x.has_remote_ip()): self.mutable_remote_ip().MergeFrom(x.remote_ip())
     if (x.has_app_id()): self.set_app_id(x.app_id())
+    if (x.has_project_id()): self.set_project_id(x.project_id())
 
   def Equals(self, x):
     if x is self: return 1
@@ -805,6 +821,8 @@
     if self.has_remote_ip_ and self.remote_ip_ != x.remote_ip_: return 0
     if self.has_app_id_ != x.has_app_id_: return 0
     if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+    if self.has_project_id_ != x.has_project_id_: return 0
+    if self.has_project_id_ and self.project_id_ != x.project_id_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -833,6 +851,7 @@
     if (self.has_listen_backlog_): n += 1 + self.lengthVarInt64(self.listen_backlog_)
     if (self.has_remote_ip_): n += 1 + self.lengthString(self.remote_ip_.ByteSize())
     if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
+    if (self.has_project_id_): n += 1 + self.lengthVarInt64(self.project_id_)
     return n + 2
 
   def ByteSizePartial(self):
@@ -849,6 +868,7 @@
     if (self.has_listen_backlog_): n += 1 + self.lengthVarInt64(self.listen_backlog_)
     if (self.has_remote_ip_): n += 1 + self.lengthString(self.remote_ip_.ByteSizePartial())
     if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
+    if (self.has_project_id_): n += 1 + self.lengthVarInt64(self.project_id_)
     return n
 
   def Clear(self):
@@ -859,6 +879,7 @@
     self.clear_listen_backlog()
     self.clear_remote_ip()
     self.clear_app_id()
+    self.clear_project_id()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(8)
@@ -883,6 +904,9 @@
     if (self.has_app_id_):
       out.putVarInt32(74)
       out.putPrefixedString(self.app_id_)
+    if (self.has_project_id_):
+      out.putVarInt32(80)
+      out.putVarInt64(self.project_id_)
 
   def OutputPartial(self, out):
     if (self.has_family_):
@@ -909,6 +933,9 @@
     if (self.has_app_id_):
       out.putVarInt32(74)
       out.putPrefixedString(self.app_id_)
+    if (self.has_project_id_):
+      out.putVarInt32(80)
+      out.putVarInt64(self.project_id_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -943,6 +970,9 @@
       if tt == 74:
         self.set_app_id(d.getPrefixedString())
         continue
+      if tt == 80:
+        self.set_project_id(d.getVarInt64())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -971,6 +1001,7 @@
       res+=self.remote_ip_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+    if self.has_project_id_: res+=prefix+("project_id: %s\n" % self.DebugFormatInt64(self.project_id_))
     return res
 
 
@@ -984,6 +1015,7 @@
   klisten_backlog = 5
   kremote_ip = 6
   kapp_id = 9
+  kproject_id = 10
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -994,7 +1026,8 @@
     5: "listen_backlog",
     6: "remote_ip",
     9: "app_id",
-  }, 9)
+    10: "project_id",
+  }, 10)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -1005,7 +1038,8 @@
     5: ProtocolBuffer.Encoder.NUMERIC,
     6: ProtocolBuffer.Encoder.STRING,
     9: ProtocolBuffer.Encoder.STRING,
-  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
+    10: ProtocolBuffer.Encoder.NUMERIC,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
diff --git a/google/appengine/api/runtime/runtime.py b/google/appengine/api/runtime/runtime.py
index a3599e5..0c7d18a 100644
--- a/google/appengine/api/runtime/runtime.py
+++ b/google/appengine/api/runtime/runtime.py
@@ -33,6 +33,7 @@
 from __future__ import with_statement
 
 
+
 import threading
 
 from google.appengine.api import apiproxy_stub_map
diff --git a/google/appengine/api/search/__init__.py b/google/appengine/api/search/__init__.py
index acaa436..ac9ffc4 100644
--- a/google/appengine/api/search/__init__.py
+++ b/google/appengine/api/search/__init__.py
@@ -21,6 +21,7 @@
 """Search API module."""
 
 from search import AtomField
+from search import AtomFacet
 from search import Cursor
 from search import DateField
 from search import DeleteError
@@ -29,6 +30,13 @@
 from search import DOCUMENT_ID_FIELD_NAME
 from search import Error
 from search import ExpressionError
+from search import Facet
+from search import FacetOptions
+from search import FacetRange
+from search import FacetRefinement
+from search import FacetRequest
+from search import FacetResult
+from search import FacetResultValue
 from search import Field
 from search import FieldExpression
 from search import GeoField
@@ -44,6 +52,9 @@
 from search import MAXIMUM_DOCUMENT_ID_LENGTH
 from search import MAXIMUM_DOCUMENTS_PER_PUT_REQUEST
 from search import MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH
+from search import MAXIMUM_DEPTH_FOR_FACETED_SEARCH
+from search import MAXIMUM_FACETS_TO_RETURN
+from search import MAXIMUM_FACET_VALUES_TO_RETURN
 from search import MAXIMUM_EXPRESSION_LENGTH
 from search import MAXIMUM_FIELD_ATOM_LENGTH
 from search import MAXIMUM_FIELD_NAME_LENGTH
@@ -57,6 +68,7 @@
 from search import MAXIMUM_SEARCH_OFFSET
 from search import MAXIMUM_SORTED_DOCUMENTS
 from search import NumberField
+from search import NumberFacet
 from search import OperationResult
 from search import PutError
 from search import PutResult
diff --git a/google/appengine/api/search/expression_parser.py b/google/appengine/api/search/expression_parser.py
index edbdc15..662db76 100644
--- a/google/appengine/api/search/expression_parser.py
+++ b/google/appengine/api/search/expression_parser.py
@@ -21,6 +21,7 @@
 """Wrapper for ExpressionParser."""
 
 
+
 from google.appengine._internal import antlr3
 from google.appengine.api.search import ExpressionLexer
 from google.appengine.api.search import ExpressionParser
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index 27dc8ab..dd7100b 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -27,12 +27,16 @@
 
 
 
+
+
+import base64
 import datetime
 import logging
 import re
 import string
 import sys
 import warnings
+from google.net.proto import ProtocolBuffer
 
 from google.appengine.datastore import document_pb
 from google.appengine.api import apiproxy_stub_map
@@ -48,6 +52,7 @@
 
 __all__ = [
     'AtomField',
+    'AtomFacet',
     'ConcurrentTransactionError',
     'Cursor',
     'DateField',
@@ -57,6 +62,13 @@
     'DOCUMENT_ID_FIELD_NAME',
     'Error',
     'ExpressionError',
+    'Facet',
+    'FacetOptions',
+    'FacetRange',
+    'FacetRefinement',
+    'FacetRequest',
+    'FacetResult',
+    'FacetResultValue',
     'Field',
     'FieldExpression',
     'HtmlField',
@@ -73,6 +85,9 @@
     'MAXIMUM_DOCUMENT_ID_LENGTH',
     'MAXIMUM_DOCUMENTS_PER_PUT_REQUEST',
     'MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH',
+    'MAXIMUM_DEPTH_FOR_FACETED_SEARCH',
+    'MAXIMUM_FACETS_TO_RETURN',
+    'MAXIMUM_FACET_VALUES_TO_RETURN',
     'MAXIMUM_EXPRESSION_LENGTH',
     'MAXIMUM_FIELD_ATOM_LENGTH',
     'MAXIMUM_FIELD_NAME_LENGTH',
@@ -90,6 +105,7 @@
     'MIN_DATE',
     'MIN_NUMBER_VALUE',
     'NumberField',
+    'NumberFacet',
     'OperationResult',
     'PutError',
     'PutResult',
@@ -118,6 +134,9 @@
 MAXIMUM_EXPRESSION_LENGTH = 5000
 MAXIMUM_QUERY_LENGTH = 2000
 MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH = 1000
+MAXIMUM_DEPTH_FOR_FACETED_SEARCH = 10000
+MAXIMUM_FACETS_TO_RETURN = 100
+MAXIMUM_FACET_VALUES_TO_RETURN = 100
 MAXIMUM_SEARCH_OFFSET = 1000
 
 MAXIMUM_SORTED_DOCUMENTS = 10000
@@ -560,6 +579,19 @@
   return _ValidateVisiblePrintableAsciiNotReserved(index_name, 'index_name')
 
 
+def _CheckFacetName(name):
+  """Checks facet name is not too long and matches facet name pattern.
+
+  Facet name pattern: "[A-Za-z][A-Za-z0-9_]*".
+
+  Args:
+    name: the name string to validate.
+  Returns:
+    the valid name.
+  """
+  return _CheckFieldName(name)
+
+
 def _CheckFieldName(name):
   """Checks field name is not too long and matches field name pattern.
 
@@ -609,6 +641,24 @@
   return []
 
 
+def _CheckType(obj, obj_type, obj_name):
+  """Check the type of an object."""
+  if not isinstance(obj, obj_type):
+    raise TypeError('%s must be a %s, got %s'
+                    % (obj_name, obj_type, obj.__class__.__name__))
+  return obj
+
+
+def _ConvertToListAndCheckType(arg, element_type, arg_name):
+  """Converts args to a list and check its element type."""
+  ret = _ConvertToList(arg)
+  for element in ret:
+    if not isinstance(element, element_type):
+      raise TypeError('%s should be single element or list of type %s'
+                      % (arg_name, element_type))
+  return ret
+
+
 def _ConvertToUnicodeList(arg):
   """Converts arg to a list of unicode objects."""
   return [_ConvertToUnicode(value) for value in _ConvertToList(arg)]
@@ -892,6 +942,391 @@
     field_value_pb.set_string_value(self.value.encode('utf-8'))
 
 
+class Facet(object):
+  """An abstract base class which represents a facet of a document.
+
+    This class should not be directly instantiated.
+  """
+
+  def __init__(self, name, value):
+    """Initializer.
+
+    Args:
+      name: The name of the facet. Facet names must have maximum length
+        MAXIMUM_FIELD_NAME_LENGTH and match pattern "[A-Za-z][A-Za-z0-9_]*".
+      value: The value of the facet which can be a str, unicode or number.
+
+    Raises:
+      TypeError: If any of the parameters have invalid types, or an unknown
+        attribute is passed.
+      ValueError: If any of the parameters have invalid values.
+    """
+    self._name = _CheckFacetName(_ConvertToUnicode(name))
+    self._value = self._CheckValue(value)
+
+  @property
+  def name(self):
+    """Returns the name of the facet."""
+    return self._name
+
+  @property
+  def value(self):
+    """Returns the value of the facet."""
+    return self._value
+
+  def _CheckValue(self, value):
+    """Checks the value is valid for the given type.
+
+    Args:
+      value: The value to check.
+
+    Returns:
+      The checked value.
+    """
+    raise NotImplementedError('_CheckValue is an abstract method')
+
+  def _CopyStringValueToProtocolBuffer(self, facet_value_pb):
+    """Copies value to a string value in proto buf."""
+    facet_value_pb.set_string_value(self.value.encode('utf-8'))
+
+  def _CopyToProtocolBuffer(self, pb):
+    """Copies facet's contents to a document_pb.Facet proto buffer."""
+    pb.set_name(self.name)
+    if self.value is not None:
+      facet_value_pb = pb.mutable_value()
+      self._CopyValueToProtocolBuffer(facet_value_pb)
+    return pb
+
+  def _AttributeValueList(self):
+    return [self.name, self.value]
+
+  def __eq__(self, other):
+    return (isinstance(other, type(self)) and
+            self._AttributeValueList() == other._AttributeValueList())
+
+  def __ne__(self, other):
+    return not self == other
+
+  def __hash__(self):
+    return hash(self._AttributeValueList())
+
+  def __repr__(self):
+    return _Repr(self, [('name', self.name), ('value', self.value)])
+
+
+class AtomFacet(Facet):
+  """A Facet that has content to be treated as a single token for indexing.
+
+  The following example shows an atom facet named wine_type:
+    AtomFacet(name='wine_type', value='Red')
+  """
+
+  def __init__(self, name, value=None):
+    """Initializer.
+
+    Args:
+      name: The name of the facet.
+      value: A str or unicode object to be treated as an indivisible text value.
+
+    Raises:
+      TypeError: If value is not a string.
+      ValueError: If value is longer than allowed.
+    """
+    Facet.__init__(self, name, _ConvertToUnicode(value))
+
+  def _CheckValue(self, value):
+    return _CheckAtom(value)
+
+  def _CopyValueToProtocolBuffer(self, facet_value_pb):
+    facet_value_pb.set_type(document_pb.FacetValue.ATOM)
+    self._CopyStringValueToProtocolBuffer(facet_value_pb)
+
+
+class NumberFacet(Facet):
+  """A Facet that has a numeric value.
+
+  The following example shows a number facet named wine_vintage:
+    NumberFacet(name='wine_vintage', value=2000)
+  """
+
+  def __init__(self, name, value=None):
+    """Initializer.
+
+    Args:
+      name: The name of the facet.
+      value: A numeric value.
+
+    Raises:
+      TypeError: If value is not numeric.
+      ValueError: If value is out of range.
+    """
+    Facet.__init__(self, name, value)
+
+  def _CheckValue(self, value):
+    value = _CheckNumber(value, 'facet value')
+    if value < MIN_NUMBER_VALUE or value > MAX_NUMBER_VALUE:
+      raise ValueError('value, %d must be between %d and %d' %
+                       (value, MIN_NUMBER_VALUE, MAX_NUMBER_VALUE))
+    return value
+
+  def _CopyValueToProtocolBuffer(self, facet_value_pb):
+    facet_value_pb.set_type(document_pb.FacetValue.NUMBER)
+    facet_value_pb.set_string_value(str(self.value))
+
+
+def _NewFacetFromPb(pb):
+  """Constructs a Facet from a document_pb.Facet protocol buffer."""
+  name = _DecodeUTF8(pb.name())
+  val_type = pb.value().type()
+  value = _DecodeValue(_GetValue(pb.value()), val_type)
+  if val_type == document_pb.FacetValue.ATOM:
+    return AtomFacet(name, value)
+  elif val_type == document_pb.FacetValue.NUMBER:
+    return NumberFacet(name, value)
+  return InvalidRequest('Unknown facet value type %d' % val_type)
+
+
+def _NewFacetsFromPb(facet_list):
+  """Returns a list of Facet copied from a document_pb.Document proto buf."""
+  return [_NewFacetFromPb(f) for f in facet_list]
+
+
+class FacetRange(object):
+  """A facet range with a name, start and end values.
+
+    An example of a FacetRange for good rating is:
+    FacetRange('good', start='3.0', end='3.5')
+  """
+
+  @datastore_rpc._positional(2)
+  def __init__(self, name=None, start=None, end=None):
+    """Initializer.
+
+    Args:
+      name: The name of the range.
+      start: Start value for the range, inclusive.
+      end: End value for the range. exclusive.
+
+    Raises:
+      TypeError: If any of the parameters have invalid types, or an unknown
+        attribute is passed.
+      ValueError: If any of the parameters have invalid values.
+    """
+    self._name = name
+    if start is None and end is None:
+      raise ValueError(
+          'Either start or end need to be provided for a facet range.')
+    none_or_numeric_type = (type(None), int, float, long)
+    self._start = _CheckType(start, none_or_numeric_type, 'start')
+    self._end = _CheckType(end, none_or_numeric_type, 'end')
+
+  @property
+  def name(self):
+    """Returns the name of the range."""
+    return self._name
+
+  @property
+  def start(self):
+    """Returns inclusive start of the range."""
+    return self._start
+
+  @property
+  def end(self):
+    """Returns exclusive end of the range."""
+    return self._end
+
+
+class FacetRequest(object):
+  """A facet to be included in search result.
+
+  An example of a request for a facet only with name:
+    FacetRequest('ExpediteShipping')
+  (in that case, results will always have this facet)
+  Or with a value constraint:
+    FacetRequest('Size', values=['XL','L','M']
+  (results will have this facet with only specified values)
+  Or ranges:
+    FacetRequest('Rating', ranges=[
+        FacetRange('Fair', 1.0, 2.0),
+        FacetRange('Good', 2.0, 3.5),
+        FacetRange('Excelent', 3.5, 4.0)]
+  (results will have this facet with specified ranges)
+  """
+
+  @datastore_rpc._positional(2)
+  def __init__(self, name, value_limit=10, ranges=None, values=None):
+    """Initializer.
+
+    Args:
+      name: The name of the facet.
+      value_limit: Number of values to return if values is not specified.
+      ranges: Range of values to return. Cannot be set with values.
+      values: Specific values to return. Cannot be set with ranges.
+
+    Raises:
+      TypeError: If any of the parameters have invalid types, or an unknown
+        attribute is passed.
+      ValueError: If any of the parameters have invalid values.
+    """
+    self._name = _CheckFacetName(_ConvertToUnicode(name))
+    self._value_limit = _CheckFacetValueLimit(value_limit)
+    if ranges is not None and values is not None:
+      raise ValueError(
+          'Cannot specify both ranges and values.')
+    self._ranges = _ConvertToListAndCheckType(
+        ranges, FacetRange, 'ranges')
+    self._values = _ConvertToListAndCheckType(
+        values, (basestring, int, float, long), 'values')
+
+  @property
+  def name(self):
+    """Returns the name of the facet."""
+    return self._name
+
+  @property
+  def value_limit(self):
+    """Returns number of values to be included in the result."""
+    return self._value_limit
+
+  @property
+  def ranges(self):
+    """Returns FacetRanges of values to be included in the result."""
+    return self._ranges
+
+  @property
+  def values(self):
+    """Returns specific values to be included in the result."""
+    return self._values
+
+  def _CopyToProtocolBuffer(self, facet_request_pb):
+    """Converts this object to a search_service_pb.FacetRequest proto buff."""
+    facet_request_pb.set_name(self.name)
+    request_param_pb = facet_request_pb.mutable_params()
+    request_param_pb.set_value_limit(self.value_limit)
+    for facet_range in self.ranges:
+      range_pb = request_param_pb.add_range()
+      range_pb.set_name(facet_range.name)
+      if facet_range.start is not None:
+        range_pb.set_start(str(facet_range.start))
+      if facet_range.end is not None:
+        range_pb.set_end(str(facet_range.end))
+    for constraint in self.values:
+      request_param_pb.add_value_constraint(constraint)
+
+
+class FacetRefinement(object):
+  """A Facet Refinement to filter out search results based on a facet value.
+
+  NOTE: The recommended way to use facet refinement is to use the token
+  string. Each FacetResult will have a token that is acceptable instead of this
+  class. To provide manual FacetRefinement, an instance of this class can be
+  passed to SearchOptions.
+  NOTE: that either value or facet_range should be set but not both.
+  Example: Request for a range refinement for a numeric facet:
+    FacetRefinement(name='rating', facet_range=FacetRange(start=1.0,end=2.5))
+  """
+
+  @datastore_rpc._positional(2)
+  def __init__(self, name, value=None, facet_range=None):
+    """Initializer.
+
+    Args:
+      name: The name of the facet.
+      value: Value of the facet.
+      facet_range: A FacetRange to refine facet based on a range.
+      FacetRange.name should be empty.
+
+    Raises:
+      TypeError: If any of the parameters have invalid types, or an unknown
+        attribute is passed.
+      ValueError: If any of the parameters have invalid values.
+    """
+    self._name = _ConvertToUnicode(name)
+    if (value is None) == (facet_range is None):
+      raise ValueError('Either value or facet_range should be set but not '
+                       'both.')
+    if facet_range is not None and facet_range.name is not None:
+      logging.warning('FacetRefinement.facet_range.name should be None.')
+    self._value = value
+    self._facet_range = facet_range
+
+  @property
+  def name(self):
+    """Returns name of the facet refinement."""
+    return self._name
+
+  @property
+  def value(self):
+    """Returns value of the facet refinement."""
+    return self._value
+
+  @property
+  def facet_range(self):
+    """Returns range of the facet refinement."""
+    return self._facet_range
+
+  def ToTokenString(self):
+    """Converts this refinement to a token string safe to be used in HTML.
+
+    The format of this string may change.
+
+    Returns:
+      A token string safe to be used in HTML for this facet refinement.
+    """
+    facet_refinement = search_service_pb.FacetRefinement()
+    self._CopyToProtocolBuffer(facet_refinement)
+    return base64.b64encode(facet_refinement.SerializeToString())
+
+  @staticmethod
+  def FromTokenString(token_string):
+    """Converts a token string to a FacetRefinement object.
+
+    Do not store token strings between different versions of API as key could
+    be incompatible.
+
+    Args:
+      token_string: A token string created by ToTokenString method or returned
+      by a search result.
+    Returns:
+      A FacetRefinement object.
+    Raises:
+      ValueError: If the token_string is invalid.
+    """
+    ref_pb = search_service_pb.FacetRefinement()
+
+    try:
+      ref_pb.ParseFromString(base64.b64decode(token_string))
+    except (ProtocolBuffer.ProtocolBufferDecodeError, TypeError), e:
+
+
+      raise ValueError('Invalid refinement token %s' % token_string, e)
+
+    facet_range = None
+    if ref_pb.has_range():
+      range_pb = ref_pb.range()
+      facet_range = FacetRange(
+          name=None,
+          start=float(range_pb.start()) if range_pb.has_start() else None,
+          end=float(range_pb.end()) if range_pb.has_end() else None)
+
+    return FacetRefinement(ref_pb.name(),
+                           value=ref_pb.value() if ref_pb.has_value() else None,
+                           facet_range=facet_range)
+
+  def _CopyToProtocolBuffer(self, facet_refinement_pb):
+    """Copies This object to a search_service_pb.FacetRefinement."""
+    facet_refinement_pb.set_name(self.name)
+    if self.value is not None:
+      facet_refinement_pb.set_value(self.value)
+    if self.facet_range is not None:
+      if self.facet_range.start:
+        facet_refinement_pb.mutable_range().set_start(
+            str(self.facet_range.start))
+      if self.facet_range.end:
+        facet_refinement_pb.mutable_range().set_end(
+            str(self.facet_range.end))
+
+
 def _CopyFieldToProtocolBuffer(field, pb):
   """Copies field's contents to a document_pb.Field protocol buffer."""
   pb.set_name(field.name.encode('utf-8'))
@@ -1223,7 +1658,8 @@
   """
   _FIRST_JAN_2011 = datetime.datetime(2011, 1, 1)
 
-  def __init__(self, doc_id=None, fields=None, language='en', rank=None):
+  def __init__(self, doc_id=None, fields=None, language='en', rank=None,
+               facets=None):
     """Initializer.
 
     Args:
@@ -1238,6 +1674,8 @@
         If not specified, the number of seconds since 1st Jan 2011 is used.
         Documents are returned in descending order of their rank, in absence
         of sorting or scoring options.
+      facets: An iterable of Facet instances representing the facets for this
+        document.
 
     Raises:
       TypeError: If any of the parameters have invalid types, or an unknown
@@ -1249,11 +1687,15 @@
       _CheckDocumentId(doc_id)
     self._doc_id = doc_id
     self._fields = _GetList(fields)
+    self._facets = _GetList(facets)
     self._language = _CheckLanguage(_ConvertToUnicode(language))
 
 
     self._field_map = None
 
+
+    self._facet_map = None
+
     doc_rank = rank
     if doc_rank is None:
       doc_rank = self._GetDefaultRank()
@@ -1272,6 +1714,11 @@
     return self._fields
 
   @property
+  def facets(self):
+    """Returns a list of facets of the document."""
+    return self._facets
+
+  @property
   def language(self):
     """Returns the code of the language the document fields are written in."""
     return self._language
@@ -1300,6 +1747,17 @@
         'Must have exactly one field with name %s, but found %d.' %
         (field_name, len(fields)))
 
+  def facet(self, facet_name):
+    """Returns list of facets with the provided name.
+
+    Args:
+      facet_name: The name of the facet to return.
+
+    Returns:
+      A list of facets with the given name.
+    """
+    return self._BuildFacetMap().get(facet_name, [])
+
   def __getitem__(self, field_name):
     """Returns a list of all fields with the provided field name.
 
@@ -1322,11 +1780,21 @@
   def _BuildFieldMap(self):
     """Lazily build the field map."""
     if self._field_map is None:
-      self._field_map = {}
+      field_map = {}
       for field in self._fields:
-        self._field_map.setdefault(field.name, []).append(field)
+        field_map.setdefault(field.name, []).append(field)
+      self._field_map = field_map
     return self._field_map
 
+  def _BuildFacetMap(self):
+    """Lazily build the facet map."""
+    if self._facet_map is None:
+      facet_map = {}
+      for facet in self._facets:
+        facet_map.setdefault(facet.name, []).append(facet)
+      self._facet_map = facet_map
+    return self._facet_map
+
   def _CheckRank(self, rank):
     """Checks if rank is valid, then returns it."""
     return _CheckInteger(rank, 'rank', upper_bound=sys.maxint)
@@ -1339,12 +1807,13 @@
   def __repr__(self):
     return _Repr(
         self, [('doc_id', self.doc_id), ('fields', self.fields),
-               ('language', self.language), ('rank', self.rank)])
+               ('facets', self.facets), ('language', self.language),
+               ('rank', self.rank)])
 
   def __eq__(self, other):
     return (isinstance(other, type(self)) and self.doc_id == other.doc_id and
             self.rank == other.rank and self.language == other.language
-            and self.fields == other.fields)
+            and self.fields == other.fields and self.facets == other.facets)
 
   def __ne__(self, other):
     return not self == other
@@ -1369,6 +1838,9 @@
   for field in document.fields:
     field_pb = pb.add_field()
     _CopyFieldToProtocolBuffer(field, field_pb)
+  for facet in document.facets:
+    facet_pb = pb.add_facet()
+    facet._CopyToProtocolBuffer(facet_pb)
   pb.set_order_id(document.rank)
   return pb
 
@@ -1386,7 +1858,8 @@
   return Document(doc_id=_DecodeUTF8(doc_pb.id()),
                   fields=_NewFieldsFromPb(doc_pb.field_list()),
                   language=lang,
-                  rank=doc_pb.order_id())
+                  rank=doc_pb.order_id(),
+                  facets=_NewFacetsFromPb(doc_pb.facet_list()))
 
 
 def _QuoteString(argument):
@@ -1725,11 +2198,90 @@
                ('default_value', self.default_value)])
 
 
+class FacetResultValue(object):
+  """A facet value as part of search result."""
+
+  def __init__(self, label, count, refinement):
+    """Initializer.
+
+    Args:
+      label: The label of the facet. Either the name of the facet, user
+        provider range name, or system generated range name.
+      count: Occurrence frequency of the label for the given facet.
+      refinement: The FacetRefinement object for this value. Passing this object
+        or its string token to the next query will refine the result based on
+        this facet value.
+    Raises:
+      TypeError: If any of the parameters have invalid types, or an unknown
+        attribute is passed.
+      ValueError: If any of the parameters have invalid values.
+    """
+    self._label = label
+    self._count = count
+    _CheckType(refinement, FacetRefinement, 'refinement')
+    self._refinement_token = refinement.ToTokenString()
+    self._refinement = refinement
+
+  @property
+  def label(self):
+    """Returns the label for this facet value."""
+    return self._label
+
+  @property
+  def count(self):
+    """Returns the count for this facet value."""
+    return self._count
+
+  @property
+  def refinement_token(self):
+    """Returns the refinement token string for this facet value."""
+    return self._refinement_token
+
+  def __repr__(self):
+    return _Repr(self, [('label', self.label),
+                        ('count', self.count),
+                        ('refinement', self._refinement)])
+
+
+class FacetResult(object):
+  """Represents a facet result returned from a search with faceted search."""
+
+  def __init__(self, name, values=None):
+    """Initializer.
+
+    Args:
+      name: The name of this facet result.
+      values: An iterable of FacetResultValue instances representing values for
+        this document.
+    Raises:
+      TypeError: If any of the parameters have invalid types, or an unknown
+        attribute is passed.
+      ValueError: If any of the parameters have invalid values.
+    """
+    self._name = _ConvertToUnicode(name)
+    self._values = values
+
+  @property
+  def name(self):
+    """Returns the name of this facet result."""
+    return self._name
+
+  @property
+  def values(self):
+    """Returns values for this facet result."""
+    return self._values
+
+  def __repr__(self):
+    return _Repr(self, [('name', self.name),
+                        ('values', self.values)])
+
+
 class ScoredDocument(Document):
   """Represents a scored document returned from a search."""
 
   def __init__(self, doc_id=None, fields=None, language='en',
-               sort_scores=None, expressions=None, cursor=None, rank=None):
+               sort_scores=None, expressions=None, cursor=None, rank=None,
+               facets=None):
     """Initializer.
 
     Args:
@@ -1749,6 +2301,8 @@
         less than sys.maxint. If not specified, the number of seconds since
         1st Jan 2011 is used. Documents are returned in descending order of
         their rank.
+      facets: An iterable of Facet instances representing the facets for this
+        document.
 
     Raises:
       TypeError: If any of the parameters have invalid types, or an unknown
@@ -1756,7 +2310,8 @@
       ValueError: If any of the parameters have invalid values.
     """
     super(ScoredDocument, self).__init__(doc_id=doc_id, fields=fields,
-                                         language=language, rank=rank)
+                                         language=language, rank=rank,
+                                         facets=facets)
     self._sort_scores = self._CheckSortScores(_GetList(sort_scores))
     self._expressions = _GetList(expressions)
     if cursor is not None and not isinstance(cursor, Cursor):
@@ -1824,7 +2379,7 @@
 class SearchResults(object):
   """Represents the result of executing a search request."""
 
-  def __init__(self, number_found, results=None, cursor=None):
+  def __init__(self, number_found, results=None, cursor=None, facets=None):
     """Initializer.
 
     Args:
@@ -1833,6 +2388,8 @@
         search request.
       cursor: A Cursor to continue the search from the end of the
         search results.
+      facets: The list of FacetResults returned from executing a search request
+        with faceted search enabled.
 
     Raises:
       TypeError: If any of the parameters have an invalid type, or an unknown
@@ -1845,6 +2402,7 @@
       raise TypeError('cursor must be a Cursor, got %s' %
                       cursor.__class__.__name__)
     self._cursor = cursor
+    self._facets = _GetList(facets)
 
   def __iter__(self):
 
@@ -1881,10 +2439,16 @@
     """
     return self._cursor
 
+  @property
+  def facets(self):
+    """Return the list of FacetResults that found in matched documents."""
+    return self._facets
+
   def __repr__(self):
     return _Repr(self, [('results', self.results),
                         ('number_found', self.number_found),
-                        ('cursor', self.cursor)])
+                        ('cursor', self.cursor),
+                        ('facets', self.facets)])
 
 
 class GetResponse(object):
@@ -1915,6 +2479,12 @@
     for result in self.results:
       yield result
 
+  def __len__(self):
+    return len(self.results)
+
+  def __getitem__(self, index):
+    return self.results[index]
+
   @property
   def results(self):
     """Returns a list of results ordered by Id from the index."""
@@ -2027,6 +2597,36 @@
       upper_bound=MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH)
 
 
+def _CheckFacetDepth(depth):
+  """Checks the facet depth to return is an integer within range."""
+  if depth is None:
+    return None
+  else:
+    return _CheckInteger(
+        depth, 'depth', zero_ok=False,
+        upper_bound=MAXIMUM_DEPTH_FOR_FACETED_SEARCH)
+
+
+def _CheckFacetDiscoveryLimit(facet_limit):
+  """Checks the facet limit is an integer within range."""
+  if facet_limit is None:
+    return None
+  else:
+    return _CheckInteger(
+        facet_limit, 'discover_facet_limit',
+        upper_bound=MAXIMUM_FACETS_TO_RETURN)
+
+
+def _CheckFacetValueLimit(value_limit):
+  """Checks the facet value limit is an integer within range."""
+  if value_limit is None:
+    return None
+  else:
+    return _CheckInteger(
+        value_limit, 'facet_value_limit', zero_ok=False,
+        upper_bound=MAXIMUM_FACET_VALUES_TO_RETURN)
+
+
 def _CheckOffset(offset):
   """Checks the offset in document list is an integer within range."""
   return _CheckInteger(
@@ -2058,6 +2658,73 @@
         % (number_expressions, MAXIMUM_FIELDS_RETURNED_PER_SEARCH))
 
 
+class FacetOptions(object):
+  """Options for processing facet reults of a query."""
+
+  @datastore_rpc._positional(1)
+  def __init__(self, discovery_limit=10, discovery_value_limit=None,
+               depth=None):
+    """Initializer.
+
+    Options include number of facets to discover, number of values for each
+    facet and the depth of the result to be considered for facet computation.
+
+    If you wish to discovering 5 facets with 10 values each in 6000 search
+    results, you can use a FacetOption object like this:
+
+    facet_option = FacetOption(discover_facet_limit=5,
+                               discover_facet_value_limit=10,
+                               facet_depth=6000)
+
+    Args:
+      discovery_limit: Number of facets to discover if facet discovery is
+        turned on. If None, discover facets will be disabled.
+      discovery_value_limit: Number of values to be discovered for each of
+        the top discovered facets.
+      depth: Number of documents in query results to evaluate to gather
+        facet information.
+    Raises:
+      TypeError: If an unknown attribute is passed.
+      ValueError: If any of the parameters have invalid values (e.g., a
+        negative facet_depth).
+    """
+    self._discovery_limit = _CheckFacetDiscoveryLimit(discovery_limit)
+    self._discovery_value_limit = _CheckFacetValueLimit(
+        discovery_value_limit)
+    self._depth = _CheckFacetDepth(depth)
+
+  @property
+  def discovery_limit(self):
+    """Returns the number of facets to discover."""
+    return self._discovery_limit
+
+  @property
+  def discovery_value_limit(self):
+    """Returns the number of values to discover for each facet."""
+    return self._discovery_value_limit
+
+  @property
+  def depth(self):
+    """Returns the number of documents to analyze for facet discovery."""
+    return self._depth
+
+  def __repr__(self):
+    return _Repr(
+        self, [('discovery_limit', self.discovery_limit),
+               ('discovery_value_limit', self.discovery_value_limit),
+               ('depth', self._depth)])
+
+  def _CopyToProtocolBuffer(self, params):
+    """Copies a FacetOptions object to a SearchParams proto buff."""
+    if self.discovery_limit is not None:
+      params.set_auto_discover_facet_count(self.discovery_limit)
+    if self.discovery_value_limit is not None:
+      params.mutable_facet_auto_detect_param().set_value_limit(
+          self.discovery_value_limit)
+    if self.depth is not None:
+      params.set_facet_depth(self.depth)
+
+
 class QueryOptions(object):
   """Options for post-processing results for a query.
 
@@ -2285,7 +2952,9 @@
 class Query(object):
   """Represents a request on the search service to query the index."""
 
-  def __init__(self, query_string, options=None):
+  @datastore_rpc._positional(3)
+  def __init__(self, query_string, options=None, enable_facet_discovery=False,
+               return_facets=None, facet_options=None, facet_refinements=None):
 
 
 
@@ -2308,7 +2977,8 @@
                       SortExpression(expression='subject')],
                   limit=1000),
               returned_fields=['author', 'subject', 'summary'],
-              snippeted_fields=['content'])))
+              snippeted_fields=['content']),
+          facet_refinements=[ref_key1, ref_key2]))
 
     In order to get a Cursor, you specify a Cursor in QueryOptions.cursor
     and extract the Cursor for the next request from results.cursor to
@@ -2318,6 +2988,31 @@
           Query(query_string='subject:first good',
                 options=QueryOptions(cursor=results.cursor)))
 
+    To enable faceted search in the result, you can use
+    enable_facet_discovery or return_facets, as shown below:
+
+    # discover top facets
+    results = index.search(
+        Query(query_string='movies',
+              enable_facet_discovery=true))
+
+    # included specific facets with search result
+    results = index.search(
+        Query(query_string='movies',
+              include_facets=['rating', 'shipping_method']))
+
+    # discover only 5 facets and two manual facets with customized value
+    facet_option = FacetOption(discovery_limit=5)
+    facet1 = FacetRequest('Rating', ranges=[
+        FacetRange('Fair', 1.0, 2.0),
+        FacetRange('Good', 2.0, 3.5),
+        FacetRange('Excelent', 3.5, 4.0)]
+    results = index.search(
+        Query(query_string='movies',
+              enable_facet_discovery=true,
+              facet_option=facet_option,
+              include_facets=[facet1, 'shipping_method']))
+
     Args:
       query_string: The query to match against documents in the index. A query
         is a boolean expression containing terms.  For example, the query
@@ -2335,12 +3030,34 @@
         https://developers.google.com/appengine/docs/python/search/overview#Expressions
         for a list of expressions that can be used in queries.
       options: A QueryOptions describing post-processing of search results.
+      enable_facet_discovery: discovery top relevent facets to this search query
+        and return them.
+      return_facets: An iterable of FacetRequest or basestring as facet name to
+        return specific facet with the result.
+      facet_options: A FacetOption describing processing of facets.
+      facet_refinements: An iterable of FacetRefinement objects or refinement
+        token strings used to filter out search results based on a facet value.
+        refinements for different facets will be conjunction and refinements for
+        the same facet will be disjunction.
     Raises:
       QueryError: If the query string is not parseable.
     """
     self._query_string = _ConvertToUnicode(query_string)
     _CheckQuery(self._query_string)
     self._options = options
+    self._facet_options = facet_options
+    self._enable_facet_discovery = enable_facet_discovery
+    self._return_facets = _ConvertToListAndCheckType(
+        return_facets, (basestring, FacetRequest), 'return_facet')
+    for index, facet in enumerate(self._return_facets):
+      if isinstance(facet, basestring):
+        self._return_facets[index] = FacetRequest(self._return_facets[index])
+    self._facet_refinements = _ConvertToListAndCheckType(
+        facet_refinements, (basestring, FacetRefinement), 'facet_refinements')
+    for index, refinement in enumerate(self._facet_refinements):
+      if isinstance(refinement, basestring):
+        self._facet_refinements[index] = FacetRefinement.FromTokenString(
+            refinement)
 
   @property
   def query_string(self):
@@ -2352,6 +3069,26 @@
     """Returns QueryOptions defining post-processing on the search results."""
     return self._options
 
+  @property
+  def facet_options(self):
+    """Returns FacetOptions defining processing of facets."""
+    return self._facet_options
+
+  @property
+  def facet_refinements(self):
+    """Returns list of facet refinements."""
+    return self._facet_refinements
+
+  @property
+  def enable_facet_discovery(self):
+    """Returns true if facet disocery is on."""
+    return self._enable_facet_discovery
+
+  @property
+  def return_facets(self):
+    """Returns the list of specific facets to be included with the result."""
+    return self._return_facets
+
 
 def _CopyQueryToProtocolBuffer(query, params):
   """Copies Query object to params protobuf."""
@@ -2360,10 +3097,19 @@
 
 def _CopyQueryObjectToProtocolBuffer(query, params):
   _CopyQueryToProtocolBuffer(query.query_string, params)
+  for refinement in query.facet_refinements:
+    refinement._CopyToProtocolBuffer(params.add_facet_refinement())
+  for return_facet in query.return_facets:
+    return_facet._CopyToProtocolBuffer(params.add_include_facet())
   options = query.options
   if query.options is None:
     options = QueryOptions()
   _CopyQueryOptionsObjectToProtocolBuffer(query.query_string, options, params)
+  facet_options = query.facet_options
+  if facet_options is None:
+    facet_options = FacetOptions(
+        discovery_limit=10 if query.enable_facet_discovery else None)
+  facet_options._CopyToProtocolBuffer(params)
 
 
 class Index(object):
@@ -2731,9 +3477,32 @@
     return ScoredDocument(
         doc_id=_DecodeUTF8(doc_pb.id()),
         fields=_NewFieldsFromPb(doc_pb.field_list()),
+        facets=_NewFacetsFromPb(doc_pb.facet_list()),
         language=lang, rank=doc_pb.order_id(), sort_scores=sort_scores,
         expressions=_NewFieldsFromPb(expressions), cursor=cursor)
 
+  def _NewFacetResultFromPb(self, facet_result_pb):
+    """Returns a FacetResult populated from search_service FacetResult pb."""
+    values = []
+    for facet_value_pb in facet_result_pb.value_list():
+      refinement_pb = facet_value_pb.refinement()
+      if refinement_pb.has_range():
+        range_pb = refinement_pb.range()
+        facet_range = FacetRange(
+            name=None,
+            start=(float(range_pb.start()) if range_pb.has_start() else None),
+            end=(float(range_pb.end()) if range_pb.has_end() else None))
+      else:
+        facet_range = None
+      refinement = FacetRefinement(
+          name=refinement_pb.name(),
+          value=refinement_pb.value() if refinement_pb.has_value() else None,
+          facet_range=facet_range)
+      values.append(FacetResultValue(label=facet_value_pb.name(),
+                                     count=facet_value_pb.count(),
+                                     refinement=refinement))
+    return FacetResult(name=facet_result_pb.name(), values=values)
+
   def _NewSearchResults(self, response, cursor):
     """Returns a SearchResults populated from a search_service response pb."""
     results = []
@@ -2754,9 +3523,12 @@
 
         results_cursor = Cursor(web_safe_string=_ToWebSafeString(
             cursor.per_result, _DecodeUTF8(response.cursor())))
+    facets = []
+    for facet_result in response.facet_result_list():
+      facets.append(self._NewFacetResultFromPb(facet_result))
     return SearchResults(
         results=results, number_found=response.matched_count(),
-        cursor=results_cursor)
+        cursor=results_cursor, facets=facets)
 
   @datastore_rpc._positional(2)
   def get(self, doc_id, deadline=None):
diff --git a/google/appengine/api/search/search_util.py b/google/appengine/api/search/search_util.py
index 5f188f8..c8edb78 100644
--- a/google/appengine/api/search/search_util.py
+++ b/google/appengine/api/search/search_util.py
@@ -19,6 +19,7 @@
 """Provides utility methods used by modules in the FTS API stub."""
 
 
+
 import datetime
 import re
 
diff --git a/google/appengine/api/search/simple_search_stub.py b/google/appengine/api/search/simple_search_stub.py
index ad0a0a6..3eab265 100644
--- a/google/appengine/api/search/simple_search_stub.py
+++ b/google/appengine/api/search/simple_search_stub.py
@@ -29,6 +29,7 @@
 
 
 
+
 import base64
 import bisect
 import copy
@@ -56,6 +57,7 @@
 from google.appengine.api.search import search_util
 from google.appengine.api.search.stub import document_matcher
 from google.appengine.api.search.stub import expression_evaluator
+from google.appengine.api.search.stub import simple_facet
 from google.appengine.api.search.stub import simple_tokenizer
 from google.appengine.api.search.stub import tokens
 from google.appengine.runtime import apiproxy_errors
@@ -780,6 +782,18 @@
         self._AddSchemaInformation(index, metadata)
       self._AddStorageInformation(index, metadata)
 
+  def _Dynamic_DeleteSchema(self, request, response):
+    """A local implementation of SearchService.DeleteSchema RPC.
+
+    Args:
+      request: A search_service_pb.DeleteSchemaRequest.
+      response: An search_service_pb.DeleteSchemaResponse.
+    """
+
+    params = request.params()
+    for index_spec in params.index_spec_list():
+      response.add_status().set_code(search_service_pb.SearchServiceError.OK)
+
   def _AddSchemaInformation(self, index, metadata_pb):
     schema = index.GetSchema()
     for name in schema:
@@ -945,7 +959,7 @@
         if (isinstance(expression, float) or
             isinstance(expression, long) or
             isinstance(expression, int)):
-          expr.mutable_value().set_string_value(str(expression))
+          expr.mutable_value().set_string_value(repr(float(expression)))
           expr.mutable_value().set_type(document_pb.FieldValue.NUMBER)
         else:
           expr.mutable_value().set_string_value(expression)
@@ -984,8 +998,10 @@
       self._InvalidRequest(response.mutable_status(), e)
       response.set_matched_count(0)
       return
-    response.set_matched_count(len(results))
 
+    facet_analyzer = simple_facet.SimpleFacet(params)
+    results = facet_analyzer.RefineResults(results)
+    response.set_matched_count(len(results))
     offset = 0
     if params.has_cursor():
       try:
@@ -1026,6 +1042,7 @@
     self._FillSearchResponse(results, result_range, params.cursor_type(),
                              _ScoreRequested(params), response, field_names,
                              params.keys_only())
+    facet_analyzer.FillFacetResponse(results, response)
 
     response.mutable_status().set_code(search_service_pb.SearchServiceError.OK)
 
diff --git a/google/appengine/api/search/stub/expression_evaluator.py b/google/appengine/api/search/stub/expression_evaluator.py
index e0b839b..3531725 100644
--- a/google/appengine/api/search/stub/expression_evaluator.py
+++ b/google/appengine/api/search/stub/expression_evaluator.py
@@ -45,6 +45,7 @@
 
 
 import logging
+import math
 
 from google.appengine.datastore import document_pb
 
@@ -94,14 +95,14 @@
     self._case_preserving_tokenizer = simple_tokenizer.SimpleTokenizer(
         preserve_case=True)
     self._function_table = {
-        ExpressionParser.ABS: self._Unsupported('abs'),
+        ExpressionParser.ABS: self._Abs,
         ExpressionParser.COUNT: self._Count,
         ExpressionParser.DISTANCE: self._Distance,
         ExpressionParser.GEOPOINT: self._Geopoint,
-        ExpressionParser.LOG: self._Unsupported('log'),
+        ExpressionParser.LOG: self._Log,
         ExpressionParser.MAX: self._Max,
         ExpressionParser.MIN: self._Min,
-        ExpressionParser.POW: self._Unsupported('pow'),
+        ExpressionParser.POW: self._Pow,
         ExpressionParser.SNIPPET: self._Snippet,
         ExpressionParser.SWITCH: self._Unsupported('switch'),
         }
@@ -152,6 +153,23 @@
     return max(self._Eval(
         node, document_pb.FieldValue.NUMBER) for node in nodes)
 
+  def _Abs(self, return_type, node):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Abs cannot be converted to a text type')
+    return abs(self._Eval(node, document_pb.FieldValue.NUMBER))
+
+  def _Log(self, return_type, node):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Log cannot be converted to a text type')
+    return math.log(self._Eval(node, document_pb.FieldValue.NUMBER))
+
+  def _Pow(self, return_type, *nodes):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Pow cannot be converted to a text type')
+    lhs, rhs = nodes
+    return pow(self._Eval(lhs, document_pb.FieldValue.NUMBER),
+               self._Eval(rhs, document_pb.FieldValue.NUMBER))
+
   def _Distance(self, return_type, *nodes):
     if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
       raise _ExpressionError('Distance cannot be converted to a text type')
diff --git a/google/appengine/api/search/stub/simple_facet.py b/google/appengine/api/search/stub/simple_facet.py
new file mode 100644
index 0000000..926d811
--- /dev/null
+++ b/google/appengine/api/search/stub/simple_facet.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""A Simple facet analyzer."""
+
+from google.appengine.datastore import document_pb
+
+
+class SimpleFacet(object):
+  """A Simple facet analyzer."""
+
+  def __init__(self, params):
+    self._params = params
+
+  def FillFacetResponse(self, results, response):
+    """Extract facet results and add them to the response."""
+
+
+    manual_facet_map = {}
+    manual_facets = {}
+    for manual_facet in self._params.include_facet_list():
+      manual_facet_map[manual_facet.name()] = manual_facet.params()
+
+
+      if (manual_facet.params().range_list() and
+          manual_facet.params().value_constraint_list()):
+        raise ValueError('Manual facet request should either specify range '
+                         'or value constraint, not both')
+      facet_obj = _Facet(
+          manual_facet.name(),
+          (manual_facet.params().value_limit()
+           if manual_facet.params().has_value_limit()
+           else self._params.facet_auto_detect_param().value_limit()))
+      manual_facets[manual_facet.name()] = facet_obj
+
+
+      for value in manual_facet.params().value_constraint_list():
+        facet_obj.AddValue(value, 0)
+
+
+      for range_pb in manual_facet.params().range_list():
+        range_pair = (
+            float(range_pb.start()) if range_pb.has_start() else None,
+            float(range_pb.end()) if range_pb.has_end() else None)
+        facet_obj.AddValue(range_pb.name(), 0, refinement=range_pair)
+
+    if not manual_facet_map and not self._params.auto_discover_facet_count():
+      return
+    discovered_facets = {}
+
+
+
+    for result in results[:self._params.facet_depth()]:
+      for facet in result.document.facet_list():
+
+        if facet.value().type() == document_pb.FacetValue.ATOM:
+
+          if facet.name() in manual_facet_map:
+            manual_facet_req = manual_facet_map[facet.name()]
+            facet_obj = manual_facets[facet.name()]
+
+
+
+            if not manual_facet_req.range_list() and (
+                not manual_facet_req.value_constraint_list() or
+                facet.value().string_value() in
+                manual_facet_req.value_constraint_list()):
+              facet_obj.AddValue(facet.value().string_value())
+          elif self._params.auto_discover_facet_count():
+            if facet.name() in discovered_facets:
+              facet_obj = discovered_facets[facet.name()]
+            else:
+              facet_obj = discovered_facets[facet.name()] = _Facet(
+                  facet.name(),
+                  self._params.facet_auto_detect_param().value_limit())
+            facet_obj.AddValue(facet.value().string_value())
+
+        elif facet.value().type() == document_pb.FacetValue.NUMBER:
+          facet_value = float(facet.value().string_value())
+          if facet.name() in manual_facet_map:
+            manual_facet_req = manual_facet_map[facet.name()]
+            facet_obj = manual_facets[facet.name()]
+            if manual_facet_req.range_list():
+              for range_pb in manual_facet_req.range_list():
+                range_pair = (
+                    float(range_pb.start()) if range_pb.has_start() else None,
+                    float(range_pb.end()) if range_pb.has_end() else None)
+                if ((range_pair[0] is None or facet_value >= range_pair[0]) and
+                    (range_pair[1] is None or facet_value < range_pair[1])):
+                  facet_obj.AddValue(range_pb.name(), refinement=range_pair)
+            elif manual_facet_req.value_constraint_list():
+              for constraint in manual_facet_req.value_constraint_list():
+                if facet_value == float(constraint):
+                  facet_obj.AddValue(constraint)
+            else:
+              facet_obj.AddNumericValue(facet_value)
+          elif self._params.auto_discover_facet_count():
+            if facet.name() in discovered_facets:
+              facet_obj = discovered_facets[facet.name()]
+            else:
+              facet_obj = discovered_facets[facet.name()] = _Facet(
+                  facet.name(),
+                  self._params.facet_auto_detect_param().value_limit())
+            facet_obj.AddNumericValue(facet_value)
+        else:
+          raise ValueError('Facet type %d is not supported' %
+                           facet.value().type())
+
+
+
+    for facet in manual_facets.values():
+      self._FillResponseForSingleFacet(facet, response.add_facet_result())
+    for facet in  _GetTopN(discovered_facets.values(),
+                           self._params.auto_discover_facet_count()):
+      self._FillResponseForSingleFacet(facet, response.add_facet_result())
+
+  def _FillResponseForSingleFacet(self, facet, facet_result_pb):
+    """Convert a single _Facet to a facet_result_pb."""
+
+
+    if facet.min is not None:
+      facet.AddValue('%s,%s' % (facet.min, facet.max), facet.min_max_count,
+                     (facet.min, facet.max))
+    facet_result_pb.set_name(facet.name)
+    for value in facet.GetTopValues(facet.value_limit):
+      value_pb = facet_result_pb.add_value()
+      ref_pb = value_pb.mutable_refinement()
+
+
+      if value.refinement is not None:
+        if value.refinement[0] is not None:
+          ref_pb.mutable_range().set_start(str(value.refinement[0]))
+        if value.refinement[1] is not None:
+          ref_pb.mutable_range().set_end(str(value.refinement[1]))
+      else:
+
+
+        ref_pb.set_value(str(value.label))
+      ref_pb.set_name(facet.name)
+      value_pb.set_name(str(value.label))
+      value_pb.set_count(value.count)
+
+  def RefineResults(self, results):
+    """Returns refined results using facet refinement parameters."""
+    if not self._params.facet_refinement_list():
+      return results
+
+
+    ref_groups = {}
+    for refinement in self._params.facet_refinement_list():
+      if refinement.name() in ref_groups:
+        ref_groups[refinement.name()].append(refinement)
+      else:
+        ref_groups[refinement.name()] = [refinement]
+
+    return [doc for doc in results
+            if self._MatchFacetRefinements(doc, ref_groups)]
+
+  def _MatchFacetRefinements(self, doc, ref_groups):
+
+
+    return all([self._MatchFacetRefinementSameName(doc, ref_same_names)
+                for ref_same_names in ref_groups.values()])
+
+  def _MatchFacetRefinementSameName(self, doc, ref_same_names):
+
+    return any([self._MatchFacetRefinement(doc, ref)
+                for ref in ref_same_names])
+
+  def _MatchFacetRefinement(self, doc, refinement):
+
+
+    doc_facets = []
+    for facet in doc.document.facet_list():
+      if facet.name() == refinement.name():
+        doc_facets.append(facet)
+    return any([self._MatchSingleFacetRefinement(doc_facet, refinement)
+                for doc_facet in doc_facets])
+
+  def _MatchSingleFacetRefinement(self, doc_facet, refinement):
+
+    if refinement.has_value():
+      if refinement.has_range():
+        raise ValueError('Refinement request for facet %s should either '
+                         'specify range or value constraint, '
+                         'not both.' % refinement.name())
+      facet_value = doc_facet.value().string_value()
+      if doc_facet.value().type() == document_pb.FacetValue.NUMBER:
+        return float(facet_value) == float(refinement.value())
+      else:
+        return facet_value == refinement.value()
+    else:
+      if not refinement.has_range():
+        raise ValueError('Refinement request for facet %s should specify '
+                         'range or value constraint.' % refinement.name())
+
+
+      if doc_facet.value().type() != document_pb.FacetValue.NUMBER:
+        return False
+      facet_value = float(doc_facet.value().string_value())
+      ref_range = refinement.range()
+      start = float(ref_range.start()) if ref_range.has_start() else None
+      end = float(ref_range.end()) if ref_range.has_end() else None
+      return ((start is None or facet_value >= start) and
+              (end is None or facet_value < end))
+
+
+class _FacetValue(object):
+  """A representation of a single facet value."""
+
+  def __init__(self, label, count=0, refinement=None):
+    """Initilizer.
+
+    Args:
+      label: label of this value. can be the actual value or a custom label for
+        ranges. If this is a custom label, refinement should be set.
+      count: Initial number of facets with this value. This number can be
+        increased later.
+      refinement: If this value does not need a custom refinement, this value
+        should be None. If the value needs a range refinement, this value should
+        be a pair representing start and end value for the range.
+    """
+    self._label = label
+    self._count = count
+    self._refinement = refinement
+
+  @property
+  def label(self):
+    return self._label
+
+  @property
+  def count(self):
+    return self._count
+
+  @property
+  def refinement(self):
+    return self._refinement
+
+  def IncCount(self, value):
+    self._count += value
+
+  def __repr__(self):
+    return '_FacetValue(label=%s, count=%d, refinement=%s)' % (self.label,
+                                                               self.count,
+                                                               self.refinement)
+
+
+class _Facet(object):
+  """Simple facet implementation that holds values and overall count."""
+
+  def __init__(self, name, value_limit):
+    """Initilizer.
+
+    Args:
+      name: The name of the facet.
+      value_limit: Maximum number of values for this facet.
+    """
+    self._name = name
+    self._value_limit = value_limit
+    self._values = {}
+    self._count = 0
+    self._min = self._max = None
+    self._min_max_count = 0
+
+  @property
+  def name(self):
+    return self._name
+
+  @property
+  def value_limit(self):
+    return self._value_limit
+
+  @property
+  def count(self):
+    return self._count + self._min_max_count
+
+  @property
+  def min(self):
+    return self._min
+
+  @property
+  def max(self):
+    return self._max
+
+  @property
+  def min_max_count(self):
+    return self._min_max_count
+
+  def AddNumericValue(self, value):
+    """Add value for discovered numeric facets.
+
+    For numeric facets, we only keep minimum and maximum values not the actual
+    value.
+
+    Args:
+      value: numeric value.
+    """
+    if self._min is None or self._min > value:
+      self._min = value
+    if self._max is None or self._max < value:
+      self._max = value
+    self._min_max_count += 1
+
+  def AddValue(self, label, count=1, refinement=None):
+    if label in self._values:
+      self._values[label].IncCount(count)
+    else:
+      self._values[label] = _FacetValue(label, count, refinement)
+    self._count += count
+
+  def GetTopValues(self, n):
+    return _GetTopN(self._values.values(), n)
+
+  def __repr__(self):
+    return '_Facet(name=%s, count=%d, values=%s)' % (
+        self.name, self.count, self._values)
+
+
+def _GetTopN(objects, n):
+  """Returns top n objects with maximum count.
+
+  Args:
+    objects: any object that has count property
+    n: number of top elements to return
+  Returns:
+    top N elements if objects size is greater than N otherwise the map elements
+    in a sorted order.
+  """
+  return sorted(objects, key=lambda o: o.count, reverse=True)[:n]
diff --git a/google/appengine/api/search/stub/simple_tokenizer.py b/google/appengine/api/search/stub/simple_tokenizer.py
index e7f8650..2555193 100644
--- a/google/appengine/api/search/stub/simple_tokenizer.py
+++ b/google/appengine/api/search/stub/simple_tokenizer.py
@@ -21,6 +21,7 @@
 
 
 
+
 import re
 
 
diff --git a/google/appengine/api/search/stub/tokens.py b/google/appengine/api/search/stub/tokens.py
index 5069408..c7101b0 100644
--- a/google/appengine/api/search/stub/tokens.py
+++ b/google/appengine/api/search/stub/tokens.py
@@ -21,6 +21,7 @@
 
 
 
+
 from google.appengine.api.search import search_util
 
 
diff --git a/google/appengine/api/system/system_stub.py b/google/appengine/api/system/system_stub.py
index 50fc990..1b76296 100644
--- a/google/appengine/api/system/system_stub.py
+++ b/google/appengine/api/system/system_stub.py
@@ -26,6 +26,7 @@
 
 
 
+
 import random
 
 from google.appengine.api import apiproxy_stub
diff --git a/google/appengine/api/taskqueue/taskqueue.py b/google/appengine/api/taskqueue/taskqueue.py
index 4c95c11..6a60c3c 100644
--- a/google/appengine/api/taskqueue/taskqueue.py
+++ b/google/appengine/api/taskqueue/taskqueue.py
@@ -43,6 +43,8 @@
 
 
 
+
+
 __all__ = [
 
     'BadTaskStateError', 'BadTransactionState', 'BadTransactionStateError',
diff --git a/google/appengine/api/taskqueue/taskqueue_stub.py b/google/appengine/api/taskqueue/taskqueue_stub.py
index 69a9fb9..af575be 100644
--- a/google/appengine/api/taskqueue/taskqueue_stub.py
+++ b/google/appengine/api/taskqueue/taskqueue_stub.py
@@ -40,6 +40,7 @@
 
 
 
+
 __all__ = []
 
 import base64
diff --git a/google/appengine/api/urlfetch.py b/google/appengine/api/urlfetch.py
index df362a2..6432b6a 100644
--- a/google/appengine/api/urlfetch.py
+++ b/google/appengine/api/urlfetch.py
@@ -33,6 +33,7 @@
 
 
 
+
 import httplib
 import os
 import StringIO
diff --git a/google/appengine/api/urlfetch_errors.py b/google/appengine/api/urlfetch_errors.py
index ca02759..88dc80a 100644
--- a/google/appengine/api/urlfetch_errors.py
+++ b/google/appengine/api/urlfetch_errors.py
@@ -31,6 +31,7 @@
 
 
 
+
 class Error(Exception):
   """Base URL fetcher error type."""
 
diff --git a/google/appengine/api/urlfetch_stub.py b/google/appengine/api/urlfetch_stub.py
index 7468905..0160b06 100644
--- a/google/appengine/api/urlfetch_stub.py
+++ b/google/appengine/api/urlfetch_stub.py
@@ -24,6 +24,7 @@
 
 
 
+
 _successfully_imported_fancy_urllib = False
 _fancy_urllib_InvalidCertException = None
 _fancy_urllib_SSLError = None
diff --git a/google/appengine/api/user_service_stub.py b/google/appengine/api/user_service_stub.py
index 9213912..f8a3fb5 100644
--- a/google/appengine/api/user_service_stub.py
+++ b/google/appengine/api/user_service_stub.py
@@ -22,6 +22,8 @@
 """Trivial implementation of the UserService."""
 
 
+
+
 import os
 import urllib
 import urlparse
diff --git a/google/appengine/api/users.py b/google/appengine/api/users.py
index e17d32f..2401598 100644
--- a/google/appengine/api/users.py
+++ b/google/appengine/api/users.py
@@ -39,6 +39,8 @@
 
 
 
+
+
 import os
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api import user_service_pb
diff --git a/google/appengine/api/validation.py b/google/appengine/api/validation.py
index 547a904..1a49f1e 100644
--- a/google/appengine/api/validation.py
+++ b/google/appengine/api/validation.py
@@ -45,6 +45,7 @@
 
 
 
+
 import re
 
 import google
diff --git a/google/appengine/api/xmpp/__init__.py b/google/appengine/api/xmpp/__init__.py
index c5c9e19..6240cd0 100644
--- a/google/appengine/api/xmpp/__init__.py
+++ b/google/appengine/api/xmpp/__init__.py
@@ -43,6 +43,7 @@
 
 
 
+
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api.xmpp import xmpp_service_pb
 from google.appengine.runtime import apiproxy_errors
diff --git a/google/appengine/api/xmpp/xmpp_service_stub.py b/google/appengine/api/xmpp/xmpp_service_stub.py
index 8ebfff8..e2a4817 100644
--- a/google/appengine/api/xmpp/xmpp_service_stub.py
+++ b/google/appengine/api/xmpp/xmpp_service_stub.py
@@ -28,6 +28,7 @@
 
 
 
+
 import logging
 import os
 
diff --git a/google/appengine/api/yaml_builder.py b/google/appengine/api/yaml_builder.py
index dc5fb58..88e3754 100644
--- a/google/appengine/api/yaml_builder.py
+++ b/google/appengine/api/yaml_builder.py
@@ -32,6 +32,7 @@
 
 
 
+
 from google.appengine.api import yaml_errors
 from google.appengine.api import yaml_listener
 
diff --git a/google/appengine/api/yaml_errors.py b/google/appengine/api/yaml_errors.py
index 9afd3a9..fdd0a4a 100644
--- a/google/appengine/api/yaml_errors.py
+++ b/google/appengine/api/yaml_errors.py
@@ -23,6 +23,7 @@
 
 
 
+
 class Error(Exception):
   """Base datastore yaml error type."""
 
diff --git a/google/appengine/api/yaml_listener.py b/google/appengine/api/yaml_listener.py
index 0b0a41f..abccc73 100644
--- a/google/appengine/api/yaml_listener.py
+++ b/google/appengine/api/yaml_listener.py
@@ -25,6 +25,7 @@
 """
 
 
+
 from google.appengine.api import yaml_errors
 import yaml
 
diff --git a/google/appengine/api/yaml_object.py b/google/appengine/api/yaml_object.py
index 4e7391b..6c9ae84 100644
--- a/google/appengine/api/yaml_object.py
+++ b/google/appengine/api/yaml_object.py
@@ -32,6 +32,7 @@
 
 
 
+
 from google.appengine.api import validation
 from google.appengine.api import yaml_listener
 from google.appengine.api import yaml_builder
diff --git a/google/appengine/client/services/port_manager.py b/google/appengine/client/services/port_manager.py
index 3ed1bda..5af9d92 100644
--- a/google/appengine/client/services/port_manager.py
+++ b/google/appengine/client/services/port_manager.py
@@ -51,8 +51,8 @@
   """Construct a PortManager object with port forwarding configured.
 
   Args:
-    forwarded_ports: A dictionary containing desired mappings from VM host port
-        to docker container port.
+    forwarded_ports: A string containing desired mappings from VM host ports
+        to docker container ports.
     container_port: An integer port number for the container port.
 
   Returns:
diff --git a/google/appengine/client/services/vme_errors.py b/google/appengine/client/services/vme_errors.py
index 9d4644b..26a28f6 100644
--- a/google/appengine/client/services/vme_errors.py
+++ b/google/appengine/client/services/vme_errors.py
@@ -186,10 +186,6 @@
   """Thrown if there was a timeout with a ReplicaPool operation."""
 
 
-class MigrateDuplicateZoneError(PermanentAppError):
-  """Thrown if attempted to migrating a replica pool to its existing zone."""
-
-
 class ReplicaPoolTransientError(TransientError):
   """Thrown if there was an 5xx HttpError from the ReplicaPool service."""
 
diff --git a/google/appengine/cron/GrocParser.py b/google/appengine/cron/GrocParser.py
index 3f5c2c3..554bc0d 100644
--- a/google/appengine/cron/GrocParser.py
+++ b/google/appengine/cron/GrocParser.py
@@ -51,6 +51,7 @@
 
 
 
+
 allOrdinals = set([1, 2, 3, 4, 5])
 numOrdinals = len(allOrdinals)
 
diff --git a/google/appengine/cron/groc.py b/google/appengine/cron/groc.py
index e907d2a..172c3df 100644
--- a/google/appengine/cron/groc.py
+++ b/google/appengine/cron/groc.py
@@ -26,6 +26,7 @@
 """A wrapper around the generated Groc parser and lexer."""
 
 
+
 import google
 
 import antlr3
diff --git a/google/appengine/cron/groctimespecification.py b/google/appengine/cron/groctimespecification.py
index b0ad14f..e4d4394 100644
--- a/google/appengine/cron/groctimespecification.py
+++ b/google/appengine/cron/groctimespecification.py
@@ -38,6 +38,7 @@
 """
 
 
+
 import calendar
 import datetime
 
diff --git a/google/appengine/datastore/datastore_index.py b/google/appengine/datastore/datastore_index.py
index 7daa43d..41f3ac8 100644
--- a/google/appengine/datastore/datastore_index.py
+++ b/google/appengine/datastore/datastore_index.py
@@ -62,6 +62,8 @@
 
 
 
+
+
 import google
 import yaml
 
diff --git a/google/appengine/datastore/datastore_pb.py b/google/appengine/datastore/datastore_pb.py
index ad1cab8..1a61380 100644
--- a/google/appengine/datastore/datastore_pb.py
+++ b/google/appengine/datastore/datastore_pb.py
@@ -27,6 +27,7 @@
 
 
 
+
 from google.appengine.datastore.action_pb import Action
 from google.appengine.datastore.entity_pb import CompositeIndex
 from google.appengine.datastore.entity_pb import EntityProto
diff --git a/google/appengine/datastore/datastore_pbs.py b/google/appengine/datastore/datastore_pbs.py
index 323897f..c3600f0 100644
--- a/google/appengine/datastore/datastore_pbs.py
+++ b/google/appengine/datastore/datastore_pbs.py
@@ -38,6 +38,7 @@
 
 
 
+
 from google.appengine.datastore import entity_pb
 
 from google.appengine.datastore import entity_v4_pb
diff --git a/google/appengine/datastore/datastore_query.py b/google/appengine/datastore/datastore_query.py
index 2698701..1f26513 100644
--- a/google/appengine/datastore/datastore_query.py
+++ b/google/appengine/datastore/datastore_query.py
@@ -35,6 +35,7 @@
 
 
 
+
 __all__ = ['Batch',
            'Batcher',
            'CompositeFilter',
@@ -897,11 +898,6 @@
   has to be of type V3 PointValue. V4 GeoPoints converts to this type.
   """
 
-
-
-
-
-
   def __init__(self, property_name, latitude, longitude, radius_meters):
     self._property_name = property_name
     self._lat_lng = geo_util.LatLng(latitude, longitude)
@@ -932,7 +928,6 @@
 
     lat_lng = geo_util.LatLng(latitude, longitude)
 
-
     return self._lat_lng - lat_lng <= self._radius_meters
 
 
@@ -944,8 +939,6 @@
   has to be of type V3 PointValue. V4 GeoPoints converts to this type.
   """
 
-
-
   def __init__(self, property_name, southwest, northeast):
     """Initializes a _BoundingBoxFilter.
 
@@ -961,7 +954,6 @@
       of the north-east coordinate.
     """
 
-
     if southwest.lat > northeast.lat:
       raise datastore_errors.BadArgumentError(
           'the south-west coordinate is on top of the north-east coordinate')
@@ -990,8 +982,6 @@
 
     _, latitude, longitude = value
 
-
-
     if not self._southwest.lat <= latitude <= self._northeast.lat:
       return False
 
@@ -1557,38 +1547,24 @@
   immediately after the last result returned by a batch.
 
   A cursor should only be used on a query with an identical signature to the
-  one that produced it.
+  one that produced it or on a query with its sort order reversed.
   """
 
   @datastore_rpc._positional(1)
-  def __init__(self, _cursor_pb=None, urlsafe=None, _cursor_bytes=None):
+  def __init__(self, urlsafe=None, _cursor_bytes=None):
     """Constructor.
 
     A Cursor constructed with no arguments points the first result of any
     query. If such a Cursor is used as an end_cursor no results will ever be
     returned.
     """
-
-
     super(Cursor, self).__init__()
-    if ((urlsafe is not None) + (_cursor_pb is not None)
-        + (_cursor_bytes is not None) > 1):
-      raise datastore_errors.BadArgumentError(
-          'Can only specify one of _cursor_pb, urlsafe, and _cursor_bytes')
     if urlsafe is not None:
-      _cursor_bytes = self._urlsafe_to_bytes(urlsafe)
-    if _cursor_pb is not None:
-      if not isinstance(_cursor_pb, datastore_pb.CompiledCursor):
+      if _cursor_bytes is not None:
         raise datastore_errors.BadArgumentError(
-            '_cursor_pb argument should be datastore_pb.CompiledCursor (%r)' %
-            (_cursor_pb,))
-      _cursor_bytes = _cursor_pb.Encode()
+            'Can only specify one of urlsafe and _cursor_bytes')
+      _cursor_bytes = self._urlsafe_to_bytes(urlsafe)
     if _cursor_bytes is not None:
-      if _cursor_pb is None and urlsafe is None:
-
-
-
-        Cursor._bytes_to_cursor_pb(_cursor_bytes)
       self.__cursor_bytes = _cursor_bytes
     else:
       self.__cursor_bytes = ''
@@ -1599,16 +1575,13 @@
       arg = '<%s>' % arg
     return '%s(%s)' % (self.__class__.__name__, arg)
 
-
   def reversed(self):
-    """Creates a cursor for use in a query with a reversed sort order."""
-    compiled_cursor = self._to_pb()
-    if compiled_cursor.has_position():
-      pos = compiled_cursor.position()
-      if pos.has_start_key():
-        raise datastore_errors.BadRequestError('Cursor cannot be reversed.')
-      pos.set_start_inclusive(not pos.start_inclusive())
-    return Cursor(_cursor_pb=compiled_cursor)
+    """DEPRECATED. It is no longer necessary to call reversed() on cursors.
+
+    A cursor returned by a query may also be used in a query whose sort order
+    has been reversed. This method returns a copy of the original cursor.
+    """
+    return Cursor(_cursor_bytes=self.__cursor_bytes)
 
   def to_bytes(self):
     """Serialize cursor as a byte string."""
@@ -1633,29 +1606,6 @@
     """
     return Cursor(_cursor_bytes=cursor)
 
-
-  @staticmethod
-  def _bytes_to_cursor_pb(cursor):
-
-    try:
-      cursor_pb = datastore_pb.CompiledCursor(cursor)
-    except (ValueError, TypeError), e:
-      raise datastore_errors.BadValueError(
-          'Invalid cursor (%r). Details: %s' % (cursor, e))
-    except Exception, e:
-
-
-
-
-
-
-      if e.__class__.__name__ == 'ProtocolBufferDecodeError':
-        raise datastore_errors.BadValueError(
-            'Invalid cursor %s. Details: %s' % (cursor, e))
-      else:
-        raise
-    return cursor_pb
-
   def urlsafe(self):
     """Serialize cursor as a websafe string.
 
@@ -1704,7 +1654,7 @@
   @staticmethod
   def _from_query_result(query_result):
     if query_result.has_compiled_cursor():
-      return Cursor(_cursor_pb=query_result.compiled_cursor())
+      return Cursor(_cursor_bytes=query_result.compiled_cursor().Encode())
     return None
 
   def advance(self, offset, query, conn):
@@ -1730,11 +1680,6 @@
     return query.run(conn, query_options).next_batch(
         Batcher.AT_LEAST_OFFSET).cursor(0)
 
-
-  def _to_pb(self):
-    """Returns the internal only pb representation."""
-    return Cursor._bytes_to_cursor_pb(self.__cursor_bytes)
-
   def __setstate__(self, state):
     if '_Cursor__compiled_cursor' in state:
 
@@ -2117,12 +2062,13 @@
 
 
     if query_options.start_cursor is not None:
-      pb.mutable_compiled_cursor().CopyFrom(query_options.start_cursor._to_pb())
+      pb.mutable_compiled_cursor().ParseFromString(
+          query_options.start_cursor.to_bytes())
 
 
     if query_options.end_cursor is not None:
-      pb.mutable_end_compiled_cursor().CopyFrom(
-          query_options.end_cursor._to_pb())
+      pb.mutable_end_compiled_cursor().ParseFromString(
+          query_options.end_cursor.to_bytes())
 
 
     if ((query_options.hint == QueryOptions.ORDER_FIRST and pb.order_size()) or
@@ -2618,9 +2564,9 @@
       return self.__start_cursor
     elif (index == 0 and
           self.__skipped_cursor):
-      return Cursor(_cursor_pb=self.__skipped_cursor)
+      return Cursor(_cursor_bytes=self.__skipped_cursor.Encode())
     elif index > 0 and self.__result_cursors:
-      return Cursor(_cursor_pb=self.__result_cursors[index - 1])
+      return Cursor(_cursor_bytes=self.__result_cursors[index - 1].Encode())
 
     elif index == len(self.__results):
       return self.__end_cursor
diff --git a/google/appengine/datastore/datastore_rpc.py b/google/appengine/datastore/datastore_rpc.py
index b6f4ced..4ce64dd 100644
--- a/google/appengine/datastore/datastore_rpc.py
+++ b/google/appengine/datastore/datastore_rpc.py
@@ -35,6 +35,7 @@
 
 
 
+
 __all__ = ['AbstractAdapter',
            'BaseConfiguration',
            'BaseConnection',
diff --git a/google/appengine/datastore/datastore_sqlite_stub.py b/google/appengine/datastore/datastore_sqlite_stub.py
index fc97993..50abb9a 100644
--- a/google/appengine/datastore/datastore_sqlite_stub.py
+++ b/google/appengine/datastore/datastore_sqlite_stub.py
@@ -39,6 +39,7 @@
 
 
 
+
 import array
 import itertools
 import logging
diff --git a/google/appengine/datastore/datastore_stats_generator.py b/google/appengine/datastore/datastore_stats_generator.py
index 81ed3de..a5dbdb5 100644
--- a/google/appengine/datastore/datastore_stats_generator.py
+++ b/google/appengine/datastore/datastore_stats_generator.py
@@ -28,6 +28,7 @@
 
 
 
+
 import datetime
 import logging
 
diff --git a/google/appengine/datastore/datastore_stub_index.py b/google/appengine/datastore/datastore_stub_index.py
index e328bfd..7664231 100644
--- a/google/appengine/datastore/datastore_stub_index.py
+++ b/google/appengine/datastore/datastore_stub_index.py
@@ -28,6 +28,7 @@
 
 
 
+
 __all__ = ['GenerateIndexFromHistory',
            'IndexYamlUpdater',
           ]
diff --git a/google/appengine/datastore/datastore_stub_util.py b/google/appengine/datastore/datastore_stub_util.py
index 3e459d9..ae58a12 100644
--- a/google/appengine/datastore/datastore_stub_util.py
+++ b/google/appengine/datastore/datastore_stub_util.py
@@ -32,6 +32,7 @@
 
 
 
+
 try:
   import hashlib
   _MD5_FUNC = hashlib.md5
@@ -171,6 +172,16 @@
 _SCATTER_SHIFT = 64 - _MAX_SEQUENTIAL_BIT + 1
 
 
+_SHOULD_FAIL_ON_BAD_OFFSET = False
+
+def _HandleBadOffset(expected, actual):
+  logging.warn('Encountered an offset %d to Next but expected %d given the '
+               'query offset and the number of skipped entities.' %
+               (actual, expected))
+  if (_SHOULD_FAIL_ON_BAD_OFFSET):
+    raise datastore_errors.BadArgumentError(
+        'Invalid offset provided. Got %s expected %s.' % (actual, expected))
+
 def _GetScatterProperty(entity_proto):
   """Gets the scatter property for an object.
 
@@ -1010,6 +1021,17 @@
     self.app = query.app()
     self.cursor = self._AcquireCursorID()
 
+    if query.has_count():
+      count = query.count()
+    elif query.has_limit():
+      count = query.limit()
+    else:
+      count = BaseDatastore._BATCH_SIZE
+
+    self.__use_persisted_offset = query.persist_offset()
+    self.__persisted_offset = query.offset()
+    self.__persisted_count = count
+
     self.__order_compare_entities = dsquery._order.cmp_for_filter(
         dsquery._filter_predicate)
     if self.group_by:
@@ -1131,6 +1153,34 @@
       position.set_start_inclusive(False)
       _SetBeforeAscending(position, self.__first_sort_order)
 
+  def PopulateQueryResult(self, result, count, offset,
+                          compile=False, first_result=False):
+    """Populates a QueryResult with this cursor and the given number of results.
+
+    Args:
+      result: datastore_pb.QueryResult
+      count: integer of how many results to return, or None if not specified
+      offset: integer of how many results to skip
+      compile: boolean, whether we are compiling this query
+      first_result: whether the query result is the first for this query
+
+    Offset and count may be ignored if the query requested information
+    to be persisted.
+    """
+    if count is None:
+      count = BaseDatastore._BATCH_SIZE
+    if self.__use_persisted_offset:
+      offset = self.__persisted_offset
+      count = self.__persisted_count
+    elif self.__persisted_offset != offset:
+      _HandleBadOffset(self.__persisted_offset, offset)
+    self._PopulateQueryResult(result, count, offset,
+                              compile, first_result)
+    self.__persisted_offset -= result.skipped_results()
+
+  def _PopulateQueryResult(self, result, count, offset,
+                           compile, first_result):
+    raise NotImplementedError
 
 class ListCursor(BaseCursor):
   """A query cursor over a list of entities.
@@ -1214,17 +1264,7 @@
         hi = mid
     return lo
 
-  def PopulateQueryResult(self, result, count, offset,
-                          compile=False, first_result=False):
-    """Populates a QueryResult with this cursor and the given number of results.
-
-    Args:
-      result: datastore_pb.QueryResult
-      count: integer of how many results to return
-      offset: integer of how many results to skip
-      compile: boolean, whether we are compiling this query
-      first_result: whether the query result is the first for this query
-    """
+  def _PopulateQueryResult(self, result, count, offset, compile, first_result):
     Check(offset >= 0, 'Offset must be >= 0')
 
     offset = min(offset, self.__count - self.__offset)
@@ -2949,8 +2989,6 @@
     else:
       return os.path.isdir(web_inf)
 
-
-
   def Clear(self):
     """Clears out all stored values."""
     self._query_cursors = {}
@@ -3044,13 +3082,7 @@
     cursor = self._datastore.GetQueryCursor(query, self._trusted, self._app_id,
                                             filter_predicate)
 
-    if query.has_count():
-      count = query.count()
-    elif query.has_limit():
-      count = query.limit()
-    else:
-      count = self._BATCH_SIZE
-
+    count = query.count() if query.has_count() else None
     cursor.PopulateQueryResult(query_result, count, query.offset(),
                                query.compile(), first_result=True)
     if query_result.has_cursor():
@@ -3136,12 +3168,12 @@
     Check(cursor and cursor.app == app,
           'Cursor %d not found' % next_request.cursor().cursor())
 
-    count = self._BATCH_SIZE
-    if next_request.has_count():
-      count = next_request.count()
-
-    cursor.PopulateQueryResult(query_result, count, next_request.offset(),
-                               next_request.compile(), first_result=False)
+    count = next_request.count() if next_request.has_count() else None
+    cursor.PopulateQueryResult(query_result,
+                               count,
+                               next_request.offset(),
+                               next_request.compile(),
+                               first_result=False)
 
     if not query_result.has_cursor():
       del self._query_cursors[next_request.cursor().cursor()]
diff --git a/google/appengine/datastore/datastore_v4_stub.py b/google/appengine/datastore/datastore_v4_stub.py
index 52cdb64..cec1484 100644
--- a/google/appengine/datastore/datastore_v4_stub.py
+++ b/google/appengine/datastore/datastore_v4_stub.py
@@ -31,6 +31,7 @@
 
 
 
+
 from google.appengine.datastore import entity_pb
 
 from google.appengine.api import api_base_pb
diff --git a/google/appengine/datastore/datastore_v4_validator.py b/google/appengine/datastore/datastore_v4_validator.py
index 403efea..082b95f 100644
--- a/google/appengine/datastore/datastore_v4_validator.py
+++ b/google/appengine/datastore/datastore_v4_validator.py
@@ -38,6 +38,7 @@
 
 
 
+
 import re
 
 from google.appengine.datastore import datastore_pbs
diff --git a/google/appengine/datastore/entity_pb.py b/google/appengine/datastore/entity_pb.py
index 7d47dae..0b7857d 100644
--- a/google/appengine/datastore/entity_pb.py
+++ b/google/appengine/datastore/entity_pb.py
@@ -3556,13 +3556,14 @@
 class SearchIndexEntry(ProtocolBuffer.ProtocolMessage):
   has_index_id_ = 0
   index_id_ = 0
+  has_write_division_family_ = 0
+  write_division_family_ = ""
   has_fingerprint_1999_ = 0
   fingerprint_1999_ = 0
   has_fingerprint_2011_ = 0
   fingerprint_2011_ = 0
 
   def __init__(self, contents=None):
-    self.division_family_ = []
     if contents is not None: self.MergeFromString(contents)
 
   def index_id(self): return self.index_id_
@@ -3578,20 +3579,18 @@
 
   def has_index_id(self): return self.has_index_id_
 
-  def division_family_size(self): return len(self.division_family_)
-  def division_family_list(self): return self.division_family_
+  def write_division_family(self): return self.write_division_family_
 
-  def division_family(self, i):
-    return self.division_family_[i]
+  def set_write_division_family(self, x):
+    self.has_write_division_family_ = 1
+    self.write_division_family_ = x
 
-  def set_division_family(self, i, x):
-    self.division_family_[i] = x
+  def clear_write_division_family(self):
+    if self.has_write_division_family_:
+      self.has_write_division_family_ = 0
+      self.write_division_family_ = ""
 
-  def add_division_family(self, x):
-    self.division_family_.append(x)
-
-  def clear_division_family(self):
-    self.division_family_ = []
+  def has_write_division_family(self): return self.has_write_division_family_
 
   def fingerprint_1999(self): return self.fingerprint_1999_
 
@@ -3623,7 +3622,7 @@
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_index_id()): self.set_index_id(x.index_id())
-    for i in xrange(x.division_family_size()): self.add_division_family(x.division_family(i))
+    if (x.has_write_division_family()): self.set_write_division_family(x.write_division_family())
     if (x.has_fingerprint_1999()): self.set_fingerprint_1999(x.fingerprint_1999())
     if (x.has_fingerprint_2011()): self.set_fingerprint_2011(x.fingerprint_2011())
 
@@ -3631,9 +3630,8 @@
     if x is self: return 1
     if self.has_index_id_ != x.has_index_id_: return 0
     if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
-    if len(self.division_family_) != len(x.division_family_): return 0
-    for e1, e2 in zip(self.division_family_, x.division_family_):
-      if e1 != e2: return 0
+    if self.has_write_division_family_ != x.has_write_division_family_: return 0
+    if self.has_write_division_family_ and self.write_division_family_ != x.write_division_family_: return 0
     if self.has_fingerprint_1999_ != x.has_fingerprint_1999_: return 0
     if self.has_fingerprint_1999_ and self.fingerprint_1999_ != x.fingerprint_1999_: return 0
     if self.has_fingerprint_2011_ != x.has_fingerprint_2011_: return 0
@@ -3646,40 +3644,43 @@
       initialized = 0
       if debug_strs is not None:
         debug_strs.append('Required field: index_id not set.')
+    if (not self.has_write_division_family_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: write_division_family not set.')
     return initialized
 
   def ByteSize(self):
     n = 0
     n += self.lengthVarInt64(self.index_id_)
-    n += 1 * len(self.division_family_)
-    for i in xrange(len(self.division_family_)): n += self.lengthString(len(self.division_family_[i]))
+    n += self.lengthString(len(self.write_division_family_))
     if (self.has_fingerprint_1999_): n += 9
     if (self.has_fingerprint_2011_): n += 9
-    return n + 1
+    return n + 2
 
   def ByteSizePartial(self):
     n = 0
     if (self.has_index_id_):
       n += 1
       n += self.lengthVarInt64(self.index_id_)
-    n += 1 * len(self.division_family_)
-    for i in xrange(len(self.division_family_)): n += self.lengthString(len(self.division_family_[i]))
+    if (self.has_write_division_family_):
+      n += 1
+      n += self.lengthString(len(self.write_division_family_))
     if (self.has_fingerprint_1999_): n += 9
     if (self.has_fingerprint_2011_): n += 9
     return n
 
   def Clear(self):
     self.clear_index_id()
-    self.clear_division_family()
+    self.clear_write_division_family()
     self.clear_fingerprint_1999()
     self.clear_fingerprint_2011()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(8)
     out.putVarInt64(self.index_id_)
-    for i in xrange(len(self.division_family_)):
-      out.putVarInt32(18)
-      out.putPrefixedString(self.division_family_[i])
+    out.putVarInt32(18)
+    out.putPrefixedString(self.write_division_family_)
     if (self.has_fingerprint_1999_):
       out.putVarInt32(25)
       out.put64(self.fingerprint_1999_)
@@ -3691,9 +3692,9 @@
     if (self.has_index_id_):
       out.putVarInt32(8)
       out.putVarInt64(self.index_id_)
-    for i in xrange(len(self.division_family_)):
+    if (self.has_write_division_family_):
       out.putVarInt32(18)
-      out.putPrefixedString(self.division_family_[i])
+      out.putPrefixedString(self.write_division_family_)
     if (self.has_fingerprint_1999_):
       out.putVarInt32(25)
       out.put64(self.fingerprint_1999_)
@@ -3708,7 +3709,7 @@
         self.set_index_id(d.getVarInt64())
         continue
       if tt == 18:
-        self.add_division_family(d.getPrefixedString())
+        self.set_write_division_family(d.getPrefixedString())
         continue
       if tt == 25:
         self.set_fingerprint_1999(d.get64())
@@ -3725,12 +3726,7 @@
   def __str__(self, prefix="", printElemNumber=0):
     res=""
     if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
-    cnt=0
-    for e in self.division_family_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("division_family%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
+    if self.has_write_division_family_: res+=prefix+("write_division_family: %s\n" % self.DebugFormatString(self.write_division_family_))
     if self.has_fingerprint_1999_: res+=prefix+("fingerprint_1999: %s\n" % self.DebugFormatFixed64(self.fingerprint_1999_))
     if self.has_fingerprint_2011_: res+=prefix+("fingerprint_2011: %s\n" % self.DebugFormatFixed64(self.fingerprint_2011_))
     return res
@@ -3740,14 +3736,14 @@
     return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
 
   kindex_id = 1
-  kdivision_family = 2
+  kwrite_division_family = 2
   kfingerprint_1999 = 3
   kfingerprint_2011 = 4
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "index_id",
-    2: "division_family",
+    2: "write_division_family",
     3: "fingerprint_1999",
     4: "fingerprint_2011",
   }, 4)
diff --git a/google/appengine/datastore/sortable_pb_encoder.py b/google/appengine/datastore/sortable_pb_encoder.py
index 5294d45..cca52b2 100644
--- a/google/appengine/datastore/sortable_pb_encoder.py
+++ b/google/appengine/datastore/sortable_pb_encoder.py
@@ -52,6 +52,7 @@
 
 
 
+
 import array
 import struct
 
diff --git a/google/appengine/dist/_library.py b/google/appengine/dist/_library.py
index 3d10db9..4a0f80f 100644
--- a/google/appengine/dist/_library.py
+++ b/google/appengine/dist/_library.py
@@ -36,6 +36,7 @@
 """
 
 
+
 __all__ = ['use_library']
 
 
diff --git a/google/appengine/dist/py_imp.py b/google/appengine/dist/py_imp.py
index e1ca09f..019aa35 100644
--- a/google/appengine/dist/py_imp.py
+++ b/google/appengine/dist/py_imp.py
@@ -21,6 +21,7 @@
 """Stub replacement for Python's imp module."""
 
 
+
 import os
 import sys
 
diff --git a/google/appengine/dist/py_select.py b/google/appengine/dist/py_select.py
index aa5df27..5fe20bd 100644
--- a/google/appengine/dist/py_select.py
+++ b/google/appengine/dist/py_select.py
@@ -21,4 +21,5 @@
 """This module supports asynchronous I/O on multiple file descriptors."""
 
 
+
 from google.appengine.api.remote_socket._remote_socket import select, error
diff --git a/google/appengine/dist/py_zipimport.py b/google/appengine/dist/py_zipimport.py
index a6e5743..950cff9 100644
--- a/google/appengine/dist/py_zipimport.py
+++ b/google/appengine/dist/py_zipimport.py
@@ -46,6 +46,7 @@
 """
 
 
+
 __all__ = ['ZipImportError', 'zipimporter']
 
 
diff --git a/google/appengine/dist/select.py b/google/appengine/dist/select.py
index aa5df27..5fe20bd 100644
--- a/google/appengine/dist/select.py
+++ b/google/appengine/dist/select.py
@@ -21,4 +21,5 @@
 """This module supports asynchronous I/O on multiple file descriptors."""
 
 
+
 from google.appengine.api.remote_socket._remote_socket import select, error
diff --git a/google/appengine/dist27/httplib.py b/google/appengine/dist27/httplib.py
index f6e547b..774df4f 100644
--- a/google/appengine/dist27/httplib.py
+++ b/google/appengine/dist27/httplib.py
@@ -1,6 +1,7 @@
 # Copyright 2013 Google Inc. All Rights Reserved.
 
 
+
 import os
 
 if 'GAE_USE_SOCKETS_HTTPLIB' in os.environ:
diff --git a/google/appengine/ext/admin/__init__.py b/google/appengine/ext/admin/__init__.py
index eba729b..a4903e7 100644
--- a/google/appengine/ext/admin/__init__.py
+++ b/google/appengine/ext/admin/__init__.py
@@ -28,6 +28,7 @@
 
 
 
+
 import cgi
 import collections
 import csv
diff --git a/google/appengine/ext/admin_redirect/main.py b/google/appengine/ext/admin_redirect/main.py
index bab40c1..3705eb4 100644
--- a/google/appengine/ext/admin_redirect/main.py
+++ b/google/appengine/ext/admin_redirect/main.py
@@ -26,6 +26,7 @@
 """
 
 
+
 import logging
 
 from google.appengine.ext import webapp
diff --git a/google/appengine/ext/analytics/main.py b/google/appengine/ext/analytics/main.py
index 2071848..c3e0996 100644
--- a/google/appengine/ext/analytics/main.py
+++ b/google/appengine/ext/analytics/main.py
@@ -21,6 +21,7 @@
 """Main script for appstats analytics."""
 
 
+
 import email.Utils
 try:
   import json
diff --git a/google/appengine/ext/analytics/process.py b/google/appengine/ext/analytics/process.py
index 6a7a324..b9a5fa7 100644
--- a/google/appengine/ext/analytics/process.py
+++ b/google/appengine/ext/analytics/process.py
@@ -27,6 +27,7 @@
 """
 
 
+
 try:
   import json
 except ImportError:
diff --git a/google/appengine/ext/analytics/standaloneapp.py b/google/appengine/ext/analytics/standaloneapp.py
index 2e3b8dd..60b5a95 100644
--- a/google/appengine/ext/analytics/standaloneapp.py
+++ b/google/appengine/ext/analytics/standaloneapp.py
@@ -21,6 +21,7 @@
 """Script which allows analytics to run as stand alone app."""
 
 
+
 from google.appengine.ext import webapp
 from google.appengine.ext.webapp.util import run_wsgi_app
 
diff --git a/google/appengine/ext/analytics/stats.py b/google/appengine/ext/analytics/stats.py
index b2ab92c..89212d8 100644
--- a/google/appengine/ext/analytics/stats.py
+++ b/google/appengine/ext/analytics/stats.py
@@ -27,6 +27,7 @@
 """
 
 
+
 import logging
 import entity
 
diff --git a/google/appengine/ext/appstats/formatting.py b/google/appengine/ext/appstats/formatting.py
index 1fa08b7..e47d14f 100644
--- a/google/appengine/ext/appstats/formatting.py
+++ b/google/appengine/ext/appstats/formatting.py
@@ -21,6 +21,7 @@
 """A fast but lossy, totally generic object formatter."""
 
 
+
 import os
 import types
 
diff --git a/google/appengine/ext/appstats/loader.py b/google/appengine/ext/appstats/loader.py
index aaed2d0..0898e57 100644
--- a/google/appengine/ext/appstats/loader.py
+++ b/google/appengine/ext/appstats/loader.py
@@ -28,6 +28,7 @@
 """
 
 
+
 import cPickle as pickle
 import logging
 import time
diff --git a/google/appengine/ext/appstats/recording.py b/google/appengine/ext/appstats/recording.py
index c365bce..eeb2e01 100644
--- a/google/appengine/ext/appstats/recording.py
+++ b/google/appengine/ext/appstats/recording.py
@@ -23,6 +23,7 @@
 from __future__ import with_statement
 
 
+
 import datetime
 import logging
 import os
diff --git a/google/appengine/ext/appstats/sample_appengine_config.py b/google/appengine/ext/appstats/sample_appengine_config.py
index a359f3e..b9f056a 100644
--- a/google/appengine/ext/appstats/sample_appengine_config.py
+++ b/google/appengine/ext/appstats/sample_appengine_config.py
@@ -32,6 +32,7 @@
 """
 
 
+
 import logging
 import os
 import random
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index 38d7be6..5e8ff0b 100644
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -2,88 +2,89 @@
 typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},m=function(a){return"string"==typeof a},n=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ia=function(a){return a[ga]||
 (a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ja=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},ka=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},p=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor=a;a.nc=
 function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};var la=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,la);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};p(la,Error);la.prototype.name="CustomError";var ma;var na=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},oa=String.prototype.trim?function(a){return a.trim()}:function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},wa=function(a){if(!pa.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(qa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ra,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(sa,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(ta,"&quot;"));-1!=a.indexOf("'")&&
-(a=a.replace(ua,"&#39;"));-1!=a.indexOf("\x00")&&(a=a.replace(va,"&#0;"));return a},qa=/&/g,ra=/</g,sa=/>/g,ta=/"/g,ua=/'/g,va=/\x00/g,pa=/[\x00&<>"']/,xa=function(a,b){return a<b?-1:a>b?1:0};var ya=function(a,b){b.unshift(a);la.call(this,na.apply(null,b));b.shift()};p(ya,la);ya.prototype.name="AssertionError";var za=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new ya(""+d,e||[]);},q=function(a,b,c){a||za("",b,Array.prototype.slice.call(arguments,2))},Aa=function(a,b,c,d){a instanceof b||za("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var r=Array.prototype,Ba=r.indexOf?function(a,b,c){q(null!=a.length);return r.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},s=r.forEach?function(a,b,c){q(null!=a.length);r.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Ca=r.filter?function(a,b,c){q(null!=a.length);return r.filter.call(a,b,
-c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var I=h[k];b.call(c,I,k,a)&&(e[g++]=I)}return e},Da=r.every?function(a,b,c){q(null!=a.length);return r.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},t=function(a,b){return 0<=Ba(a,b)},Ea=function(a,b){var c=Ba(a,b),d;if(d=0<=c)q(null!=a.length),r.splice.call(a,c,1);return d},Fa=function(a){var b=a.length;if(0<b){for(var c=
-Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ha=function(a,b,c,d){q(null!=a.length);r.splice.apply(a,Ga(arguments,1))},Ga=function(a,b,c){q(null!=a.length);return 2>=arguments.length?r.slice.call(a,b):r.slice.call(a,b,c)};var Ia=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ja=function(a,b){for(var c=Ia(a),d=Ga(arguments,1),e=c,g=0;g<d.length;g++)t(e,d[g])||e.push(d[g]);a.className=c.join(" ")},La=function(a,b){var c=Ia(a),d=Ga(arguments,1),c=Ka(c,d);a.className=c.join(" ")},Ka=function(a,b){return Ca(a,function(a){return!t(b,a)})};var Ma=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Na=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Oa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Pa=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Qa="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ra=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Qa.length;g++)c=
-Qa[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var Sa;t:{var Ta=l.navigator;if(Ta){var Ua=Ta.userAgent;if(Ua){Sa=Ua;break t}}Sa=""}var u=function(a){return-1!=Sa.indexOf(a)};var Va=u("Opera")||u("OPR"),v=u("Trident")||u("MSIE"),w=u("Gecko")&&-1==Sa.toLowerCase().indexOf("webkit")&&!(u("Trident")||u("MSIE")),x=-1!=Sa.toLowerCase().indexOf("webkit"),Wa=l.navigator||null,y=-1!=(Wa&&Wa.platform||"").indexOf("Mac"),Xa=function(){var a=l.document;return a?a.documentMode:void 0},Ya=function(){var a="",b;if(Va&&l.opera)return a=l.opera.version,n(a)?a():a;w?b=/rv\:([^\);]+)(\)|;)/:v?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:x&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(Sa))?a[1]:"");return v&&
-(b=Xa(),b>parseFloat(a))?String(b):a}(),Za={},A=function(a){var b;if(!(b=Za[a])){b=0;for(var c=oa(String(Ya)).split("."),d=oa(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",I=RegExp("(\\d*)(\\D*)","g"),z=RegExp("(\\d*)(\\D*)","g");do{var K=I.exec(h)||["","",""],P=z.exec(k)||["","",""];if(0==K[0].length&&0==P[0].length)break;b=xa(0==K[1].length?0:parseInt(K[1],10),0==P[1].length?0:parseInt(P[1],10))||xa(0==K[2].length,0==P[2].length)||xa(K[2],P[2])}while(0==
-b)}b=Za[a]=0<=b}return b},$a=l.document,ab=$a&&v?Xa()||("CSS1Compat"==$a.compatMode?parseInt(Ya,10):5):void 0;var bb=!v||v&&9<=ab;!w&&!v||v&&v&&9<=ab||w&&A("1.9.1");var cb=v&&!A("9");var fb=function(a){return a?new db(eb(a)):ma||(ma=new db)},gb=function(a,b){return m(b)?a.getElementById(b):b},hb=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
-"function"==typeof a.split&&t(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},jb=function(a,b){Ma(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in ib?a.setAttribute(ib[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},ib={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
-valign:"vAlign",width:"width"},lb=function(a,b,c){return kb(document,arguments)},kb=function(a,b){var c=b[0],d=b[1];if(!bb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',wa(d.name),'"');if(d.type){c.push(' type="',wa(d.type),'"');var e={};Ra(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?c.className=d.join(" "):jb(c,d));2<b.length&&mb(a,c,b);return c},mb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
-2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}s(h?Fa(g):g,d)}}},nb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},eb=function(a){q(a,"Node cannot be null or undefined.");
-return 9==a.nodeType?a:a.ownerDocument||a.document},ob=function(a,b){q(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);c=eb(a);a.appendChild(c.createTextNode(String(b)))}},pb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},qb={IMG:" ",BR:"\n"},
-tb=function(a){return rb(a)&&sb(a)},ub=function(a,b){b?a.tabIndex=0:(a.tabIndex=-1,a.removeAttribute("tabIndex"))},rb=function(a){a=a.getAttributeNode("tabindex");return null!=a&&a.specified},sb=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},vb=function(a,b,c){if(!(a.nodeName in pb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in qb)b.push(qb[a.nodeName]);else for(a=a.firstChild;a;)vb(a,b,c),a=a.nextSibling},db=
-function(a){this.Q=a||l.document||document};f=db.prototype;f.mb=fb;f.a=function(a){return gb(this.Q,a)};f.o=function(a,b,c){return kb(this.Q,arguments)};f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=nb;
-f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!rb(a)||sb(a)):tb(a))&&v?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var wb=function(a){wb[" "](a);return a};wb[" "]=aa;var xb=!v||v&&9<=ab,yb=!v||v&&9<=ab,zb=v&&!A("9");!x||A("528");w&&A("1.9b")||v&&A("8")||Va&&A("9.5")||x&&A("528");w&&!A("8")||v&&A("9");var Ab=function(){this.eb=this.eb;this.ic=this.ic};Ab.prototype.eb=!1;var B=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.$=!1;this.wb=!0};B.prototype.stopPropagation=function(){this.$=!0};B.prototype.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var C=function(a,b){B.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.kb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(w){var e;t:{try{wb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
+(a=a.replace(ua,"&#39;"));-1!=a.indexOf("\x00")&&(a=a.replace(va,"&#0;"));return a},qa=/&/g,ra=/</g,sa=/>/g,ta=/"/g,ua=/'/g,va=/\x00/g,pa=/[\x00&<>"']/,xa=function(a,b){return a<b?-1:a>b?1:0};var ya=function(a,b){b.unshift(a);la.call(this,na.apply(null,b));b.shift()};p(ya,la);ya.prototype.name="AssertionError";
+var za=function(a,b,c,d){var e="Assertion failed";if(c)var e=e+(": "+c),g=d;else a&&(e+=": "+a,g=b);throw new ya(""+e,g||[]);},q=function(a,b,c){a||za("",null,b,Array.prototype.slice.call(arguments,2))},Aa=function(a,b,c){fa(a)&&1==a.nodeType||za("Expected Element but got %s: %s.",[ca(a),a],b,Array.prototype.slice.call(arguments,2))},Ba=function(a,b,c,d){a instanceof b||za("instanceof check failed.",null,c,Array.prototype.slice.call(arguments,3))};var r=Array.prototype,Ca=r.indexOf?function(a,b,c){q(null!=a.length);return r.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},s=r.forEach?function(a,b,c){q(null!=a.length);r.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Da=r.filter?function(a,b,c){q(null!=a.length);return r.filter.call(a,b,
+c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var I=h[k];b.call(c,I,k,a)&&(e[g++]=I)}return e},Ea=r.every?function(a,b,c){q(null!=a.length);return r.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},t=function(a,b){return 0<=Ca(a,b)},Fa=function(a,b){var c=Ca(a,b),d;if(d=0<=c)q(null!=a.length),r.splice.call(a,c,1);return d},Ga=function(a){var b=a.length;if(0<b){for(var c=
+Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ia=function(a,b,c,d){q(null!=a.length);r.splice.apply(a,Ha(arguments,1))},Ha=function(a,b,c){q(null!=a.length);return 2>=arguments.length?r.slice.call(a,b):r.slice.call(a,b,c)};var Ja=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ka=function(a,b){for(var c=Ja(a),d=Ha(arguments,1),e=c,g=0;g<d.length;g++)t(e,d[g])||e.push(d[g]);a.className=c.join(" ")},Ma=function(a,b){var c=Ja(a),d=Ha(arguments,1),c=La(c,d);a.className=c.join(" ")},La=function(a,b){return Da(a,function(a){return!t(b,a)})};var Na=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Oa=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Pa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Qa=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ra="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Sa=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ra.length;g++)c=
+Ra[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var Ta;t:{var Ua=l.navigator;if(Ua){var Va=Ua.userAgent;if(Va){Ta=Va;break t}}Ta=""}var u=function(a){return-1!=Ta.indexOf(a)};var Wa=u("Opera")||u("OPR"),v=u("Trident")||u("MSIE"),w=u("Gecko")&&-1==Ta.toLowerCase().indexOf("webkit")&&!(u("Trident")||u("MSIE")),x=-1!=Ta.toLowerCase().indexOf("webkit"),Xa=l.navigator||null,y=-1!=(Xa&&Xa.platform||"").indexOf("Mac"),Ya=function(){var a=l.document;return a?a.documentMode:void 0},Za=function(){var a="",b;if(Wa&&l.opera)return a=l.opera.version,n(a)?a():a;w?b=/rv\:([^\);]+)(\)|;)/:v?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:x&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(Ta))?a[1]:"");return v&&
+(b=Ya(),b>parseFloat(a))?String(b):a}(),$a={},A=function(a){var b;if(!(b=$a[a])){b=0;for(var c=oa(String(Za)).split("."),d=oa(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",I=RegExp("(\\d*)(\\D*)","g"),z=RegExp("(\\d*)(\\D*)","g");do{var K=I.exec(h)||["","",""],P=z.exec(k)||["","",""];if(0==K[0].length&&0==P[0].length)break;b=xa(0==K[1].length?0:parseInt(K[1],10),0==P[1].length?0:parseInt(P[1],10))||xa(0==K[2].length,0==P[2].length)||xa(K[2],P[2])}while(0==
+b)}b=$a[a]=0<=b}return b},ab=l.document,bb=ab&&v?Ya()||("CSS1Compat"==ab.compatMode?parseInt(Za,10):5):void 0;var cb=!v||v&&9<=bb;!w&&!v||v&&v&&9<=bb||w&&A("1.9.1");var db=v&&!A("9");var gb=function(a){return a?new eb(fb(a)):ma||(ma=new eb)},hb=function(a,b){return m(b)?a.getElementById(b):b},ib=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
+"function"==typeof a.split&&t(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},kb=function(a,b){Na(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in jb?a.setAttribute(jb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},jb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
+valign:"vAlign",width:"width"},mb=function(a,b,c){return lb(document,arguments)},lb=function(a,b){var c=b[0],d=b[1];if(!cb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',wa(d.name),'"');if(d.type){c.push(' type="',wa(d.type),'"');var e={};Sa(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?c.className=d.join(" "):kb(c,d));2<b.length&&nb(a,c,b);return c},nb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
+2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}s(h?Ga(g):g,d)}}},ob=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},fb=function(a){q(a,"Node cannot be null or undefined.");
+return 9==a.nodeType?a:a.ownerDocument||a.document},pb=function(a,b){q(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);c=fb(a);a.appendChild(c.createTextNode(String(b)))}},qb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},rb={IMG:" ",BR:"\n"},
+ub=function(a){return sb(a)&&tb(a)},vb=function(a,b){b?a.tabIndex=0:(a.tabIndex=-1,a.removeAttribute("tabIndex"))},sb=function(a){a=a.getAttributeNode("tabindex");return null!=a&&a.specified},tb=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},wb=function(a,b,c){if(!(a.nodeName in qb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in rb)b.push(rb[a.nodeName]);else for(a=a.firstChild;a;)wb(a,b,c),a=a.nextSibling},eb=
+function(a){this.Q=a||l.document||document};f=eb.prototype;f.mb=gb;f.a=function(a){return hb(this.Q,a)};f.o=function(a,b,c){return lb(this.Q,arguments)};f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=ob;
+f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!sb(a)||tb(a)):ub(a))&&v?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var xb=function(a){xb[" "](a);return a};xb[" "]=aa;var yb=!v||v&&9<=bb,zb=!v||v&&9<=bb,Ab=v&&!A("9");!x||A("528");w&&A("1.9b")||v&&A("8")||Wa&&A("9.5")||x&&A("528");w&&!A("8")||v&&A("9");var Bb=function(){this.eb=this.eb;this.ic=this.ic};Bb.prototype.eb=!1;var B=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.$=!1;this.wb=!0};B.prototype.stopPropagation=function(){this.$=!0};B.prototype.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var C=function(a,b){B.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.kb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(w){var e;t:{try{xb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
 c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=x||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=x||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
-a.metaKey;this.kb=y?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(C,B);var Bb=[1,4,2],Cb=function(a){return xb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Bb[0])};C.prototype.stopPropagation=function(){C.f.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
-C.prototype.preventDefault=function(){C.f.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,zb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Db="closure_listenable_"+(1E6*Math.random()|0),Eb=0;var Fb=function(a,b,c,d,e){this.W=a;this.Ea=null;this.src=b;this.type=c;this.Fa=!!d;this.Ha=e;this.key=++Eb;this.ha=this.Ga=!1},Gb=function(a){a.ha=!0;a.W=null;a.Ea=null;a.src=null;a.Ha=null};var D=function(a){this.src=a;this.m={};this.ua=0};D.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.m[g];a||(a=this.m[g]=[],this.ua++);var h=Hb(a,b,d,e);-1<h?(b=a[h],c||(b.Ga=!1)):(b=new Fb(b,this.src,g,!!d,e),b.Ga=c,a.push(b));return b};D.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.m))return!1;var e=this.m[a];b=Hb(e,b,c,d);return-1<b?(Gb(e[b]),q(null!=e.length),r.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.ua--),!0):!1};
-var Ib=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Ea(a.m[c],b);d&&(Gb(b),0==a.m[c].length&&(delete a.m[c],a.ua--));return d};D.prototype.ab=function(a){a=a&&a.toString();var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Gb(d[e]);delete this.m[c];this.ua--}return b};D.prototype.wa=function(a,b,c,d){a=this.m[a.toString()];var e=-1;a&&(e=Hb(a,b,c,d));return-1<e?a[e]:null};
-var Hb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ha&&g.W==b&&g.Fa==!!c&&g.Ha==d)return e}return-1};var Jb="closure_lm_"+(1E6*Math.random()|0),Kb={},Lb=0,E=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)E(a,b[g],c,d,e);return null}c=Mb(c);if(a&&a[Db])a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Nb(a);h||(a[Jb]=h=new D(a));c=h.add(b,c,!1,d,e);c.Ea||(d=Ob(),c.Ea=d,d.src=a,d.W=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Pb(b.toString()),d),Lb++);a=c}return a},Ob=function(){var a=Qb,b=yb?function(c){return a.call(b.src,b.W,c)}:function(c){c=
-a.call(b.src,b.W,c);if(!c)return c};return b},Rb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Rb(a,b[g],c,d,e);else c=Mb(c),a&&a[Db]?a.u(b,c,d,e):a&&(a=Nb(a))&&(b=a.wa(b,c,!!d,e))&&F(b)},F=function(a){if("number"==typeof a||!a||a.ha)return!1;var b=a.src;if(b&&b[Db])return Ib(b.Y,a);var c=a.type,d=a.Ea;b.removeEventListener?b.removeEventListener(c,d,a.Fa):b.detachEvent&&b.detachEvent(Pb(c),d);Lb--;(c=Nb(b))?(Ib(c,a),0==c.ua&&(c.src=null,b[Jb]=null)):Gb(a);return!0},Pb=function(a){return a in
-Kb?Kb[a]:Kb[a]="on"+a},Tb=function(a,b,c,d){var e=1;if(a=Nb(a))if(b=a.m[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.Fa==c&&!g.ha&&(e&=!1!==Sb(g,d))}return Boolean(e)},Sb=function(a,b){var c=a.W,d=a.Ha||a.src;a.Ga&&F(a);return c.call(d,b)},Qb=function(a,b){if(a.ha)return!0;if(!yb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new C(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==
-e.keyCode)try{e.keyCode=-1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.$&&0<=k;k--)c.currentTarget=e[k],d&=Tb(e[k],g,!0,c);for(k=0;!c.$&&k<e.length;k++)c.currentTarget=e[k],d&=Tb(e[k],g,!1,c)}return d}return Sb(a,new C(b,this))},Nb=function(a){a=a[Jb];return a instanceof D?a:null},Ub="__closure_events_fn_"+(1E9*Math.random()>>>0),Mb=function(a){q(a,"Listener can not be null.");if(n(a))return a;
-q(a.handleEvent,"An object listener must have handleEvent method.");a[Ub]||(a[Ub]=function(b){return a.handleEvent(b)});return a[Ub]};var G=function(a){Ab.call(this);this.Eb=a;this.Ma={}};p(G,Ab);var Vb=[];G.prototype.c=function(a,b,c,d){da(b)||(b&&(Vb[0]=b.toString()),b=Vb);for(var e=0;e<b.length;e++){var g=E(a,b[e],c||this.handleEvent,d||!1,this.Eb||this);if(!g)break;this.Ma[g.key]=g}return this};
-G.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Eb||this,c=Mb(c),d=!!d,b=a&&a[Db]?a.wa(b,c,d,e):a?(a=Nb(a))?a.wa(b,c,d,e):null:null,b&&(F(b),delete this.Ma[b.key]);return this};G.prototype.ab=function(){Ma(this.Ma,F);this.Ma={}};G.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var H=function(){Ab.call(this);this.Y=new D(this);this.dc=this;this.nb=null};p(H,Ab);H.prototype[Db]=!0;f=H.prototype;f.hb=function(a){this.nb=a};f.addEventListener=function(a,b,c,d){E(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Rb(this,a,b,c,d)};
-f.dispatchEvent=function(a){Wb(this);var b,c=this.nb;if(c){b=[];for(var d=1;c;c=c.nb)b.push(c),q(1E3>++d,"infinite loop")}c=this.dc;d=a.type||a;if(m(a))a=new B(a,c);else if(a instanceof B)a.target=a.target||c;else{var e=a;a=new B(d,c);Ra(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.$&&0<=h;h--)g=a.currentTarget=b[h],e=Xb(g,d,!0,a)&&e;a.$||(g=a.currentTarget=c,e=Xb(g,d,!0,a)&&e,a.$||(e=Xb(g,d,!1,a)&&e));if(b)for(h=0;!a.$&&h<b.length;h++)g=a.currentTarget=b[h],e=Xb(g,d,!1,a)&&e;return e};
-f.c=function(a,b,c,d){Wb(this);return this.Y.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Y.remove(String(a),b,c,d)};var Xb=function(a,b,c,d){b=a.Y.m[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.ha&&h.Fa==c){var k=h.W,I=h.Ha||h.src;h.Ga&&Ib(a.Y,h);e=!1!==k.call(I,d)&&e}}return e&&0!=d.wb};H.prototype.wa=function(a,b,c,d){return this.Y.wa(String(a),b,c,d)};var Wb=function(a){q(a.Y,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var J=function(a,b){a.style.display=b?"":"none"},Yb=w?"MozUserSelect":x?"WebkitUserSelect":null,Zb=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(Yb){if(b=b?"none":"",a.style[Yb]=b,c){a=0;for(var d;d=c[a];a++)d.style[Yb]=b}}else if(v||Va)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var $b=function(){};ba($b);$b.prototype.gc=0;var L=function(a){H.call(this);this.A=a||fb();this.Ca=ac;this.da=null;this.g=!1;this.d=null;this.pa=void 0;this.F=this.q=this.p=null};p(L,H);L.prototype.fc=$b.ia();
-var ac=null,bc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");},cc=function(a){return a.da||(a.da=":"+(a.fc.gc++).toString(36))},dc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.da;d in c&&delete c[d];Oa(a.p.F,b,a)}a.da=b};
-L.prototype.a=function(){return this.d};var ec=function(a){a.pa||(a.pa=new G(a));return a.pa},gc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.da&&fc(a.p,a.da)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;L.f.hb.call(a,b)};f=L.prototype;f.getParent=function(){return this.p};f.hb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");L.f.hb.call(this,a)};f.mb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};
-f.K=function(a){if(this.g)throw Error("Component already rendered");if(a&&this.X(a)){var b=eb(a);this.A&&this.A.Q==b||(this.A=fb(a));this.Za(a);this.D()}else throw Error("Invalid element to decorate");};f.X=function(){return!0};f.Za=function(a){this.d=a};f.D=function(){this.g=!0;hc(this,function(a){!a.g&&a.a()&&a.D()})};f.ba=function(){hc(this,function(a){a.g&&a.ba()});this.pa&&this.pa.ab();this.g=!1};f.Da=function(a,b){this.Va(a,M(this),b)};
-f.Va=function(a,b,c){q(!!a,"Provided element must not be null.");if(a.g&&(c||!this.g))throw Error("Component already rendered");if(0>b||b>M(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=cc(a);this.F[d]=a;Ea(this.q,a)}else Oa(this.F,cc(a),a);gc(a,this);Ha(this.q,b,0,a);if(a.g&&this.g&&a.getParent()==this)c=this.C(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=N(this,b+1);b=this.C();c=c?c.d:null;
+a.metaKey;this.kb=y?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(C,B);var Cb=[1,4,2],Db=function(a){return yb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Cb[0])};C.prototype.stopPropagation=function(){C.f.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
+C.prototype.preventDefault=function(){C.f.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Ab)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Eb="closure_listenable_"+(1E6*Math.random()|0),Fb=0;var Gb=function(a,b,c,d,e){this.W=a;this.Ea=null;this.src=b;this.type=c;this.Fa=!!d;this.Ha=e;this.key=++Fb;this.ha=this.Ga=!1},Hb=function(a){a.ha=!0;a.W=null;a.Ea=null;a.src=null;a.Ha=null};var D=function(a){this.src=a;this.m={};this.ua=0};D.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.m[g];a||(a=this.m[g]=[],this.ua++);var h=Ib(a,b,d,e);-1<h?(b=a[h],c||(b.Ga=!1)):(b=new Gb(b,this.src,g,!!d,e),b.Ga=c,a.push(b));return b};D.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.m))return!1;var e=this.m[a];b=Ib(e,b,c,d);return-1<b?(Hb(e[b]),q(null!=e.length),r.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.ua--),!0):!1};
+var Jb=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Fa(a.m[c],b);d&&(Hb(b),0==a.m[c].length&&(delete a.m[c],a.ua--));return d};D.prototype.ab=function(a){a=a&&a.toString();var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Hb(d[e]);delete this.m[c];this.ua--}return b};D.prototype.wa=function(a,b,c,d){a=this.m[a.toString()];var e=-1;a&&(e=Ib(a,b,c,d));return-1<e?a[e]:null};
+var Ib=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ha&&g.W==b&&g.Fa==!!c&&g.Ha==d)return e}return-1};var Kb="closure_lm_"+(1E6*Math.random()|0),Lb={},Mb=0,E=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)E(a,b[g],c,d,e);return null}c=Nb(c);if(a&&a[Eb])a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Ob(a);h||(a[Kb]=h=new D(a));c=h.add(b,c,!1,d,e);c.Ea||(d=Pb(),c.Ea=d,d.src=a,d.W=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Qb(b.toString()),d),Mb++);a=c}return a},Pb=function(){var a=Rb,b=zb?function(c){return a.call(b.src,b.W,c)}:function(c){c=
+a.call(b.src,b.W,c);if(!c)return c};return b},Sb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Sb(a,b[g],c,d,e);else c=Nb(c),a&&a[Eb]?a.u(b,c,d,e):a&&(a=Ob(a))&&(b=a.wa(b,c,!!d,e))&&F(b)},F=function(a){if("number"==typeof a||!a||a.ha)return!1;var b=a.src;if(b&&b[Eb])return Jb(b.Y,a);var c=a.type,d=a.Ea;b.removeEventListener?b.removeEventListener(c,d,a.Fa):b.detachEvent&&b.detachEvent(Qb(c),d);Mb--;(c=Ob(b))?(Jb(c,a),0==c.ua&&(c.src=null,b[Kb]=null)):Hb(a);return!0},Qb=function(a){return a in
+Lb?Lb[a]:Lb[a]="on"+a},Ub=function(a,b,c,d){var e=1;if(a=Ob(a))if(b=a.m[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.Fa==c&&!g.ha&&(e&=!1!==Tb(g,d))}return Boolean(e)},Tb=function(a,b){var c=a.W,d=a.Ha||a.src;a.Ga&&F(a);return c.call(d,b)},Rb=function(a,b){if(a.ha)return!0;if(!zb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new C(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==
+e.keyCode)try{e.keyCode=-1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.$&&0<=k;k--)c.currentTarget=e[k],d&=Ub(e[k],g,!0,c);for(k=0;!c.$&&k<e.length;k++)c.currentTarget=e[k],d&=Ub(e[k],g,!1,c)}return d}return Tb(a,new C(b,this))},Ob=function(a){a=a[Kb];return a instanceof D?a:null},Vb="__closure_events_fn_"+(1E9*Math.random()>>>0),Nb=function(a){q(a,"Listener can not be null.");if(n(a))return a;
+q(a.handleEvent,"An object listener must have handleEvent method.");a[Vb]||(a[Vb]=function(b){return a.handleEvent(b)});return a[Vb]};var G=function(a){Bb.call(this);this.Eb=a;this.Ma={}};p(G,Bb);var Wb=[];G.prototype.c=function(a,b,c,d){da(b)||(b&&(Wb[0]=b.toString()),b=Wb);for(var e=0;e<b.length;e++){var g=E(a,b[e],c||this.handleEvent,d||!1,this.Eb||this);if(!g)break;this.Ma[g.key]=g}return this};
+G.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Eb||this,c=Nb(c),d=!!d,b=a&&a[Eb]?a.wa(b,c,d,e):a?(a=Ob(a))?a.wa(b,c,d,e):null:null,b&&(F(b),delete this.Ma[b.key]);return this};G.prototype.ab=function(){Na(this.Ma,F);this.Ma={}};G.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var H=function(){Bb.call(this);this.Y=new D(this);this.dc=this;this.nb=null};p(H,Bb);H.prototype[Eb]=!0;f=H.prototype;f.hb=function(a){this.nb=a};f.addEventListener=function(a,b,c,d){E(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Sb(this,a,b,c,d)};
+f.dispatchEvent=function(a){Xb(this);var b,c=this.nb;if(c){b=[];for(var d=1;c;c=c.nb)b.push(c),q(1E3>++d,"infinite loop")}c=this.dc;d=a.type||a;if(m(a))a=new B(a,c);else if(a instanceof B)a.target=a.target||c;else{var e=a;a=new B(d,c);Sa(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.$&&0<=h;h--)g=a.currentTarget=b[h],e=Yb(g,d,!0,a)&&e;a.$||(g=a.currentTarget=c,e=Yb(g,d,!0,a)&&e,a.$||(e=Yb(g,d,!1,a)&&e));if(b)for(h=0;!a.$&&h<b.length;h++)g=a.currentTarget=b[h],e=Yb(g,d,!1,a)&&e;return e};
+f.c=function(a,b,c,d){Xb(this);return this.Y.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Y.remove(String(a),b,c,d)};var Yb=function(a,b,c,d){b=a.Y.m[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.ha&&h.Fa==c){var k=h.W,I=h.Ha||h.src;h.Ga&&Jb(a.Y,h);e=!1!==k.call(I,d)&&e}}return e&&0!=d.wb};H.prototype.wa=function(a,b,c,d){return this.Y.wa(String(a),b,c,d)};var Xb=function(a){q(a.Y,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var J=function(a,b){a.style.display=b?"":"none"},Zb=w?"MozUserSelect":x?"WebkitUserSelect":null,$b=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(Zb){if(b=b?"none":"",a.style[Zb]=b,c){a=0;for(var d;d=c[a];a++)d.style[Zb]=b}}else if(v||Wa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var ac=function(){};ba(ac);ac.prototype.gc=0;var L=function(a){H.call(this);this.A=a||gb();this.Ca=bc;this.da=null;this.g=!1;this.d=null;this.pa=void 0;this.F=this.q=this.p=null};p(L,H);L.prototype.fc=ac.ia();
+var bc=null,cc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");},dc=function(a){return a.da||(a.da=":"+(a.fc.gc++).toString(36))},ec=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.da;d in c&&delete c[d];Pa(a.p.F,b,a)}a.da=b};
+L.prototype.a=function(){return this.d};var fc=function(a){a.pa||(a.pa=new G(a));return a.pa},hc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.da&&gc(a.p,a.da)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;L.f.hb.call(a,b)};f=L.prototype;f.getParent=function(){return this.p};f.hb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");L.f.hb.call(this,a)};f.mb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};
+f.K=function(a){if(this.g)throw Error("Component already rendered");if(a&&this.X(a)){var b=fb(a);this.A&&this.A.Q==b||(this.A=gb(a));this.Za(a);this.D()}else throw Error("Invalid element to decorate");};f.X=function(){return!0};f.Za=function(a){this.d=a};f.D=function(){this.g=!0;ic(this,function(a){!a.g&&a.a()&&a.D()})};f.ba=function(){ic(this,function(a){a.g&&a.ba()});this.pa&&this.pa.ab();this.g=!1};f.Da=function(a,b){this.Va(a,M(this),b)};
+f.Va=function(a,b,c){q(!!a,"Provided element must not be null.");if(a.g&&(c||!this.g))throw Error("Component already rendered");if(0>b||b>M(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=dc(a);this.F[d]=a;Fa(this.q,a)}else Pa(this.F,dc(a),a);hc(a,this);Ia(this.q,b,0,a);if(a.g&&this.g&&a.getParent()==this)c=this.C(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=N(this,b+1);b=this.C();c=c?c.d:null;
 if(a.g)throw Error("Component already rendered");a.d||a.o();b?b.insertBefore(a.d,c||null):a.A.Q.body.appendChild(a.d);a.p&&!a.p.g||a.D()}else this.g&&!a.g&&a.d&&a.d.parentNode&&1==a.d.parentNode.nodeType&&a.D()};f.C=function(){return this.d};
-var ic=function(a){if(null==a.Ca){var b=a.g?a.d:a.A.Q.body,c;t:{c=eb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.Ca="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.Ca};L.prototype.ra=function(a){if(this.g)throw Error("Component already rendered");this.Ca=a};
-var M=function(a){return a.q?a.q.length:0},fc=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},N=function(a,b){return a.q?a.q[b]||null:null},hc=function(a,b,c){a.q&&s(a.q,b,c)},jc=function(a,b){return a.q&&b?Ba(a.q,b):-1};
-L.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:cc(a);a=fc(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Ea(this.q,a);b&&(a.ba(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));gc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var kc,lc={oc:"activedescendant",tc:"atomic",uc:"autocomplete",wc:"busy",zc:"checked",Ec:"controls",Gc:"describedby",Jc:"disabled",Lc:"dropeffect",Mc:"expanded",Nc:"flowto",Pc:"grabbed",Tc:"haspopup",Vc:"hidden",Xc:"invalid",Yc:"label",Zc:"labelledby",$c:"level",ed:"live",pd:"multiline",qd:"multiselectable",ud:"orientation",vd:"owns",wd:"posinset",yd:"pressed",Cd:"readonly",Ed:"relevant",Fd:"required",Ld:"selected",Nd:"setsize",Pd:"sort",be:"valuemax",ce:"valuemin",de:"valuenow",ee:"valuetext"};var mc={pc:"alert",qc:"alertdialog",rc:"application",sc:"article",vc:"banner",xc:"button",yc:"checkbox",Ac:"columnheader",Bc:"combobox",Cc:"complementary",Dc:"contentinfo",Fc:"definition",Hc:"dialog",Ic:"directory",Kc:"document",Oc:"form",Qc:"grid",Rc:"gridcell",Sc:"group",Uc:"heading",Wc:"img",ad:"link",bd:"list",cd:"listbox",dd:"listitem",fd:"log",gd:"main",hd:"marquee",jd:"math",kd:"menu",ld:"menubar",md:"menuitem",nd:"menuitemcheckbox",od:"menuitemradio",rd:"navigation",sd:"note",td:"option",
-xd:"presentation",zd:"progressbar",Ad:"radio",Bd:"radiogroup",Dd:"region",Gd:"row",Hd:"rowgroup",Id:"rowheader",Jd:"scrollbar",Kd:"search",Md:"separator",Od:"slider",Qd:"spinbutton",Rd:"status",Sd:"tab",Td:"tablist",Ud:"tabpanel",Vd:"textbox",Wd:"timer",Xd:"toolbar",Yd:"tooltip",Zd:"tree",$d:"treegrid",ae:"treeitem"};var nc=function(a,b){b?(q(Na(mc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},pc=function(a,b,c){da(c)&&(c=c.join(" "));var d=oc(b);""===c||void 0==c?(kc||(kc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=kc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
-c)},oc=function(a){q(a,"ARIA attribute cannot be empty.");q(Na(lc,a),"No such ARIA attribute "+a);return"aria-"+a};var sc=function(a,b,c,d,e){if(!(v||x&&A("525")))return!0;if(y&&e)return qc(a);if(e&&!d)return!1;"number"==typeof b&&(b=rc(b));if(!c&&(17==b||18==b||y&&91==b))return!1;if(x&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(v&&d&&b==a)return!1;switch(a){case 13:return!0;case 27:return!x}return qc(a)},qc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||x&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
-default:return!1}},rc=function(a){if(w)a=tc(a);else if(y&&x)t:switch(a){case 93:a=91;break t}return a},tc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var O=function(a,b){H.call(this);a&&uc(this,a,b)};p(O,H);f=O.prototype;f.d=null;f.Ia=null;f.Ya=null;f.Ja=null;f.r=-1;f.N=-1;f.lb=!1;
-var vc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},wc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},xc=v||x&&A("525"),yc=y&&w;
-O.prototype.Tb=function(a){x&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||y&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));xc&&!sc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=rc(a.keyCode),yc&&(this.lb=a.altKey))};O.prototype.Ub=function(a){this.N=this.r=-1;this.lb=a.altKey};
-O.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;v&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):x&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&qc(c)?b.charCode:0):Va?(c=this.N,d=qc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,yc&&(e=this.lb),y&&63==d&&224==c&&(c=191));var g=c=rc(c),h=b.keyIdentifier;c?63232<=c&&c in vc?g=vc[c]:25==c&&a.shiftKey&&(g=9):h&&h in wc&&(g=wc[h]);a=g==this.r;this.r=g;b=new zc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
-O.prototype.a=function(){return this.d};var uc=function(a,b,c){a.Ja&&a.detach();a.d=b;a.Ia=E(a.d,"keypress",a,c);a.Ya=E(a.d,"keydown",a.Tb,c,a);a.Ja=E(a.d,"keyup",a.Ub,c,a)};O.prototype.detach=function(){this.Ia&&(F(this.Ia),F(this.Ya),F(this.Ja),this.Ja=this.Ya=this.Ia=null);this.d=null;this.N=this.r=-1};var zc=function(a,b,c,d){C.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(zc,C);var Q=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ac=function(a,b){return a.classList?a.classList.contains(b):t(Q(a),b)},Bc=function(a,b){a.classList?a.classList.add(b):Ac(a,b)||(a.className+=0<a.className.length?" "+b:b)},Cc=function(a,b){if(a.classList)s(b,function(b){Bc(a,b)});else{var c={};s(Q(a),function(a){c[a]=!0});s(b,function(a){c[a]=!0});a.className="";for(var d in c)a.className+=0<a.className.length?" "+d:d}},Dc=function(a,b){a.classList?
-a.classList.remove(b):Ac(a,b)&&(a.className=Ca(Q(a),function(a){return a!=b}).join(" "))},Ec=function(a,b){a.classList?s(b,function(b){Dc(a,b)}):a.className=Ca(Q(a),function(a){return!t(b,a)}).join(" ")};var Gc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Fc[a]=b},Hc={},Fc={};var R=function(a){this.Hb=a};ba(R);R.prototype.fa=function(){return this.Hb};var Ic=function(a,b){a&&(a.tabIndex=b?0:-1)};f=R.prototype;f.o=function(a){return a.mb().o("div",this.ta(a).join(" "))};f.C=function(a){return a};f.X=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&dc(a,b.id);var c=this.v(),d=!1,e=Q(b);e&&s(e,function(b){b==c?d=!0:b&&this.bb(a,b,c)},this);d||Bc(b,c);Jc(a,this.C(b));return b};
-f.bb=function(a,b,c){b==c+"-disabled"?a.ca(!1):b==c+"-horizontal"?Kc(a,"horizontal"):b==c+"-vertical"&&Kc(a,"vertical")};var Jc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=c;e=void 0;q(g);for(var g=Q(g),h=0,k=g.length;h<k;h++)if(e=g[h],e=e in Fc?Fc[e]():null)break t;e=null}e&&(e.d=c,a.isEnabled()||e.ca(!1),a.Da(e),e.K(c))}else c.nodeValue&&""!=oa(c.nodeValue)||b.removeChild(c);c=d}};
-R.prototype.Oa=function(a){a=a.a();q(a,"The container DOM element cannot be null.");Zb(a,!0,w);v&&(a.hideFocus=!0);var b=this.fa();b&&nc(a,b)};R.prototype.j=function(a){return a.a()};R.prototype.v=function(){return"goog-container"};R.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var S=function(){},Lc;ba(S);var Mc={button:"pressed",checkbox:"checked",menuitem:"selected",menuitemcheckbox:"checked",menuitemradio:"checked",radio:"checked",tab:"selected",treeitem:"selected"};f=S.prototype;f.fa=function(){};f.o=function(a){var b=a.mb().o("div",this.ta(a).join(" "),a.Ba);Nc(a,b);return b};f.C=function(a){return a};f.sa=function(a,b,c){if(a=a.a?a.a():a){var d=[b];v&&!A("7")&&(d=Oc(Q(a),b),d.push(b));(c?Cc:Ec)(a,d)}};f.X=function(){return!0};
-f.K=function(a,b){b.id&&dc(a,b.id);var c=this.C(b);c&&c.firstChild?Pc(a,c.firstChild.nextSibling?Fa(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,I=!1,z=Fa(Q(b));s(z,function(a){h||a!=e?k||a!=g?d|=Qc(this,a):k=!0:(h=!0,g==e&&(k=!0));1==Qc(this,a)&&tb(c)&&ub(c,!1)},this);a.h=d;h||(z.push(e),g==e&&(k=!0));k||z.push(g);var K=a.G;K&&z.push.apply(z,K);if(v&&!A("7")){var P=Oc(z);0<P.length&&(z.push.apply(z,P),I=!0)}if(!h||!k||K||I)b.className=z.join(" ");Nc(a,b);return b};
-f.Oa=function(a){ic(a)&&this.ra(a.a(),!0);a.isEnabled()&&this.oa(a,a.s())};var Rc=function(a,b,c){if(a=c||a.fa())q(b,"The element passed as a first parameter cannot be null."),c=b.getAttribute("role")||null,a!=c&&nc(b,a)},Nc=function(a,b){q(a);q(b);a.s()||pc(b,"hidden",!a.s());a.isEnabled()||Sc(b,1,!a.isEnabled());a.l&8&&Sc(b,8,!!(a.h&8));a.l&16&&Sc(b,16,!!(a.h&16));a.l&64&&Sc(b,64,!!(a.h&64))};f=S.prototype;f.za=function(a,b){Zb(a,!b,!v&&!Va)};f.ra=function(a,b){this.sa(a,this.v()+"-rtl",b)};
-f.I=function(a){var b;return a.l&32&&(b=a.j())?tb(b):!1};f.oa=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.h&32){try{c.blur()}catch(d){}a.h&32&&a.ma(null)}tb(c)!=b&&ub(c,b)}};f.ka=function(a,b){J(a,b);a&&pc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=Tc(this,b);e&&this.sa(a,e,c);Sc(d,b,c)}};
-var Sc=function(a,b,c){Lc||(Lc={1:"disabled",8:"selected",16:"checked",64:"expanded"});q(a,"The element passed as a first parameter cannot be null.");b=Lc[b];var d=a.getAttribute("role")||null;d&&(d=Mc[d]||b,b="checked"==b||"selected"==b?d:b);b&&pc(a,b,c)};S.prototype.j=function(a){return a.a()};S.prototype.v=function(){return"goog-control"};
-S.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.h;for(d=[];b;){var e=b&-b;d.push(Tc(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);v&&!A("7")&&c.push.apply(c,Oc(c));return c};
-var Oc=function(a,b){var c=[];b&&(a=a.concat([b]));s([],function(d){!Da(d,ja(t,a))||b&&!t(d,b)||c.push(d.join("_"))});return c},Tc=function(a,b){a.Na||Uc(a);return a.Na[b]},Qc=function(a,b){a.Gb||(a.Na||Uc(a),a.Gb=Pa(a.Na));var c=parseInt(a.Gb[b],10);return isNaN(c)?0:c},Uc=function(a){var b=a.v(),c=b.replace(/\xa0|\s/g," ");q(-1==c.indexOf(" "),"ControlRenderer has an invalid css class: '"+b+"'");a.Na={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+
-"-open"}};var T=function(a,b,c){L.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ia(b);if(d=Hc[d])break;b=b.f?b.f.constructor:null}b=d?n(d.ia)?d.ia():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(T,L);f=T.prototype;f.Ba=null;f.h=0;f.l=39;f.ec=255;f.T=0;f.n=!0;f.G=null;f.Z=!0;f.xa=!1;f.sb=null;f.qb=function(){return this.Z};f.Pa=function(a){this.g&&a!=this.Z&&Vc(this,a);this.Z=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ga||(this.ga=new O)};f.Ab=function(){return this.b};
-f.sa=function(a,b){b?a&&(this.G?t(this.G,a)||this.G.push(a):this.G=[a],this.b.sa(this,a,!0)):a&&this.G&&Ea(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.sa(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Rc(this.b,a,this.sb);this.xa||this.b.za(a,!1);this.s()||this.b.ka(a,!1)};f.C=function(){return this.b.C(this.a())};f.X=function(a){return this.b.X(a)};f.Za=function(a){this.d=a=this.b.K(this,a);Rc(this.b,a,this.sb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
-f.D=function(){T.f.D.call(this);this.b.Oa(this);if(this.l&-2&&(this.qb()&&Vc(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();uc(b,a);ec(this).c(b,"key",this.J).c(a,"focus",this.na).c(a,"blur",this.ma)}}};
-var Vc=function(a,b){var c=ec(a),d=a.a();b?(c.c(d,"mouseover",a.Sa).c(d,"mousedown",a.la).c(d,"mouseup",a.Ta).c(d,"mouseout",a.Ra),a.qa!=aa&&c.c(d,"contextmenu",a.qa),v&&c.c(d,"dblclick",a.ub)):(c.u(d,"mouseover",a.Sa).u(d,"mousedown",a.la).u(d,"mouseup",a.Ta).u(d,"mouseout",a.Ra),a.qa!=aa&&c.u(d,"contextmenu",a.qa),v&&c.u(d,"dblclick",a.ub))};T.prototype.ba=function(){T.f.ba.call(this);this.ga&&this.ga.detach();this.s()&&this.isEnabled()&&this.b.oa(this,!1)};var Pc=function(a,b){a.Ba=b};f=T.prototype;
+var jc=function(a){if(null==a.Ca){var b=a.g?a.d:a.A.Q.body,c;t:{c=fb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.Ca="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.Ca};L.prototype.ra=function(a){if(this.g)throw Error("Component already rendered");this.Ca=a};
+var M=function(a){return a.q?a.q.length:0},gc=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},N=function(a,b){return a.q?a.q[b]||null:null},ic=function(a,b,c){a.q&&s(a.q,b,c)},kc=function(a,b){return a.q&&b?Ca(a.q,b):-1};
+L.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:dc(a);a=gc(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Fa(this.q,a);b&&(a.ba(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));hc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var lc,mc={oc:"activedescendant",tc:"atomic",uc:"autocomplete",wc:"busy",zc:"checked",Ec:"controls",Gc:"describedby",Jc:"disabled",Lc:"dropeffect",Mc:"expanded",Nc:"flowto",Pc:"grabbed",Tc:"haspopup",Vc:"hidden",Xc:"invalid",Yc:"label",Zc:"labelledby",$c:"level",ed:"live",pd:"multiline",qd:"multiselectable",ud:"orientation",vd:"owns",wd:"posinset",yd:"pressed",Cd:"readonly",Ed:"relevant",Fd:"required",Ld:"selected",Nd:"setsize",Pd:"sort",be:"valuemax",ce:"valuemin",de:"valuenow",ee:"valuetext"};var nc={pc:"alert",qc:"alertdialog",rc:"application",sc:"article",vc:"banner",xc:"button",yc:"checkbox",Ac:"columnheader",Bc:"combobox",Cc:"complementary",Dc:"contentinfo",Fc:"definition",Hc:"dialog",Ic:"directory",Kc:"document",Oc:"form",Qc:"grid",Rc:"gridcell",Sc:"group",Uc:"heading",Wc:"img",ad:"link",bd:"list",cd:"listbox",dd:"listitem",fd:"log",gd:"main",hd:"marquee",jd:"math",kd:"menu",ld:"menubar",md:"menuitem",nd:"menuitemcheckbox",od:"menuitemradio",rd:"navigation",sd:"note",td:"option",
+xd:"presentation",zd:"progressbar",Ad:"radio",Bd:"radiogroup",Dd:"region",Gd:"row",Hd:"rowgroup",Id:"rowheader",Jd:"scrollbar",Kd:"search",Md:"separator",Od:"slider",Qd:"spinbutton",Rd:"status",Sd:"tab",Td:"tablist",Ud:"tabpanel",Vd:"textbox",Wd:"timer",Xd:"toolbar",Yd:"tooltip",Zd:"tree",$d:"treegrid",ae:"treeitem"};var oc=function(a,b){b?(q(Oa(nc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},qc=function(a,b,c){da(c)&&(c=c.join(" "));var d=pc(b);""===c||void 0==c?(lc||(lc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=lc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
+c)},pc=function(a){q(a,"ARIA attribute cannot be empty.");q(Oa(mc,a),"No such ARIA attribute "+a);return"aria-"+a};var tc=function(a,b,c,d,e){if(!(v||x&&A("525")))return!0;if(y&&e)return rc(a);if(e&&!d)return!1;"number"==typeof b&&(b=sc(b));if(!c&&(17==b||18==b||y&&91==b))return!1;if(x&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(v&&d&&b==a)return!1;switch(a){case 13:return!0;case 27:return!x}return rc(a)},rc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||x&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
+default:return!1}},sc=function(a){if(w)a=uc(a);else if(y&&x)t:switch(a){case 93:a=91;break t}return a},uc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var O=function(a,b){H.call(this);a&&vc(this,a,b)};p(O,H);f=O.prototype;f.d=null;f.Ia=null;f.Ya=null;f.Ja=null;f.r=-1;f.N=-1;f.lb=!1;
+var wc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},xc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},yc=v||x&&A("525"),zc=y&&w;
+O.prototype.Tb=function(a){x&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||y&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));yc&&!tc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=sc(a.keyCode),zc&&(this.lb=a.altKey))};O.prototype.Ub=function(a){this.N=this.r=-1;this.lb=a.altKey};
+O.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;v&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):x&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&rc(c)?b.charCode:0):Wa?(c=this.N,d=rc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,zc&&(e=this.lb),y&&63==d&&224==c&&(c=191));var g=c=sc(c),h=b.keyIdentifier;c?63232<=c&&c in wc?g=wc[c]:25==c&&a.shiftKey&&(g=9):h&&h in xc&&(g=xc[h]);a=g==this.r;this.r=g;b=new Ac(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
+O.prototype.a=function(){return this.d};var vc=function(a,b,c){a.Ja&&a.detach();a.d=b;a.Ia=E(a.d,"keypress",a,c);a.Ya=E(a.d,"keydown",a.Tb,c,a);a.Ja=E(a.d,"keyup",a.Ub,c,a)};O.prototype.detach=function(){this.Ia&&(F(this.Ia),F(this.Ya),F(this.Ja),this.Ja=this.Ya=this.Ia=null);this.d=null;this.N=this.r=-1};var Ac=function(a,b,c,d){C.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(Ac,C);var Q=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Bc=function(a,b){return a.classList?a.classList.contains(b):t(Q(a),b)},Cc=function(a,b){a.classList?a.classList.add(b):Bc(a,b)||(a.className+=0<a.className.length?" "+b:b)},Dc=function(a,b){if(a.classList)s(b,function(b){Cc(a,b)});else{var c={};s(Q(a),function(a){c[a]=!0});s(b,function(a){c[a]=!0});a.className="";for(var d in c)a.className+=0<a.className.length?" "+d:d}},Ec=function(a,b){a.classList?
+a.classList.remove(b):Bc(a,b)&&(a.className=Da(Q(a),function(a){return a!=b}).join(" "))},Fc=function(a,b){a.classList?s(b,function(b){Ec(a,b)}):a.className=Da(Q(a),function(a){return!t(b,a)}).join(" ")};var Hc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Gc[a]=b},Ic={},Gc={};var R=function(a){this.Hb=a};ba(R);R.prototype.fa=function(){return this.Hb};var Jc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=R.prototype;f.o=function(a){return a.mb().o("div",this.ta(a).join(" "))};f.C=function(a){return a};f.X=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&ec(a,b.id);var c=this.v(),d=!1,e=Q(b);e&&s(e,function(b){b==c?d=!0:b&&this.bb(a,b,c)},this);d||Cc(b,c);Kc(a,this.C(b));return b};
+f.bb=function(a,b,c){b==c+"-disabled"?a.ca(!1):b==c+"-horizontal"?Lc(a,"horizontal"):b==c+"-vertical"&&Lc(a,"vertical")};var Kc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=c;e=void 0;q(g);for(var g=Q(g),h=0,k=g.length;h<k;h++)if(e=g[h],e=e in Gc?Gc[e]():null)break t;e=null}e&&(e.d=c,a.isEnabled()||e.ca(!1),a.Da(e),e.K(c))}else c.nodeValue&&""!=oa(c.nodeValue)||b.removeChild(c);c=d}};
+R.prototype.Oa=function(a){a=a.a();q(a,"The container DOM element cannot be null.");$b(a,!0,w);v&&(a.hideFocus=!0);var b=this.fa();b&&oc(a,b)};R.prototype.j=function(a){return a.a()};R.prototype.v=function(){return"goog-container"};R.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var S=function(){},Mc;ba(S);var Nc={button:"pressed",checkbox:"checked",menuitem:"selected",menuitemcheckbox:"checked",menuitemradio:"checked",radio:"checked",tab:"selected",treeitem:"selected"};f=S.prototype;f.fa=function(){};f.o=function(a){var b=a.mb().o("div",this.ta(a).join(" "),a.Ba);Oc(a,b);return b};f.C=function(a){return a};f.sa=function(a,b,c){if(a=a.a?a.a():a){var d=[b];v&&!A("7")&&(d=Pc(Q(a),b),d.push(b));(c?Dc:Fc)(a,d)}};f.X=function(){return!0};
+f.K=function(a,b){b.id&&ec(a,b.id);var c=this.C(b);c&&c.firstChild?Qc(a,c.firstChild.nextSibling?Ga(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,I=!1,z=Ga(Q(b));s(z,function(a){h||a!=e?k||a!=g?d|=Rc(this,a):k=!0:(h=!0,g==e&&(k=!0));1==Rc(this,a)&&(Aa(c),ub(c)&&vb(c,!1))},this);a.h=d;h||(z.push(e),g==e&&(k=!0));k||z.push(g);var K=a.G;K&&z.push.apply(z,K);if(v&&!A("7")){var P=Pc(z);0<P.length&&(z.push.apply(z,P),I=!0)}if(!h||!k||K||I)b.className=z.join(" ");Oc(a,b);
+return b};f.Oa=function(a){jc(a)&&this.ra(a.a(),!0);a.isEnabled()&&this.oa(a,a.s())};var Sc=function(a,b,c){if(a=c||a.fa())q(b,"The element passed as a first parameter cannot be null."),c=b.getAttribute("role")||null,a!=c&&oc(b,a)},Oc=function(a,b){q(a);q(b);a.s()||qc(b,"hidden",!a.s());a.isEnabled()||Tc(b,1,!a.isEnabled());a.l&8&&Tc(b,8,!!(a.h&8));a.l&16&&Tc(b,16,!!(a.h&16));a.l&64&&Tc(b,64,!!(a.h&64))};f=S.prototype;f.za=function(a,b){$b(a,!b,!v&&!Wa)};
+f.ra=function(a,b){this.sa(a,this.v()+"-rtl",b)};f.I=function(a){var b;return a.l&32&&(b=a.j())?ub(b):!1};f.oa=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.h&32){try{c.blur()}catch(d){}a.h&32&&a.ma(null)}ub(c)!=b&&vb(c,b)}};f.ka=function(a,b){J(a,b);a&&qc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=Uc(this,b);e&&this.sa(a,e,c);Tc(d,b,c)}};
+var Tc=function(a,b,c){Mc||(Mc={1:"disabled",8:"selected",16:"checked",64:"expanded"});q(a,"The element passed as a first parameter cannot be null.");b=Mc[b];var d=a.getAttribute("role")||null;d&&(d=Nc[d]||b,b="checked"==b||"selected"==b?d:b);b&&qc(a,b,c)};S.prototype.j=function(a){return a.a()};S.prototype.v=function(){return"goog-control"};
+S.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.h;for(d=[];b;){var e=b&-b;d.push(Uc(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);v&&!A("7")&&c.push.apply(c,Pc(c));return c};
+var Pc=function(a,b){var c=[];b&&(a=a.concat([b]));s([],function(d){!Ea(d,ja(t,a))||b&&!t(d,b)||c.push(d.join("_"))});return c},Uc=function(a,b){a.Na||Vc(a);return a.Na[b]},Rc=function(a,b){a.Gb||(a.Na||Vc(a),a.Gb=Qa(a.Na));var c=parseInt(a.Gb[b],10);return isNaN(c)?0:c},Vc=function(a){var b=a.v(),c=b.replace(/\xa0|\s/g," ");q(-1==c.indexOf(" "),"ControlRenderer has an invalid css class: '"+b+"'");a.Na={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+
+"-open"}};var T=function(a,b,c){L.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ia(b);if(d=Ic[d])break;b=b.f?b.f.constructor:null}b=d?n(d.ia)?d.ia():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(T,L);f=T.prototype;f.Ba=null;f.h=0;f.l=39;f.ec=255;f.T=0;f.n=!0;f.G=null;f.Z=!0;f.xa=!1;f.sb=null;f.qb=function(){return this.Z};f.Pa=function(a){this.g&&a!=this.Z&&Wc(this,a);this.Z=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ga||(this.ga=new O)};f.Ab=function(){return this.b};
+f.sa=function(a,b){b?a&&(this.G?t(this.G,a)||this.G.push(a):this.G=[a],this.b.sa(this,a,!0)):a&&this.G&&Fa(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.sa(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Sc(this.b,a,this.sb);this.xa||this.b.za(a,!1);this.s()||this.b.ka(a,!1)};f.C=function(){return this.b.C(this.a())};f.X=function(a){return this.b.X(a)};f.Za=function(a){this.d=a=this.b.K(this,a);Sc(this.b,a,this.sb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
+f.D=function(){T.f.D.call(this);this.b.Oa(this);if(this.l&-2&&(this.qb()&&Wc(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();vc(b,a);fc(this).c(b,"key",this.J).c(a,"focus",this.na).c(a,"blur",this.ma)}}};
+var Wc=function(a,b){var c=fc(a),d=a.a();b?(c.c(d,"mouseover",a.Sa).c(d,"mousedown",a.la).c(d,"mouseup",a.Ta).c(d,"mouseout",a.Ra),a.qa!=aa&&c.c(d,"contextmenu",a.qa),v&&c.c(d,"dblclick",a.ub)):(c.u(d,"mouseover",a.Sa).u(d,"mousedown",a.la).u(d,"mouseup",a.Ta).u(d,"mouseout",a.Ra),a.qa!=aa&&c.u(d,"contextmenu",a.qa),v&&c.u(d,"dblclick",a.ub))};T.prototype.ba=function(){T.f.ba.call(this);this.ga&&this.ga.detach();this.s()&&this.isEnabled()&&this.b.oa(this,!1)};var Qc=function(a,b){a.Ba=b};f=T.prototype;
 f.ra=function(a){T.f.ra.call(this,a);var b=this.a();b&&this.b.ra(b,a)};f.za=function(a){this.xa=a;var b=this.a();b&&this.b.za(b,a)};f.s=function(){return this.n};f.ka=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ka(c,a);this.isEnabled()&&this.b.oa(this,a);this.n=a;return!0}return!1};f.isEnabled=function(){return!(this.h&1)};
-f.ca=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!U(this,1,!a)||(a||(this.setActive(!1),this.B(!1)),this.s()&&this.b.oa(this,a),this.t(1,!a,!0))};f.B=function(a){U(this,2,a)&&this.t(2,a)};f.setActive=function(a){U(this,4,a)&&this.t(4,a)};var Wc=function(a,b){U(a,8,b)&&a.t(8,b)},Xc=function(a,b){U(a,64,b)&&a.t(64,b)};T.prototype.t=function(a,b,c){c||1!=a?this.l&a&&b!=!!(this.h&a)&&(this.b.t(this,a,b),this.h=b?this.h|a:this.h&~a):this.ca(!b)};
-var Yc=function(a,b,c){if(a.g&&a.h&b&&!c)throw Error("Component already rendered");!c&&a.h&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},V=function(a,b){return!!(a.ec&b)&&!!(a.l&b)},U=function(a,b,c){return!!(a.l&b)&&!!(a.h&b)!=c&&(!(a.T&b)||a.dispatchEvent(bc(b,c)))&&!a.eb};f=T.prototype;f.Sa=function(a){(!a.relatedTarget||!nb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&V(this,2)&&this.B(!0)};
-f.Ra=function(a){a.relatedTarget&&nb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(V(this,4)&&this.setActive(!1),V(this,2)&&this.B(!1))};f.qa=aa;f.la=function(a){this.isEnabled()&&(V(this,2)&&this.B(!0),!Cb(a)||x&&y&&a.ctrlKey||(V(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Cb(a)||x&&y&&a.ctrlKey||a.preventDefault()};f.Ta=function(a){this.isEnabled()&&(V(this,2)&&this.B(!0),this.h&4&&Zc(this,a)&&V(this,4)&&this.setActive(!1))};
-f.ub=function(a){this.isEnabled()&&Zc(this,a)};var Zc=function(a,b){if(V(a,16)){var c=!(a.h&16);U(a,16,c)&&a.t(16,c)}V(a,8)&&Wc(a,!0);V(a,64)&&Xc(a,!(a.h&64));c=new B("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.kb=b.kb);return a.dispatchEvent(c)};T.prototype.na=function(){V(this,32)&&U(this,32,!0)&&this.t(32,!0)};T.prototype.ma=function(){V(this,4)&&this.setActive(!1);V(this,32)&&U(this,32,!1)&&this.t(32,!1)};
-T.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.ob(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};T.prototype.ob=function(a){return 13==a.keyCode&&Zc(this,a)};if(!n(T))throw Error("Invalid component class "+T);if(!n(S))throw Error("Invalid renderer class "+S);var $c=ia(T);Hc[$c]=S;Gc("goog-control",function(){return new T(null)});var W=function(a,b,c){L.call(this,c);this.b=b||R.ia();this.L=a||"vertical"};p(W,L);f=W.prototype;f.vb=null;f.ga=null;f.b=null;f.L=null;f.n=!0;f.U=!0;f.$a=!0;f.e=-1;f.i=null;f.aa=!1;f.Sb=!1;f.Rb=!0;f.M=null;f.j=function(){return this.vb||this.b.j(this)};f.ya=function(){return this.ga||(this.ga=new O(this.j()))};f.Ab=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.C=function(){return this.b.C(this.a())};f.X=function(a){return this.b.X(a)};
-f.Za=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){W.f.D.call(this);hc(this,function(a){a.g&&ad(this,a)},this);var a=this.a();this.b.Oa(this);this.ka(this.n,!0);ec(this).c(this,"enter",this.Lb).c(this,"highlight",this.Mb).c(this,"unhighlight",this.Ob).c(this,"open",this.Nb).c(this,"close",this.Jb).c(a,"mousedown",this.la).c(eb(a),"mouseup",this.Kb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Ib);this.I()&&bd(this,!0)};
-var bd=function(a,b){var c=ec(a),d=a.j();b?c.c(d,"focus",a.na).c(d,"blur",a.ma).c(a.ya(),"key",a.J):c.u(d,"focus",a.na).u(d,"blur",a.ma).u(a.ya(),"key",a.J)};f=W.prototype;f.ba=function(){cd(this,-1);this.i&&Xc(this.i,!1);this.aa=!1;W.f.ba.call(this)};f.Lb=function(){return!0};
-f.Mb=function(a){var b=jc(this,a.target);if(-1<b&&b!=this.e){var c=N(this,this.e);c&&c.B(!1);this.e=b;c=N(this,this.e);this.aa&&c.setActive(!0);this.Rb&&this.i&&c!=this.i&&(c.l&64?Xc(c,!0):Xc(this.i,!1))}b=this.a();q(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&pc(b,"activedescendant",a.target.a().id)};f.Ob=function(a){a.target==N(this,this.e)&&(this.e=-1);a=this.a();q(a,"The DOM element for the container cannot be null.");a.removeAttribute(oc("activedescendant"))};
-f.Nb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&Xc(this.i,!1),this.i=a)};f.Jb=function(a){a.target==this.i&&(this.i=null)};f.la=function(a){this.U&&(this.aa=!0);var b=this.j();b&&tb(b)?b.focus():a.preventDefault()};f.Kb=function(){this.aa=!1};
-f.Ib=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.la(a);break;case "mouseup":b.Ta(a);break;case "mouseover":b.Sa(a);break;case "mouseout":b.Ra(a);break;case "contextmenu":b.qa(a)}};f.na=function(){};f.ma=function(){cd(this,-1);this.aa=!1;this.i&&Xc(this.i,!1)};
+f.ca=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!U(this,1,!a)||(a||(this.setActive(!1),this.B(!1)),this.s()&&this.b.oa(this,a),this.t(1,!a,!0))};f.B=function(a){U(this,2,a)&&this.t(2,a)};f.setActive=function(a){U(this,4,a)&&this.t(4,a)};var Xc=function(a,b){U(a,8,b)&&a.t(8,b)},Yc=function(a,b){U(a,64,b)&&a.t(64,b)};T.prototype.t=function(a,b,c){c||1!=a?this.l&a&&b!=!!(this.h&a)&&(this.b.t(this,a,b),this.h=b?this.h|a:this.h&~a):this.ca(!b)};
+var Zc=function(a,b,c){if(a.g&&a.h&b&&!c)throw Error("Component already rendered");!c&&a.h&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},V=function(a,b){return!!(a.ec&b)&&!!(a.l&b)},U=function(a,b,c){return!!(a.l&b)&&!!(a.h&b)!=c&&(!(a.T&b)||a.dispatchEvent(cc(b,c)))&&!a.eb};f=T.prototype;f.Sa=function(a){(!a.relatedTarget||!ob(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&V(this,2)&&this.B(!0)};
+f.Ra=function(a){a.relatedTarget&&ob(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(V(this,4)&&this.setActive(!1),V(this,2)&&this.B(!1))};f.qa=aa;f.la=function(a){this.isEnabled()&&(V(this,2)&&this.B(!0),!Db(a)||x&&y&&a.ctrlKey||(V(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Db(a)||x&&y&&a.ctrlKey||a.preventDefault()};f.Ta=function(a){this.isEnabled()&&(V(this,2)&&this.B(!0),this.h&4&&$c(this,a)&&V(this,4)&&this.setActive(!1))};
+f.ub=function(a){this.isEnabled()&&$c(this,a)};var $c=function(a,b){if(V(a,16)){var c=!(a.h&16);U(a,16,c)&&a.t(16,c)}V(a,8)&&Xc(a,!0);V(a,64)&&Yc(a,!(a.h&64));c=new B("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.kb=b.kb);return a.dispatchEvent(c)};T.prototype.na=function(){V(this,32)&&U(this,32,!0)&&this.t(32,!0)};T.prototype.ma=function(){V(this,4)&&this.setActive(!1);V(this,32)&&U(this,32,!1)&&this.t(32,!1)};
+T.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.ob(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};T.prototype.ob=function(a){return 13==a.keyCode&&$c(this,a)};if(!n(T))throw Error("Invalid component class "+T);if(!n(S))throw Error("Invalid renderer class "+S);var ad=ia(T);Ic[ad]=S;Hc("goog-control",function(){return new T(null)});var W=function(a,b,c){L.call(this,c);this.b=b||R.ia();this.L=a||"vertical"};p(W,L);f=W.prototype;f.vb=null;f.ga=null;f.b=null;f.L=null;f.n=!0;f.U=!0;f.$a=!0;f.e=-1;f.i=null;f.aa=!1;f.Sb=!1;f.Rb=!0;f.M=null;f.j=function(){return this.vb||this.b.j(this)};f.ya=function(){return this.ga||(this.ga=new O(this.j()))};f.Ab=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.C=function(){return this.b.C(this.a())};f.X=function(a){return this.b.X(a)};
+f.Za=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){W.f.D.call(this);ic(this,function(a){a.g&&bd(this,a)},this);var a=this.a();this.b.Oa(this);this.ka(this.n,!0);fc(this).c(this,"enter",this.Lb).c(this,"highlight",this.Mb).c(this,"unhighlight",this.Ob).c(this,"open",this.Nb).c(this,"close",this.Jb).c(a,"mousedown",this.la).c(fb(a),"mouseup",this.Kb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Ib);this.I()&&cd(this,!0)};
+var cd=function(a,b){var c=fc(a),d=a.j();b?c.c(d,"focus",a.na).c(d,"blur",a.ma).c(a.ya(),"key",a.J):c.u(d,"focus",a.na).u(d,"blur",a.ma).u(a.ya(),"key",a.J)};f=W.prototype;f.ba=function(){dd(this,-1);this.i&&Yc(this.i,!1);this.aa=!1;W.f.ba.call(this)};f.Lb=function(){return!0};
+f.Mb=function(a){var b=kc(this,a.target);if(-1<b&&b!=this.e){var c=N(this,this.e);c&&c.B(!1);this.e=b;c=N(this,this.e);this.aa&&c.setActive(!0);this.Rb&&this.i&&c!=this.i&&(c.l&64?Yc(c,!0):Yc(this.i,!1))}b=this.a();q(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&qc(b,"activedescendant",a.target.a().id)};f.Ob=function(a){a.target==N(this,this.e)&&(this.e=-1);a=this.a();q(a,"The DOM element for the container cannot be null.");a.removeAttribute(pc("activedescendant"))};
+f.Nb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&Yc(this.i,!1),this.i=a)};f.Jb=function(a){a.target==this.i&&(this.i=null)};f.la=function(a){this.U&&(this.aa=!0);var b=this.j();b&&ub(b)?b.focus():a.preventDefault()};f.Kb=function(){this.aa=!1};
+f.Ib=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.la(a);break;case "mouseup":b.Ta(a);break;case "mouseover":b.Sa(a);break;case "mouseout":b.Ra(a);break;case "contextmenu":b.qa(a)}};f.na=function(){};f.ma=function(){dd(this,-1);this.aa=!1;this.i&&Yc(this.i,!1)};
 f.J=function(a){return this.isEnabled()&&this.s()&&(0!=M(this)||this.vb)&&this.ob(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-f.ob=function(a){var b=N(this,this.e);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:dd(this);break;case 35:ed(this);break;case 38:if("vertical"==this.L)fd(this);else return!1;break;case 37:if("horizontal"==this.L)ic(this)?gd(this):fd(this);else return!1;break;case 40:if("vertical"==this.L)gd(this);else return!1;
-break;case 39:if("horizontal"==this.L)ic(this)?fd(this):gd(this);else return!1;break;default:return!1}return!0};var ad=function(a,b){var c=b.a(),c=c.id||(c.id=cc(b));a.M||(a.M={});a.M[c]=b};W.prototype.Da=function(a,b){Aa(a,T,"The child of a container must be a control");W.f.Da.call(this,a,b)};
-W.prototype.Va=function(a,b,c){Aa(a,T);a.T|=2;a.T|=64;!this.I()&&this.Sb||Yc(a,32,!1);a.Pa(!1);var d=a.getParent()==this?jc(this,a):-1;W.f.Va.call(this,a,b,c);a.g&&this.g&&ad(this,a);a=d;-1==a&&(a=M(this));a==this.e?this.e=Math.min(M(this)-1,b):a>this.e&&b<=this.e?this.e++:a<this.e&&b>this.e&&this.e--};
-W.prototype.removeChild=function(a,b){a=m(a)?fc(this,a):a;Aa(a,T);if(a){var c=jc(this,a);-1!=c&&(c==this.e?(a.B(!1),this.e=-1):c<this.e&&this.e--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=W.f.removeChild.call(this,a,b);a.Pa(!0);return a};var Kc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=W.prototype;f.s=function(){return this.n};
-f.ka=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(J(c,a),this.I()&&Ic(this.j(),this.U&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.U};f.ca=function(a){this.U!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.U=!0,hc(this,function(a){a.xb?delete a.xb:a.ca(!0)})):(hc(this,function(a){a.isEnabled()?a.ca(!1):a.xb=!0}),this.aa=this.U=!1),this.I()&&Ic(this.j(),a&&this.n))};
-f.I=function(){return this.$a};f.oa=function(a){a!=this.$a&&this.g&&bd(this,a);this.$a=a;this.U&&this.n&&Ic(this.j(),a)};var cd=function(a,b){var c=N(a,b);c?c.B(!0):-1<a.e&&N(a,a.e).B(!1)};W.prototype.B=function(a){cd(this,jc(this,a))};
-var dd=function(a){hd(a,function(a,c){return(a+1)%c},M(a)-1)},ed=function(a){hd(a,function(a,c){a--;return 0>a?c-1:a},0)},gd=function(a){hd(a,function(a,c){return(a+1)%c},a.e)},fd=function(a){hd(a,function(a,c){a--;return 0>a?c-1:a},a.e)},hd=function(a,b,c){c=0>c?jc(a,a.i):c;var d=M(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=N(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Wa(c);break}e++;c=b.call(a,c,d)}};W.prototype.Wa=function(a){cd(this,a)};var id=function(){};p(id,S);ba(id);f=id.prototype;f.v=function(){return"goog-tab"};f.fa=function(){return"tab"};f.o=function(a){var b=id.f.o.call(this,a);(a=a.Ua())&&this.Xa(b,a);return b};f.K=function(a,b){b=id.f.K.call(this,a,b);var c=this.Ua(b);c&&(a.tb=c);a.h&8&&(c=a.getParent())&&n(c.V)&&(a.t(8,!1),c.V(a));return b};f.Ua=function(a){return a.title||""};f.Xa=function(a,b){a&&(a.title=b||"")};var jd=function(a,b,c){T.call(this,a,b||id.ia(),c);Yc(this,8,!0);this.T|=9};p(jd,T);jd.prototype.Ua=function(){return this.tb};jd.prototype.Xa=function(a){this.Ab().Xa(this.a(),a);this.tb=a};Gc("goog-tab",function(){return new jd(null)});var X=function(){this.Hb="tablist"};p(X,R);ba(X);X.prototype.v=function(){return"goog-tab-bar"};X.prototype.bb=function(a,b,c){this.Bb||(this.Ka||kd(this),this.Bb=Pa(this.Ka));var d=this.Bb[b];d?(Kc(a,ld(d)),a.yb=d):X.f.bb.call(this,a,b,c)};X.prototype.ta=function(a){var b=X.f.ta.call(this,a);this.Ka||kd(this);b.push(this.Ka[a.yb]);return b};var kd=function(a){var b=a.v();a.Ka={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var Y=function(a,b,c){a=a||"top";Kc(this,ld(a));this.yb=a;W.call(this,this.L,b||X.ia(),c);md(this)};p(Y,W);f=Y.prototype;f.ac=!0;f.H=null;f.D=function(){Y.f.D.call(this);md(this)};f.removeChild=function(a,b){nd(this,a);return Y.f.removeChild.call(this,a,b)};f.Wa=function(a){Y.f.Wa.call(this,a);this.ac&&this.V(N(this,a))};f.V=function(a){a?Wc(a,!0):this.H&&Wc(this.H,!1)};
-var nd=function(a,b){if(b&&b==a.H){for(var c=jc(a,b),d=c-1;b=N(a,d);d--)if(b.s()&&b.isEnabled()){a.V(b);return}for(c+=1;b=N(a,c);c++)if(b.s()&&b.isEnabled()){a.V(b);return}a.V(null)}};f=Y.prototype;f.Zb=function(a){this.H&&this.H!=a.target&&Wc(this.H,!1);this.H=a.target};f.$b=function(a){a.target==this.H&&(this.H=null)};f.Xb=function(a){nd(this,a.target)};f.Yb=function(a){nd(this,a.target)};f.na=function(){N(this,this.e)||this.B(this.H||N(this,0))};
-var md=function(a){ec(a).c(a,"select",a.Zb).c(a,"unselect",a.$b).c(a,"disable",a.Xb).c(a,"hide",a.Yb)},ld=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Gc("goog-tab-bar",function(){return new Y});var Z=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,nc(a,h.fa()),Bc(a,"goog-zippy-header"),od(h,a),a&&h.Pb.c(a,"keydown",h.Qb))}H.call(this);this.A=e||fb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.ea=(this.Qa=n(b)?b:null)||!b?null:this.A.a(b);this.k=1==c;this.Pb=new G(this);this.rb=new G(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(Z,H);f=Z.prototype;f.Z=!0;f.fa=function(){return"tab"};f.C=function(){return this.ea};f.toggle=function(){this.S(!this.k)};
-f.S=function(a){this.ea?J(this.ea,a):a&&this.Qa&&(this.ea=this.Qa());this.ea&&Bc(this.ea,"goog-zippy-content");if(this.Aa)J(this.R,!a),J(this.Aa,a);else if(this.R){var b=this.R;a?Bc(b,"goog-zippy-expanded"):Dc(b,"goog-zippy-expanded");b=this.R;a?Dc(b,"goog-zippy-collapsed"):Bc(b,"goog-zippy-collapsed");pc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new pd("toggle",this))};f.qb=function(){return this.Z};f.Pa=function(a){this.Z!=a&&((this.Z=a)?(od(this,this.R),od(this,this.Aa)):this.rb.ab())};
-var od=function(a,b){b&&a.rb.c(b,"click",a.bc)};Z.prototype.Qb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new B("action",this)),a.preventDefault(),a.stopPropagation()};Z.prototype.bc=function(){this.toggle();this.dispatchEvent(new B("action",this))};var pd=function(a,b){B.call(this,a,b)};p(pd,B);var rd=function(a,b){this.pb=[];for(var c=hb("span","ae-zippy",gb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Z(e,g,!1);this.pb.push(e)}this.hc=new qd(this.pb,gb(document,b))};rd.prototype.lc=function(){return this.hc};rd.prototype.mc=function(){return this.pb};
-var qd=function(a,b){this.va=a;if(this.va.length)for(var c=0,d;d=this.va[c];c++)E(d,"toggle",this.Wb,!1,this);this.La=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.va.length||(c+=" ae-disabled");this.P=lb("span",{className:c},"Expand All");E(this.P,"click",this.Vb,!1,this);b&&b.appendChild(this.P)};qd.prototype.Vb=function(){this.va.length&&this.S(!this.k)};
-qd.prototype.Wb=function(a){a=a.currentTarget;this.La=a.k?this.La+1:this.La-1;a.k!=this.k&&(a.k?(this.k=!0,sd(this,!0)):0==this.La&&(this.k=!1,sd(this,!1)))};qd.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.va[a];a++)b.k!=this.k&&b.S(this.k);sd(this)};
-var sd=function(a,b){(void 0!==b?b:a.k)?(La(a.P,"ae-plus"),Ja(a.P,"ae-minus"),ob(a.P,"Collapse All")):(La(a.P,"ae-minus"),Ja(a.P,"ae-plus"),ob(a.P,"Expand All"))},td=function(a){this.cc=a;this.Db={};var b,c=lb("div",{},b=lb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),lb("div",{className:"goog-tab-bar-clear"}),a=lb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Y;d.K(b);E(d,"select",this.Cb,!1,this);E(d,"unselect",this.Cb,!1,this);
-b=0;for(var e;e=this.cc[b];b++)if(e=gb(document,"ae-stats-details-"+e)){var g=hb("h2",null,e)[0],h;h=g;var k=void 0;cb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],vb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");cb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new jd(h);this.Db[ia(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.V(g):J(e,!1)}gb(document,"bd").appendChild(c)};
-td.prototype.Cb=function(a){var b=this.Db[ia(a.target)];J(b,"select"==a.type)};ka("ae.Stats.Details.Tabs",td);ka("goog.ui.Zippy",Z);Z.prototype.setExpanded=Z.prototype.S;ka("ae.Stats.MakeZippys",rd);rd.prototype.getExpandCollapse=rd.prototype.lc;rd.prototype.getZippys=rd.prototype.mc;qd.prototype.setExpanded=qd.prototype.S;var $=function(){this.cb=[];this.jb=[]},ud=[[5,.2,1],[6,.2,1.2],[5,.25,1.25],[6,.25,1.5],[4,.5,2],[5,.5,2.5],[6,.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],vd=function(a){if(0>=a)return[2,.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<ud.length;c++)if(a<=ud[c][2])return[ud[c][0],ud[c][1]*b,ud[c][2]*b];return[5,2*b,10*b]};$.prototype.ib="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.gb=0;$.prototype.write=function(a){this.jb.push(a)};
-var wd=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.ib+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.kc=function(){this.jb=[];var a=vd(this.gb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');wd(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ja.length&&this.write('<a class="'+this.w+'link" href="'+e.ja+'">'),this.write(e.label),0<e.ja.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ja.length&&this.write('<a class="'+
+f.ob=function(a){var b=N(this,this.e);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:ed(this);break;case 35:fd(this);break;case 38:if("vertical"==this.L)gd(this);else return!1;break;case 37:if("horizontal"==this.L)jc(this)?hd(this):gd(this);else return!1;break;case 40:if("vertical"==this.L)hd(this);else return!1;
+break;case 39:if("horizontal"==this.L)jc(this)?gd(this):hd(this);else return!1;break;default:return!1}return!0};var bd=function(a,b){var c=b.a(),c=c.id||(c.id=dc(b));a.M||(a.M={});a.M[c]=b};W.prototype.Da=function(a,b){Ba(a,T,"The child of a container must be a control");W.f.Da.call(this,a,b)};
+W.prototype.Va=function(a,b,c){Ba(a,T);a.T|=2;a.T|=64;!this.I()&&this.Sb||Zc(a,32,!1);a.Pa(!1);var d=a.getParent()==this?kc(this,a):-1;W.f.Va.call(this,a,b,c);a.g&&this.g&&bd(this,a);a=d;-1==a&&(a=M(this));a==this.e?this.e=Math.min(M(this)-1,b):a>this.e&&b<=this.e?this.e++:a<this.e&&b>this.e&&this.e--};
+W.prototype.removeChild=function(a,b){a=m(a)?gc(this,a):a;Ba(a,T);if(a){var c=kc(this,a);-1!=c&&(c==this.e?(a.B(!1),this.e=-1):c<this.e&&this.e--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=W.f.removeChild.call(this,a,b);a.Pa(!0);return a};var Lc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=W.prototype;f.s=function(){return this.n};
+f.ka=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(J(c,a),this.I()&&Jc(this.j(),this.U&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.U};f.ca=function(a){this.U!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.U=!0,ic(this,function(a){a.xb?delete a.xb:a.ca(!0)})):(ic(this,function(a){a.isEnabled()?a.ca(!1):a.xb=!0}),this.aa=this.U=!1),this.I()&&Jc(this.j(),a&&this.n))};
+f.I=function(){return this.$a};f.oa=function(a){a!=this.$a&&this.g&&cd(this,a);this.$a=a;this.U&&this.n&&Jc(this.j(),a)};var dd=function(a,b){var c=N(a,b);c?c.B(!0):-1<a.e&&N(a,a.e).B(!1)};W.prototype.B=function(a){dd(this,kc(this,a))};
+var ed=function(a){id(a,function(a,c){return(a+1)%c},M(a)-1)},fd=function(a){id(a,function(a,c){a--;return 0>a?c-1:a},0)},hd=function(a){id(a,function(a,c){return(a+1)%c},a.e)},gd=function(a){id(a,function(a,c){a--;return 0>a?c-1:a},a.e)},id=function(a,b,c){c=0>c?kc(a,a.i):c;var d=M(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=N(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Wa(c);break}e++;c=b.call(a,c,d)}};W.prototype.Wa=function(a){dd(this,a)};var jd=function(){};p(jd,S);ba(jd);f=jd.prototype;f.v=function(){return"goog-tab"};f.fa=function(){return"tab"};f.o=function(a){var b=jd.f.o.call(this,a);(a=a.Ua())&&this.Xa(b,a);return b};f.K=function(a,b){b=jd.f.K.call(this,a,b);var c=this.Ua(b);c&&(a.tb=c);a.h&8&&(c=a.getParent())&&n(c.V)&&(a.t(8,!1),c.V(a));return b};f.Ua=function(a){return a.title||""};f.Xa=function(a,b){a&&(a.title=b||"")};var kd=function(a,b,c){T.call(this,a,b||jd.ia(),c);Zc(this,8,!0);this.T|=9};p(kd,T);kd.prototype.Ua=function(){return this.tb};kd.prototype.Xa=function(a){this.Ab().Xa(this.a(),a);this.tb=a};Hc("goog-tab",function(){return new kd(null)});var X=function(){this.Hb="tablist"};p(X,R);ba(X);X.prototype.v=function(){return"goog-tab-bar"};X.prototype.bb=function(a,b,c){this.Bb||(this.Ka||ld(this),this.Bb=Qa(this.Ka));var d=this.Bb[b];d?(Lc(a,md(d)),a.yb=d):X.f.bb.call(this,a,b,c)};X.prototype.ta=function(a){var b=X.f.ta.call(this,a);this.Ka||ld(this);b.push(this.Ka[a.yb]);return b};var ld=function(a){var b=a.v();a.Ka={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var Y=function(a,b,c){a=a||"top";Lc(this,md(a));this.yb=a;W.call(this,this.L,b||X.ia(),c);nd(this)};p(Y,W);f=Y.prototype;f.ac=!0;f.H=null;f.D=function(){Y.f.D.call(this);nd(this)};f.removeChild=function(a,b){od(this,a);return Y.f.removeChild.call(this,a,b)};f.Wa=function(a){Y.f.Wa.call(this,a);this.ac&&this.V(N(this,a))};f.V=function(a){a?Xc(a,!0):this.H&&Xc(this.H,!1)};
+var od=function(a,b){if(b&&b==a.H){for(var c=kc(a,b),d=c-1;b=N(a,d);d--)if(b.s()&&b.isEnabled()){a.V(b);return}for(c+=1;b=N(a,c);c++)if(b.s()&&b.isEnabled()){a.V(b);return}a.V(null)}};f=Y.prototype;f.Zb=function(a){this.H&&this.H!=a.target&&Xc(this.H,!1);this.H=a.target};f.$b=function(a){a.target==this.H&&(this.H=null)};f.Xb=function(a){od(this,a.target)};f.Yb=function(a){od(this,a.target)};f.na=function(){N(this,this.e)||this.B(this.H||N(this,0))};
+var nd=function(a){fc(a).c(a,"select",a.Zb).c(a,"unselect",a.$b).c(a,"disable",a.Xb).c(a,"hide",a.Yb)},md=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Hc("goog-tab-bar",function(){return new Y});var Z=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,oc(a,h.fa()),Cc(a,"goog-zippy-header"),pd(h,a),a&&h.Pb.c(a,"keydown",h.Qb))}H.call(this);this.A=e||gb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.ea=(this.Qa=n(b)?b:null)||!b?null:this.A.a(b);this.k=1==c;this.Pb=new G(this);this.rb=new G(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(Z,H);f=Z.prototype;f.Z=!0;f.fa=function(){return"tab"};f.C=function(){return this.ea};f.toggle=function(){this.S(!this.k)};
+f.S=function(a){this.ea?J(this.ea,a):a&&this.Qa&&(this.ea=this.Qa());this.ea&&Cc(this.ea,"goog-zippy-content");if(this.Aa)J(this.R,!a),J(this.Aa,a);else if(this.R){var b=this.R;a?Cc(b,"goog-zippy-expanded"):Ec(b,"goog-zippy-expanded");b=this.R;a?Ec(b,"goog-zippy-collapsed"):Cc(b,"goog-zippy-collapsed");qc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new qd("toggle",this))};f.qb=function(){return this.Z};f.Pa=function(a){this.Z!=a&&((this.Z=a)?(pd(this,this.R),pd(this,this.Aa)):this.rb.ab())};
+var pd=function(a,b){b&&a.rb.c(b,"click",a.bc)};Z.prototype.Qb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new B("action",this)),a.preventDefault(),a.stopPropagation()};Z.prototype.bc=function(){this.toggle();this.dispatchEvent(new B("action",this))};var qd=function(a,b){B.call(this,a,b)};p(qd,B);var sd=function(a,b){this.pb=[];for(var c=ib("span","ae-zippy",hb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Z(e,g,!1);this.pb.push(e)}this.hc=new rd(this.pb,hb(document,b))};sd.prototype.lc=function(){return this.hc};sd.prototype.mc=function(){return this.pb};
+var rd=function(a,b){this.va=a;if(this.va.length)for(var c=0,d;d=this.va[c];c++)E(d,"toggle",this.Wb,!1,this);this.La=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.va.length||(c+=" ae-disabled");this.P=mb("span",{className:c},"Expand All");E(this.P,"click",this.Vb,!1,this);b&&b.appendChild(this.P)};rd.prototype.Vb=function(){this.va.length&&this.S(!this.k)};
+rd.prototype.Wb=function(a){a=a.currentTarget;this.La=a.k?this.La+1:this.La-1;a.k!=this.k&&(a.k?(this.k=!0,td(this,!0)):0==this.La&&(this.k=!1,td(this,!1)))};rd.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.va[a];a++)b.k!=this.k&&b.S(this.k);td(this)};
+var td=function(a,b){(void 0!==b?b:a.k)?(Ma(a.P,"ae-plus"),Ka(a.P,"ae-minus"),pb(a.P,"Collapse All")):(Ma(a.P,"ae-minus"),Ka(a.P,"ae-plus"),pb(a.P,"Expand All"))},ud=function(a){this.cc=a;this.Db={};var b,c=mb("div",{},b=mb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),mb("div",{className:"goog-tab-bar-clear"}),a=mb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Y;d.K(b);E(d,"select",this.Cb,!1,this);E(d,"unselect",this.Cb,!1,this);
+b=0;for(var e;e=this.cc[b];b++)if(e=hb(document,"ae-stats-details-"+e)){var g=ib("h2",null,e)[0],h;h=g;var k=void 0;db&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],wb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");db||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new kd(h);this.Db[ia(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.V(g):J(e,!1)}hb(document,"bd").appendChild(c)};
+ud.prototype.Cb=function(a){var b=this.Db[ia(a.target)];J(b,"select"==a.type)};ka("ae.Stats.Details.Tabs",ud);ka("goog.ui.Zippy",Z);Z.prototype.setExpanded=Z.prototype.S;ka("ae.Stats.MakeZippys",sd);sd.prototype.getExpandCollapse=sd.prototype.lc;sd.prototype.getZippys=sd.prototype.mc;rd.prototype.setExpanded=rd.prototype.S;var $=function(){this.cb=[];this.jb=[]},vd=[[5,.2,1],[6,.2,1.2],[5,.25,1.25],[6,.25,1.5],[4,.5,2],[5,.5,2.5],[6,.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],wd=function(a){if(0>=a)return[2,.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<vd.length;c++)if(a<=vd[c][2])return[vd[c][0],vd[c][1]*b,vd[c][2]*b];return[5,2*b,10*b]};$.prototype.ib="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.gb=0;$.prototype.write=function(a){this.jb.push(a)};
+var xd=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.ib+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.kc=function(){this.jb=[];var a=wd(this.gb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');xd(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ja.length&&this.write('<a class="'+this.w+'link" href="'+e.ja+'">'),this.write(e.label),0<e.ja.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ja.length&&this.write('<a class="'+
 this.w+'link" href="'+e.ja+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.ib+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.fb&&(this.write('<img class="'+this.w+'extra" src="'+this.ib+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.fb*a+'%"\n>'));0<e.zb.length&&(this.write('<span class="'+this.w+'inline" style="left:'+(e.start+Math.max(e.duration,e.fb))*a+'%">&nbsp;'),this.write(e.zb),this.write("</span>"));0<e.ja.length&&
-this.write("</a>");this.write("</div></td></tr>\n")}wd(this,b,c,a);this.write("</table>\n");return this.jb.join("")};$.prototype.jc=function(a,b,c,d,e,g){this.gb=Math.max(this.gb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,fb:d,zb:e,ja:g})};ka("Gantt",$);$.prototype.add_bar=$.prototype.jc;$.prototype.draw=$.prototype.kc;})();
+this.write("</a>");this.write("</div></td></tr>\n")}xd(this,b,c,a);this.write("</table>\n");return this.jb.join("")};$.prototype.jc=function(a,b,c,d,e,g){this.gb=Math.max(this.gb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,fb:d,zb:e,ja:g})};ka("Gantt",$);$.prototype.add_bar=$.prototype.jc;$.prototype.draw=$.prototype.kc;})();
diff --git a/google/appengine/ext/appstats/ui.py b/google/appengine/ext/appstats/ui.py
index e22dbfb..cdf4175 100644
--- a/google/appengine/ext/appstats/ui.py
+++ b/google/appengine/ext/appstats/ui.py
@@ -32,6 +32,7 @@
 """
 
 
+
 import cgi
 import cStringIO
 import email.Utils
diff --git a/google/appengine/ext/blobstore/blobstore.py b/google/appengine/ext/blobstore/blobstore.py
index 97461f5..72a2265 100644
--- a/google/appengine/ext/blobstore/blobstore.py
+++ b/google/appengine/ext/blobstore/blobstore.py
@@ -34,6 +34,7 @@
 
 
 
+
 import base64
 import email
 import email.message
diff --git a/google/appengine/ext/builtins/__init__.py b/google/appengine/ext/builtins/__init__.py
index 1a9719e..13c7a02 100644
--- a/google/appengine/ext/builtins/__init__.py
+++ b/google/appengine/ext/builtins/__init__.py
@@ -37,6 +37,7 @@
 
 
 
+
 import logging
 import os
 
diff --git a/google/appengine/ext/bulkload/bulkload_deprecated.py b/google/appengine/ext/bulkload/bulkload_deprecated.py
index 8ed0f3c..e56d060 100644
--- a/google/appengine/ext/bulkload/bulkload_deprecated.py
+++ b/google/appengine/ext/bulkload/bulkload_deprecated.py
@@ -31,6 +31,7 @@
 
 
 
+
 import Cookie
 import StringIO
 import csv
diff --git a/google/appengine/ext/bulkload/bulkloader_config.py b/google/appengine/ext/bulkload/bulkloader_config.py
index 29b1954..91159a4 100644
--- a/google/appengine/ext/bulkload/bulkloader_config.py
+++ b/google/appengine/ext/bulkload/bulkloader_config.py
@@ -35,6 +35,7 @@
 
 
 
+
 import copy
 import os
 import sys
diff --git a/google/appengine/ext/bulkload/bulkloader_errors.py b/google/appengine/ext/bulkload/bulkloader_errors.py
index 067aefd..19b30f8 100644
--- a/google/appengine/ext/bulkload/bulkloader_errors.py
+++ b/google/appengine/ext/bulkload/bulkloader_errors.py
@@ -28,6 +28,7 @@
 
 
 
+
 class Error(Exception):
   """Base bulkloader error type."""
 
diff --git a/google/appengine/ext/bulkload/bulkloader_parser.py b/google/appengine/ext/bulkload/bulkloader_parser.py
index f04754e..07b1c48 100644
--- a/google/appengine/ext/bulkload/bulkloader_parser.py
+++ b/google/appengine/ext/bulkload/bulkloader_parser.py
@@ -37,6 +37,7 @@
 
 
 
+
 import inspect
 import sys
 
diff --git a/google/appengine/ext/bulkload/connector_interface.py b/google/appengine/ext/bulkload/connector_interface.py
index 47b1438..c3fb9bd 100644
--- a/google/appengine/ext/bulkload/connector_interface.py
+++ b/google/appengine/ext/bulkload/connector_interface.py
@@ -31,6 +31,7 @@
 
 
 
+
 class ConnectorInterface(object):
   """Abstract base class describing the external Connector interface.
 
diff --git a/google/appengine/ext/bulkload/constants.py b/google/appengine/ext/bulkload/constants.py
index 8af9d18..31a663e 100644
--- a/google/appengine/ext/bulkload/constants.py
+++ b/google/appengine/ext/bulkload/constants.py
@@ -23,5 +23,6 @@
 """
 
 
+
 KIND_PARAM = 'kind'
 CSV_PARAM = 'csv'
diff --git a/google/appengine/ext/bulkload/csv_connector.py b/google/appengine/ext/bulkload/csv_connector.py
index 973dc13..0df8b05 100644
--- a/google/appengine/ext/bulkload/csv_connector.py
+++ b/google/appengine/ext/bulkload/csv_connector.py
@@ -34,6 +34,7 @@
 
 
 
+
 import codecs
 import cStringIO
 import csv
diff --git a/google/appengine/ext/bulkload/simpletext_connector.py b/google/appengine/ext/bulkload/simpletext_connector.py
index b934790..c3db4ad 100644
--- a/google/appengine/ext/bulkload/simpletext_connector.py
+++ b/google/appengine/ext/bulkload/simpletext_connector.py
@@ -33,6 +33,7 @@
 
 
 
+
 from google.appengine.ext.bulkload import bulkloader_errors
 from google.appengine.ext.bulkload import connector_interface
 
diff --git a/google/appengine/ext/bulkload/simplexml_connector.py b/google/appengine/ext/bulkload/simplexml_connector.py
index 4ea05cb..f015ca1 100644
--- a/google/appengine/ext/bulkload/simplexml_connector.py
+++ b/google/appengine/ext/bulkload/simplexml_connector.py
@@ -33,6 +33,7 @@
 
 
 
+
 import codecs
 import logging
 import re
diff --git a/google/appengine/ext/bulkload/transform.py b/google/appengine/ext/bulkload/transform.py
index 6348d73..18fa4f9 100644
--- a/google/appengine/ext/bulkload/transform.py
+++ b/google/appengine/ext/bulkload/transform.py
@@ -34,6 +34,7 @@
 
 
 
+
 import base64
 
 import datetime
diff --git a/google/appengine/ext/cloudstorage/cloudstorage_stub.py b/google/appengine/ext/cloudstorage/cloudstorage_stub.py
index fbb15ef..069b7b6 100644
--- a/google/appengine/ext/cloudstorage/cloudstorage_stub.py
+++ b/google/appengine/ext/cloudstorage/cloudstorage_stub.py
@@ -19,6 +19,7 @@
 
 
 
+
 import calendar
 import datetime
 import hashlib
diff --git a/google/appengine/ext/cloudstorage/common.py b/google/appengine/ext/cloudstorage/common.py
index 19e91fb..8379365 100644
--- a/google/appengine/ext/cloudstorage/common.py
+++ b/google/appengine/ext/cloudstorage/common.py
@@ -38,6 +38,7 @@
 
 
 
+
 __all__ = ['CS_XML_NS',
            'CSFileStat',
            'dt_str_to_posix',
diff --git a/google/appengine/ext/cloudstorage/stub_dispatcher.py b/google/appengine/ext/cloudstorage/stub_dispatcher.py
index 3d84853..373ffd7 100644
--- a/google/appengine/ext/cloudstorage/stub_dispatcher.py
+++ b/google/appengine/ext/cloudstorage/stub_dispatcher.py
@@ -26,6 +26,7 @@
 
 
 
+
 import httplib
 import re
 import threading
diff --git a/google/appengine/ext/datastore_admin/backup_handler.py b/google/appengine/ext/datastore_admin/backup_handler.py
index f74f60c..5752398 100644
--- a/google/appengine/ext/datastore_admin/backup_handler.py
+++ b/google/appengine/ext/datastore_admin/backup_handler.py
@@ -34,6 +34,7 @@
 
 
 
+
 import cStringIO
 import datetime
 import itertools
diff --git a/google/appengine/ext/datastore_admin/backup_pb2.py b/google/appengine/ext/datastore_admin/backup_pb2.py
index 8b2f4f9..c934d38 100644
--- a/google/appengine/ext/datastore_admin/backup_pb2.py
+++ b/google/appengine/ext/datastore_admin/backup_pb2.py
@@ -19,6 +19,7 @@
 
 import sys
 _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+import google
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
diff --git a/google/appengine/ext/datastore_admin/copy_handler.py b/google/appengine/ext/datastore_admin/copy_handler.py
index 4512830..9544887 100644
--- a/google/appengine/ext/datastore_admin/copy_handler.py
+++ b/google/appengine/ext/datastore_admin/copy_handler.py
@@ -31,6 +31,7 @@
 """
 
 
+
 import logging
 import urllib
 
diff --git a/google/appengine/ext/datastore_admin/delete_handler.py b/google/appengine/ext/datastore_admin/delete_handler.py
index e8a108c..c78917e 100644
--- a/google/appengine/ext/datastore_admin/delete_handler.py
+++ b/google/appengine/ext/datastore_admin/delete_handler.py
@@ -24,6 +24,7 @@
 
 
 
+
 import os
 import re
 import urllib
diff --git a/google/appengine/ext/datastore_admin/main.py b/google/appengine/ext/datastore_admin/main.py
index d743dd1..aa53b3b 100644
--- a/google/appengine/ext/datastore_admin/main.py
+++ b/google/appengine/ext/datastore_admin/main.py
@@ -26,6 +26,7 @@
 """
 
 
+
 import logging
 import operator
 import os
diff --git a/google/appengine/ext/datastore_admin/remote_api_put_stub.py b/google/appengine/ext/datastore_admin/remote_api_put_stub.py
index ef4ecde..b8dae85 100644
--- a/google/appengine/ext/datastore_admin/remote_api_put_stub.py
+++ b/google/appengine/ext/datastore_admin/remote_api_put_stub.py
@@ -38,6 +38,7 @@
 
 
 
+
 import logging
 import pickle
 import random
diff --git a/google/appengine/ext/datastore_admin/utils.py b/google/appengine/ext/datastore_admin/utils.py
index ba05098..fa3c19d 100644
--- a/google/appengine/ext/datastore_admin/utils.py
+++ b/google/appengine/ext/datastore_admin/utils.py
@@ -21,6 +21,7 @@
 """Used render templates for datastore admin."""
 
 
+
 import base64
 import datetime
 import logging
diff --git a/google/appengine/ext/db/__init__.py b/google/appengine/ext/db/__init__.py
index 7ce16ff..0551f18 100644
--- a/google/appengine/ext/db/__init__.py
+++ b/google/appengine/ext/db/__init__.py
@@ -87,6 +87,9 @@
 
 
 
+
+
+
 import copy
 import datetime
 import logging
diff --git a/google/appengine/ext/db/djangoforms.py b/google/appengine/ext/db/djangoforms.py
index 233dfb0..a89f642 100644
--- a/google/appengine/ext/db/djangoforms.py
+++ b/google/appengine/ext/db/djangoforms.py
@@ -87,6 +87,7 @@
 
 
 
+
 import itertools
 import logging
 
diff --git a/google/appengine/ext/db/metadata.py b/google/appengine/ext/db/metadata.py
index dac2357..c1005d4 100644
--- a/google/appengine/ext/db/metadata.py
+++ b/google/appengine/ext/db/metadata.py
@@ -53,6 +53,7 @@
 
 
 
+
 from google.appengine.api import datastore_types
 from google.appengine.ext import db
 
diff --git a/google/appengine/ext/db/polymodel.py b/google/appengine/ext/db/polymodel.py
index 77a7faa..ff7d1a0 100644
--- a/google/appengine/ext/db/polymodel.py
+++ b/google/appengine/ext/db/polymodel.py
@@ -34,6 +34,7 @@
 """
 
 
+
 from google.appengine.ext import db
 
 
diff --git a/google/appengine/ext/db/stats.py b/google/appengine/ext/db/stats.py
index 195dd7e..e044b60 100644
--- a/google/appengine/ext/db/stats.py
+++ b/google/appengine/ext/db/stats.py
@@ -33,6 +33,7 @@
 
 
 
+
 from google.appengine.ext import db
 
 
diff --git a/google/appengine/ext/deferred/__init__.py b/google/appengine/ext/deferred/__init__.py
index 3ee0ac3..227e9b0 100644
--- a/google/appengine/ext/deferred/__init__.py
+++ b/google/appengine/ext/deferred/__init__.py
@@ -26,6 +26,7 @@
 
 
 
+
 from deferred import *
 
 
diff --git a/google/appengine/ext/deferred/deferred.py b/google/appengine/ext/deferred/deferred.py
index dc71e2c..0875e37 100644
--- a/google/appengine/ext/deferred/deferred.py
+++ b/google/appengine/ext/deferred/deferred.py
@@ -91,6 +91,7 @@
 
 
 
+
 import logging
 import os
 import pickle
diff --git a/google/appengine/ext/deferred/handler.py b/google/appengine/ext/deferred/handler.py
index c0feeed..486fbe7 100644
--- a/google/appengine/ext/deferred/handler.py
+++ b/google/appengine/ext/deferred/handler.py
@@ -31,6 +31,7 @@
 
 
 
+
 from google.appengine.ext.deferred import deferred
 from google.appengine.ext.webapp.util import run_wsgi_app
 
diff --git a/google/appengine/ext/django/backends/rdbms/base.py b/google/appengine/ext/django/backends/rdbms/base.py
index 64adc24..c2d34f6 100644
--- a/google/appengine/ext/django/backends/rdbms/base.py
+++ b/google/appengine/ext/django/backends/rdbms/base.py
@@ -24,4 +24,5 @@
 """
 
 
+
 from google.storage.speckle.python.django.backend.base import *
diff --git a/google/appengine/ext/django/main.py b/google/appengine/ext/django/main.py
index a65aa21..ee6eacb 100644
--- a/google/appengine/ext/django/main.py
+++ b/google/appengine/ext/django/main.py
@@ -27,6 +27,7 @@
 
 
 
+
 import logging
 import os
 
diff --git a/google/appengine/ext/django/management/commands/getoauthtoken.py b/google/appengine/ext/django/management/commands/getoauthtoken.py
index e88b24c..5571be1 100644
--- a/google/appengine/ext/django/management/commands/getoauthtoken.py
+++ b/google/appengine/ext/django/management/commands/getoauthtoken.py
@@ -24,4 +24,5 @@
 """
 
 
+
 from google.storage.speckle.python.django.management.commands.getoauthtoken import *
diff --git a/google/appengine/ext/endpoints/__init__.py b/google/appengine/ext/endpoints/__init__.py
index 09529c2..d7135ab 100644
--- a/google/appengine/ext/endpoints/__init__.py
+++ b/google/appengine/ext/endpoints/__init__.py
@@ -65,6 +65,19 @@
 
 
 
+
+
+
+
+
+
+
+
+
+
+
+
+
 from endpoints import *
 
 
diff --git a/google/appengine/ext/ereporter/ereporter.py b/google/appengine/ext/ereporter/ereporter.py
index f9f1553..a93d495 100644
--- a/google/appengine/ext/ereporter/ereporter.py
+++ b/google/appengine/ext/ereporter/ereporter.py
@@ -78,6 +78,7 @@
 
 
 
+
 import datetime
 import logging
 import os
diff --git a/google/appengine/ext/ereporter/report_generator.py b/google/appengine/ext/ereporter/report_generator.py
index 8a8e88d..2885c03 100644
--- a/google/appengine/ext/ereporter/report_generator.py
+++ b/google/appengine/ext/ereporter/report_generator.py
@@ -44,6 +44,7 @@
 
 
 
+
 import datetime
 import itertools
 import os
diff --git a/google/appengine/ext/gql/__init__.py b/google/appengine/ext/gql/__init__.py
index 3d4a411..c072be3 100644
--- a/google/appengine/ext/gql/__init__.py
+++ b/google/appengine/ext/gql/__init__.py
@@ -34,6 +34,7 @@
 
 
 
+
 import calendar
 import datetime
 import itertools
diff --git a/google/appengine/ext/key_range/__init__.py b/google/appengine/ext/key_range/__init__.py
index 8e4ac60..2ab9087 100644
--- a/google/appengine/ext/key_range/__init__.py
+++ b/google/appengine/ext/key_range/__init__.py
@@ -24,6 +24,7 @@
 """Key range representation and splitting."""
 
 
+
 import os
 
 
diff --git a/google/appengine/ext/mapreduce/api/map_job/__init__.py b/google/appengine/ext/mapreduce/api/map_job/__init__.py
index bf17d2e..4c2c30e 100644
--- a/google/appengine/ext/mapreduce/api/map_job/__init__.py
+++ b/google/appengine/ext/mapreduce/api/map_job/__init__.py
@@ -24,9 +24,6 @@
 
 from .input_reader import InputReader
 from .map_job_config import JobConfig
-from .map_job_context import JobContext
-from .map_job_context import ShardContext
-from .map_job_context import SliceContext
 from .map_job_control import Job
 from .mapper import Mapper
 from .output_writer import OutputWriter
diff --git a/google/appengine/ext/mapreduce/api/map_job/input_reader.py b/google/appengine/ext/mapreduce/api/map_job/input_reader.py
index 01b9655..7fb60d7 100644
--- a/google/appengine/ext/mapreduce/api/map_job/input_reader.py
+++ b/google/appengine/ext/mapreduce/api/map_job/input_reader.py
@@ -16,9 +16,9 @@
 #
 """Input Reader interface for map job."""
 
-from . import shard_life_cycle
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import json_util
+from google.appengine.ext.mapreduce import shard_life_cycle
 
 
 
diff --git a/google/appengine/ext/mapreduce/api/map_job/mapper.py b/google/appengine/ext/mapreduce/api/map_job/mapper.py
index 95217e2..d80392a 100644
--- a/google/appengine/ext/mapreduce/api/map_job/mapper.py
+++ b/google/appengine/ext/mapreduce/api/map_job/mapper.py
@@ -16,7 +16,8 @@
 #
 """Interface for user defined mapper."""
 
-from . import shard_life_cycle
+from google.appengine.ext.mapreduce import shard_life_cycle
+
 
 
 
@@ -66,7 +67,7 @@
     CAUTION! Carefully tune to not to exceed memory limit or request deadline.
 
     Args:
-      slice_ctx: map_job.SliceContext object.
+      slice_ctx: map_job_context.SliceContext object.
       val: a single value yielded by your input reader. The type
         depends on the input reader. For example, some may yield a single
         datastore entity, others may yield a (int, str) tuple.
diff --git a/google/appengine/ext/mapreduce/api/map_job/output_writer.py b/google/appengine/ext/mapreduce/api/map_job/output_writer.py
index 4e9cef6..785d8ba 100644
--- a/google/appengine/ext/mapreduce/api/map_job/output_writer.py
+++ b/google/appengine/ext/mapreduce/api/map_job/output_writer.py
@@ -16,9 +16,9 @@
 #
 """Output writer interface for map job."""
 
-from . import shard_life_cycle
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import json_util
+from google.appengine.ext.mapreduce import shard_life_cycle
 
 
 
@@ -95,7 +95,7 @@
     """Create new writer for a shard.
 
     Args:
-      shard_ctx: map_job.ShardContext for this shard.
+      shard_ctx: map_job_context.ShardContext for this shard.
     """
     raise NotImplementedError("create() not implemented in %s" % cls)
 
@@ -118,7 +118,7 @@
     (e.g a list of filenames)
 
     Args:
-      shard_ctx: map_job.ShardContext for this shard.
+      shard_ctx: map_job_context.ShardContext for this shard.
       iterator: an iterator that yields json serializable
         references to the outputs from this shard.
         Contents from the iterator can be accessible later via
diff --git a/google/appengine/ext/mapreduce/base_handler.py b/google/appengine/ext/mapreduce/base_handler.py
index fdb4096..7e8377a 100644
--- a/google/appengine/ext/mapreduce/base_handler.py
+++ b/google/appengine/ext/mapreduce/base_handler.py
@@ -35,6 +35,7 @@
 
 
 
+
 import httplib
 import logging
 
diff --git a/google/appengine/ext/mapreduce/context.py b/google/appengine/ext/mapreduce/context.py
index d535ca5..cd662d5 100644
--- a/google/appengine/ext/mapreduce/context.py
+++ b/google/appengine/ext/mapreduce/context.py
@@ -38,6 +38,7 @@
 """
 
 
+
 __all__ = ["get",
            "Pool",
            "Context",
diff --git a/google/appengine/ext/mapreduce/control.py b/google/appengine/ext/mapreduce/control.py
index 1515a66..380b3fc 100644
--- a/google/appengine/ext/mapreduce/control.py
+++ b/google/appengine/ext/mapreduce/control.py
@@ -33,6 +33,7 @@
 """API for controlling MapReduce execution outside of MapReduce framework."""
 
 
+
 __all__ = ["start_map"]
 
 
diff --git a/google/appengine/ext/mapreduce/datastore_range_iterators.py b/google/appengine/ext/mapreduce/datastore_range_iterators.py
index 6d035e1..ebce79f 100644
--- a/google/appengine/ext/mapreduce/datastore_range_iterators.py
+++ b/google/appengine/ext/mapreduce/datastore_range_iterators.py
@@ -19,6 +19,7 @@
 
 
 
+
 from google.appengine.datastore import datastore_query
 from google.appengine.datastore import datastore_rpc
 from google.appengine.ext import db
diff --git a/google/appengine/ext/mapreduce/errors.py b/google/appengine/ext/mapreduce/errors.py
index 8b0cb6f..8be958b 100644
--- a/google/appengine/ext/mapreduce/errors.py
+++ b/google/appengine/ext/mapreduce/errors.py
@@ -31,6 +31,7 @@
 """Map Reduce framework errors."""
 
 
+
 __all__ = [
     "BadCombinerOutputError",
     "BadParamsError",
diff --git a/google/appengine/ext/mapreduce/file_format_parser.py b/google/appengine/ext/mapreduce/file_format_parser.py
index 30826e7..f44040f 100644
--- a/google/appengine/ext/mapreduce/file_format_parser.py
+++ b/google/appengine/ext/mapreduce/file_format_parser.py
@@ -33,6 +33,7 @@
 
 
 
+
 __all__ = ['parse']
 
 import re
diff --git a/google/appengine/ext/mapreduce/file_format_root.py b/google/appengine/ext/mapreduce/file_format_root.py
index f37d475..ef57408 100644
--- a/google/appengine/ext/mapreduce/file_format_root.py
+++ b/google/appengine/ext/mapreduce/file_format_root.py
@@ -34,6 +34,7 @@
 
 
 
+
 __all__ = ['FileFormatRoot',
            'split']
 
diff --git a/google/appengine/ext/mapreduce/file_formats.py b/google/appengine/ext/mapreduce/file_formats.py
index d183f8e..25ecc3f 100644
--- a/google/appengine/ext/mapreduce/file_formats.py
+++ b/google/appengine/ext/mapreduce/file_formats.py
@@ -33,6 +33,7 @@
 
 
 
+
 __all__ = ['FileFormat',
            'FORMATS']
 
diff --git a/google/appengine/ext/mapreduce/handlers.py b/google/appengine/ext/mapreduce/handlers.py
index a544736..b27188d 100644
--- a/google/appengine/ext/mapreduce/handlers.py
+++ b/google/appengine/ext/mapreduce/handlers.py
@@ -35,6 +35,7 @@
 
 
 
+
 import datetime
 import logging
 import math
@@ -57,13 +58,14 @@
 from google.appengine.ext.mapreduce import context
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import input_readers
+from google.appengine.ext.mapreduce import map_job_context
 from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import operation
 from google.appengine.ext.mapreduce import output_writers
 from google.appengine.ext.mapreduce import parameters
+from google.appengine.ext.mapreduce import shard_life_cycle
 from google.appengine.ext.mapreduce import util
 from google.appengine.ext.mapreduce.api import map_job
-from google.appengine.ext.mapreduce.api.map_job import shard_life_cycle
 from google.appengine.runtime import apiproxy_errors
 
 
@@ -143,6 +145,8 @@
     """Constructor."""
     super(MapperWorkerCallbackHandler, self).__init__(*args)
     self._time = time.time
+    self.slice_context = None
+    self.shard_context = None
 
   def _drop_gracefully(self):
     """Drop worker task gracefully.
@@ -407,6 +411,19 @@
       if last_slice:
         obj.end_shard(shard_context)
 
+  def _lc_start_slice(self, tstate, slice_id):
+    self._maintain_LC(tstate.output_writer, slice_id)
+    self._maintain_LC(tstate.input_reader, slice_id)
+    self._maintain_LC(tstate.handler, slice_id)
+
+  def _lc_end_slice(self, tstate, slice_id, last_slice=False):
+    self._maintain_LC(tstate.handler, slice_id, last_slice=last_slice,
+                      begin_slice=False)
+    self._maintain_LC(tstate.input_reader, slice_id, last_slice=last_slice,
+                      begin_slice=False)
+    self._maintain_LC(tstate.output_writer, slice_id, last_slice=last_slice,
+                      begin_slice=False)
+
   def handle(self):
     """Handle request.
 
@@ -463,33 +480,17 @@
     job_config = map_job.JobConfig._to_map_job_config(
         spec,
         os.environ.get("HTTP_X_APPENGINE_QUEUENAME"))
-    job_context = map_job.JobContext(job_config)
-    self.shard_context = map_job.ShardContext(job_context, shard_state)
-    self.slice_context = map_job.SliceContext(self.shard_context,
-                                              shard_state,
-                                              tstate)
+    job_context = map_job_context.JobContext(job_config)
+    self.shard_context = map_job_context.ShardContext(job_context, shard_state)
+    self.slice_context = map_job_context.SliceContext(self.shard_context,
+                                                      shard_state,
+                                                      tstate)
     try:
       slice_id = tstate.slice_id
-      self._maintain_LC(tstate.handler, slice_id)
-      self._maintain_LC(tstate.input_reader, slice_id)
-      self._maintain_LC(tstate.output_writer, slice_id)
+      self._lc_start_slice(tstate, slice_id)
 
-      if is_this_a_retry:
-        task_directive = self._attempt_slice_recovery(shard_state, tstate)
-        if task_directive != self._TASK_DIRECTIVE.PROCEED_TASK:
-          return self.__return(shard_state, tstate, task_directive)
-
-      last_slice = self._process_inputs(
-          tstate.input_reader, shard_state, tstate, ctx)
-
-      self._maintain_LC(tstate.handler, slice_id, last_slice, False)
-      self._maintain_LC(tstate.input_reader, slice_id, last_slice, False)
-      self._maintain_LC(tstate.output_writer, slice_id, last_slice, False)
-
-      ctx.flush()
-
-      if last_slice:
-
+      if shard_state.is_input_finished():
+        self._lc_end_slice(tstate, slice_id, last_slice=True)
 
         if (tstate.output_writer and
             isinstance(tstate.output_writer, output_writers.OutputWriter)):
@@ -499,6 +500,25 @@
 
           tstate.output_writer.finalize(ctx, shard_state)
         shard_state.set_for_success()
+        return self.__return(shard_state, tstate, task_directive)
+
+      if is_this_a_retry:
+        task_directive = self._attempt_slice_recovery(shard_state, tstate)
+        if task_directive != self._TASK_DIRECTIVE.PROCEED_TASK:
+          return self.__return(shard_state, tstate, task_directive)
+
+      last_slice = self._process_inputs(
+          tstate.input_reader, shard_state, tstate, ctx)
+
+      self._lc_end_slice(tstate, slice_id)
+
+      ctx.flush()
+
+      if last_slice:
+
+
+
+        shard_state.set_input_finished()
 
     except Exception, e:
       logging.warning("Shard %s got error.", shard_state.shard_id)
diff --git a/google/appengine/ext/mapreduce/hooks.py b/google/appengine/ext/mapreduce/hooks.py
index 0aa33aa..f39d7f0 100644
--- a/google/appengine/ext/mapreduce/hooks.py
+++ b/google/appengine/ext/mapreduce/hooks.py
@@ -33,6 +33,7 @@
 """API allowing control over some mapreduce implementation details."""
 
 
+
 __all__ = ["Hooks"]
 
 
diff --git a/google/appengine/ext/mapreduce/input_readers.py b/google/appengine/ext/mapreduce/input_readers.py
index ba3c4e4..1acec33 100644
--- a/google/appengine/ext/mapreduce/input_readers.py
+++ b/google/appengine/ext/mapreduce/input_readers.py
@@ -31,6 +31,7 @@
 """Defines input readers for MapReduce."""
 
 
+
 __all__ = [
     "AbstractDatastoreInputReader",
     "ALLOW_CHECKPOINT",
@@ -2554,6 +2555,16 @@
       self._bucket_iter = iter(self._bucket)
 
   @classmethod
+  def get_params(cls, mapper_spec, allowed_keys=None, allow_old=True):
+    params = _get_params(mapper_spec, allowed_keys, allow_old)
+
+
+    if (mapper_spec.params.get(cls.BUCKET_NAME_PARAM) is not None and
+        params.get(cls.BUCKET_NAME_PARAM) is None):
+      params[cls.BUCKET_NAME_PARAM] = mapper_spec.params[cls.BUCKET_NAME_PARAM]
+    return params
+
+  @classmethod
   def validate(cls, mapper_spec):
     """Validate mapper specification.
 
@@ -2564,7 +2575,7 @@
       BadReaderParamsError: if the specification is invalid for any reason such
         as missing the bucket name or providing an invalid bucket name.
     """
-    reader_spec = _get_params(mapper_spec, allow_old=False)
+    reader_spec = cls.get_params(mapper_spec, allow_old=False)
 
 
     if cls.BUCKET_NAME_PARAM not in reader_spec:
@@ -2614,7 +2625,7 @@
     Returns:
       A list of InputReaders. None when no input data can be found.
     """
-    reader_spec = _get_params(mapper_spec, allow_old=False)
+    reader_spec = cls.get_params(mapper_spec, allow_old=False)
     bucket = reader_spec[cls.BUCKET_NAME_PARAM]
     filenames = reader_spec[cls.OBJECT_NAMES_PARAM]
     delimiter = reader_spec.get(cls.DELIMITER_PARAM)
@@ -2764,14 +2775,15 @@
         self._record_reader = None
 
 
-
-class _ReducerReader(RecordsReader):
-  """Reader to read KeyValues records files from Files API."""
+class _ReducerReader(_GoogleCloudStorageRecordInputReader):
+  """Reader to read KeyValues records from GCS."""
 
   expand_parameters = True
 
-  def __init__(self, filenames, position):
-    super(_ReducerReader, self).__init__(filenames, position)
+  def __init__(self, filenames, index=0, buffer_size=None, _account_id=None,
+               delimiter=None):
+    super(_ReducerReader, self).__init__(filenames, index, buffer_size,
+                                         _account_id, delimiter)
     self.current_key = None
     self.current_values = None
 
@@ -2784,50 +2796,54 @@
       if combiner_spec:
         combiner = util.handler_for_name(combiner_spec)
 
-    for binary_record in super(_ReducerReader, self).__iter__():
-      proto = file_service_pb.KeyValues()
-      proto.ParseFromString(binary_record)
+    try:
+      while True:
+        binary_record = super(_ReducerReader, self).next()
+        proto = file_service_pb.KeyValues()
+        proto.ParseFromString(binary_record)
 
-      to_yield = None
-      if self.current_key is not None and self.current_key != proto.key():
-        to_yield = (self.current_key, self.current_values)
-        self.current_key = None
-        self.current_values = None
+        to_yield = None
+        if self.current_key is not None and self.current_key != proto.key():
+          to_yield = (self.current_key, self.current_values)
+          self.current_key = None
+          self.current_values = None
 
-      if self.current_key is None:
-        self.current_key = proto.key()
-        self.current_values = []
+        if self.current_key is None:
+          self.current_key = proto.key()
+          self.current_values = []
 
-      if combiner:
-        combiner_result = combiner(
-            self.current_key, proto.value_list(), self.current_values)
+        if combiner:
+          combiner_result = combiner(
+              self.current_key, proto.value_list(), self.current_values)
 
-        if not util.is_generator(combiner_result):
-          raise errors.BadCombinerOutputError(
-              "Combiner %s should yield values instead of returning them (%s)" %
-              (combiner, combiner_result))
+          if not util.is_generator(combiner_result):
+            raise errors.BadCombinerOutputError(
+                "Combiner %s should yield values instead of returning them "
+                "(%s)" % (combiner, combiner_result))
 
-        self.current_values = []
-        for value in combiner_result:
-          if isinstance(value, operation.Operation):
-            value(ctx)
-          else:
+          self.current_values = []
+          for value in combiner_result:
+            if isinstance(value, operation.Operation):
+              value(ctx)
+            else:
 
-            self.current_values.append(value)
+              self.current_values.append(value)
 
 
 
 
-        if not to_yield:
+          if not to_yield:
+            yield ALLOW_CHECKPOINT
+        else:
+
+          self.current_values.extend(proto.value_list())
+
+        if to_yield:
+          yield to_yield
+
           yield ALLOW_CHECKPOINT
-      else:
-
-        self.current_values.extend(proto.value_list())
-
-      if to_yield:
-        yield to_yield
-
-        yield ALLOW_CHECKPOINT
+    except StopIteration:
+      pass
 
 
 
diff --git a/google/appengine/ext/mapreduce/key_ranges.py b/google/appengine/ext/mapreduce/key_ranges.py
index 8a23425..9f79e14 100644
--- a/google/appengine/ext/mapreduce/key_ranges.py
+++ b/google/appengine/ext/mapreduce/key_ranges.py
@@ -17,6 +17,7 @@
 """An abstract for a collection of key_range.KeyRange objects."""
 
 
+
 from google.appengine.ext import key_range
 from google.appengine.ext.mapreduce import namespace_range
 
diff --git a/google/appengine/ext/mapreduce/main.py b/google/appengine/ext/mapreduce/main.py
index 223b41a..7a6ed91 100644
--- a/google/appengine/ext/mapreduce/main.py
+++ b/google/appengine/ext/mapreduce/main.py
@@ -42,6 +42,7 @@
 
 
 
+
 import google
 
 
diff --git a/google/appengine/ext/mapreduce/api/map_job/map_job_context.py b/google/appengine/ext/mapreduce/map_job_context.py
similarity index 100%
rename from google/appengine/ext/mapreduce/api/map_job/map_job_context.py
rename to google/appengine/ext/mapreduce/map_job_context.py
diff --git a/google/appengine/ext/mapreduce/mapper_pipeline.py b/google/appengine/ext/mapreduce/mapper_pipeline.py
index 9fadcd1..65573ae 100644
--- a/google/appengine/ext/mapreduce/mapper_pipeline.py
+++ b/google/appengine/ext/mapreduce/mapper_pipeline.py
@@ -28,18 +28,16 @@
 
 
 
-
-
 """Pipelines for mapreduce library."""
 
 
+
 __all__ = [
     "MapperPipeline",
     ]
 
 import google
 
-from google.appengine.api import files
 from google.appengine.ext.mapreduce import control
 from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import parameters
@@ -141,27 +139,3 @@
     self.fill(self.outputs.result_status, mapreduce_state.result_status)
     self.fill(self.outputs.counters, mapreduce_state.counters_map.to_dict())
     self.complete(outputs)
-
-
-class _CleanupPipeline(pipeline_base.PipelineBase):
-  """A pipeline to do a cleanup for mapreduce jobs.
-
-  Args:
-    filename_or_list: list of files or file lists to delete.
-  """
-
-  def delete_file_or_list(self, filename_or_list):
-    if isinstance(filename_or_list, list):
-      for filename in filename_or_list:
-        self.delete_file_or_list(filename)
-    else:
-      filename = filename_or_list
-      for _ in range(10):
-        try:
-          files.delete(filename)
-          break
-        except:
-          pass
-
-  def run(self, temp_files):
-    self.delete_file_or_list(temp_files)
diff --git a/google/appengine/ext/mapreduce/mapreduce_pipeline.py b/google/appengine/ext/mapreduce/mapreduce_pipeline.py
index 1c802d5..86393ed 100644
--- a/google/appengine/ext/mapreduce/mapreduce_pipeline.py
+++ b/google/appengine/ext/mapreduce/mapreduce_pipeline.py
@@ -28,13 +28,12 @@
 
 
 
-
-
 """Pipelines for mapreduce library."""
 
 from __future__ import with_statement
 
 
+
 __all__ = [
     "CleanupPipeline",
     "MapPipeline",
@@ -48,13 +47,15 @@
 
 from appengine_pipeline.src import pipeline
 from appengine_pipeline.src.pipeline import common as pipeline_common
-from google.appengine.api import files
+from google.appengine.api import app_identity
+from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import input_readers
 from google.appengine.ext.mapreduce import mapper_pipeline
 from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import output_writers
 from google.appengine.ext.mapreduce import pipeline_base
 from google.appengine.ext.mapreduce import shuffler
+from google.appengine.ext.mapreduce import util
 
 
 
@@ -65,7 +66,7 @@
 
 ShufflePipeline = shuffler.ShufflePipeline
 
-CleanupPipeline = mapper_pipeline._CleanupPipeline
+CleanupPipeline = shuffler._GCSCleanupPipeline
 
 
 _ReducerReader = input_readers._ReducerReader
@@ -95,13 +96,17 @@
           input_reader_spec,
           params,
           shards=None):
+    new_params = dict(params or {})
+
+
+    new_params.update({"output_writer": {}})
     yield MapperPipeline(
         job_name + "-map",
         mapper_spec,
         input_reader_spec,
-        output_writer_spec=
-            output_writers.__name__ + ".KeyValueBlobstoreOutputWriter",
-        params=params,
+        output_writer_spec=(output_writers.__name__ +
+                            "._GoogleCloudStorageKeyValueOutputWriter"),
+        params=new_params,
         shards=shards)
 
 
@@ -117,6 +122,7 @@
     output_writer_spec: specification of output write to use with reduce
       function.
     params: mapper parameters to use as dict.
+    bucket_name: The name of the Google Cloud Storage bucket.
     filenames: list of filenames to reduce.
     combiner_spec: Optional. Specification of a combine function. If not
       supplied, no combine step will take place. The combine function takes a
@@ -136,13 +142,18 @@
           reducer_spec,
           output_writer_spec,
           params,
+          bucket_name,
           filenames,
           combiner_spec=None,
           shards=None):
+    filenames_only = (
+        util.strip_prefix_from_items("/%s/" % bucket_name, filenames))
     new_params = dict(params or {})
     new_params.update({
-        "files": filenames
-        })
+        "input_reader": {
+            "bucket_name": bucket_name,
+            "objects": filenames_only,
+        }})
     if combiner_spec:
       new_params.update({
           "combiner_spec": combiner_spec,
@@ -165,6 +176,10 @@
                         pipeline_base.PipelineBase):
   """Pipeline to execute MapReduce jobs.
 
+  The Shuffle stage uses Google Cloud Storage (GCS). For newly created projects,
+  GCS is activated automatically. To activate GCS follow these instructions:
+  https://cloud.google.com/storage/docs/signup#activate
+
   Args:
     job_name: job name as string.
     mapper_spec: specification of mapper to use.
@@ -184,7 +199,7 @@
   Returns:
     result_status: one of model.MapreduceState._RESULTS. Check this to see
       if the job is successful.
-    default: a list of filenames if the mapreduce was sucesssful and
+    default: a list of filenames if the mapreduce was successful and
       was outputting files. An empty list otherwise.
   """
 
@@ -198,18 +213,34 @@
           reducer_params=None,
           shards=None,
           combiner_spec=None):
+
+
+    if mapper_params.get("bucket_name") is None:
+      try:
+        mapper_params["bucket_name"] = (
+            app_identity.get_default_gcs_bucket_name())
+      except Exception, e:
+        raise errors.Error("Unable to get the GCS default bucket name. "
+                           "Check to see that GCS is properly activated. "
+                           + str(e))
+    if mapper_params["bucket_name"] is None:
+      raise errors.Error("There is no GCS default bucket name. "
+                         "Check to see that GCS is properly activated.")
+
+
     map_pipeline = yield MapPipeline(job_name,
                                      mapper_spec,
                                      input_reader_spec,
                                      params=mapper_params,
                                      shards=shards)
     shuffler_pipeline = yield ShufflePipeline(
-        job_name, map_pipeline)
+        job_name, mapper_params, map_pipeline)
     reducer_pipeline = yield ReducePipeline(
         job_name,
         reducer_spec,
         output_writer_spec,
         reducer_params,
+        mapper_params["bucket_name"],
         shuffler_pipeline,
         combiner_spec=combiner_spec)
     with pipeline.After(reducer_pipeline):
diff --git a/google/appengine/ext/mapreduce/model.py b/google/appengine/ext/mapreduce/model.py
index 4174931..c66b6c3 100644
--- a/google/appengine/ext/mapreduce/model.py
+++ b/google/appengine/ext/mapreduce/model.py
@@ -41,6 +41,7 @@
 
 
 
+
 __all__ = ["MapreduceState",
            "MapperSpec",
            "MapreduceControl",
@@ -910,6 +911,7 @@
 
   mapreduce_id = db.StringProperty(required=True)
   active = db.BooleanProperty(default=True, indexed=False)
+  input_finished = db.BooleanProperty(default=False, indexed=False)
   counters_map = json_util.JsonProperty(
       CountersMap, default=CountersMap(), indexed=False)
   result_status = db.StringProperty(choices=_RESULTS, indexed=False)
@@ -958,6 +960,7 @@
     self.last_work_item = ""
     self.active = True
     self.result_status = None
+    self.input_finished = False
     self.counters_map = CountersMap()
     self.slice_id = 0
     self.slice_start_time = None
@@ -990,6 +993,12 @@
     self.active = False
     self.result_status = self.RESULT_ABORTED
 
+  def set_input_finished(self):
+    self.input_finished = True
+
+  def is_input_finished(self):
+    return self.input_finished
+
   def set_for_success(self):
     self.active = False
     self.result_status = self.RESULT_SUCCESS
diff --git a/google/appengine/ext/mapreduce/namespace_range.py b/google/appengine/ext/mapreduce/namespace_range.py
index b2d2992..5af2417 100644
--- a/google/appengine/ext/mapreduce/namespace_range.py
+++ b/google/appengine/ext/mapreduce/namespace_range.py
@@ -33,6 +33,7 @@
 
 
 
+
 __all__ = [
     'NAMESPACE_CHARACTERS',
     'MAX_NAMESPACE_LENGTH',
diff --git a/google/appengine/ext/mapreduce/operation/__init__.py b/google/appengine/ext/mapreduce/operation/__init__.py
index 1f99022..ae4793f 100644
--- a/google/appengine/ext/mapreduce/operation/__init__.py
+++ b/google/appengine/ext/mapreduce/operation/__init__.py
@@ -39,6 +39,7 @@
 
 
 
+
 import db
 import counters
 from base import Operation
diff --git a/google/appengine/ext/mapreduce/operation/base.py b/google/appengine/ext/mapreduce/operation/base.py
index a312488..cad2472 100644
--- a/google/appengine/ext/mapreduce/operation/base.py
+++ b/google/appengine/ext/mapreduce/operation/base.py
@@ -33,6 +33,7 @@
 """Base operation class."""
 
 
+
 __all__ = ['Operation']
 
 
diff --git a/google/appengine/ext/mapreduce/operation/counters.py b/google/appengine/ext/mapreduce/operation/counters.py
index e2bc7e8..32945a8 100644
--- a/google/appengine/ext/mapreduce/operation/counters.py
+++ b/google/appengine/ext/mapreduce/operation/counters.py
@@ -33,6 +33,7 @@
 """Counters-related operations."""
 
 
+
 __all__ = ['Increment']
 
 
diff --git a/google/appengine/ext/mapreduce/operation/db.py b/google/appengine/ext/mapreduce/operation/db.py
index 31899b0..bb53f78 100644
--- a/google/appengine/ext/mapreduce/operation/db.py
+++ b/google/appengine/ext/mapreduce/operation/db.py
@@ -33,6 +33,7 @@
 """DB-related operations."""
 
 
+
 __all__ = ['Put', 'Delete']
 
 
diff --git a/google/appengine/ext/mapreduce/output_writers.py b/google/appengine/ext/mapreduce/output_writers.py
index a7270ab..5f321bf 100644
--- a/google/appengine/ext/mapreduce/output_writers.py
+++ b/google/appengine/ext/mapreduce/output_writers.py
@@ -33,6 +33,7 @@
 from __future__ import with_statement
 
 
+
 __all__ = [
     "BlobstoreOutputWriter",
     "BlobstoreOutputWriterBase",
@@ -40,12 +41,15 @@
     "FileOutputWriter",
     "FileOutputWriterBase",
     "FileRecordsOutputWriter",
+    "GoogleCloudStorageConsistentOutputWriter",
+    "GoogleCloudStorageConsistentRecordOutputWriter",
     "KeyValueBlobstoreOutputWriter",
     "KeyValueFileOutputWriter",
     "COUNTER_IO_WRITE_BYTES",
     "COUNTER_IO_WRITE_MSEC",
     "OutputWriter",
     "RecordsPool",
+    "GCSRecordsPool"
     ]
 
 
@@ -55,6 +59,7 @@
 import gc
 import logging
 import pickle
+import random
 import string
 import time
 
@@ -66,21 +71,32 @@
 from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import operation
 from google.appengine.ext.mapreduce import records
+from google.appengine.ext.mapreduce import shard_life_cycle
 
 
 
 try:
 
+  cloudstorage = None
   from google.appengine.ext import cloudstorage
   if hasattr(cloudstorage, "_STUB"):
     cloudstorage = None
 
   if cloudstorage:
     from google.appengine.ext.cloudstorage import cloudstorage_api
+    from google.appengine.ext.cloudstorage import errors as cloud_errors
 except ImportError:
   pass
 
 
+if cloudstorage is None:
+  try:
+    import cloudstorage
+    from cloudstorage import cloudstorage_api
+  except ImportError:
+    pass
+
+
 
 COUNTER_IO_WRITE_BYTES = "io-write-bytes"
 
@@ -285,10 +301,10 @@
 
 
 
-_FILES_API_FLUSH_SIZE = 128*1024
+_FILE_POOL_FLUSH_SIZE = 128*1024
 
 
-_FILES_API_MAX_SIZE = 1000*1024
+_FILE_POOL_MAX_SIZE = 1000*1024
 
 
 def _get_params(mapper_spec, allowed_keys=None, allow_old=True):
@@ -336,7 +352,7 @@
 class _FilePool(context.Pool):
   """Pool of file append operations."""
 
-  def __init__(self, flush_size_chars=_FILES_API_FLUSH_SIZE, ctx=None):
+  def __init__(self, flush_size_chars=_FILE_POOL_FLUSH_SIZE, ctx=None):
     """Constructor.
 
     Args:
@@ -368,10 +384,10 @@
     if self._size + len(data) > self._flush_size:
       self.flush()
 
-    if len(data) > _FILES_API_MAX_SIZE:
+    if len(data) > _FILE_POOL_MAX_SIZE:
       raise errors.Error(
           "Can't write more than %s bytes in one request: "
-          "risk of writes interleaving." % _FILES_API_MAX_SIZE)
+          "risk of writes interleaving." % _FILE_POOL_MAX_SIZE)
     else:
       self.__append(filename, data)
 
@@ -383,7 +399,7 @@
     start_time = time.time()
     for filename, data in self._append_buffer.iteritems():
       with files.open(filename, "a") as f:
-        if len(data) > _FILES_API_MAX_SIZE:
+        if len(data) > _FILE_POOL_MAX_SIZE:
           raise errors.Error("Bad data of length: %s" % len(data))
         if self._ctx:
           operation.counters.Increment(
@@ -397,20 +413,21 @@
     self._size = 0
 
 
-class RecordsPool(context.Pool):
-  """Pool of append operations for records files."""
+class _RecordsPoolBase(context.Pool):
+  """Base class for Pool of append operations for records files."""
 
 
   _RECORD_OVERHEAD_BYTES = 10
 
-  def __init__(self, filename,
-               flush_size_chars=_FILES_API_FLUSH_SIZE,
+  def __init__(self,
+               flush_size_chars=_FILE_POOL_FLUSH_SIZE,
                ctx=None,
                exclusive=False):
     """Constructor.
 
+    Any classes that subclass this will need to implement the _write() function.
+
     Args:
-      filename: file name to write data to as string.
       flush_size_chars: buffer flush threshold as int.
       ctx: mapreduce context as context.Context.
       exclusive: a boolean flag indicating if the pool has an exclusive
@@ -420,7 +437,6 @@
     self._flush_size = flush_size_chars
     self._buffer = []
     self._size = 0
-    self._filename = filename
     self._ctx = ctx
     self._exclusive = exclusive
 
@@ -430,9 +446,9 @@
     if self._size + data_length > self._flush_size:
       self.flush()
 
-    if not self._exclusive and data_length > _FILES_API_MAX_SIZE:
+    if not self._exclusive and data_length > _FILE_POOL_MAX_SIZE:
       raise errors.Error(
-          "Too big input %s (%s)."  % (data_length, _FILES_API_MAX_SIZE))
+          "Too big input %s (%s)."  % (data_length, _FILE_POOL_MAX_SIZE))
     else:
       self._buffer.append(data)
       self._size += data_length
@@ -451,22 +467,20 @@
     str_buf = buf.getvalue()
     buf.close()
 
-    if not self._exclusive and len(str_buf) > _FILES_API_MAX_SIZE:
+    if not self._exclusive and len(str_buf) > _FILE_POOL_MAX_SIZE:
 
       raise errors.Error(
           "Buffer too big. Can't write more than %s bytes in one request: "
           "risk of writes interleaving. Got: %s" %
-          (_FILES_API_MAX_SIZE, len(str_buf)))
+          (_FILE_POOL_MAX_SIZE, len(str_buf)))
 
 
     start_time = time.time()
-    with files.open(self._filename, "a", exclusive_lock=self._exclusive) as f:
-      f.write(str_buf)
-      if self._ctx:
-        operation.counters.Increment(
-            COUNTER_IO_WRITE_BYTES, len(str_buf))(self._ctx)
+    self._write(str_buf)
     if self._ctx:
       operation.counters.Increment(
+          COUNTER_IO_WRITE_BYTES, len(str_buf))(self._ctx)
+      operation.counters.Increment(
           COUNTER_IO_WRITE_MSEC,
           int((time.time() - start_time) * 1000))(self._ctx)
 
@@ -475,6 +489,9 @@
     self._size = 0
     gc.collect()
 
+  def _write(self, str_buf):
+    raise NotImplementedError("_write() not implemented in %s" % type(self))
+
   def __enter__(self):
     return self
 
@@ -482,6 +499,60 @@
     self.flush()
 
 
+class RecordsPool(_RecordsPoolBase):
+  """Pool of append operations for records using Files API."""
+
+  def __init__(self,
+               filename,
+               flush_size_chars=_FILE_POOL_FLUSH_SIZE,
+               ctx=None,
+               exclusive=False):
+    """Requires the filename of the file to write to via the Filaes API."""
+    super(RecordsPool, self).__init__(flush_size_chars, ctx, exclusive)
+    self._filename = filename
+
+  def _write(self, str_buf):
+    """Opens and appends to the filename."""
+    with files.open(self._filename, "a", exclusive_lock=self._exclusive) as f:
+      f.write(str_buf)
+
+
+class GCSRecordsPool(_RecordsPoolBase):
+  """Pool of append operations for records using GCS."""
+
+
+  _GCS_BLOCK_SIZE = 256 * 1024
+
+  def __init__(self,
+               filehandle,
+               flush_size_chars=_FILE_POOL_FLUSH_SIZE,
+               ctx=None,
+               exclusive=False):
+    """Requires the filehandle of an open GCS file to write to."""
+    super(GCSRecordsPool, self).__init__(flush_size_chars, ctx, exclusive)
+    self._filehandle = filehandle
+    self._buf_size = 0
+
+  def _write(self, str_buf):
+    """Uses the filehandle to the file in GCS to write to it."""
+    self._filehandle.write(str_buf)
+    self._buf_size += len(str_buf)
+
+  def flush(self, force=False):
+    """Flush pool contents.
+
+    Args:
+      force: Inserts additional padding to achieve the minimum block size
+        required for GCS.
+    """
+    super(GCSRecordsPool, self).flush()
+    if force:
+      extra_padding = self._buf_size % self._GCS_BLOCK_SIZE
+      if extra_padding > 0:
+        self._write("\x00" * (self._GCS_BLOCK_SIZE - extra_padding))
+    self._filehandle.flush()
+
+
 class FileOutputWriterBase(OutputWriter):
   """Base class for all file output writers."""
 
@@ -876,11 +947,9 @@
   """Output writer for KeyValue records files in blobstore."""
 
 
-class _GoogleCloudStorageOutputWriter(OutputWriter):
-  """Output writer to Google Cloud Storage using the cloudstorage library.
-
-  This class is expected to be subclassed with a writer that applies formatting
-  to user-level records.
+class _GoogleCloudStorageBase(shard_life_cycle._ShardLifeCycle,
+                              OutputWriter):
+  """Base abstract class for all GCS writers.
 
   Required configuration in the mapper_spec.output_writer dictionary.
     BUCKET_NAME_PARAM: name of the bucket to use (with no extra delimiters or
@@ -900,15 +969,8 @@
       be applied by the writer.
     CONTENT_TYPE_PARAM: mime type to apply on the files. If not provided, Google
       Cloud Storage will apply its default.
-    _NO_DUPLICATE: if True, slice recovery logic will be used to ensure
-      output files has no duplicates. Every shard should have only one final
-      output in user specified location. But it may produce many smaller
-      files (named "seg") due to slice recovery. These segs live in a
-      tmp directory and should be combined and renamed to the final location.
-      In current impl, they are not combined.
   """
 
-
   BUCKET_NAME_PARAM = "bucket_name"
   ACL_PARAM = "acl"
   NAMING_FORMAT_PARAM = "naming_format"
@@ -916,13 +978,179 @@
   _NO_DUPLICATE = "no_duplicate"
 
 
-  DEFAULT_NAMING_FORMAT = "$name/$id/output-$num"
+  _ACCOUNT_ID_PARAM = "account_id"
+
+
+class _GoogleCloudStorageOutputWriterBase(_GoogleCloudStorageBase):
+  """Base class for GCS writers directly interacting with GCS.
+
+  Base class for both _GoogleCloudStorageOutputWriter and
+  GoogleCloudStorageConsistentOutputWriter.
+
+  This class is expected to be subclassed with a writer that applies formatting
+  to user-level records.
+
+  Subclasses need to define to_json, from_json, create, finalize and
+  _get_write_buffer methods.
+
+  See _GoogleCloudStorageBase for config options.
+  """
+
+
+  _DEFAULT_NAMING_FORMAT = "$name/$id/output-$num"
 
 
   _MR_TMP = "gae_mr_tmp"
   _TMP_FILE_NAMING_FORMAT = (
       _MR_TMP + "/$name/$id/attempt-$attempt/output-$num/seg-$seg")
-  _ACCOUNT_ID_PARAM = "account_id"
+
+  @classmethod
+  def _generate_filename(cls, writer_spec, name, job_id, num,
+                         attempt=None, seg_index=None):
+    """Generates a filename for a particular output.
+
+    Args:
+      writer_spec: specification dictionary for the output writer.
+      name: name of the job.
+      job_id: the ID number assigned to the job.
+      num: shard number.
+      attempt: the shard attempt number.
+      seg_index: index of the seg. None means the final output.
+
+    Returns:
+      a string containing the filename.
+
+    Raises:
+      BadWriterParamsError: if the template contains any errors such as invalid
+        syntax or contains unknown substitution placeholders.
+    """
+    naming_format = cls._TMP_FILE_NAMING_FORMAT
+    if seg_index is None:
+      naming_format = writer_spec.get(cls.NAMING_FORMAT_PARAM,
+                                      cls._DEFAULT_NAMING_FORMAT)
+
+    template = string.Template(naming_format)
+    try:
+
+      if seg_index is None:
+        return template.substitute(name=name, id=job_id, num=num)
+      else:
+        return template.substitute(name=name, id=job_id, num=num,
+                                   attempt=attempt,
+                                   seg=seg_index)
+    except ValueError, error:
+      raise errors.BadWriterParamsError("Naming template is bad, %s" % (error))
+    except KeyError, error:
+      raise errors.BadWriterParamsError("Naming template '%s' has extra "
+                                        "mappings, %s" % (naming_format, error))
+
+  @classmethod
+  def get_params(cls, mapper_spec, allowed_keys=None, allow_old=True):
+    params = _get_params(mapper_spec, allowed_keys, allow_old)
+
+
+    if (mapper_spec.params.get(cls.BUCKET_NAME_PARAM) is not None and
+        params.get(cls.BUCKET_NAME_PARAM) is None):
+      params[cls.BUCKET_NAME_PARAM] = mapper_spec.params[cls.BUCKET_NAME_PARAM]
+    return params
+
+  @classmethod
+  def validate(cls, mapper_spec):
+    """Validate mapper specification.
+
+    Args:
+      mapper_spec: an instance of model.MapperSpec.
+
+    Raises:
+      BadWriterParamsError: if the specification is invalid for any reason such
+        as missing the bucket name or providing an invalid bucket name.
+    """
+    writer_spec = cls.get_params(mapper_spec, allow_old=False)
+
+
+    if cls.BUCKET_NAME_PARAM not in writer_spec:
+      raise errors.BadWriterParamsError(
+          "%s is required for Google Cloud Storage" %
+          cls.BUCKET_NAME_PARAM)
+    try:
+      cloudstorage.validate_bucket_name(
+          writer_spec[cls.BUCKET_NAME_PARAM])
+    except ValueError, error:
+      raise errors.BadWriterParamsError("Bad bucket name, %s" % (error))
+
+    if writer_spec.get(cls._NO_DUPLICATE, False) not in (True, False):
+      raise errors.BadWriterParamsError("No duplicate must a boolean.")
+
+
+    cls._generate_filename(writer_spec, "name", "id", 0)
+    cls._generate_filename(writer_spec, "name", "id", 0, 1, 0)
+
+  @classmethod
+  def _open_file(cls, writer_spec, filename_suffix):
+    """Opens a new gcs file for writing."""
+
+    filename = "/%s/%s" % (writer_spec[cls.BUCKET_NAME_PARAM], filename_suffix)
+
+    content_type = writer_spec.get(cls.CONTENT_TYPE_PARAM, None)
+
+    options = {}
+    if cls.ACL_PARAM in writer_spec:
+      options["x-goog-acl"] = writer_spec.get(cls.ACL_PARAM)
+
+    account_id = writer_spec.get(cls._ACCOUNT_ID_PARAM, None)
+
+    return cloudstorage.open(filename, mode="w", content_type=content_type,
+                             options=options, _account_id=account_id)
+
+  @classmethod
+  def _get_filename(cls, shard_state):
+    return shard_state.writer_state["filename"]
+
+  @classmethod
+  def get_filenames(cls, mapreduce_state):
+    filenames = []
+    for shard in model.ShardState.find_all_by_mapreduce_state(mapreduce_state):
+      if shard.result_status == model.ShardState.RESULT_SUCCESS:
+        filenames.append(cls._get_filename(shard))
+    return filenames
+
+  def _get_write_buffer(self):
+    """Returns a buffer to be used by the write() method."""
+    raise NotImplementedError()
+
+  def write(self, data):
+    """Write data to the GoogleCloudStorage file.
+
+    Args:
+      data: string containing the data to be written.
+    """
+    start_time = time.time()
+    self._get_write_buffer().write(data)
+    ctx = context.get()
+    operation.counters.Increment(COUNTER_IO_WRITE_BYTES, len(data))(ctx)
+    operation.counters.Increment(
+        COUNTER_IO_WRITE_MSEC, int((time.time() - start_time) * 1000))(ctx)
+
+
+  def _supports_shard_retry(self, tstate):
+    return True
+
+
+class _GoogleCloudStorageOutputWriter(_GoogleCloudStorageOutputWriterBase):
+  """Naive version of GoogleCloudStorageWriter.
+
+  This version is known to create inconsistent outputs if the input changes
+  during slice retries. Consider using GoogleCloudStorageConsistentOutputWriter
+  instead.
+
+  Optional configuration in the mapper_spec.output_writer dictionary:
+    _NO_DUPLICATE: if True, slice recovery logic will be used to ensure
+      output files has no duplicates. Every shard should have only one final
+      output in user specified location. But it may produce many smaller
+      files (named "seg") due to slice recovery. These segs live in a
+      tmp directory and should be combined and renamed to the final location.
+      In current impl, they are not combined.
+  """
   _SEG_PREFIX = "seg_prefix"
   _LAST_SEG_INDEX = "last_seg_index"
   _JSON_GCS_BUFFER = "buffer"
@@ -955,81 +1183,13 @@
 
       self._seg_valid_length = 0
 
-  @classmethod
-  def _generate_filename(cls, writer_spec, name, job_id, num,
-                         attempt=None, seg_index=None):
-    """Generates a filename for a particular output.
-
-    Args:
-      writer_spec: specification dictionary for the output writer.
-      name: name of the job.
-      job_id: the ID number assigned to the job.
-      num: shard number.
-      attempt: the shard attempt number.
-      seg_index: index of the seg. None means the final output.
-
-    Returns:
-      a string containing the filename.
-
-    Raises:
-      BadWriterParamsError: if the template contains any errors such as invalid
-        syntax or contains unknown substitution placeholders.
-    """
-    naming_format = cls._TMP_FILE_NAMING_FORMAT
-    if seg_index is None:
-      naming_format = writer_spec.get(cls.NAMING_FORMAT_PARAM,
-                                      cls.DEFAULT_NAMING_FORMAT)
-
-    template = string.Template(naming_format)
-    try:
-
-      if seg_index is None:
-        return template.substitute(name=name, id=job_id, num=num)
-      else:
-        return template.substitute(name=name, id=job_id, num=num,
-                                   attempt=attempt,
-                                   seg=seg_index)
-    except ValueError, error:
-      raise errors.BadWriterParamsError("Naming template is bad, %s" % (error))
-    except KeyError, error:
-      raise errors.BadWriterParamsError("Naming template '%s' has extra "
-                                        "mappings, %s" % (naming_format, error))
-
-  @classmethod
-  def validate(cls, mapper_spec):
-    """Validate mapper specification.
-
-    Args:
-      mapper_spec: an instance of model.MapperSpec.
-
-    Raises:
-      BadWriterParamsError: if the specification is invalid for any reason such
-        as missing the bucket name or providing an invalid bucket name.
-    """
-    writer_spec = _get_params(mapper_spec, allow_old=False)
-
-
-    if cls.BUCKET_NAME_PARAM not in writer_spec:
-      raise errors.BadWriterParamsError(
-          "%s is required for Google Cloud Storage" %
-          cls.BUCKET_NAME_PARAM)
-    try:
-      cloudstorage.validate_bucket_name(
-          writer_spec[cls.BUCKET_NAME_PARAM])
-    except ValueError, error:
-      raise errors.BadWriterParamsError("Bad bucket name, %s" % (error))
-
-    if writer_spec.get(cls._NO_DUPLICATE, False) not in (True, False):
-      raise errors.BadWriterParamsError("No duplicate must a boolean.")
-
-
-    cls._generate_filename(writer_spec, "name", "id", 0)
-    cls._generate_filename(writer_spec, "name", "id", 0, 1, 0)
+  def _get_write_buffer(self):
+    return self._streaming_buffer
 
   @classmethod
   def create(cls, mr_spec, shard_number, shard_attempt, _writer_state=None):
     """Inherit docs."""
-    writer_spec = _get_params(mr_spec.mapper, allow_old=False)
+    writer_spec = cls.get_params(mr_spec.mapper, allow_old=False)
     seg_index = None
     if writer_spec.get(cls._NO_DUPLICATE, False):
       seg_index = 0
@@ -1044,37 +1204,10 @@
   @classmethod
   def _create(cls, writer_spec, filename_suffix):
     """Helper method that actually creates the file in cloud storage."""
-
-    filename = "/%s/%s" % (writer_spec[cls.BUCKET_NAME_PARAM], filename_suffix)
-
-    content_type = writer_spec.get(cls.CONTENT_TYPE_PARAM, None)
-
-    options = {}
-    if cls.ACL_PARAM in writer_spec:
-      options["x-goog-acl"] = writer_spec.get(cls.ACL_PARAM)
-
-    account_id = writer_spec.get(cls._ACCOUNT_ID_PARAM, None)
-
-    writer = cloudstorage.open(filename, mode="w",
-                               content_type=content_type,
-                               options=options,
-                               _account_id=account_id)
-
+    writer = cls._open_file(writer_spec, filename_suffix)
     return cls(writer, writer_spec=writer_spec)
 
   @classmethod
-  def _get_filename(cls, shard_state):
-    return shard_state.writer_state["filename"]
-
-  @classmethod
-  def get_filenames(cls, mapreduce_state):
-    filenames = []
-    for shard in model.ShardState.find_all_by_mapreduce_state(mapreduce_state):
-      if shard.result_status == model.ShardState.RESULT_SUCCESS:
-        filenames.append(cls._get_filename(shard))
-    return filenames
-
-  @classmethod
   def from_json(cls, state):
     writer = cls(pickle.loads(state[cls._JSON_GCS_BUFFER]))
     no_dup = state.get(cls._JSON_NO_DUP, False)
@@ -1084,10 +1217,11 @@
       writer._seg_index = state[cls._JSON_SEG_INDEX]
     return writer
 
-  def to_json(self):
-
+  def end_slice(self, slice_ctx):
     if not self._streaming_buffer.closed:
       self._streaming_buffer.flush()
+
+  def to_json(self):
     result = {self._JSON_GCS_BUFFER: pickle.dumps(self._streaming_buffer),
               self._JSON_NO_DUP: self._no_dup}
     if self._no_dup:
@@ -1101,19 +1235,6 @@
           self._JSON_SEG_INDEX: self._seg_index})
     return result
 
-  def write(self, data):
-    """Write data to the GoogleCloudStorage file.
-
-    Args:
-      data: string containing the data to be written.
-    """
-    start_time = time.time()
-    self._streaming_buffer.write(data)
-    ctx = context.get()
-    operation.counters.Increment(COUNTER_IO_WRITE_BYTES, len(data))(ctx)
-    operation.counters.Increment(
-        COUNTER_IO_WRITE_MSEC, int((time.time() - start_time) * 1000))(ctx)
-
   def finalize(self, ctx, shard_state):
     self._streaming_buffer.close()
 
@@ -1125,7 +1246,7 @@
 
 
       mr_spec = ctx.mapreduce_spec
-      writer_spec = _get_params(mr_spec.mapper, allow_old=False)
+      writer_spec = self.get_params(mr_spec.mapper, allow_old=False)
       filename = self._generate_filename(writer_spec,
                                          mr_spec.name,
                                          mr_spec.mapreduce_id,
@@ -1141,12 +1262,8 @@
     else:
       shard_state.writer_state = {"filename": self._streaming_buffer.name}
 
-
-  def _supports_shard_retry(self, tstate):
-    return True
-
   def _supports_slice_recovery(self, mapper_spec):
-    writer_spec = _get_params(mapper_spec, allow_old=False)
+    writer_spec = self.get_params(mapper_spec, allow_old=False)
     return writer_spec.get(self._NO_DUPLICATE, False)
 
   def _recover(self, mr_spec, shard_number, shard_attempt):
@@ -1173,7 +1290,7 @@
                     self._seg_valid_length})
       next_seg_index = self._seg_index + 1
 
-    writer_spec = _get_params(mr_spec.mapper, allow_old=False)
+    writer_spec = self.get_params(mr_spec.mapper, allow_old=False)
 
     key = self._generate_filename(
         writer_spec, mr_spec.name,
@@ -1185,45 +1302,279 @@
     new_writer._seg_index = next_seg_index
     return new_writer
 
+  def _get_filename_for_test(self):
+    return self._streaming_buffer.name
 
-class _GoogleCloudStorageRecordOutputWriter(_GoogleCloudStorageOutputWriter):
-  """Write data to the Google Cloud Storage file using LevelDB format.
 
-  Data are written to cloudstorage in record format. On writer serializaton,
-  up to 32KB padding may be added to ensure the next slice aligns with
-  record boundary.
+class _ConsistentStatus(object):
+  """Object used to pass status to the next slice."""
 
-  See the _GoogleCloudStorageOutputWriter for configuration options.
+  def __init__(self):
+    self.writer_spec = None
+    self.mapreduce_id = None
+    self.shard = None
+    self.mainfile = None
+    self.tmpfile = None
+    self.tmpfile_1ago = None
+
+
+class GoogleCloudStorageConsistentOutputWriter(
+    _GoogleCloudStorageOutputWriterBase):
+  """Output writer to Google Cloud Storage using the cloudstorage library.
+
+  This version ensures that the output written to GCS is consistent.
   """
 
-  def __init__(self,
-               streaming_buffer,
-               writer_spec=None):
-    """Initialize a CloudStorageOutputWriter instance.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  _JSON_STATUS = "status"
+  _RAND_BITS = 128
+  _REWRITE_BLOCK_SIZE = 1024 * 256
+  _REWRITE_MR_TMP = "gae_mr_tmp"
+  _TMPFILE_PATTERN = _REWRITE_MR_TMP + "/$id-tmp-$shard-$random"
+  _TMPFILE_PREFIX = _REWRITE_MR_TMP + "/$id-tmp-$shard-"
+
+  def __init__(self, status):
+    """Initialize a GoogleCloudStorageConsistentOutputWriter instance.
 
     Args:
-      streaming_buffer: an instance of writable buffer from cloudstorage_api.
-      writer_spec: the specification for the writer.
+      status: an instance of _ConsistentStatus with initialized tmpfile
+              and mainfile.
     """
-    super(_GoogleCloudStorageRecordOutputWriter, self).__init__(
-        streaming_buffer, writer_spec)
-    self._record_writer = records.RecordsWriter(
-        super(_GoogleCloudStorageRecordOutputWriter, self))
+
+    self.status = status
+    self._data_written_to_slice = False
+
+  def _get_write_buffer(self):
+    if not self.status.tmpfile:
+      raise errors.FailJobError(
+          "write buffer called but empty, begin_slice missing?")
+    return self.status.tmpfile
+
+  def _get_filename_for_test(self):
+    return self.status.mainfile.name
+
+  @classmethod
+  def create(cls, mr_spec, shard_number, shard_attempt, _writer_state=None):
+    """Inherit docs."""
+    writer_spec = cls.get_params(mr_spec.mapper, allow_old=False)
+
+
+    key = cls._generate_filename(writer_spec, mr_spec.name,
+                                 mr_spec.mapreduce_id,
+                                 shard_number, shard_attempt)
+
+    status = _ConsistentStatus()
+    status.writer_spec = writer_spec
+    status.mainfile = cls._open_file(writer_spec, key)
+    status.mapreduce_id = mr_spec.mapreduce_id
+    status.shard = shard_number
+
+    return cls(status)
+
+  def _remove_file(self, filename, writer_spec):
+    if not filename:
+      return
+    account_id = writer_spec.get(self._ACCOUNT_ID_PARAM, None)
+    try:
+      cloudstorage_api.delete(filename, _account_id=account_id)
+    except cloud_errors.NotFoundError:
+      pass
+
+  def _rewrite_tmpfile(self, mainfile, tmpfile, writer_spec):
+    """Copies contents of tmpfile (name) to mainfile (buffer)."""
+    if mainfile.closed:
+
+      return
+
+    account_id = writer_spec.get(self._ACCOUNT_ID_PARAM, None)
+    f = cloudstorage_api.open(tmpfile, _account_id=account_id)
+
+    data = f.read(self._REWRITE_BLOCK_SIZE)
+    while data:
+      mainfile.write(data)
+      data = f.read(self._REWRITE_BLOCK_SIZE)
+    f.close()
+    mainfile.flush()
+
+  @classmethod
+  def _create_tmpfile(cls, status):
+    """Creates a new random-named tmpfile."""
+
+
+
+
+
+
+
+    tmpl = string.Template(cls._TMPFILE_PATTERN)
+    filename = tmpl.substitute(
+        id=status.mapreduce_id, shard=status.shard,
+        random=random.getrandbits(cls._RAND_BITS))
+
+    return cls._open_file(status.writer_spec, filename)
+
+  def begin_slice(self, slice_ctx):
+    status = self.status
+    writer_spec = status.writer_spec
+
+
+    if status.tmpfile_1ago:
+      self._remove_file(status.tmpfile_1ago.name, writer_spec)
+
+
+    if status.tmpfile:
+      self._rewrite_tmpfile(status.mainfile, status.tmpfile.name, writer_spec)
+
+
+    self._try_to_clean_garbage(writer_spec)
+
+
+    status.tmpfile_1ago = status.tmpfile
+    status.tmpfile = self._create_tmpfile(status)
+
+
+    if status.mainfile.closed:
+      status.tmpfile.close()
+      self._remove_file(status.tmpfile.name, writer_spec)
+
+  @classmethod
+  def from_json(cls, state):
+    return cls(pickle.loads(state[cls._JSON_STATUS]))
+
+  def end_slice(self, slice_ctx):
+    self.status.tmpfile.close()
 
   def to_json(self):
-
-    if not self._streaming_buffer.closed:
-      self._record_writer._pad_block()
-    return super(_GoogleCloudStorageRecordOutputWriter, self).to_json()
+    return {self._JSON_STATUS: pickle.dumps(self.status)}
 
   def write(self, data):
-    """Write a single record of data to the file using LevelDB format.
+    super(GoogleCloudStorageConsistentOutputWriter, self).write(data)
+    self._data_written_to_slice = True
 
-    Args:
-      data: string containing the data to be written.
-    """
+  def _try_to_clean_garbage(self, writer_spec):
+
+
+    tmpl = string.Template(self._TMPFILE_PREFIX)
+    prefix = tmpl.substitute(
+        id=self.status.mapreduce_id, shard=self.status.shard)
+    bucket = self.status.writer_spec[self.BUCKET_NAME_PARAM]
+    account_id = writer_spec.get(self._ACCOUNT_ID_PARAM, None)
+    for f in cloudstorage.listbucket("/%s/%s" % (bucket, prefix),
+                                     _account_id=account_id):
+      self._remove_file(f.filename, self.status.writer_spec)
+
+  def finalize(self, ctx, shard_state):
+    if self._data_written_to_slice:
+      raise errors.FailJobError(
+          "finalize() called after data was written")
+
+    if self.status.tmpfile:
+      self.status.tmpfile.close()
+    self.status.mainfile.close()
+
+
+    if self.status.tmpfile_1ago:
+      self._remove_file(self.status.tmpfile_1ago.name, self.status.writer_spec)
+    if self.status.tmpfile:
+      self._remove_file(self.status.tmpfile.name, self.status.writer_spec)
+
+    self._try_to_clean_garbage(self.status.writer_spec)
+
+    shard_state.writer_state = {"filename": self.status.mainfile.name}
+
+
+class _GoogleCloudStorageRecordOutputWriterBase(_GoogleCloudStorageBase):
+  """Wraps a GCS writer with a records.RecordsWriter.
+
+  This class wraps a WRITER_CLS (and its instance) and delegates most calls
+  to it. write() calls are done using records.RecordsWriter.
+
+  WRITER_CLS has to be set to a subclass of _GoogleCloudStorageOutputWriterBase.
+
+  For list of supported parameters see _GoogleCloudStorageBase.
+  """
+
+  WRITER_CLS = None
+
+  def __init__(self, writer):
+    self._writer = writer
+    self._record_writer = records.RecordsWriter(writer)
+
+  @classmethod
+  def validate(cls, mapper_spec):
+    return cls.WRITER_CLS.validate(mapper_spec)
+
+  @classmethod
+  def init_job(cls, mapreduce_state):
+    return cls.WRITER_CLS.init_job(mapreduce_state)
+
+  @classmethod
+  def finalize_job(cls, mapreduce_state):
+    return cls.WRITER_CLS.finalize_job(mapreduce_state)
+
+  @classmethod
+  def from_json(cls, state):
+    return cls(cls.WRITER_CLS.from_json(state))
+
+  def to_json(self):
+    return self._writer.to_json()
+
+  @classmethod
+  def create(cls, mr_spec, shard_number, shard_attempt, _writer_state=None):
+    return cls(cls.WRITER_CLS.create(mr_spec, shard_number, shard_attempt,
+                                     _writer_state))
+
+  def write(self, data):
     self._record_writer.write(data)
 
+  def finalize(self, ctx, shard_state):
+    return self._writer.finalize(ctx, shard_state)
+
+  @classmethod
+  def get_filenames(cls, mapreduce_state):
+    return cls.WRITER_CLS.get_filenames(mapreduce_state)
+
+  def _supports_shard_retry(self, tstate):
+    return self._writer._supports_shard_retry(tstate)
+
+  def _supports_slice_recovery(self, mapper_spec):
+    return self._writer._supports_slice_recovery(mapper_spec)
+
+  def _recover(self, mr_spec, shard_number, shard_attempt):
+    return self._writer._recover(mr_spec, shard_number, shard_attempt)
+
+  def begin_slice(self, slice_ctx):
+    return self._writer.begin_slice(slice_ctx)
+
+  def end_slice(self, slice_ctx):
+
+    if not self._writer._get_write_buffer().closed:
+      self._record_writer._pad_block()
+    return self._writer.end_slice(slice_ctx)
+
+
+class _GoogleCloudStorageRecordOutputWriter(
+    _GoogleCloudStorageRecordOutputWriterBase):
+  WRITER_CLS = _GoogleCloudStorageOutputWriter
+
+
+class GoogleCloudStorageConsistentRecordOutputWriter(
+    _GoogleCloudStorageRecordOutputWriterBase):
+  WRITER_CLS = GoogleCloudStorageConsistentOutputWriter
+
 
 
 class _GoogleCloudStorageKeyValueOutputWriter(
diff --git a/google/appengine/ext/mapreduce/property_range.py b/google/appengine/ext/mapreduce/property_range.py
index 08631e2..ee7b132 100644
--- a/google/appengine/ext/mapreduce/property_range.py
+++ b/google/appengine/ext/mapreduce/property_range.py
@@ -20,6 +20,7 @@
 
 
 
+
 import datetime
 
 from google.appengine.ext import ndb
diff --git a/google/appengine/ext/mapreduce/api/map_job/shard_life_cycle.py b/google/appengine/ext/mapreduce/shard_life_cycle.py
similarity index 91%
rename from google/appengine/ext/mapreduce/api/map_job/shard_life_cycle.py
rename to google/appengine/ext/mapreduce/shard_life_cycle.py
index ba3e8c1..18a2144 100644
--- a/google/appengine/ext/mapreduce/api/map_job/shard_life_cycle.py
+++ b/google/appengine/ext/mapreduce/shard_life_cycle.py
@@ -45,7 +45,7 @@
     Make it idempotent.
 
     Args:
-      shard_ctx: map_job.ShardContext object.
+      shard_ctx: map_job_context.ShardContext object.
     """
     pass
 
@@ -59,7 +59,7 @@
     won't be called.
 
     Args:
-      shard_ctx: map_job.ShardContext object.
+      shard_ctx: map_job_context.ShardContext object.
     """
     pass
 
@@ -70,7 +70,7 @@
     Make it idempotent.
 
     Args:
-      slice_ctx: map_job.SliceContext object.
+      slice_ctx: map_job_context.SliceContext object.
     """
     pass
 
@@ -84,6 +84,6 @@
     won't be called.
 
     Args:
-      slice_ctx: map_job.SliceContext object.
+      slice_ctx: map_job_context.SliceContext object.
     """
     pass
diff --git a/google/appengine/ext/mapreduce/shuffler.py b/google/appengine/ext/mapreduce/shuffler.py
index 8901207..000dafc 100644
--- a/google/appengine/ext/mapreduce/shuffler.py
+++ b/google/appengine/ext/mapreduce/shuffler.py
@@ -29,37 +29,50 @@
 
 
 
-
 """Mapreduce shuffler implementation."""
 
 from __future__ import with_statement
 
 
 
+
 __all__ = [
     "ShufflePipeline",
     ]
 
+
+
 import gc
 import heapq
 import logging
-import os
+import pickle
 import time
 
 from appengine_pipeline.src import pipeline
 from appengine_pipeline.src.pipeline import common as pipeline_common
-from google.appengine.api import files
-from google.appengine.api import modules
 from google.appengine.api.files import file_service_pb
 from google.appengine.ext import db
 from google.appengine.ext.mapreduce import context
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import input_readers
 from google.appengine.ext.mapreduce import mapper_pipeline
+from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import operation
 from google.appengine.ext.mapreduce import output_writers
 from google.appengine.ext.mapreduce import pipeline_base
 from google.appengine.ext.mapreduce import records
+from google.appengine.ext.mapreduce import util
+
+
+
+try:
+
+  from google.appengine.ext import cloudstorage
+  if hasattr(cloudstorage, "_STUB"):
+    cloudstorage = None
+except ImportError:
+  pass
+
 
 
 
@@ -94,33 +107,40 @@
   return cmp(key_record1[0], key_record2[0])
 
 
-class _BatchRecordsReader(input_readers.RecordsReader):
-  """Records reader that reads in big batches."""
+class _BatchGCSRecordsReader(
+    input_readers._GoogleCloudStorageRecordInputReader):
+  """GCS Records reader that reads in big batches."""
 
-  BATCH_SIZE = 1024*1024 * 3
+  BATCH_SIZE = 1024 *1024 * 3
 
   def __iter__(self):
+
     records = []
     size = 0
-    for record in input_readers.RecordsReader.__iter__(self):
-      records.append(record)
-      size += len(record)
-      if size > self.BATCH_SIZE:
-        yield records
-        size = 0
-        records = []
-        gc.collect()
+    try:
+      while True:
+        record = super(_BatchGCSRecordsReader, self).next()
+        records.append(record)
+        size += len(record)
+        if size > self.BATCH_SIZE:
+          yield records
+          size = 0
+          records = []
+          gc.collect()
+    except StopIteration:
+      pass
     if records:
       yield records
       records = []
       gc.collect()
 
 
+
 def _sort_records_map(records):
   """Map function sorting records.
 
   Converts records to KeyValue protos, sorts them by key and writes them
-  into new blobstore file. Creates _OutputFile entity to record resulting
+  into new GCS file. Creates _OutputFile entity to record resulting
   file name.
 
   Args:
@@ -140,20 +160,21 @@
   key_records.sort(cmp=_compare_keys)
 
   logging.debug("Writing")
-  blob_file_name = (ctx.mapreduce_spec.name + "-" +
-                    ctx.mapreduce_id + "-output")
-  output_path = files.blobstore.create(
-      _blobinfo_uploaded_filename=blob_file_name)
-  with output_writers.RecordsPool(output_path, ctx=ctx) as pool:
+  mapper_spec = ctx.mapreduce_spec.mapper
+  params = input_readers._get_params(mapper_spec)
+  bucket_name = params.get("bucket_name")
+  filename = (ctx.mapreduce_spec.name + "/" + ctx.mapreduce_id + "/output-" +
+              ctx.shard_id + "-" + str(int(time.time())))
+  full_filename = "/%s/%s" % (bucket_name, filename)
+  filehandle = cloudstorage.open(full_filename, mode="w")
+  with output_writers.GCSRecordsPool(filehandle, ctx=ctx) as pool:
     for key_record in key_records:
       pool.append(key_record[1])
 
   logging.debug("Finalizing")
-  files.finalize(output_path)
-  output_path = files.blobstore.get_file_name(
-      files.blobstore.get_blob_key(output_path))
+  filehandle.close()
 
-  entity = _OutputFile(key_name=output_path,
+  entity = _OutputFile(key_name=full_filename,
                        parent=_OutputFile.get_root_key(ctx.mapreduce_id))
   entity.put()
 
@@ -163,24 +184,30 @@
 
   Args:
     job_name: root job name.
-    filenames: list of filenames to sort.
+    bucket_name: The name of the Google Cloud Storage bucket.
+    filenames: list of a list of filenames (hashed/bucketed) to sort,
+      as produced by _HashingGCSOutputWriter.
 
   Returns:
-    The list of lists of sorted filenames. Each list corresponds to one
-    input file. Each filenames contains a chunk of sorted data.
+    The list of lists of sorted filenames. Each list corresponds to each
+    list of input files. Each filenames contains a chunk of sorted data.
   """
-  def run(self, job_name, filenames):
+
+  def run(self, job_name, bucket_name, filenames):
     sort_mappers = []
     for i in range(len(filenames)):
-      filename = filenames[i]
+      filenames_only = util.strip_prefix_from_items("/%s/" % bucket_name,
+                                                    filenames[i])
       sort_mapper = yield mapper_pipeline.MapperPipeline(
           "%s-shuffle-sort-%s" % (job_name, str(i)),
           __name__ + "._sort_records_map",
-          __name__ + "._BatchRecordsReader",
+          __name__ + "._BatchGCSRecordsReader",
           None,
           {
-              "files": [filename],
-              "processing_rate": 1000000,
+              "input_reader": {
+                  "bucket_name": bucket_name,
+                  "objects": filenames_only,
+              },
           },
           shards=1)
       sort_mappers.append(sort_mapper)
@@ -219,7 +246,6 @@
   """
 
   def run(self, job_ids):
-    result = []
     for job_id in job_ids:
       db.delete(_OutputFile.all().ancestor(_OutputFile.get_root_key(job_id)))
 
@@ -242,6 +268,9 @@
   MAX_VALUES_COUNT_PARAM = "max_values_count"
   MAX_VALUES_SIZE_PARAM = "max_values_size"
 
+
+  GCS_BUFFER_SIZE = 256 * 1024
+
   def __init__(self,
                offsets,
                max_values_count,
@@ -263,6 +292,12 @@
 
     self._offsets is always correctly updated so that stopping iterations
     doesn't skip records and doesn't read the same record twice.
+
+    Raises:
+      Exception: when Files list and offsets do not match.
+
+    Yields:
+      The result.
     """
     ctx = context.get()
     mapper_spec = ctx.mapreduce_spec.mapper
@@ -278,7 +313,10 @@
 
     for (i, filename) in enumerate(filenames):
       offset = self._offsets[i]
-      reader = records.RecordsReader(files.BufferedFile(filename))
+
+
+      reader = records.RecordsReader(
+          cloudstorage.open(filename, read_buffer_size=self.GCS_BUFFER_SIZE))
       reader.seek(offset)
       readers.append((None, None, i, reader))
 
@@ -302,12 +340,12 @@
 
             should_yield = True
           elif (self._max_values_count != -1 and
-              current_count >= self._max_values_count):
+                current_count >= self._max_values_count):
 
             current_result[2] = True
             should_yield = True
           elif (self._max_values_size != -1 and
-              current_size >= self._max_values_size):
+                current_size >= self._max_values_size):
 
             current_result[2] = True
             should_yield = True
@@ -374,24 +412,33 @@
     if mapper_spec.input_reader_class() != cls:
       raise errors.BadReaderParamsError("Input reader class mismatch")
     params = mapper_spec.params
-    if not cls.FILES_PARAM in params:
+    if cls.FILES_PARAM not in params:
       raise errors.BadReaderParamsError("Missing files parameter.")
 
 
-class _HashingBlobstoreOutputWriter(output_writers.BlobstoreOutputWriterBase):
-  """An OutputWriter which outputs data into blobstore in key-value format.
+class _HashingGCSOutputWriter(output_writers.OutputWriter):
+  """An OutputWriter which outputs data into GCS in key-value format.
 
   The output is tailored towards shuffler needs. It shards key/values using
-  key hash modulo number of output files.
+  key hash modulo number of output files. Each shard will hash keys that will
+  be placed in one of shard_count number of files (buckets) specific to that
+  shard. The same key will be hashed to the same logical file across all of
+  the shards. Then the list of all the same logical files will be assembled
+  and a list of those lists will be returned.
   """
 
-  def __init__(self, filenames):
+
+  BUCKET_NAME_PARAM = "bucket_name"
+
+
+  def __init__(self, filehandles):
     """Constructor.
 
     Args:
-      filenames: list of filenames that this writer outputs to.
+      filehandles: list of file handles that this writer outputs to.
     """
-    self._filenames = filenames
+    self._filehandles = filehandles
+    self._pools = [None] * len(filehandles)
 
   @classmethod
   def validate(cls, mapper_spec):
@@ -399,29 +446,17 @@
 
     Args:
       mapper_spec: an instance of model.MapperSpec to validate.
+    Raises:
+      BadWriterParamsError: when Output writer class mismatch.
     """
     if mapper_spec.output_writer_class() != cls:
       raise errors.BadWriterParamsError("Output writer class mismatch")
+    params = output_writers._get_params(mapper_spec)
 
-  @classmethod
-  def init_job(cls, mapreduce_state):
-    """Initialize job-level writer state.
-
-    Args:
-      mapreduce_state: an instance of model.MapreduceState describing current
-      job. State can be modified during initialization.
-    """
-    shards = mapreduce_state.mapreduce_spec.mapper.shard_count
-
-    filenames = []
-    for i in range(shards):
-      blob_file_name = (mapreduce_state.mapreduce_spec.name +
-                        "-" + mapreduce_state.mapreduce_spec.mapreduce_id +
-                        "-output-" + str(i))
-      filenames.append(
-          files.blobstore.create(
-              _blobinfo_uploaded_filename=blob_file_name))
-    mapreduce_state.writer_state = {"filenames": filenames}
+    if cls.BUCKET_NAME_PARAM not in params:
+      raise errors.BadWriterParamsError(
+          "%s is required for the _HashingGCSOutputWriter" %
+          cls.BUCKET_NAME_PARAM)
 
   @classmethod
   def finalize_job(cls, mapreduce_state):
@@ -431,13 +466,16 @@
       mapreduce_state: an instance of model.MapreduceState describing current
         job. State can be modified during finalization.
     """
-    finalized_filenames = []
-    for filename in mapreduce_state.writer_state["filenames"]:
-      files.finalize(filename)
-      finalized_filenames.append(
-          files.blobstore.get_file_name(
-              files.blobstore.get_blob_key(filename)))
-    mapreduce_state.writer_state = {"filenames": finalized_filenames}
+    shards = mapreduce_state.mapreduce_spec.mapper.shard_count
+    filenames = []
+    for _ in range(shards):
+      filenames.append([None] * shards)
+    shard_states = model.ShardState.find_all_by_mapreduce_state(mapreduce_state)
+    for x, shard_state in enumerate(shard_states):
+      shard_filenames = shard_state.writer_state["shard_filenames"]
+      for y in range(shards):
+        filenames[y][x] = shard_filenames[y]
+    mapreduce_state.writer_state = {"filenames": filenames}
 
   @classmethod
   def from_json(cls, json):
@@ -449,7 +487,7 @@
     Returns:
       An instance of the OutputWriter configured using the values of json.
     """
-    return cls(json["filenames"])
+    return cls(pickle.loads(json["filehandles"]))
 
   def to_json(self):
     """Returns writer state to serialize in json.
@@ -457,12 +495,29 @@
     Returns:
       A json-izable version of the OutputWriter state.
     """
-    return {"filenames": self._filenames}
+
+
+
+    for pool in self._pools:
+      if pool is not None:
+        pool.flush(True)
+    return {"filehandles": pickle.dumps(self._filehandles)}
 
   @classmethod
   def create(cls, mr_spec, shard_number, shard_attempt, _writer_state=None):
     """Inherit docs."""
-    return cls(_writer_state["filenames"])
+    mapper_spec = mr_spec.mapper
+    params = output_writers._get_params(mapper_spec)
+    bucket_name = params.get(cls.BUCKET_NAME_PARAM)
+    shards = mapper_spec.shard_count
+
+    filehandles = []
+    filename = (mr_spec.name + "/" + mr_spec.mapreduce_id +
+                "/shard-" + str(shard_number) + "-bucket-")
+    for i in range(shards):
+      full_filename = "/%s/%s%d" % (bucket_name, filename, i)
+      filehandles.append(cloudstorage.open(full_filename, mode="w"))
+    return cls(filehandles)
 
   @classmethod
   def get_filenames(cls, mapreduce_state):
@@ -472,7 +527,12 @@
     return []
 
   def finalize(self, ctx, shard_state):
-    pass
+    """See parent class."""
+    filenames = []
+    for filehandle in self._filehandles:
+      filenames.append(filehandle.name)
+      filehandle.close()
+    shard_state.writer_state = {"shard_filenames": filenames}
 
   def write(self, data):
     """Write data.
@@ -492,21 +552,28 @@
       logging.error("Expecting a tuple, but got %s: %s",
                     data.__class__.__name__, data)
 
-    file_index = key.__hash__() % len(self._filenames)
-    pool_name = "kv_pool%d" % file_index
-    filename = self._filenames[file_index]
+    file_index = key.__hash__() % len(self._filehandles)
 
-    if ctx.get_pool(pool_name) is None:
-      ctx.register_pool(pool_name,
-                        output_writers.RecordsPool(filename=filename, ctx=ctx))
+
+
+
+
+    pool = self._pools[file_index]
+    if pool is None:
+      filehandle = self._filehandles[file_index]
+      pool = output_writers.GCSRecordsPool(filehandle=filehandle, ctx=ctx)
+      self._pools[file_index] = pool
+
     proto = file_service_pb.KeyValue()
     proto.set_key(key)
     proto.set_value(value)
-    ctx.get_pool(pool_name).append(proto.Encode())
+    pool.append(proto.Encode())
 
 
 class _ShardOutputs(pipeline_base.PipelineBase):
-  """Takes a flat list of filenames, returns a list of lists, each with
+  """Shards the ouputs.
+
+  Takes a flat list of filenames, returns a list of lists, each with
   one member each.
   """
 
@@ -526,6 +593,9 @@
     key: values key.
     values: values themselves.
     partial: True if more values for this key will follow. False otherwise.
+
+  Yields:
+    The proto.
   """
   proto = file_service_pb.KeyValues()
   proto.set_key(key)
@@ -544,7 +614,7 @@
       shard. Each file in the list should have keys sorted and should contain
       records with KeyValue serialized entity.
 
-  Returns:
+  Yields:
     The list of filenames, where each filename is fully merged and will contain
     records with KeyValues serialized entity.
   """
@@ -554,25 +624,34 @@
 
   _MAX_VALUES_SIZE = 1000000
 
-  def run(self, job_name, filenames):
+  def run(self, job_name, bucket_name, filenames):
     yield mapper_pipeline.MapperPipeline(
         job_name + "-shuffle-merge",
         __name__ + "._merge_map",
         __name__ + "._MergingReader",
         output_writer_spec=
-        output_writers.__name__ + ".BlobstoreRecordsOutputWriter",
+        output_writers.__name__ + "._GoogleCloudStorageRecordOutputWriter",
         params={
-          _MergingReader.FILES_PARAM: filenames,
-          _MergingReader.MAX_VALUES_COUNT_PARAM: self._MAX_VALUES_COUNT,
-          _MergingReader.MAX_VALUES_SIZE_PARAM: self._MAX_VALUES_SIZE,
-          },
+            _MergingReader.FILES_PARAM: filenames,
+            _MergingReader.MAX_VALUES_COUNT_PARAM: self._MAX_VALUES_COUNT,
+            _MergingReader.MAX_VALUES_SIZE_PARAM: self._MAX_VALUES_SIZE,
+            "output_writer": {
+                "bucket_name": bucket_name,
+            },
+        },
         shards=len(filenames))
 
 
 def _hashing_map(binary_record):
   """A map function used in hash phase.
 
-  Reads KeyValue from binary record and yields (key, value).
+  Reads KeyValue from binary record.
+
+  Args:
+    binary_record: The binary record.
+
+  Yields:
+    The (key, value).
   """
   proto = file_service_pb.KeyValue()
   proto.ParseFromString(binary_record)
@@ -584,116 +663,50 @@
 
   Args:
     job_name: root mapreduce job name.
+    bucket_name: The name of the Google Cloud Storage bucket.
     filenames: filenames of mapper output. Should be of records format
       with serialized KeyValue proto.
     shards: Optional. Number of output shards to generate. Defaults
       to the number of input files.
 
-  Returns:
+  Yields:
     The list of filenames. Each file is of records formad with serialized
     KeyValue proto. For each proto its output file is decided based on key
     hash. Thus all equal keys would end up in the same file.
   """
-  def run(self, job_name, filenames, shards=None):
+
+  def run(self, job_name, bucket_name, filenames, shards=None):
+    filenames_only = (
+        util.strip_prefix_from_items("/%s/" % bucket_name, filenames))
     if shards is None:
       shards = len(filenames)
     yield mapper_pipeline.MapperPipeline(
         job_name + "-shuffle-hash",
         __name__ + "._hashing_map",
-        input_readers.__name__ + ".RecordsReader",
-        output_writer_spec= __name__ + "._HashingBlobstoreOutputWriter",
-        params={'files': filenames},
+        input_readers.__name__ + "._GoogleCloudStorageRecordInputReader",
+        output_writer_spec=__name__ + "._HashingGCSOutputWriter",
+        params={
+            "input_reader": {
+                "bucket_name": bucket_name,
+                "objects": filenames_only,
+            },
+            "output_writer": {
+                "bucket_name": bucket_name,
+            },
+        },
         shards=shards)
 
 
-class _ShuffleServicePipeline(pipeline_base.PipelineBase):
-  """A pipeline to invoke shuffle service.
-
-  Args:
-    input_files: list of file names to shuffle.
-
-  Returns:
-    list of shuffled file names. Empty list if there is no input.
-  """
-  async = True
-
-  output_names = [
-
-      "_output_files",
-      ]
-
-  def run(self, job_name, input_files):
-
-
-    empty = True
-    for filename in input_files:
-      if files.stat(filename).st_size > 0:
-        empty = False
-        break
-    if empty:
-      self.complete([])
-      return
-
-    shard_number = len(input_files)
-    output_files = []
-    for i in range(shard_number):
-      blob_file_name = (job_name + "-shuffle-output-" + str(i))
-      file_name = files.blobstore.create(
-          _blobinfo_uploaded_filename=blob_file_name)
-      output_files.append(file_name)
-    self.fill(self.outputs._output_files, output_files)
-
-
-
-    target = modules.get_current_version_name()
-    module_name = modules.get_current_module_name()
-    if module_name != "default":
-
-
-
-      target = "%s.%s." % (target, module_name)
-
-    files.shuffler.shuffle("%s-%s" % (job_name, int(time.time())),
-                           input_files,
-                           output_files,
-                           {
-                               "url": self.get_callback_url(),
-
-
-
-                               "method": "GET",
-                               "queue": self.queue_name,
-                               "version": target,
-                           })
-
-  def callback(self, **kwargs):
-    if "error" in kwargs:
-      self.retry("Error from shuffle service: %s" % kwargs["error"])
-      return
-
-    output_files = self.outputs._output_files.value
-    for filename in output_files:
-      files.finalize(filename)
-
-    finalized_file_names = []
-    for filename in output_files:
-      finalized_file_names.append(
-          files.blobstore.get_file_name(
-              files.blobstore.get_blob_key(filename)))
-    self.complete(finalized_file_names)
-
-  def try_cancel(self):
-    return True
-
-
 class ShufflePipeline(pipeline_base.PipelineBase):
   """A pipeline to shuffle multiple key-value files.
 
   Args:
     job_name: The descriptive name of the overall job.
+    mapper_params: parameters to use for mapper phase.
     filenames: list of file names to sort. Files have to be of records format
       defined by Files API and contain serialized file_service_pb.KeyValue
-      protocol messages.
+      protocol messages. The filenames may or may not contain the
+      GCS bucket name in their path.
     shards: Optional. Number of output shards to generate. Defaults
       to the number of input files.
 
@@ -705,15 +718,48 @@
       in memory shuffler.
   """
 
-  def run(self, job_name, filenames, shards=None):
-    hashed_files = yield _HashPipeline(job_name, filenames, shards=shards)
-    sorted_files = yield _SortChunksPipeline(job_name, hashed_files)
+  def run(self, job_name, mapper_params, filenames, shards=None):
+    bucket_name = mapper_params["bucket_name"]
+    hashed_files = yield _HashPipeline(job_name, bucket_name,
+                                       filenames, shards=shards)
+    sorted_files = yield _SortChunksPipeline(job_name, bucket_name,
+                                             hashed_files)
     temp_files = [hashed_files, sorted_files]
 
-    merged_files = yield _MergePipeline(job_name, sorted_files)
+    merged_files = yield _MergePipeline(job_name, bucket_name, sorted_files)
 
     with pipeline.After(merged_files):
       all_temp_files = yield pipeline_common.Extend(*temp_files)
-      yield mapper_pipeline._CleanupPipeline(all_temp_files)
+      yield _GCSCleanupPipeline(all_temp_files)
 
     yield pipeline_common.Return(merged_files)
+
+
+class _GCSCleanupPipeline(pipeline_base.PipelineBase):
+  """A pipeline to do a cleanup for mapreduce jobs that use GCS.
+
+  Args:
+    filename_or_list: list of files or file lists to delete.
+  """
+
+
+  _MIN_RETRIES = 5
+
+  _MAX_RETRIES = 10
+
+  def delete_file_or_list(self, filename_or_list):
+    if isinstance(filename_or_list, list):
+      for filename in filename_or_list:
+        self.delete_file_or_list(filename)
+    else:
+      filename = filename_or_list
+      retry_params = cloudstorage.RetryParams(min_retries=self._MIN_RETRIES,
+                                              max_retries=self._MAX_RETRIES)
+
+      try:
+        cloudstorage.delete(filename, retry_params)
+      except:
+        pass
+
+  def run(self, temp_files):
+    self.delete_file_or_list(temp_files)
diff --git a/google/appengine/ext/mapreduce/status.py b/google/appengine/ext/mapreduce/status.py
index 480e783..b4abce1 100644
--- a/google/appengine/ext/mapreduce/status.py
+++ b/google/appengine/ext/mapreduce/status.py
@@ -33,6 +33,8 @@
 """Status page handler for mapreduce framework."""
 
 
+
+
 import os
 import pkgutil
 import time
diff --git a/google/appengine/ext/mapreduce/test_support.py b/google/appengine/ext/mapreduce/test_support.py
index d46e0b3..b57f013 100644
--- a/google/appengine/ext/mapreduce/test_support.py
+++ b/google/appengine/ext/mapreduce/test_support.py
@@ -33,6 +33,7 @@
 """Utilities to aid in testing mapreduces."""
 
 
+
 import base64
 import collections
 import logging
@@ -99,9 +100,13 @@
   url = task["url"]
   handler = None
 
+  params = []
+
   for (re_str, handler_class) in handlers_map:
     re_str = "^" + re_str + "($|\\?)"
-    if re.match(re_str, url):
+    m = re.match(re_str, url)
+    if m:
+      params = m.groups()[:-1]
       break
   else:
     raise Exception("Can't determine handler for %s" % task)
@@ -166,9 +171,9 @@
     os.environ = copy_os_environ
 
     if task["method"] == "POST":
-      handler.post()
+      handler.post(*params)
     elif task["method"] == "GET":
-      handler.get()
+      handler.get(*params)
     else:
       raise Exception("Unsupported method: %s" % task.method)
   finally:
diff --git a/google/appengine/ext/mapreduce/util.py b/google/appengine/ext/mapreduce/util.py
index 63a1ced..92dbc4b 100644
--- a/google/appengine/ext/mapreduce/util.py
+++ b/google/appengine/ext/mapreduce/util.py
@@ -28,13 +28,12 @@
 
 
 
-
-
 """Utility functions for use with the mapreduce library."""
 
 
 
 
+
 __all__ = [
     "create_datastore_write_config",
     "for_name",
@@ -47,6 +46,7 @@
     "try_serialize_handler",
     "try_deserialize_handler",
     "CALLBACK_MR_ID_TASK_HEADER",
+    "strip_prefix_from_items"
     ]
 
 import inspect
@@ -202,7 +202,8 @@
   name doesn't contain '.', the current module will be used.
 
   Args:
-    fq_name: fully qualified name of something to find
+    fq_name: fully qualified name of something to find.
+    recursive: run recursively or not.
 
   Returns:
     class object or None if fq_name is None.
@@ -238,7 +239,7 @@
       raise
     else:
       raise ImportError("Could not find '%s' on path '%s'" % (
-                        short_name, module_name))
+          short_name, module_name))
   except ImportError:
 
 
@@ -251,7 +252,7 @@
         raise KeyError()
     except KeyError:
       raise ImportError("Could not find '%s' on path '%s'" % (
-                        short_name, module_name))
+          short_name, module_name))
     except ImportError:
 
 
@@ -422,3 +423,25 @@
           "Object %r must be defined on the top level of a module." % obj)
     return "%s.%s" % (obj.__module__, obj.__name__)
   raise TypeError("Unexpected type %s." % type(obj))
+
+
+def strip_prefix_from_items(prefix, items):
+  """Strips out the prefix from each of the items if it is present.
+
+  Args:
+    prefix: the string for that you wish to strip from the beginning of each
+      of the items.
+    items: a list of strings that may or may not contain the prefix you want
+      to strip out.
+
+  Returns:
+    items_no_prefix: a copy of the list of items (same order) without the
+      prefix (if present).
+  """
+  items_no_prefix = []
+  for item in items:
+    if item.startswith(prefix):
+      items_no_prefix.append(item[len(prefix):])
+    else:
+      items_no_prefix.append(item)
+  return items_no_prefix
diff --git a/google/appengine/ext/ndb/query.py b/google/appengine/ext/ndb/query.py
index 8d6d47c..e558d89 100644
--- a/google/appengine/ext/ndb/query.py
+++ b/google/appengine/ext/ndb/query.py
@@ -1293,6 +1293,8 @@
     total = 0
     while rpc is not None:
       batch = yield rpc
+      options = QueryOptions(offset=options.offset - batch.skipped_results,
+                             config=options)
       rpc = batch.next_batch_async(options)
       total += batch.skipped_results
     raise tasklets.Return(total)
diff --git a/google/appengine/ext/preload/__init__.py b/google/appengine/ext/preload/__init__.py
index 257e729..ea1771a 100644
--- a/google/appengine/ext/preload/__init__.py
+++ b/google/appengine/ext/preload/__init__.py
@@ -34,6 +34,7 @@
 
 
 
+
 import os
 _original_os_urandom = os.urandom
 def os_urandom_replacement(n):
diff --git a/google/appengine/ext/remote_api/handler.py b/google/appengine/ext/remote_api/handler.py
index 8e81f95..ee9ee3b 100644
--- a/google/appengine/ext/remote_api/handler.py
+++ b/google/appengine/ext/remote_api/handler.py
@@ -59,6 +59,7 @@
 
 
 
+
 import google
 import hashlib
 import logging
diff --git a/google/appengine/ext/remote_api/remote_api_services.py b/google/appengine/ext/remote_api/remote_api_services.py
index 289116b..628f724 100644
--- a/google/appengine/ext/remote_api/remote_api_services.py
+++ b/google/appengine/ext/remote_api/remote_api_services.py
@@ -240,6 +240,8 @@
                         search_service_pb.ListIndexesResponse),
         'Search': (search_service_pb.SearchRequest,
                    search_service_pb.SearchResponse),
+        'DeleteSchema': (search_service_pb.DeleteSchemaRequest,
+                         search_service_pb.DeleteSchemaResponse),
     },
     'modules': {
         'GetModules': (modules_service_pb.GetModulesRequest,
diff --git a/google/appengine/ext/remote_api/remote_api_stub.py b/google/appengine/ext/remote_api/remote_api_stub.py
index 1304812..7715008 100644
--- a/google/appengine/ext/remote_api/remote_api_stub.py
+++ b/google/appengine/ext/remote_api/remote_api_stub.py
@@ -65,6 +65,7 @@
 
 
 
+
 import google
 import os
 import pickle
@@ -660,6 +661,155 @@
   return app_id, server
 
 
+
+_OAUTH_SCOPES = [
+    'https://www.googleapis.com/auth/appengine.apis',
+    'https://www.googleapis.com/auth/userinfo.email',
+    ]
+
+
+
+def _ConfigureRemoteApiWithKeyFile(servername,
+                                   path,
+                                   service_account,
+                                   key_file_path):
+  """Does necessary setup to allow easy remote access to App Engine APIs.
+
+  This function uses OAuth2 with a credential derived from service_account and
+  key_file_path to communicate with App Engine APIs.
+
+  Use of this method requires an encryption library to be installed.
+
+  Args:
+    servername: The hostname your app is deployed on (typically,
+        <app_id>.appspot.com).
+    path: The path to the remote_api handler for your app
+      (for example, '/_ah/remote_api').
+    service_account: The email address of the service account to use for
+      making OAuth requests.
+    key_file_path: The path to a .p12 file containing the private key for
+      service_account.
+
+  Returns:
+    server, a server which may be useful for calling the application directly.
+
+  Raises:
+    urllib2.HTTPError: if app_id is not provided and there is an error while
+      retrieving it.
+    ConfigurationError: if there is a error configuring the DatstoreFileStub.
+    ImportError: if the oauth2client module is not available or an appropriate
+      encryption library cannot not be found.
+    IOError: if key_file_path does not exist or cannot be read.
+  """
+  try:
+
+    import oauth2client.client
+  except ImportError, e:
+    raise ImportError('Use of a key file to access the Remote API '
+                      'requires the oauth2client module: %s' % e)
+
+  if not oauth2client.client.HAS_CRYPTO:
+    raise ImportError('Use of a key file to access the Remote API '
+                      'requires an encryption library. Please install '
+                      'either PyOpenSSL or PyCrypto 2.6 or later.')
+
+  with open(key_file_path, 'rb') as key_file:
+    key = key_file.read()
+    credentials = oauth2client.client.SignedJwtAssertionCredentials(
+        service_account,
+        key,
+        _OAUTH_SCOPES)
+    return _ConfigureRemoteApiWithOAuthCredentials(servername,
+                                                   path,
+                                                   credentials)
+
+
+
+def _ConfigureRemoteApiWithComputeEngineCredential(servername,
+                                                   path):
+  """Does necessary setup to allow easy remote access to App Engine APIs.
+
+  This function uses OAuth2 with a credential from the Compute Engine metadata
+  server to communicate with App Engine APIs.
+
+  Args:
+    servername: The hostname your app is deployed on (typically,
+        <app_id>.appspot.com).
+    path: The path to the remote_api handler for your app
+      (for example, '/_ah/remote_api').
+
+  Returns:
+    server, a server which may be useful for calling the application directly.
+
+  Raises:
+    urllib2.HTTPError: if app_id is not provided and there is an error while
+      retrieving it.
+    ConfigurationError: if there is a error configuring the DatstoreFileStub.
+    ImportError: if the oauth2client or httplib2 module is not available.
+  """
+  try:
+
+    import httplib2
+    import oauth2client
+  except ImportError, e:
+    raise ImportError('Use of Compute Engine credentials requires the '
+                      'oauth2client and httplib2 modules: %s' % e)
+  credentials = oauth2client.gce.AppAssertionCredentials(_OAUTH_SCOPES)
+  http = httplib2.Http()
+  credentials.authorize(http)
+  credentials.refresh(http)
+  return _ConfigureRemoteApiWithOAuthCredentials(servername,
+                                                 path,
+                                                 credentials)
+
+
+def _ConfigureRemoteApiWithOAuthCredentials(servername,
+                                            path,
+                                            credentials):
+  """Does necessary setup to allow easy remote access to App Engine APIs.
+
+  Args:
+    servername: The hostname your app is deployed on (typically,
+        <app_id>.appspot.com).
+    path: The path to the remote_api handler for your app
+      (for example, '/_ah/remote_api').
+    credentials: An oauth2client.OAuth2Credentials object.
+
+  Returns:
+    server, a server which may be useful for calling the application directly.
+
+  Raises:
+    urllib2.HTTPError: if app_id is not provided and there is an error while
+      retrieving it.
+    ConfigurationError: if there is a error configuring the DatstoreFileStub.
+    ImportError: if the appengine_rpc_httplib2 module is not available.
+  """
+  try:
+
+    from google.appengine.tools import appengine_rpc_httplib2
+  except ImportError, e:
+    raise ImportError('Use of OAuth credentials requires the '
+                      'appengine_rpc_httplib2 module. %s' % e)
+  if not servername:
+    raise ConfigurationError('servername required')
+
+  oauth2_parameters = (
+      appengine_rpc_httplib2.HttpRpcServerOAuth2.OAuth2Parameters(
+          access_token=None,
+          client_id=None,
+          client_secret=None,
+          scope=None,
+          refresh_token=None,
+          credential_file=None,
+          credentials=credentials))
+  return ConfigureRemoteApi(
+      app_id=None,
+      path=path,
+      auth_func=oauth2_parameters,
+      servername=servername,
+      rpc_server_factory=appengine_rpc_httplib2.HttpRpcServerOAuth2)
+
+
 def ConfigureRemoteApi(app_id,
                        path,
                        auth_func,
@@ -679,8 +829,7 @@
 
   Note that if the app_id is specified, the internal appid must be used;
   this may include a partition and a domain. It is often easier to let
-  remote_api_stub retreive the app_id automatically.
-
+  remote_api_stub retrieve the app_id automatically.
 
   Args:
     app_id: The app_id of your app, as declared in app.yaml, or None.
diff --git a/google/appengine/ext/remote_api/throttle.py b/google/appengine/ext/remote_api/throttle.py
index 9f56ce9..e8b552b 100644
--- a/google/appengine/ext/remote_api/throttle.py
+++ b/google/appengine/ext/remote_api/throttle.py
@@ -63,6 +63,7 @@
 """
 
 
+
 import logging
 import os
 import threading
diff --git a/google/appengine/ext/search/__init__.py b/google/appengine/ext/search/__init__.py
index a8f804a..c149bf5 100644
--- a/google/appengine/ext/search/__init__.py
+++ b/google/appengine/ext/search/__init__.py
@@ -126,6 +126,7 @@
 
 
 
+
 import re
 import string
 import sys
diff --git a/google/appengine/ext/testbed/__init__.py b/google/appengine/ext/testbed/__init__.py
index d66ad0b..9dae4ca 100644
--- a/google/appengine/ext/testbed/__init__.py
+++ b/google/appengine/ext/testbed/__init__.py
@@ -101,6 +101,9 @@
 
 
 
+
+
+
 import os
 import unittest
 
diff --git a/google/appengine/ext/vendor/__init__.py b/google/appengine/ext/vendor/__init__.py
new file mode 100644
index 0000000..58a23f6
--- /dev/null
+++ b/google/appengine/ext/vendor/__init__.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Dependencies vendoring helpers."""
+
+import os.path
+import site
+import sys
+
+
+PYTHON_VERSION = 'python%d.%d' % (sys.version_info[0], sys.version_info[1])
+
+
+def add(path, index=1):
+  """Insert site dir or virtualenv at a given index in sys.path.
+
+  Args:
+    path: relative path to a site dir or virtualenv.
+    index: sys.path position to insert the site dir.
+
+  Raises:
+    ValueError: path doesn't exist.
+  """
+  venv_path = os.path.join(path, 'lib', PYTHON_VERSION, 'site-packages')
+  if os.path.isdir(venv_path):
+    site_dir = venv_path
+  elif os.path.isdir(path):
+    site_dir = path
+  else:
+    raise ValueError('virtualenv: cannot access %s: '
+                     'No such virtualenv or site directory' % path)
+
+
+
+  sys_path = sys.path[:]
+  del sys.path[index:]
+  site.addsitedir(site_dir)
+  sys.path.extend(sys_path[index:])
diff --git a/google/appengine/ext/webapp/__init__.py b/google/appengine/ext/webapp/__init__.py
index 9f5ce27..69c2438 100644
--- a/google/appengine/ext/webapp/__init__.py
+++ b/google/appengine/ext/webapp/__init__.py
@@ -66,6 +66,7 @@
 """
 
 
+
 import logging
 import os
 
diff --git a/google/appengine/ext/webapp/_template.py b/google/appengine/ext/webapp/_template.py
index 0dbc044..e5fc9df 100644
--- a/google/appengine/ext/webapp/_template.py
+++ b/google/appengine/ext/webapp/_template.py
@@ -27,6 +27,7 @@
 
 
 
+
 import warnings
 warnings.filterwarnings('ignore',
                         '',
diff --git a/google/appengine/ext/webapp/_webapp25.py b/google/appengine/ext/webapp/_webapp25.py
index 71dc2c7..cc9764c 100644
--- a/google/appengine/ext/webapp/_webapp25.py
+++ b/google/appengine/ext/webapp/_webapp25.py
@@ -65,6 +65,7 @@
 
 
 
+
 import cgi
 import logging
 import re
diff --git a/google/appengine/ext/webapp/blobstore_handlers.py b/google/appengine/ext/webapp/blobstore_handlers.py
index 5da27f2..dfb33c3 100644
--- a/google/appengine/ext/webapp/blobstore_handlers.py
+++ b/google/appengine/ext/webapp/blobstore_handlers.py
@@ -41,6 +41,7 @@
 
 
 
+
 import cgi
 import collections
 import re
diff --git a/google/appengine/ext/webapp/mail_handlers.py b/google/appengine/ext/webapp/mail_handlers.py
index baa2edc..266209b 100644
--- a/google/appengine/ext/webapp/mail_handlers.py
+++ b/google/appengine/ext/webapp/mail_handlers.py
@@ -37,6 +37,7 @@
 
 
 
+
 from google.appengine.api import mail
 from google.appengine.ext import webapp
 
diff --git a/google/appengine/ext/webapp/mock_webapp.py b/google/appengine/ext/webapp/mock_webapp.py
index 91effd8..458c25a 100644
--- a/google/appengine/ext/webapp/mock_webapp.py
+++ b/google/appengine/ext/webapp/mock_webapp.py
@@ -24,6 +24,7 @@
 """
 
 
+
 import StringIO
 import urlparse
 
diff --git a/google/appengine/ext/webapp/template.py b/google/appengine/ext/webapp/template.py
index e7c5702..c5a7020 100644
--- a/google/appengine/ext/webapp/template.py
+++ b/google/appengine/ext/webapp/template.py
@@ -49,6 +49,8 @@
 
 
 
+
+
 import logging
 import os
 import warnings
diff --git a/google/appengine/ext/webapp/util.py b/google/appengine/ext/webapp/util.py
index 7f9d21f..adfc9a6 100644
--- a/google/appengine/ext/webapp/util.py
+++ b/google/appengine/ext/webapp/util.py
@@ -28,6 +28,7 @@
 
 
 
+
 __all__ = ['login_required',
            'run_wsgi_app',
            'add_wsgi_middleware',
diff --git a/google/appengine/ext/webapp/xmpp_handlers.py b/google/appengine/ext/webapp/xmpp_handlers.py
index 3f7658b..07eb0d4 100644
--- a/google/appengine/ext/webapp/xmpp_handlers.py
+++ b/google/appengine/ext/webapp/xmpp_handlers.py
@@ -30,6 +30,7 @@
 
 
 
+
 import logging
 from google.appengine.api import xmpp
 from google.appengine.ext import webapp
diff --git a/google/appengine/ext/zipserve/__init__.py b/google/appengine/ext/zipserve/__init__.py
index b7ecc2d..f63be94 100644
--- a/google/appengine/ext/zipserve/__init__.py
+++ b/google/appengine/ext/zipserve/__init__.py
@@ -58,6 +58,7 @@
 """
 
 
+
 import email.Utils
 import logging
 import mimetypes
diff --git a/google/appengine/runtime/__init__.py b/google/appengine/runtime/__init__.py
index a365006..1a2e446 100644
--- a/google/appengine/runtime/__init__.py
+++ b/google/appengine/runtime/__init__.py
@@ -23,6 +23,7 @@
 
 
 
+
 try:
   BaseException
 except NameError:
diff --git a/google/appengine/runtime/apiproxy.py b/google/appengine/runtime/apiproxy.py
index ee085ea..4d6f1fb 100644
--- a/google/appengine/runtime/apiproxy.py
+++ b/google/appengine/runtime/apiproxy.py
@@ -26,6 +26,7 @@
 """
 
 
+
 import sys
 
 apiproxy_stub_map_loaded = (
diff --git a/google/appengine/runtime/apiproxy_errors.py b/google/appengine/runtime/apiproxy_errors.py
index b0ad72c..9aec93a 100644
--- a/google/appengine/runtime/apiproxy_errors.py
+++ b/google/appengine/runtime/apiproxy_errors.py
@@ -22,6 +22,7 @@
 """
 
 
+
 class Error(Exception):
   """Base APIProxy error type."""
 
diff --git a/google/appengine/runtime/background.py b/google/appengine/runtime/background.py
index fac550c..6d39aea 100644
--- a/google/appengine/runtime/background.py
+++ b/google/appengine/runtime/background.py
@@ -23,6 +23,7 @@
 """
 
 
+
 import logging
 import sys
 import thread
diff --git a/google/appengine/runtime/cgi.py b/google/appengine/runtime/cgi.py
index 2c458ad..af99bf9 100644
--- a/google/appengine/runtime/cgi.py
+++ b/google/appengine/runtime/cgi.py
@@ -27,6 +27,7 @@
 from __future__ import with_statement
 
 
+
 import cStringIO
 from email import feedparser
 import imp
diff --git a/google/appengine/runtime/features.py b/google/appengine/runtime/features.py
new file mode 100644
index 0000000..0fc303f
--- /dev/null
+++ b/google/appengine/runtime/features.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""features module.
+
+This module simplifies the access to the appengine feature flags.
+"""
+
+import __builtin__
+
+
+def IsEnabled(feature_name, default=False):
+  """Indicates if a specific feature flag is enabled.
+
+  Args:
+    feature_name: The name of the feature flag to check.
+    default: Default value if the flags are not initialized (In a test
+             environment for example).
+
+  Returns:
+    True/False if the flag is set/not set or default if the feature flags
+    were not initialized.
+  """
+  try:
+
+    return feature_name in __builtin__._APPENGINE_FEATURE_FLAGS
+  except AttributeError:
+    return default
diff --git a/google/appengine/runtime/request_environment.py b/google/appengine/runtime/request_environment.py
index 85ffc7b..9a15e3f 100644
--- a/google/appengine/runtime/request_environment.py
+++ b/google/appengine/runtime/request_environment.py
@@ -26,6 +26,7 @@
 
 
 
+
 import collections
 import os
 import sys
diff --git a/google/appengine/runtime/runtime.py b/google/appengine/runtime/runtime.py
index 2c9268f..6b3c32f 100644
--- a/google/appengine/runtime/runtime.py
+++ b/google/appengine/runtime/runtime.py
@@ -30,6 +30,7 @@
 
 
 
+
 import cStringIO
 import thread
 import threading
diff --git a/google/appengine/runtime/shutdown.py b/google/appengine/runtime/shutdown.py
index 86194f5..34cc70d 100644
--- a/google/appengine/runtime/shutdown.py
+++ b/google/appengine/runtime/shutdown.py
@@ -29,6 +29,7 @@
 
 
 
+
 import logging
 import sys
 import traceback
diff --git a/google/appengine/runtime/wsgi.py b/google/appengine/runtime/wsgi.py
index 87a5a35..f65e5d3 100644
--- a/google/appengine/runtime/wsgi.py
+++ b/google/appengine/runtime/wsgi.py
@@ -30,6 +30,7 @@
 
 
 
+
 import logging
 import sys
 import types
diff --git a/google/appengine/tools/adaptive_thread_pool.py b/google/appengine/tools/adaptive_thread_pool.py
index 3f730ec..b30b2ef 100644
--- a/google/appengine/tools/adaptive_thread_pool.py
+++ b/google/appengine/tools/adaptive_thread_pool.py
@@ -26,6 +26,7 @@
 
 
 
+
 import logging
 import Queue
 import sys
diff --git a/google/appengine/tools/api_server.py b/google/appengine/tools/api_server.py
index 3b51b04..b26d641 100644
--- a/google/appengine/tools/api_server.py
+++ b/google/appengine/tools/api_server.py
@@ -22,6 +22,7 @@
 from __future__ import with_statement
 
 
+
 import BaseHTTPServer
 import httplib
 import logging
@@ -382,8 +383,6 @@
   except ImportError:
 
 
-    logging.warning('Could not initialize images API; you are likely missing '
-                    'the Python "PIL" module.')
 
     from google.appengine.api.images import images_not_implemented_stub
     apiproxy_stub_map.apiproxy.RegisterStub(
diff --git a/google/appengine/tools/appcfg.py b/google/appengine/tools/appcfg.py
index 591527c..6471489 100644
--- a/google/appengine/tools/appcfg.py
+++ b/google/appengine/tools/appcfg.py
@@ -29,6 +29,7 @@
 from __future__ import with_statement
 
 
+
 import calendar
 import contextlib
 import copy
diff --git a/google/appengine/tools/appcfg_java.py b/google/appengine/tools/appcfg_java.py
index c6b6e53..0953501 100644
--- a/google/appengine/tools/appcfg_java.py
+++ b/google/appengine/tools/appcfg_java.py
@@ -34,6 +34,7 @@
 from google.appengine.tools import dispatch_xml_parser
 from google.appengine.tools import dos_xml_parser
 from google.appengine.tools import jarfile
+from google.appengine.tools import java_utils
 from google.appengine.tools import queue_xml_parser
 from google.appengine.tools import web_xml_parser
 from google.appengine.tools import yaml_translator
@@ -134,7 +135,7 @@
 
       self.options.no_symlinks = True
 
-    java_home, exec_suffix = JavaHomeAndSuffix()
+    java_home, exec_suffix = java_utils.JavaHomeAndSuffix()
     self.java_command = os.path.join(java_home, 'bin', 'java' + exec_suffix)
     self.javac_command = os.path.join(java_home, 'bin', 'javac' + exec_suffix)
 
@@ -589,61 +590,6 @@
   return matches
 
 
-def JavaHomeAndSuffix():
-  """Find the directory that the JDK is installed in.
-
-  The JDK install directory is expected to have a bin directory that contains
-  at a minimum the java and javac executables. If the environment variable
-  JAVA_HOME is set then it must point to such a directory. Otherwise, we look
-  for javac on the PATH and check that it is inside a JDK install directory.
-
-  Returns:
-    A tuple where the first element is the JDK install directory and the second
-    element is a suffix that must be appended to executables in that directory
-    ('' on Unix-like systems, '.exe' on Windows).
-
-  Raises:
-    RuntimeError: If JAVA_HOME is set but is not a JDK install directory, or
-    otherwise if a JDK install directory cannot be found based on the PATH.
-  """
-  def ResultForJdkAt(path):
-    """Return (path, suffix) if path is a JDK install directory, else None."""
-    def IsExecutable(binary):
-      return os.path.isfile(binary) and os.access(binary, os.X_OK)
-
-    def ResultFor(path):
-      for suffix in ['', '.exe']:
-        if all(IsExecutable(os.path.join(path, 'bin', binary + suffix))
-               for binary in ['java', 'javac', 'jar']):
-          return (path, suffix)
-      return None
-
-    result = ResultFor(path)
-    if not result:
-
-
-      head, tail = os.path.split(path)
-      if tail == 'jre':
-        result = ResultFor(head)
-    return result
-
-  java_home = os.getenv('JAVA_HOME')
-  if java_home:
-    result = ResultForJdkAt(java_home)
-    if result:
-      return result
-    else:
-      raise RuntimeError(
-          'JAVA_HOME is set but does not reference a valid JDK: %s' % java_home)
-  for path_dir in os.environ['PATH'].split(os.pathsep):
-    maybe_root, last = os.path.split(path_dir)
-    if last == 'bin':
-      result = ResultForJdkAt(maybe_root)
-      if result:
-        return result
-  raise RuntimeError('Did not find JDK in PATH and JAVA_HOME is not set')
-
-
 def _FindApiJars(lib_dir):
   """Find the appengine-api-*.jar and its version.
 
diff --git a/google/appengine/tools/appengine_rpc.py b/google/appengine/tools/appengine_rpc.py
index 2f8c738..a040d30 100644
--- a/google/appengine/tools/appengine_rpc.py
+++ b/google/appengine/tools/appengine_rpc.py
@@ -17,6 +17,7 @@
 """Tool for performing authenticated RPCs against App Engine."""
 
 
+
 import google
 
 import cookielib
diff --git a/google/appengine/tools/appengine_rpc_httplib2.py b/google/appengine/tools/appengine_rpc_httplib2.py
index 2f8cece..6c4d4af 100644
--- a/google/appengine/tools/appengine_rpc_httplib2.py
+++ b/google/appengine/tools/appengine_rpc_httplib2.py
@@ -291,7 +291,8 @@
     """Class encapsulating parameters related to OAuth2 authentication."""
 
     def __init__(self, access_token, client_id, client_secret, scope,
-                 refresh_token, credential_file, token_uri=None):
+                 refresh_token, credential_file, token_uri=None,
+                 credentials=None):
       self.access_token = access_token
       self.client_id = client_id
       self.client_secret = client_secret
@@ -299,6 +300,7 @@
       self.refresh_token = refresh_token
       self.credential_file = credential_file
       self.token_uri = token_uri
+      self.credentials = credentials
 
   def __init__(self, host, oauth2_parameters, user_agent, source,
                host_override=None, extra_headers=None, save_cookies=False,
@@ -344,8 +346,10 @@
     else:
       self.storage = NoStorage()
 
-    if any((oauth2_parameters.access_token, oauth2_parameters.refresh_token,
-            oauth2_parameters.token_uri)):
+    if oauth2_parameters.credentials:
+      self.credentials = oauth2_parameters.credentials
+    elif any((oauth2_parameters.access_token, oauth2_parameters.refresh_token,
+              oauth2_parameters.token_uri)):
       token_uri = (oauth2_parameters.token_uri or
                    ('https://%s/o/oauth2/token' %
                     os.getenv('APPENGINE_AUTH_SERVER', 'accounts.google.com')))
diff --git a/google/appengine/tools/bulkload_client.py b/google/appengine/tools/bulkload_client.py
index 65273ce..a6a5acc 100644
--- a/google/appengine/tools/bulkload_client.py
+++ b/google/appengine/tools/bulkload_client.py
@@ -37,6 +37,7 @@
 """
 
 
+
 import StringIO
 import httplib
 import logging
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index db6866c..beb5743 100644
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -126,7 +126,7 @@
   } else {
     if (goog.inHtmlDocument_()) {
       for (var doc = goog.global.document, scripts = doc.getElementsByTagName("script"), i = scripts.length - 1;0 <= i;--i) {
-        var src = scripts[i].src, qmark = src.lastIndexOf("?"), l = -1 == qmark ? src.length : qmark;
+        var script = scripts[i], src = script.src, qmark = src.lastIndexOf("?"), l = -1 == qmark ? src.length : qmark;
         if ("base.js" == src.substr(l - 7, 7)) {
           goog.basePath = src.substr(0, l - 7);
           break;
@@ -195,7 +195,7 @@
     goog.loadedModules_[moduleName] = exports;
     if (goog.moduleLoaderState_.declareTestMethods) {
       for (var entry in exports) {
-        if (0 === entry.indexOf("test", 0) || "tearDown" == entry || "setup" == entry) {
+        if (0 === entry.indexOf("test", 0) || "tearDown" == entry || "setUp" == entry || "setUpPage" == entry || "tearDownPage" == entry) {
           goog.global[entry] = exports[entry];
         }
       }
@@ -594,12 +594,17 @@
 goog.string.collapseWhitespace = function(str) {
   return str.replace(/[\s\xa0]+/g, " ").replace(/^\s+|\s+$/g, "");
 };
-goog.string.isEmpty = function(str) {
+goog.string.isEmptyOrWhitespace = function(str) {
   return/^[\s\xa0]*$/.test(str);
 };
-goog.string.isEmptySafe = function(str) {
-  return goog.string.isEmpty(goog.string.makeSafe(str));
+goog.string.isEmptyString = function(str) {
+  return 0 == str.length;
 };
+goog.string.isEmpty = goog.string.isEmptyOrWhitespace;
+goog.string.isEmptyOrWhitespaceSafe = function(str) {
+  return goog.string.isEmptyOrWhitespace(goog.string.makeSafe(str));
+};
+goog.string.isEmptySafe = goog.string.isEmptyOrWhitespaceSafe;
 goog.string.isBreakingWhitespace = function(str) {
   return!/[^\t\n\r ]/.test(str);
 };
@@ -1378,7 +1383,15 @@
 goog.array.flatten = function(var_args) {
   for (var result = [], i = 0;i < arguments.length;i++) {
     var element = arguments[i];
-    goog.isArray(element) ? result.push.apply(result, goog.array.flatten.apply(null, element)) : result.push(element);
+    if (goog.isArray(element)) {
+      for (var c = 0;c < element.length;c += 8192) {
+        for (var chunk = goog.array.slice(element, c, c + 8192), recurseResult = goog.array.flatten.apply(null, chunk), r = 0;r < recurseResult.length;r++) {
+          result.push(recurseResult[r]);
+        }
+      }
+    } else {
+      result.push(element);
+    }
   }
   return result;
 };
@@ -1754,11 +1767,13 @@
   return key in obj ? obj[key] : obj[key] = value;
 };
 goog.object.equals = function(a, b) {
-  if (!goog.array.equals(goog.object.getKeys(a), goog.object.getKeys(b))) {
-    return!1;
-  }
   for (var k in a) {
-    if (a[k] !== b[k]) {
+    if (!(k in b) || a[k] !== b[k]) {
+      return!1;
+    }
+  }
+  for (k in b) {
+    if (!(k in a)) {
       return!1;
     }
   }
@@ -1885,6 +1900,12 @@
 goog.labs.userAgent.browser.matchSafari_ = function() {
   return goog.labs.userAgent.util.matchUserAgent("Safari") && !goog.labs.userAgent.util.matchUserAgent("Chrome") && !goog.labs.userAgent.util.matchUserAgent("CriOS") && !goog.labs.userAgent.util.matchUserAgent("Android");
 };
+goog.labs.userAgent.browser.matchCoast_ = function() {
+  return goog.labs.userAgent.util.matchUserAgent("Coast");
+};
+goog.labs.userAgent.browser.matchIosWebview_ = function() {
+  return(goog.labs.userAgent.util.matchUserAgent("iPad") || goog.labs.userAgent.util.matchUserAgent("iPhone")) && !goog.labs.userAgent.browser.matchSafari_() && !goog.labs.userAgent.browser.matchChrome_() && !goog.labs.userAgent.browser.matchCoast_() && goog.labs.userAgent.util.matchUserAgent("AppleWebKit");
+};
 goog.labs.userAgent.browser.matchChrome_ = function() {
   return goog.labs.userAgent.util.matchUserAgent("Chrome") || goog.labs.userAgent.util.matchUserAgent("CriOS");
 };
@@ -1895,6 +1916,8 @@
 goog.labs.userAgent.browser.isIE = goog.labs.userAgent.browser.matchIE_;
 goog.labs.userAgent.browser.isFirefox = goog.labs.userAgent.browser.matchFirefox_;
 goog.labs.userAgent.browser.isSafari = goog.labs.userAgent.browser.matchSafari_;
+goog.labs.userAgent.browser.isCoast = goog.labs.userAgent.browser.matchCoast_;
+goog.labs.userAgent.browser.isIosWebview = goog.labs.userAgent.browser.matchIosWebview_;
 goog.labs.userAgent.browser.isChrome = goog.labs.userAgent.browser.matchChrome_;
 goog.labs.userAgent.browser.isAndroidBrowser = goog.labs.userAgent.browser.matchAndroidBrowser_;
 goog.labs.userAgent.browser.isSilk = function() {
@@ -2038,7 +2061,6 @@
   goog.userAgent.detectedMac_ = goog.string.contains(goog.userAgent.PLATFORM, "Mac");
   goog.userAgent.detectedWindows_ = goog.string.contains(goog.userAgent.PLATFORM, "Win");
   goog.userAgent.detectedLinux_ = goog.string.contains(goog.userAgent.PLATFORM, "Linux");
-  goog.userAgent.detectedX11_ = !!goog.userAgent.getNavigator() && goog.string.contains(goog.userAgent.getNavigator().appVersion || "", "X11");
   var ua = goog.userAgent.getUserAgentString();
   goog.userAgent.detectedAndroid_ = !!ua && goog.string.contains(ua, "Android");
   goog.userAgent.detectedIPhone_ = !!ua && goog.string.contains(ua, "iPhone");
@@ -2048,7 +2070,11 @@
 goog.userAgent.MAC = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_MAC : goog.userAgent.detectedMac_;
 goog.userAgent.WINDOWS = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_WINDOWS : goog.userAgent.detectedWindows_;
 goog.userAgent.LINUX = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_LINUX : goog.userAgent.detectedLinux_;
-goog.userAgent.X11 = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_X11 : goog.userAgent.detectedX11_;
+goog.userAgent.isX11_ = function() {
+  var navigator = goog.userAgent.getNavigator();
+  return!!navigator && goog.string.contains(navigator.appVersion || "", "X11");
+};
+goog.userAgent.X11 = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_X11 : goog.userAgent.isX11_();
 goog.userAgent.ANDROID = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_ANDROID : goog.userAgent.detectedAndroid_;
 goog.userAgent.IPHONE = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_IPHONE : goog.userAgent.detectedIPhone_;
 goog.userAgent.IPAD = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_IPAD : goog.userAgent.detectedIPad_;
@@ -3010,9 +3036,9 @@
 CHANGE:"change", SELECT:"select", SUBMIT:"submit", INPUT:"input", PROPERTYCHANGE:"propertychange", DRAGSTART:"dragstart", DRAG:"drag", DRAGENTER:"dragenter", DRAGOVER:"dragover", DRAGLEAVE:"dragleave", DROP:"drop", DRAGEND:"dragend", TOUCHSTART:"touchstart", TOUCHMOVE:"touchmove", TOUCHEND:"touchend", TOUCHCANCEL:"touchcancel", BEFOREUNLOAD:"beforeunload", CONSOLEMESSAGE:"consolemessage", CONTEXTMENU:"contextmenu", DOMCONTENTLOADED:"DOMContentLoaded", ERROR:"error", HELP:"help", LOAD:"load", LOSECAPTURE:"losecapture", 
 ORIENTATIONCHANGE:"orientationchange", READYSTATECHANGE:"readystatechange", RESIZE:"resize", SCROLL:"scroll", UNLOAD:"unload", HASHCHANGE:"hashchange", PAGEHIDE:"pagehide", PAGESHOW:"pageshow", POPSTATE:"popstate", COPY:"copy", PASTE:"paste", CUT:"cut", BEFORECOPY:"beforecopy", BEFORECUT:"beforecut", BEFOREPASTE:"beforepaste", ONLINE:"online", OFFLINE:"offline", MESSAGE:"message", CONNECT:"connect", ANIMATIONSTART:goog.events.getVendorPrefixedName_("AnimationStart"), ANIMATIONEND:goog.events.getVendorPrefixedName_("AnimationEnd"), 
 ANIMATIONITERATION:goog.events.getVendorPrefixedName_("AnimationIteration"), TRANSITIONEND:goog.events.getVendorPrefixedName_("TransitionEnd"), POINTERDOWN:"pointerdown", POINTERUP:"pointerup", POINTERCANCEL:"pointercancel", POINTERMOVE:"pointermove", POINTEROVER:"pointerover", POINTEROUT:"pointerout", POINTERENTER:"pointerenter", POINTERLEAVE:"pointerleave", GOTPOINTERCAPTURE:"gotpointercapture", LOSTPOINTERCAPTURE:"lostpointercapture", MSGESTURECHANGE:"MSGestureChange", MSGESTUREEND:"MSGestureEnd", 
-MSGESTUREHOLD:"MSGestureHold", MSGESTURESTART:"MSGestureStart", MSGESTURETAP:"MSGestureTap", MSGOTPOINTERCAPTURE:"MSGotPointerCapture", MSINERTIASTART:"MSInertiaStart", MSLOSTPOINTERCAPTURE:"MSLostPointerCapture", MSPOINTERCANCEL:"MSPointerCancel", MSPOINTERDOWN:"MSPointerDown", MSPOINTERENTER:"MSPointerEnter", MSPOINTERHOVER:"MSPointerHover", MSPOINTERLEAVE:"MSPointerLeave", MSPOINTERMOVE:"MSPointerMove", MSPOINTEROUT:"MSPointerOut", MSPOINTEROVER:"MSPointerOver", MSPOINTERUP:"MSPointerUp", TEXTINPUT:"textinput", 
-COMPOSITIONSTART:"compositionstart", COMPOSITIONUPDATE:"compositionupdate", COMPOSITIONEND:"compositionend", EXIT:"exit", LOADABORT:"loadabort", LOADCOMMIT:"loadcommit", LOADREDIRECT:"loadredirect", LOADSTART:"loadstart", LOADSTOP:"loadstop", RESPONSIVE:"responsive", SIZECHANGED:"sizechanged", UNRESPONSIVE:"unresponsive", VISIBILITYCHANGE:"visibilitychange", STORAGE:"storage", DOMSUBTREEMODIFIED:"DOMSubtreeModified", DOMNODEINSERTED:"DOMNodeInserted", DOMNODEREMOVED:"DOMNodeRemoved", DOMNODEREMOVEDFROMDOCUMENT:"DOMNodeRemovedFromDocument", 
-DOMNODEINSERTEDINTODOCUMENT:"DOMNodeInsertedIntoDocument", DOMATTRMODIFIED:"DOMAttrModified", DOMCHARACTERDATAMODIFIED:"DOMCharacterDataModified"};
+MSGESTUREHOLD:"MSGestureHold", MSGESTURESTART:"MSGestureStart", MSGESTURETAP:"MSGestureTap", MSGOTPOINTERCAPTURE:"MSGotPointerCapture", MSINERTIASTART:"MSInertiaStart", MSLOSTPOINTERCAPTURE:"MSLostPointerCapture", MSPOINTERCANCEL:"MSPointerCancel", MSPOINTERDOWN:"MSPointerDown", MSPOINTERENTER:"MSPointerEnter", MSPOINTERHOVER:"MSPointerHover", MSPOINTERLEAVE:"MSPointerLeave", MSPOINTERMOVE:"MSPointerMove", MSPOINTEROUT:"MSPointerOut", MSPOINTEROVER:"MSPointerOver", MSPOINTERUP:"MSPointerUp", TEXT:"text", 
+TEXTINPUT:"textInput", COMPOSITIONSTART:"compositionstart", COMPOSITIONUPDATE:"compositionupdate", COMPOSITIONEND:"compositionend", EXIT:"exit", LOADABORT:"loadabort", LOADCOMMIT:"loadcommit", LOADREDIRECT:"loadredirect", LOADSTART:"loadstart", LOADSTOP:"loadstop", RESPONSIVE:"responsive", SIZECHANGED:"sizechanged", UNRESPONSIVE:"unresponsive", VISIBILITYCHANGE:"visibilitychange", STORAGE:"storage", DOMSUBTREEMODIFIED:"DOMSubtreeModified", DOMNODEINSERTED:"DOMNodeInserted", DOMNODEREMOVED:"DOMNodeRemoved", 
+DOMNODEREMOVEDFROMDOCUMENT:"DOMNodeRemovedFromDocument", DOMNODEINSERTEDINTODOCUMENT:"DOMNodeInsertedIntoDocument", DOMATTRMODIFIED:"DOMAttrModified", DOMCHARACTERDATAMODIFIED:"DOMCharacterDataModified"};
 goog.events.BrowserEvent = function(opt_e, opt_currentTarget) {
   goog.events.Event.call(this, opt_e ? opt_e.type : "");
   this.relatedTarget = this.currentTarget = this.target = null;
@@ -3942,21 +3968,15 @@
   return newIter;
 };
 goog.iter.takeWhile = function(iterable, f, opt_obj) {
-  var iterator = goog.iter.toIterator(iterable), newIter = new goog.iter.Iterator, taking = !0;
-  newIter.next = function() {
-    for (;;) {
-      if (taking) {
-        var val = iterator.next();
-        if (f.call(opt_obj, val, void 0, iterator)) {
-          return val;
-        }
-        taking = !1;
-      } else {
-        throw goog.iter.StopIteration;
-      }
+  var iterator = goog.iter.toIterator(iterable), iter = new goog.iter.Iterator;
+  iter.next = function() {
+    var val = iterator.next();
+    if (f.call(opt_obj, val, void 0, iterator)) {
+      return val;
     }
+    throw goog.iter.StopIteration;
   };
-  return newIter;
+  return iter;
 };
 goog.iter.toArray = function(iterable) {
   if (goog.isArrayLike(iterable)) {
@@ -5080,7 +5100,7 @@
     doc.write("");
     doc.close();
     var message = "callImmediate" + Math.random(), origin = "file:" == win.location.protocol ? "*" : win.location.protocol + "//" + win.location.host, onmessage = goog.bind(function(e) {
-      if (e.origin == origin || e.data == message) {
+      if (("*" == origin || e.origin == origin) && e.data == message) {
         this.port1.onmessage();
       }
     }, this);
@@ -5093,10 +5113,12 @@
   if ("undefined" !== typeof Channel && !goog.labs.userAgent.browser.isIE()) {
     var channel = new Channel, head = {}, tail = head;
     channel.port1.onmessage = function() {
-      head = head.next;
-      var cb = head.cb;
-      head.cb = null;
-      cb();
+      if (goog.isDef(head.next)) {
+        head = head.next;
+        var cb = head.cb;
+        head.cb = null;
+        cb();
+      }
     };
     return function(cb) {
       tail.next = {cb:cb};
diff --git a/google/appengine/tools/dev_appserver_apiserver.py b/google/appengine/tools/dev_appserver_apiserver.py
index 34a8b0b..7a1b5a8 100644
--- a/google/appengine/tools/dev_appserver_apiserver.py
+++ b/google/appengine/tools/dev_appserver_apiserver.py
@@ -30,6 +30,7 @@
 
 
 
+
 import base64
 import cgi
 import cStringIO
diff --git a/google/appengine/tools/dev_appserver_blobimage.py b/google/appengine/tools/dev_appserver_blobimage.py
index a8b3c79..5130be1 100644
--- a/google/appengine/tools/dev_appserver_blobimage.py
+++ b/google/appengine/tools/dev_appserver_blobimage.py
@@ -24,6 +24,7 @@
 """
 
 
+
 import logging
 import re
 import urlparse
diff --git a/google/appengine/tools/dev_appserver_blobstore.py b/google/appengine/tools/dev_appserver_blobstore.py
index 753565d..cf78da6 100644
--- a/google/appengine/tools/dev_appserver_blobstore.py
+++ b/google/appengine/tools/dev_appserver_blobstore.py
@@ -29,6 +29,7 @@
 
 
 
+
 import cgi
 import cStringIO
 import logging
diff --git a/google/appengine/tools/dev_appserver_channel.py b/google/appengine/tools/dev_appserver_channel.py
index fb846bd..3ee9366 100644
--- a/google/appengine/tools/dev_appserver_channel.py
+++ b/google/appengine/tools/dev_appserver_channel.py
@@ -25,6 +25,7 @@
 
 
 
+
 import cgi
 import os
 import urlparse
diff --git a/google/appengine/tools/dev_appserver_index.py b/google/appengine/tools/dev_appserver_index.py
index fb80ee9..a5c3857 100644
--- a/google/appengine/tools/dev_appserver_index.py
+++ b/google/appengine/tools/dev_appserver_index.py
@@ -22,6 +22,7 @@
 
 
 
+
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.datastore.datastore_stub_index import *
 
diff --git a/google/appengine/tools/dev_appserver_login.py b/google/appengine/tools/dev_appserver_login.py
index d72bb86..a4d6df5 100644
--- a/google/appengine/tools/dev_appserver_login.py
+++ b/google/appengine/tools/dev_appserver_login.py
@@ -28,6 +28,7 @@
 """
 
 
+
 import cgi
 import Cookie
 import os
diff --git a/google/appengine/tools/dev_appserver_main.py b/google/appengine/tools/dev_appserver_main.py
index fbf6432..850b998 100644
--- a/google/appengine/tools/dev_appserver_main.py
+++ b/google/appengine/tools/dev_appserver_main.py
@@ -148,6 +148,7 @@
 
 
 
+
 from google.appengine.tools import os_compat
 
 import getopt
diff --git a/google/appengine/tools/dev_appserver_multiprocess.py b/google/appengine/tools/dev_appserver_multiprocess.py
index 0442da2..36ef5ec 100644
--- a/google/appengine/tools/dev_appserver_multiprocess.py
+++ b/google/appengine/tools/dev_appserver_multiprocess.py
@@ -34,6 +34,7 @@
 """
 
 
+
 import BaseHTTPServer
 import copy
 import cStringIO
diff --git a/google/appengine/tools/dev_appserver_oauth.py b/google/appengine/tools/dev_appserver_oauth.py
index 66aaa7e..93ed470 100644
--- a/google/appengine/tools/dev_appserver_oauth.py
+++ b/google/appengine/tools/dev_appserver_oauth.py
@@ -18,6 +18,7 @@
 
 
 
+
 import cgi
 
 
diff --git a/google/appengine/tools/dev_appserver_upload.py b/google/appengine/tools/dev_appserver_upload.py
index db53c5d..0bb1b60 100644
--- a/google/appengine/tools/dev_appserver_upload.py
+++ b/google/appengine/tools/dev_appserver_upload.py
@@ -25,6 +25,7 @@
 """
 
 
+
 import base64
 import cStringIO
 import datetime
diff --git a/google/appengine/tools/devappserver2/admin/admin_request_handler.py b/google/appengine/tools/devappserver2/admin/admin_request_handler.py
index 1470ff9..021837e 100644
--- a/google/appengine/tools/devappserver2/admin/admin_request_handler.py
+++ b/google/appengine/tools/devappserver2/admin/admin_request_handler.py
@@ -17,6 +17,7 @@
 """A base class for all Admin UI request handlers and related utilities."""
 
 
+
 import os.path
 import random
 import string
@@ -122,7 +123,7 @@
     For example, if the request URL is
     "http://foo/bar?animal=cat&color=redirect" then
     _construct_url(['animal'], {'vehicle': 'car'}) will return
-    "http://foo/bar?vehicle=car&color=redirect"
+    "http://foo/bar?color=redirect&vehicle=car"
 
     Args:
       remove: A sequence of query parameters to remove from the query string.
diff --git a/google/appengine/tools/devappserver2/admin/admin_request_handler_test.py b/google/appengine/tools/devappserver2/admin/admin_request_handler_test.py
index 439804a..1e322e7 100644
--- a/google/appengine/tools/devappserver2/admin/admin_request_handler_test.py
+++ b/google/appengine/tools/devappserver2/admin/admin_request_handler_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.admin.admin_request_handler."""
 
 
+
 import os.path
 import tempfile
 import unittest
diff --git a/google/appengine/tools/devappserver2/admin/admin_server.py b/google/appengine/tools/devappserver2/admin/admin_server.py
index d25f10a..b7a9ebc 100644
--- a/google/appengine/tools/devappserver2/admin/admin_server.py
+++ b/google/appengine/tools/devappserver2/admin/admin_server.py
@@ -17,13 +17,13 @@
 """Run a server displaying the administrative UI for the application."""
 
 
+
 import logging
-import os.path
 
 import google
-import jinja2
 import webapp2
 
+from google.appengine.tools.devappserver2 import wsgi_server
 from google.appengine.tools.devappserver2.admin import admin_request_handler
 from google.appengine.tools.devappserver2.admin import blobstore_viewer
 from google.appengine.tools.devappserver2.admin import console
@@ -31,6 +31,7 @@
 from google.appengine.tools.devappserver2.admin import datastore_indexes_viewer
 from google.appengine.tools.devappserver2.admin import datastore_stats_handler
 from google.appengine.tools.devappserver2.admin import datastore_viewer
+from google.appengine.tools.devappserver2.admin import logs_handler
 from google.appengine.tools.devappserver2.admin import mail_request_handler
 from google.appengine.tools.devappserver2.admin import memcache_viewer
 from google.appengine.tools.devappserver2.admin import modules_handler
@@ -40,7 +41,6 @@
 from google.appengine.tools.devappserver2.admin import taskqueue_queues_handler
 from google.appengine.tools.devappserver2.admin import taskqueue_tasks_handler
 from google.appengine.tools.devappserver2.admin import xmpp_request_handler
-from google.appengine.tools.devappserver2 import wsgi_server
 
 
 class AdminApplication(webapp2.WSGIApplication):
@@ -79,6 +79,7 @@
          ('/search/index', search_handler.SearchIndexHandler),
          ('/assets/(.+)', static_file_handler.StaticFileHandler),
          ('/instances', modules_handler.ModulesHandler),
+         ('/instances/logs', logs_handler.LogsHandler),
          webapp2.Route('/',
                        webapp2.RedirectHandler,
                        defaults={'_uri': '/instances'})],
diff --git a/google/appengine/tools/devappserver2/admin/console.py b/google/appengine/tools/devappserver2/admin/console.py
index 1fa62d3..e3f4fca 100644
--- a/google/appengine/tools/devappserver2/admin/console.py
+++ b/google/appengine/tools/devappserver2/admin/console.py
@@ -17,6 +17,7 @@
 """A handler that provides an interactive console."""
 
 
+
 import threading
 
 from google.appengine.tools.devappserver2 import module
diff --git a/google/appengine/tools/devappserver2/admin/console_test.py b/google/appengine/tools/devappserver2/admin/console_test.py
index 652be69..621c009 100644
--- a/google/appengine/tools/devappserver2/admin/console_test.py
+++ b/google/appengine/tools/devappserver2/admin/console_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.admin.console."""
 
 
+
 import unittest
 
 import google
diff --git a/google/appengine/tools/devappserver2/admin/datastore_viewer.py b/google/appengine/tools/devappserver2/admin/datastore_viewer.py
index 452ffdd..5b97d0f 100644
--- a/google/appengine/tools/devappserver2/admin/datastore_viewer.py
+++ b/google/appengine/tools/devappserver2/admin/datastore_viewer.py
@@ -17,6 +17,7 @@
 """A handler that displays information about datastore entities."""
 
 
+
 import cgi
 import datetime
 import math
diff --git a/google/appengine/tools/devappserver2/admin/datastore_viewer_test.py b/google/appengine/tools/devappserver2/admin/datastore_viewer_test.py
index 5876e72..b0919ac 100644
--- a/google/appengine/tools/devappserver2/admin/datastore_viewer_test.py
+++ b/google/appengine/tools/devappserver2/admin/datastore_viewer_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.admin.datastore_viewer."""
 
 
+
 import datetime
 import os
 import unittest
diff --git a/google/appengine/tools/devappserver2/admin/logs_handler.py b/google/appengine/tools/devappserver2/admin/logs_handler.py
new file mode 100644
index 0000000..7280a44
--- /dev/null
+++ b/google/appengine/tools/devappserver2/admin/logs_handler.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""A handler that displays logs for instances."""
+
+import json
+import os
+
+import google
+import requests
+
+from google.appengine.tools.devappserver2 import log_manager
+from google.appengine.tools.devappserver2.admin import admin_request_handler
+
+
+class LogsHandler(admin_request_handler.AdminRequestHandler):
+  _REQUIRED_PARAMS = ['app', 'module', 'version', 'instance', 'log_type']
+
+  def get(self):
+    try:
+      ps = self.request.params
+      params = dict([(p, ps[p]) for p in self._REQUIRED_PARAMS])
+    except KeyError, e:
+      self.abort(404, detail='Missing log request parameter %s.' % e.message)
+
+    # Forward request to LogServer
+    host = os.environ.get(log_manager.APP_ENGINE_LOG_SERVER_HOST)
+    port = os.environ.get(log_manager.APP_ENGINE_LOG_SERVER_PORT)
+    if not host or not port:
+      self.abort(404, detail='LogServer Host and Port must be set')
+
+    r = requests.get('http://{host}:{port}'.format(host=host, port=port),
+                     params=params)
+    params['logs'] = json.loads(r.text)
+
+    self.response.write(self.render('instance_logs.html', params))
diff --git a/google/appengine/tools/devappserver2/admin/memcache_viewer.py b/google/appengine/tools/devappserver2/admin/memcache_viewer.py
index bceac87..308a2df 100644
--- a/google/appengine/tools/devappserver2/admin/memcache_viewer.py
+++ b/google/appengine/tools/devappserver2/admin/memcache_viewer.py
@@ -26,6 +26,7 @@
 """
 
 
+
 import datetime
 import logging
 import urllib
diff --git a/google/appengine/tools/devappserver2/admin/static_file_handler.py b/google/appengine/tools/devappserver2/admin/static_file_handler.py
index fef47d3..0d25b2f 100644
--- a/google/appengine/tools/devappserver2/admin/static_file_handler.py
+++ b/google/appengine/tools/devappserver2/admin/static_file_handler.py
@@ -17,6 +17,7 @@
 """A simple handler to serve static assets."""
 
 
+
 import logging
 import mimetypes
 import os
diff --git a/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler.py b/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler.py
index f574bb6..9229ba0 100644
--- a/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler.py
+++ b/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler.py
@@ -17,6 +17,7 @@
 """A handler that displays queue information for the application."""
 
 
+
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api.taskqueue import taskqueue_service_pb
 
diff --git a/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler_test.py b/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler_test.py
index 904f44a..1227c43 100644
--- a/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler_test.py
+++ b/google/appengine/tools/devappserver2/admin/taskqueue_queues_handler_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.admin.taskqueue_queues_handler."""
 
 
+
 import unittest
 
 import google
diff --git a/google/appengine/tools/devappserver2/admin/taskqueue_tasks_handler.py b/google/appengine/tools/devappserver2/admin/taskqueue_tasks_handler.py
index c384ce5..80d1fa3 100644
--- a/google/appengine/tools/devappserver2/admin/taskqueue_tasks_handler.py
+++ b/google/appengine/tools/devappserver2/admin/taskqueue_tasks_handler.py
@@ -17,6 +17,7 @@
 """A handler that displays task information for a single queue."""
 
 
+
 import datetime
 import urllib
 
diff --git a/google/appengine/tools/devappserver2/admin/taskqueue_utils.py b/google/appengine/tools/devappserver2/admin/taskqueue_utils.py
index ae3bc28..5ba67af 100644
--- a/google/appengine/tools/devappserver2/admin/taskqueue_utils.py
+++ b/google/appengine/tools/devappserver2/admin/taskqueue_utils.py
@@ -17,6 +17,7 @@
 """Shared utilities for dealing with taskqueue."""
 
 
+
 import datetime
 
 from google.appengine.api import apiproxy_stub_map
diff --git a/google/appengine/tools/devappserver2/admin/taskqueue_utils_test.py b/google/appengine/tools/devappserver2/admin/taskqueue_utils_test.py
index 38aeb26..bd39239 100644
--- a/google/appengine/tools/devappserver2/admin/taskqueue_utils_test.py
+++ b/google/appengine/tools/devappserver2/admin/taskqueue_utils_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.admin.taskqueue_utils."""
 
 
+
 import unittest
 
 import google
diff --git a/google/appengine/tools/devappserver2/admin/templates/instance_logs.css b/google/appengine/tools/devappserver2/admin/templates/instance_logs.css
new file mode 100644
index 0000000..923bae4
--- /dev/null
+++ b/google/appengine/tools/devappserver2/admin/templates/instance_logs.css
@@ -0,0 +1,16 @@
+#tabs {
+  border-bottom: .5em solid #EEEEEE;
+  margin: 0;
+  padding: 0;
+}
+
+#tabs li {
+  display:inline;
+}
+
+#tabs li a {
+  text-decoration: none;
+  padding: 0.25em 1em;
+  color: #000;
+  font-size: 150%;
+}
diff --git a/google/appengine/tools/devappserver2/admin/templates/instance_logs.html b/google/appengine/tools/devappserver2/admin/templates/instance_logs.html
new file mode 100644
index 0000000..8981a56
--- /dev/null
+++ b/google/appengine/tools/devappserver2/admin/templates/instance_logs.html
@@ -0,0 +1,41 @@
+{% extends "skeleton.html" %}
+{% block page_title %}Logs{% endblock %}
+
+{% block page_head %}
+  <style type="text/css">
+    {% include "instance_logs.css" %}
+  </style>
+{% endblock %}
+
+{% block body %}
+  <h3>
+    Logs for instance
+    {% if module.supports_individually_addressable_instances %}
+      <a href="//{{ module.get_instance_address(instance.instance_id) }}"
+          target="_blank">
+    {% endif %}
+      {{ instance.instance_id }}
+    {% if module.supports_individually_addressable_instances %}
+      </a>
+    {% endif %}
+    of module
+    <a href="//{{ module.balanced_address }}" target="_blank">
+      {{ module.name }}
+    </a>.
+  </h3>
+  <ul id="tabs">
+    <!--TODO(gloom): pass the list of logtypes here-->
+    {% for log_type in ['request', 'app', 'appjson'] %}
+      <li id={{ log_type }}>
+        <a href="/instances/logs?app={{ module.app_name_external }}&module={{ module.name }}&version={{ module.version }}&instance={{ instance.instance_id }}&log_type={{ log_type }}">
+          {{ log_type }}
+        </a>
+      </li>
+    {% endfor %}
+  </ul>
+  {% for l in logs %}
+  <li>
+    {{ l }}
+  </li>
+  {% endfor %}
+{% endblock %}
diff --git a/google/appengine/tools/devappserver2/admin/templates/modules.html b/google/appengine/tools/devappserver2/admin/templates/modules.html
index ee72953..8a976df 100644
--- a/google/appengine/tools/devappserver2/admin/templates/modules.html
+++ b/google/appengine/tools/devappserver2/admin/templates/modules.html
@@ -15,46 +15,55 @@
         <th>Latency (ms)</th>
         <th>QPS</th>
         <th>Total Requests</th>
+        <th>Logs</th>
         <th>Runtime</th>
       </tr>
     </thead>
     <tbody>
       {% for module in modules|sort(attribute='name') %}
-      <tr>
-        <th colspan=4>
-          <a href="//{{ module.balanced_address }}" target="_blank">
-            {{ module.name }}</a>
-        </th>
-        <td>
-          {% if module.runtime == "vm" %}
-            vm:
-          {% endif %}
-          {{ module.effective_runtime }}
-        </td>
-      </tr>
-      {% for instance in module.instances|sort(attribute='instance_id') %}
-      <tr class="instance">
-        <td class="instance-id">
-          {% if module.supports_individually_addressable_instances %}
-          <a href="//{{ module.get_instance_address(instance.instance_id) }}"
-            target="_blank">
-          {% endif %}
-          {{ instance.instance_id }}
-          {% if module.supports_individually_addressable_instances %}
-          </a>
-          {% endif %}
-        </td>
-        <td>
-          {{ '%0.1f'|format(instance.get_latency_60s() * 1000) }}
-        </td>
-        <td>
-          {{ '%0.2f'|format(instance.get_qps_60s()) }}
-        </td>
-        <td>
-          {{ instance.total_requests }}
-        </td>
-      </tr>
-      {% endfor %}
+        <tr>
+          <th colspan=5>
+            <a href="//{{ module.balanced_address }}" target="_blank">
+              {{ module.name }}</a>
+          </th>
+          <td>
+            {% if module.runtime == "vm" %}
+              vm:
+            {% endif %}
+            {{ module.effective_runtime }}
+          </td>
+        </tr>
+        {% for instance in module.instances|sort(attribute='instance_id') %}
+          <tr class="instance">
+            <td class="instance-id">
+              {% if module.supports_individually_addressable_instances %}
+                <a href="//{{ module.get_instance_address(instance.instance_id) }}"
+                  target="_blank">
+              {% endif %}
+                {{ instance.instance_id }}
+              {% if module.supports_individually_addressable_instances %}
+                </a>
+              {% endif %}
+            </td>
+            <td>
+              {{ '%0.1f'|format(instance.get_latency_60s() * 1000) }}
+            </td>
+            <td>
+              {{ '%0.2f'|format(instance.get_qps_60s()) }}
+            </td>
+            <td>
+              {{ instance.total_requests }}
+            </td>
+            {% if module.runtime == "vm" %}
+            <td>
+              <a href="/instances/logs?app={{ module.app_name_external }}&module={{ module.name }}&version={{ module.version }}&instance={{ instance.instance_id }}&log_type=request">
+                Logs
+              </a>
+            </td>
+            {% endif %}
+            <td></td>
+          </tr>
+        {% endfor %}
       {% endfor %}
     </tbody>
   </table>
diff --git a/google/appengine/tools/devappserver2/admin/xmpp_request_handler.py b/google/appengine/tools/devappserver2/admin/xmpp_request_handler.py
index 7ca5ef4..63cb6d2 100644
--- a/google/appengine/tools/devappserver2/admin/xmpp_request_handler.py
+++ b/google/appengine/tools/devappserver2/admin/xmpp_request_handler.py
@@ -17,6 +17,7 @@
 """A handler that allows the user to send XMPP messages to their application."""
 
 
+
 import cStringIO
 import random
 import string
diff --git a/google/appengine/tools/devappserver2/admin/xmpp_request_handler_test.py b/google/appengine/tools/devappserver2/admin/xmpp_request_handler_test.py
index 97611cc..43a95fa 100644
--- a/google/appengine/tools/devappserver2/admin/xmpp_request_handler_test.py
+++ b/google/appengine/tools/devappserver2/admin/xmpp_request_handler_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.admin.xmpp_request_handler."""
 
 
+
 import unittest
 
 import google
diff --git a/google/appengine/tools/devappserver2/api_server.py b/google/appengine/tools/devappserver2/api_server.py
index 325d9e1..2b7dfc3 100644
--- a/google/appengine/tools/devappserver2/api_server.py
+++ b/google/appengine/tools/devappserver2/api_server.py
@@ -20,6 +20,7 @@
 """
 
 
+
 import logging
 import os
 import pickle
@@ -334,6 +335,9 @@
     default_gcs_bucket_name: A str, overriding the default bucket behavior.
   """
 
+
+
+
   identity_stub = app_identity_stub.AppIdentityServiceStub.Create(
       email_address=appidentity_email_address,
       private_key_path=appidentity_private_key_path)
@@ -380,8 +384,9 @@
     from google.appengine.api.images import images_stub
   except ImportError:
 
-    logging.warning('Could not initialize images API; you are likely missing '
-                    'the Python "PIL" module.')
+
+
+
     # We register a stub which throws a NotImplementedError for most RPCs.
     from google.appengine.api.images import images_not_implemented_stub
     apiproxy_stub_map.apiproxy.RegisterStub(
diff --git a/google/appengine/tools/devappserver2/api_server_test.py b/google/appengine/tools/devappserver2/api_server_test.py
index 878cda4..1887217 100644
--- a/google/appengine/tools/devappserver2/api_server_test.py
+++ b/google/appengine/tools/devappserver2/api_server_test.py
@@ -17,6 +17,7 @@
 """Tests for google.apphosting.tools.devappserver2.api_server."""
 
 
+
 import cStringIO
 import pickle
 import re
diff --git a/google/appengine/tools/devappserver2/application_configuration.py b/google/appengine/tools/devappserver2/application_configuration.py
index 442b560..6e15648 100644
--- a/google/appengine/tools/devappserver2/application_configuration.py
+++ b/google/appengine/tools/devappserver2/application_configuration.py
@@ -19,6 +19,8 @@
 # TODO: Support more than just app.yaml.
 
 
+
+import datetime
 import errno
 import logging
 import os
@@ -48,6 +50,10 @@
 NOBUILD_FILES_CHANGED = 7
 
 
+
+
+
+
 _HEALTH_CHECK_DEFAULTS = {
     'enable_health_check': True,
     'check_interval_sec': 5,
@@ -361,6 +367,11 @@
         config, files = appinfo_includes.ParseAndReturnIncludePaths(f)
     if self._forced_app_id:
       config.application = self._forced_app_id
+
+    if config.runtime == 'vm' and not config.version:
+      config.version = generate_version_id()
+      logging.info('No version specified. Generated version id: %s',
+                   config.version)
     return config, [configuration_path] + files
 
   def _parse_java_configuration(self, app_engine_web_xml_path):
@@ -859,3 +870,15 @@
       return os.path.join(module_configuration.application_root,
                           error_handler.file)
   return None
+
+
+def generate_version_id(datetime_getter=datetime.datetime.now):
+  """Generates a version id based off the current time.
+
+  Args:
+    datetime_getter: A function that returns a datetime.datetime instance.
+
+  Returns:
+    A version string based.
+  """
+  return datetime_getter().isoformat().lower().translate(None, ':-')[:15]
diff --git a/google/appengine/tools/devappserver2/application_configuration_test.py b/google/appengine/tools/devappserver2/application_configuration_test.py
index 8471704..75f40b6 100644
--- a/google/appengine/tools/devappserver2/application_configuration_test.py
+++ b/google/appengine/tools/devappserver2/application_configuration_test.py
@@ -17,8 +17,10 @@
 """Tests for google.apphosting.tools.devappserver2.application_configuration."""
 
 
+
 import collections
 from contextlib import contextmanager
+import datetime
 import io
 import os.path
 import shutil
@@ -171,6 +173,29 @@
     self.assertEqual({'/appdir/app.yaml': 10}, config._mtimes)
     self.assertEqual(info.vm_health_check, config.vm_health_check)
 
+  def test_vm_no_version(self):
+    manual_scaling = appinfo.ManualScaling()
+    info = appinfo.AppInfoExternal(
+        application='app',
+        module='module1',
+        runtime='vm',
+        threadsafe=False,
+        manual_scaling=manual_scaling,
+    )
+
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
+    os.path.getmtime('/appdir/app.yaml').AndReturn(10)
+
+    self.mox.StubOutWithMock(application_configuration, 'generate_version_id')
+    application_configuration.generate_version_id().AndReturn(
+        'generated-version')
+    self.mox.ReplayAll()
+    config = application_configuration.ModuleConfiguration('/appdir/app.yaml')
+
+    self.mox.VerifyAll()
+    self.assertEqual(config.major_version, 'generated-version')
+
   def test_set_health_check_defaults(self):
     # Pass nothing in.
     self.assertEqual(
@@ -1148,5 +1173,15 @@
     self.assertEqual(dispatch_config, config.dispatch)
 
 
+class GenerateVersionIdTest(unittest.TestCase):
+  """Tests the GenerateVersionId function."""
+
+  def test_generate_version_id(self):
+    datetime_getter = lambda: datetime.datetime(2014, 9, 18, 17, 31, 45, 92949)
+    generated_version = application_configuration.generate_version_id(
+        datetime_getter)
+    self.assertEqual(generated_version, '20140918t173145')
+
+
 if __name__ == '__main__':
   unittest.main()
diff --git a/google/appengine/tools/devappserver2/blob_download.py b/google/appengine/tools/devappserver2/blob_download.py
index 07455f4..32c7269 100644
--- a/google/appengine/tools/devappserver2/blob_download.py
+++ b/google/appengine/tools/devappserver2/blob_download.py
@@ -17,6 +17,7 @@
 """Rewrites blob download headers in the response with full blob contents."""
 
 
+
 import logging
 
 from google.appengine.api import apiproxy_stub_map
diff --git a/google/appengine/tools/devappserver2/blob_upload.py b/google/appengine/tools/devappserver2/blob_upload.py
index afdc1ba..d8aac17 100644
--- a/google/appengine/tools/devappserver2/blob_upload.py
+++ b/google/appengine/tools/devappserver2/blob_upload.py
@@ -21,6 +21,7 @@
 """
 
 
+
 import base64
 import cgi
 import cStringIO
diff --git a/google/appengine/tools/devappserver2/blob_upload_test.py b/google/appengine/tools/devappserver2/blob_upload_test.py
index eba18ac..f5d4b99 100644
--- a/google/appengine/tools/devappserver2/blob_upload_test.py
+++ b/google/appengine/tools/devappserver2/blob_upload_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.blob_upload."""
 
 
+
 import base64
 import cgi
 import cStringIO
diff --git a/google/appengine/tools/devappserver2/channel.py b/google/appengine/tools/devappserver2/channel.py
index c2cb2e4..7a4039b 100644
--- a/google/appengine/tools/devappserver2/channel.py
+++ b/google/appengine/tools/devappserver2/channel.py
@@ -21,6 +21,7 @@
 """
 
 
+
 import os
 
 import google
diff --git a/google/appengine/tools/devappserver2/channel_test.py b/google/appengine/tools/devappserver2/channel_test.py
index 95fedd5..ca1e2bc 100644
--- a/google/appengine/tools/devappserver2/channel_test.py
+++ b/google/appengine/tools/devappserver2/channel_test.py
@@ -18,6 +18,7 @@
 
 
 
+
 import StringIO
 import unittest
 import urllib
diff --git a/google/appengine/tools/devappserver2/constants.py b/google/appengine/tools/devappserver2/constants.py
index ffe6eb6..779a967 100644
--- a/google/appengine/tools/devappserver2/constants.py
+++ b/google/appengine/tools/devappserver2/constants.py
@@ -17,6 +17,7 @@
 """Constants used by the devappserver2."""
 
 
+
 # These statuses must not include a response body (RFC 2616).
 NO_BODY_RESPONSE_STATUSES = frozenset([100, 101, 204, 304])
 
@@ -29,6 +30,9 @@
 # See:
 # https://developers.google.com/appengine/docs/python/runtime#Request_Headers
 
+
+
+
 IGNORED_REQUEST_HEADERS = frozenset([
     'accept-encoding',
     'connection',
@@ -46,6 +50,9 @@
 # Note: Content-Length is set by a subsequent rewriter or removed.
 # Note: Server and Date are then set by devappserver2.
 
+
+
+
 _COMMON_IGNORED_RESPONSE_HEADERS = frozenset([
     'connection',
     'content-encoding',
diff --git a/google/appengine/tools/devappserver2/devappserver2.py b/google/appengine/tools/devappserver2/devappserver2.py
index ffbc988..29eb023 100644
--- a/google/appengine/tools/devappserver2/devappserver2.py
+++ b/google/appengine/tools/devappserver2/devappserver2.py
@@ -17,6 +17,7 @@
 """The main entry point for the new development server."""
 
 
+
 import argparse
 import errno
 import getpass
@@ -371,7 +372,11 @@
       'can be a boolean, in which case all modules threadsafe setting will '
       'be overridden or a comma-separated list of module:threadsafe_override '
       'e.g. "default:False,backend:True"')
-  common_group.add_argument('--docker_daemon_url', help=argparse.SUPPRESS)
+  common_group.add_argument('--enable_mvm_logs',
+                            action=boolean_action.BooleanAction,
+                            const=True,
+                            default=False,
+                            help=argparse.SUPPRESS)
 
   # PHP
   php_group = parser.add_argument_group('PHP')
@@ -631,7 +636,12 @@
       'decide)')
   misc_group.add_argument(
       '--default_gcs_bucket_name', default=None,
-      help='default Google Cloud Storgage bucket name')
+      help='default Google Cloud Storage bucket name')
+
+
+
+
+
 
 
   return parser
@@ -702,6 +712,8 @@
   def module_to_address(self, module_name, instance=None):
     """Returns the address of a module."""
 
+
+
     if module_name is None:
       return self._dispatcher.dispatch_address
     return self._dispatcher.get_hostname(
@@ -918,8 +930,6 @@
   @staticmethod
   def _create_vm_config(options):
     vm_config = runtime_config_pb2.VMConfig()
-    if options.docker_daemon_url:
-      vm_config.docker_daemon_url = options.docker_daemon_url
     if options.dart_sdk:
       vm_config.dart_config.dart_sdk = os.path.abspath(options.dart_sdk)
     if options.dart_dev_mode:
@@ -928,6 +938,7 @@
       vm_config.dart_config.dart_pub_serve_host = options.dart_pub_serve_host
     if options.dart_pub_serve_port:
       vm_config.dart_config.dart_pub_serve_port = options.dart_pub_serve_port
+    vm_config.enable_logs = options.enable_mvm_logs
     return vm_config
 
   @staticmethod
diff --git a/google/appengine/tools/devappserver2/devappserver2_test.py b/google/appengine/tools/devappserver2/devappserver2_test.py
index 08ffd26..516226a 100644
--- a/google/appengine/tools/devappserver2/devappserver2_test.py
+++ b/google/appengine/tools/devappserver2/devappserver2_test.py
@@ -17,6 +17,7 @@
 """Tests for google.apphosting.tools.devappserver2.devappserver2."""
 
 
+
 import argparse
 import getpass
 import itertools
diff --git a/google/appengine/tools/devappserver2/dispatcher.py b/google/appengine/tools/devappserver2/dispatcher.py
index c4cd393..057eee3 100644
--- a/google/appengine/tools/devappserver2/dispatcher.py
+++ b/google/appengine/tools/devappserver2/dispatcher.py
@@ -107,8 +107,7 @@
           required configuration for local Google Cloud SQL development. If None
           then Cloud SQL will not be available.
       vm_config: A runtime_config_pb2.VMConfig instance containing
-          VM runtime-specific configuration. If vm_config does not have
-          docker_daemon_url specified all docker-related stuff is disabled.
+          VM runtime-specific configuration.
       module_to_max_instances: A mapping between a module name and the maximum
           number of instances that can be created (this overrides the settings
           found in the configuration argument) e.g.
@@ -661,6 +660,10 @@
     if not hostname or hostname == default_address:
       return self._module_for_request(path), None
 
+
+
+
+
     default_address_offset = hostname.find(default_address)
     if default_address_offset > 0:
       prefix = hostname[:default_address_offset - 1]
diff --git a/google/appengine/tools/devappserver2/dispatcher_test.py b/google/appengine/tools/devappserver2/dispatcher_test.py
index ba1b88a..7afdb7c 100644
--- a/google/appengine/tools/devappserver2/dispatcher_test.py
+++ b/google/appengine/tools/devappserver2/dispatcher_test.py
@@ -47,6 +47,7 @@
   def __init__(self, application, module_name, version, manual_scaling):
     self.application_root = '/'
     self.application = application
+    self.application_external_name = 'app'
     self.module_name = module_name
     self.major_version = version
     self.version_id = '%s:%s.%s' % (module_name, version, '12345')
diff --git a/google/appengine/tools/devappserver2/endpoints/api_config_manager.py b/google/appengine/tools/devappserver2/endpoints/api_config_manager.py
index 72577eb..ad49ddf 100644
--- a/google/appengine/tools/devappserver2/endpoints/api_config_manager.py
+++ b/google/appengine/tools/devappserver2/endpoints/api_config_manager.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import base64
 import json
 import logging
@@ -82,6 +86,9 @@
       body: A string, the JSON body of the getApiConfigs response.
     """
 
+
+
+
     try:
       response_obj = json.loads(body)
     except ValueError, unused_err:
@@ -152,6 +159,12 @@
           The score for the given path.
         """
 
+
+
+
+
+
+
         score = 0
         parts = path.split('/')
         for part in parts:
diff --git a/google/appengine/tools/devappserver2/endpoints/api_config_manager_test.py b/google/appengine/tools/devappserver2/endpoints/api_config_manager_test.py
index 2e43533..d76d320 100644
--- a/google/appengine/tools/devappserver2/endpoints/api_config_manager_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/api_config_manager_test.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import json
 import re
 import unittest
@@ -268,6 +272,8 @@
 class ParameterizedPathTest(unittest.TestCase):
 
 
+
+
   def test_invalid_variable_name_leading_digit(self):
     self.assertEqual(
         None, re.match(api_config_manager._PATH_VARIABLE_PATTERN, '1abc'))
diff --git a/google/appengine/tools/devappserver2/endpoints/api_request.py b/google/appengine/tools/devappserver2/endpoints/api_request.py
index 6e24394..65a4ba5 100644
--- a/google/appengine/tools/devappserver2/endpoints/api_request.py
+++ b/google/appengine/tools/devappserver2/endpoints/api_request.py
@@ -21,6 +21,9 @@
 
 
 
+
+
+
 import cgi
 import copy
 import json
diff --git a/google/appengine/tools/devappserver2/endpoints/api_request_test.py b/google/appengine/tools/devappserver2/endpoints/api_request_test.py
index 2856b76..000778b 100644
--- a/google/appengine/tools/devappserver2/endpoints/api_request_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/api_request_test.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import cStringIO
 import json
 import unittest
diff --git a/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy.py b/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy.py
index c01032b..293b156 100644
--- a/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy.py
+++ b/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import httplib
 import json
 import logging
diff --git a/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy_test.py b/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy_test.py
index 20fc2d4..c34d19c 100644
--- a/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/discovery_api_proxy_test.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import httplib
 import json
 import os
diff --git a/google/appengine/tools/devappserver2/endpoints/discovery_service.py b/google/appengine/tools/devappserver2/endpoints/discovery_service.py
index 32ba9cd..5490485 100644
--- a/google/appengine/tools/devappserver2/endpoints/discovery_service.py
+++ b/google/appengine/tools/devappserver2/endpoints/discovery_service.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import json
 import logging
 
diff --git a/google/appengine/tools/devappserver2/endpoints/discovery_service_test.py b/google/appengine/tools/devappserver2/endpoints/discovery_service_test.py
index 303d9c2..b0439c5 100644
--- a/google/appengine/tools/devappserver2/endpoints/discovery_service_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/discovery_service_test.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import json
 import os
 import unittest
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
index 4edeb3b..dda565c 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
@@ -27,6 +27,10 @@
 
 
 
+
+
+
+
 import httplib
 import json
 import logging
@@ -600,6 +604,11 @@
       # we need to call transform_parameter_value on them before calling
       # _add_message_field.
 
+
+
+
+
+
       body_json[key] = parameter_converter.transform_parameter_value(
           key, body_json[key], current_parameter)
       # Remove the old key and try to convert to nested message value
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py
index 1a26184..6e65c40 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import httplib
 import json
 import logging
diff --git a/google/appengine/tools/devappserver2/endpoints/errors.py b/google/appengine/tools/devappserver2/endpoints/errors.py
index 678f194..0248c50 100644
--- a/google/appengine/tools/devappserver2/endpoints/errors.py
+++ b/google/appengine/tools/devappserver2/endpoints/errors.py
@@ -19,6 +19,10 @@
 
 
 
+
+
+
+
 import json
 import logging
 
@@ -203,6 +207,9 @@
 
 
 
+
+
+
 class EnumRejectionError(InvalidParameterError):
   """Custom request rejection exception for enum values."""
 
diff --git a/google/appengine/tools/devappserver2/endpoints/generated_error_info.py b/google/appengine/tools/devappserver2/endpoints/generated_error_info.py
index b3540cd..462cbd7 100644
--- a/google/appengine/tools/devappserver2/endpoints/generated_error_info.py
+++ b/google/appengine/tools/devappserver2/endpoints/generated_error_info.py
@@ -25,6 +25,12 @@
 
 
 
+
+
+
+
+
+
 import collections
 
 
diff --git a/google/appengine/tools/devappserver2/endpoints/parameter_converter.py b/google/appengine/tools/devappserver2/endpoints/parameter_converter.py
index 0535002..579cd42 100644
--- a/google/appengine/tools/devappserver2/endpoints/parameter_converter.py
+++ b/google/appengine/tools/devappserver2/endpoints/parameter_converter.py
@@ -23,6 +23,10 @@
 
 
 
+
+
+
+
 from google.appengine.tools.devappserver2.endpoints import errors
 
 
@@ -83,6 +87,9 @@
     return
 
 
+
+
+
   if value.lower() not in ('1', 'true', '0', 'false'):
     raise errors.BasicTypeParameterError(parameter_name, value, 'boolean')
 
diff --git a/google/appengine/tools/devappserver2/endpoints/test_utils.py b/google/appengine/tools/devappserver2/endpoints/test_utils.py
index 3fae96e..a0eebc9 100644
--- a/google/appengine/tools/devappserver2/endpoints/test_utils.py
+++ b/google/appengine/tools/devappserver2/endpoints/test_utils.py
@@ -19,6 +19,9 @@
 
 
 
+
+
+
 import cStringIO
 import unittest
 import urlparse
diff --git a/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py b/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
index ce1bcb5..0a329df 100644
--- a/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
+++ b/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
@@ -16,6 +16,8 @@
 #
 """Test service for regression testing of Cloud Endpoints support."""
 
+
+
 import logging
 
 import endpoints
diff --git a/google/appengine/tools/devappserver2/endpoints/util.py b/google/appengine/tools/devappserver2/endpoints/util.py
index ceb644a..002043d 100644
--- a/google/appengine/tools/devappserver2/endpoints/util.py
+++ b/google/appengine/tools/devappserver2/endpoints/util.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import json
 
 
diff --git a/google/appengine/tools/devappserver2/endpoints/util_test.py b/google/appengine/tools/devappserver2/endpoints/util_test.py
index 9a41cda..c747ce5 100644
--- a/google/appengine/tools/devappserver2/endpoints/util_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/util_test.py
@@ -18,6 +18,10 @@
 
 
 
+
+
+
+
 import unittest
 
 from google.appengine.tools.devappserver2.endpoints import test_utils
diff --git a/google/appengine/tools/devappserver2/errors.py b/google/appengine/tools/devappserver2/errors.py
index 8ce24ef..b8c2677 100644
--- a/google/appengine/tools/devappserver2/errors.py
+++ b/google/appengine/tools/devappserver2/errors.py
@@ -18,6 +18,7 @@
 
 
 
+
 class Error(Exception):
   """Base class for exceptions in this package."""
 
diff --git a/google/appengine/tools/devappserver2/file_watcher.py b/google/appengine/tools/devappserver2/file_watcher.py
index b0efcdc..e38511b 100644
--- a/google/appengine/tools/devappserver2/file_watcher.py
+++ b/google/appengine/tools/devappserver2/file_watcher.py
@@ -17,6 +17,7 @@
 """Monitors a directory tree for changes."""
 
 
+
 import logging
 import sys
 import types
@@ -50,11 +51,11 @@
     for watcher in self._file_watchers:
       watcher.quit()
 
-  def has_changes(self):
-    # .has_changes() returns True if there has been any changes since the
-    # last call to .has_changes() so it must be called for every FileWatcher
-    # to prevent spurious change notifications on subsequent calls.
-    return any([watcher.has_changes() for watcher in self._file_watchers])
+  def changes(self):
+    # .changes() returns the set of file paths change if there has been any
+    # changes since the last call to .changes so it must be called for every
+    # FileWatcher to prevent spurious change notifications on subsequent calls.
+    return set.union(*[watcher.changes() for watcher in self._file_watchers])
 
 
 def _create_watcher(directories, watcher_class):
@@ -122,7 +123,7 @@
 
   Returns:
     A FileWatcher appropriate for the current platform. start() must be called
-    before has_changes().
+    before changes().
   """
   assert not isinstance(directories, types.StringTypes), 'expected list got str'
 
diff --git a/google/appengine/tools/devappserver2/gcd_application.py b/google/appengine/tools/devappserver2/gcd_application.py
index 03f40aa..8efc48f 100644
--- a/google/appengine/tools/devappserver2/gcd_application.py
+++ b/google/appengine/tools/devappserver2/gcd_application.py
@@ -17,6 +17,7 @@
 """Cloud datastore Web Application handling."""
 
 
+
 import getpass
 import os
 import shutil
diff --git a/google/appengine/tools/devappserver2/gcs_server.py b/google/appengine/tools/devappserver2/gcs_server.py
index e444628..09c3665 100644
--- a/google/appengine/tools/devappserver2/gcs_server.py
+++ b/google/appengine/tools/devappserver2/gcs_server.py
@@ -21,6 +21,7 @@
 """
 
 
+
 import httplib
 import logging
 import webob
diff --git a/google/appengine/tools/devappserver2/go_application.py b/google/appengine/tools/devappserver2/go_application.py
index c3e20c8..33c207a 100644
--- a/google/appengine/tools/devappserver2/go_application.py
+++ b/google/appengine/tools/devappserver2/go_application.py
@@ -17,6 +17,7 @@
 """An abstraction around the source and executable for a Go application."""
 
 
+
 import atexit
 import errno
 import logging
diff --git a/google/appengine/tools/devappserver2/go_runtime.py b/google/appengine/tools/devappserver2/go_runtime.py
index 2d44ae8..4774156 100644
--- a/google/appengine/tools/devappserver2/go_runtime.py
+++ b/google/appengine/tools/devappserver2/go_runtime.py
@@ -17,6 +17,7 @@
 """Serves content for "script" handlers using the Go runtime."""
 
 
+
 import logging
 import os
 import os.path
diff --git a/google/appengine/tools/devappserver2/http_proxy.py b/google/appengine/tools/devappserver2/http_proxy.py
index 8bdbaf0..de22dd5 100644
--- a/google/appengine/tools/devappserver2/http_proxy.py
+++ b/google/appengine/tools/devappserver2/http_proxy.py
@@ -23,24 +23,16 @@
 import contextlib
 import httplib
 import logging
-import socket
 import urllib
 import wsgiref.headers
 
 from google.appengine.tools.devappserver2 import http_runtime_constants
+from google.appengine.tools.devappserver2 import http_utils
 from google.appengine.tools.devappserver2 import instance
 from google.appengine.tools.devappserver2 import login
 from google.appengine.tools.devappserver2 import util
 
 
-class Error(Exception):
-  """Base class for errors in this module."""
-
-
-class HostNotReachable(Error):
-  """Raised if host can't be reached at given port."""
-
-
 class HttpProxy:
   """Forwards HTTP requests to an application instance."""
   def __init__(self, host, port, instance_died_unexpectedly,
@@ -87,29 +79,14 @@
       retries: int, Number of connection retries.
 
     Raises:
-      HostNotReachable: if host:port can't be reached after given number of
-        retries.
+      http_utils.HostNotReachable: if host:port can't be reached after given
+          number of retries.
     """
     # If there was a prior error, we don't need to wait for a connection.
     if self._prior_error:
       return
 
-    def ping():
-      connection = httplib.HTTPConnection(self._host, self._port)
-      with contextlib.closing(connection):
-        try:
-          connection.connect()
-        except (socket.error, httplib.HTTPException):
-          return False
-        else:
-          return True
-
-    while not ping() and retries > 0:
-      retries -= 1
-    if not retries:
-      raise HostNotReachable(
-          'Cannot connect to the instance on {host}:{port}'.format(
-              host=self._host, port=self._port))
+    http_utils.wait_for_connection(self._host, self._port, retries)
 
   def handle(self, environ, start_response, url_map, match, request_id,
              request_type):
diff --git a/google/appengine/tools/devappserver2/http_proxy_test.py b/google/appengine/tools/devappserver2/http_proxy_test.py
index 6281d3c..962e137 100644
--- a/google/appengine/tools/devappserver2/http_proxy_test.py
+++ b/google/appengine/tools/devappserver2/http_proxy_test.py
@@ -98,30 +98,6 @@
     shutil.rmtree(self.tmpdir)
     self.mox.UnsetStubs()
 
-  def test_wait_for_connection_retries_used_up(self):
-    retries = 5
-    for _ in xrange(0, retries + 1):
-      httplib.HTTPConnection.connect().AndRaise(socket.error)
-      httplib.HTTPConnection.close()
-
-    self.mox.ReplayAll()
-    self.assertRaises(http_proxy.HostNotReachable,
-                      self.proxy.wait_for_connection, retries)
-    self.mox.VerifyAll()
-
-  def test_wait_for_connection_worked(self):
-    retries = 5
-    for _ in xrange(0, retries):
-      httplib.HTTPConnection.connect().AndRaise(socket.error)
-      httplib.HTTPConnection.close()
-
-    httplib.HTTPConnection.connect()
-    httplib.HTTPConnection.close()
-
-    self.mox.ReplayAll()
-    self.proxy.wait_for_connection(retries + 1)
-    self.mox.VerifyAll()
-
   def test_handle_get(self):
     response = FakeHttpResponse(200,
                                 'OK',
diff --git a/google/appengine/tools/devappserver2/http_runtime.py b/google/appengine/tools/devappserver2/http_runtime.py
index 3960374..0beff17 100644
--- a/google/appengine/tools/devappserver2/http_runtime.py
+++ b/google/appengine/tools/devappserver2/http_runtime.py
@@ -34,6 +34,7 @@
 """
 
 
+
 import base64
 import logging
 import os
@@ -116,6 +117,9 @@
   # TODO: Determine if we can always use SIGTERM.
   # Set this to True to quit with SIGTERM rather than SIGKILL
 
+
+
+
   _quit_with_sigterm = False
 
   @classmethod
diff --git a/google/appengine/tools/devappserver2/http_runtime_constants.py b/google/appengine/tools/devappserver2/http_runtime_constants.py
index 19b1312..7fe5e5e 100644
--- a/google/appengine/tools/devappserver2/http_runtime_constants.py
+++ b/google/appengine/tools/devappserver2/http_runtime_constants.py
@@ -17,6 +17,7 @@
 """Constants used for communicating with the Python devappserver2 runtime."""
 
 
+
 SERVER_SOFTWARE = 'Development/2.0'
 
 # Internal AppEngine prefix for Headers (Environment variables)
diff --git a/google/appengine/tools/devappserver2/http_runtime_test.py b/google/appengine/tools/devappserver2/http_runtime_test.py
index 3d9ce09..4e4c5ed 100644
--- a/google/appengine/tools/devappserver2/http_runtime_test.py
+++ b/google/appengine/tools/devappserver2/http_runtime_test.py
@@ -17,6 +17,7 @@
 """Tests for google.appengine.tools.devappserver2.http_runtime."""
 
 
+
 import base64
 import os
 import re
diff --git a/google/appengine/tools/devappserver2/http_utils.py b/google/appengine/tools/devappserver2/http_utils.py
new file mode 100644
index 0000000..8d25d42
--- /dev/null
+++ b/google/appengine/tools/devappserver2/http_utils.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""HTTP utils for devappserver."""
+
+import contextlib
+import httplib
+import socket
+import time
+
+
+class Error(Exception):
+  """Base class for errors in this module."""
+
+
+class HostNotReachable(Error):
+  """Raised if host can't be reached at given port."""
+
+
+def wait_for_connection(host, port, retries=1):
+  """Tries to connect to the given host and port.
+
+  Retries until success or number of retires is used up.
+
+  Args:
+    host: str, Host to connect to.
+    port: int, Port to connect to.
+    retries: int, Number of connection retries.
+
+  Raises:
+      HostNotReachable: if host:port can't be reached after given number of
+        retries.
+  """
+  def ping():
+    connection = httplib.HTTPConnection(host, port)
+    with contextlib.closing(connection):
+      try:
+        connection.connect()
+      except (socket.error, httplib.HTTPException):
+        return False
+      else:
+        return True
+
+  while not ping() and retries > 0:
+    retries -= 1
+    if not retries:
+      raise HostNotReachable(
+          'Cannot connect to the instance on {host}:{port}'.format(
+              host=host, port=port))
+    time.sleep(1)
diff --git a/google/appengine/tools/devappserver2/inotify_file_watcher.py b/google/appengine/tools/devappserver2/inotify_file_watcher.py
index 83d70f6..a579200 100644
--- a/google/appengine/tools/devappserver2/inotify_file_watcher.py
+++ b/google/appengine/tools/devappserver2/inotify_file_watcher.py
@@ -20,6 +20,7 @@
 """
 
 
+
 import ctypes
 import ctypes.util
 import errno
@@ -45,7 +46,7 @@
 _INOTIFY_EVENT = struct.Struct('iIII')
 _INOTIFY_EVENT_SIZE = _INOTIFY_EVENT.size
 _INTERESTING_INOTIFY_EVENTS = (
-    IN_ATTRIB|IN_MODIFY|IN_MOVED_FROM|IN_MOVED_TO|IN_CREATE|IN_DELETE)
+    IN_ATTRIB | IN_MODIFY | IN_MOVED_FROM | IN_MOVED_TO | IN_CREATE | IN_DELETE)
 
 # inotify only available on Linux and a ctypes.CDLL will raise if code tries to
 # specify the arg types or return type for a non-existent function.
@@ -210,18 +211,25 @@
     """Stop watching the directory for changes."""
     os.close(self._inotify_fd)
 
-  def _get_changed_paths(self):
+  def changes(self, timeout_ms=0):
     """Return paths for changed files and directories.
 
     start() must be called before this method.
 
+    Args:
+      timeout_ms: a timeout in milliseconds on which this watcher will block
+                  waiting for a change. It allows for external polling threads
+                  to react immediately on a change instead of waiting for
+                  a random polling delay.
+
     Returns:
       A set of strings representing file and directory paths that have changed
       since the last call to get_changed_paths.
     """
     paths = set()
     while True:
-      if not self._inotify_poll.poll(0):
+      # Don't wait to detect subsequent changes after the initial one.
+      if not self._inotify_poll.poll(0 if paths else timeout_ms):
         break
 
       self._inotify_events += os.read(self._inotify_fd, 1024)
@@ -232,14 +240,15 @@
           break
 
         name = self._inotify_events[
-            _INOTIFY_EVENT_SIZE:_INOTIFY_EVENT_SIZE+length]
+            _INOTIFY_EVENT_SIZE:_INOTIFY_EVENT_SIZE + length]
         name = name.rstrip('\0')
 
         logging.debug('wd=%s, mask=%s, cookie=%s, length=%s, name=%r',
                       wd, _bit_str(mask, _ATTRIBUTE_MASK_NAMES), cookie, length,
                       name)
 
-        self._inotify_events = self._inotify_events[_INOTIFY_EVENT_SIZE+length:]
+        self._inotify_events = self._inotify_events[
+            _INOTIFY_EVENT_SIZE + length:]
 
         if mask & IN_IGNORED:
           continue
@@ -262,6 +271,3 @@
         if path not in paths and not watcher_common.ignore_file(path):
           paths.add(path)
     return paths
-
-  def has_changes(self):
-    return bool(self._get_changed_paths())
diff --git a/google/appengine/tools/devappserver2/inotify_file_watcher_test.py b/google/appengine/tools/devappserver2/inotify_file_watcher_test.py
index 4a1a4c6..914083e 100644
--- a/google/appengine/tools/devappserver2/inotify_file_watcher_test.py
+++ b/google/appengine/tools/devappserver2/inotify_file_watcher_test.py
@@ -17,6 +17,7 @@
 """Tests for google.apphosting.tools.devappserver2.inotify_file_watcher."""
 
 
+
 import logging
 import os
 import os.path
@@ -66,7 +67,7 @@
     # Divide the remaining number of directories to create among 4
     # subdirectories in an approximate even fashion.
     for i in range(4, 0, -1):
-      sub_dir_size = num_directories/i
+      sub_dir_size = num_directories / i
       self._create_directory_tree(os.path.join(path, 'dir%d' % i), sub_dir_size)
       num_directories -= sub_dir_size
 
@@ -75,7 +76,7 @@
     path = self._create_file('test')
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_file_modified(self):
     path = self._create_file('test')
@@ -84,7 +85,7 @@
       f.write('testing')
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_file_read(self):
     path = self._create_file('test')
@@ -96,7 +97,7 @@
     # Reads should not trigger updates.
     self.assertEqual(
         set(),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_file_deleted(self):
     path = self._create_file('test')
@@ -104,7 +105,7 @@
     os.remove(path)
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_file_renamed(self):
     source = self._create_file('test')
@@ -113,14 +114,14 @@
     os.rename(source, target)
     self.assertEqual(
         set([source, target]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_create_directory(self):
     self._watcher.start()
     directory = self._create_directory('test')
     self.assertEqual(
         set([directory]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_file_created_in_directory(self):
     directory = self._create_directory('test')
@@ -128,7 +129,7 @@
     path = self._create_file('test/file')
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_move_directory(self):
     source = self._create_directory('test')
@@ -137,7 +138,7 @@
     os.rename(source, target)
     self.assertEqual(
         set([source, target]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_move_directory_out_of_watched(self):
     source = self._create_directory('test')
@@ -146,13 +147,13 @@
     os.rename(source, target)
     self.assertEqual(
         set([source]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
     with open(os.path.join(target, 'file'), 'w'):
       pass
     # Changes to files in subdirectories that have been moved should be ignored.
     self.assertEqual(
         set([]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_move_directory_into_watched(self):
     source = os.path.join(self._junk_directory, 'source')
@@ -162,13 +163,13 @@
     os.rename(source, target)
     self.assertEqual(
         set([target]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
     file_path = os.path.join(target, 'file')
     with open(file_path, 'w+'):
       pass
     self.assertEqual(
         set([file_path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_directory_deleted(self):
     path = self._create_directory('test')
@@ -176,7 +177,7 @@
     os.rmdir(path)
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_subdirectory_deleted(self):
     """Tests that internal _directory_to_subdirs is updated on delete."""
@@ -190,7 +191,7 @@
     os.rmdir(sub_path)
     self.assertEqual(
         set([sub_path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
     self.assertEqual(
         set(),
         self._watcher._directory_to_subdirs[path])
@@ -198,7 +199,7 @@
     os.rmdir(path)
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_symlink_directory(self):
     sym_target = os.path.join(self._directory, 'test')
@@ -209,27 +210,27 @@
     os.symlink(self._junk_directory, sym_target)
     self.assertEqual(
         set([sym_target]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Check that a file added to the symlinked directory is reported.
     with open(os.path.join(self._junk_directory, 'file1'), 'w'):
       pass
     self.assertEqual(
         set([os.path.join(self._directory, 'test', 'file1')]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Check that modifying the file in the symlinked directory is reported.
     with open(os.path.join(self._junk_directory, 'file1'), 'w') as fp:
       fp.write('some data')
     self.assertEqual(
         set([os.path.join(self._directory, 'test', 'file1')]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Check that a removed symlinked directory is reported.
     os.remove(sym_target)
     self.assertEqual(
         set([sym_target]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Check that a file added to the removed symlinked directory is *not*
     # reported.
@@ -237,7 +238,7 @@
       pass
     self.assertEqual(
         set(),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   @unittest.skip('b/11896748')
   def test_symlink_file(self):
@@ -251,20 +252,20 @@
     os.symlink(actual_file, symbolic_link)
     self.assertEqual(
         set([symbolic_link]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Check that modifying the source file is reported.
     with open(actual_file, 'w') as fp:
       fp.write('some data')
     self.assertEqual(
         set([symbolic_link]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Check that deleting the source file is reported.
     os.unlink(actual_file)
     self.assertEqual(
         set([symbolic_link]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_many_directories(self):
     # Linux supports a limited number of watches per file descriptor. The
@@ -274,7 +275,7 @@
     path = self._create_file('bigdir/dir4/dir4/file')
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def test_internal_symlinks_relative(self):
     dir_a_b = self._create_directory('a/b')
@@ -283,8 +284,8 @@
     self._create_directory('p/q/r')
     self._watcher.start()
     shutil.rmtree(dir_p)
-    self._watcher._get_changed_paths()
-    # TODO: validate the value returned from _get_changed_paths once
+    self._watcher.changes()
+    # TODO: validate the value returned from self._watcher.changes() once
     # a solution is designed.
 
   def test_internal_symlinks_absolute(self):
@@ -294,8 +295,8 @@
     self._create_directory('p/q/r')
     self._watcher.start()
     shutil.rmtree(dir_p)
-    self._watcher._get_changed_paths()
-    # TODO: validate the value returned from _get_changed_paths once
+    self._watcher.changes()
+    # TODO: validate the value returned from self._watcher.changes() once
     # a solution is designed.
 
   @unittest.skip('b/14583335')
@@ -319,7 +320,7 @@
       f.write('change1')
     self.assertEqual(
         set([watched_junk_file]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
     # Temporarily create a second symlink to the junk directory. We don't
     # care about changed paths are reported, we just need to make sure the
@@ -327,16 +328,16 @@
     # when it is removed.
     symlink_junkdir_2 = os.path.join(self._directory, 'junk2')
     os.symlink(self._junk_directory, symlink_junkdir_2)
-    self._watcher._get_changed_paths()
+    self._watcher.changes()
     os.unlink(symlink_junkdir_2)
-    self._watcher._get_changed_paths()
+    self._watcher.changes()
 
     # And make sure changes to the file are still reported.
     with open(junk_file, 'w') as f:
       f.write('change2')
     self.assertEqual(
         set([watched_junk_file]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
 
 @unittest.skipUnless(sys.platform.startswith('linux'), 'requires linux')
@@ -364,20 +365,20 @@
     path = self._create_file(self._directories[0], 'moo')
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def testInDir2(self):
     path = self._create_file(self._directories[2], 'moo')
     self.assertEqual(
         set([path]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
   def testInDir1And3(self):
     path1 = self._create_file(self._directories[1], 'moo')
     path3 = self._create_file(self._directories[3], 'moo')
     self.assertEqual(
         set([path1, path3]),
-        self._watcher._get_changed_paths())
+        self._watcher.changes())
 
 
 class TestBitStr(unittest.TestCase):
diff --git a/google/appengine/tools/devappserver2/instance.py b/google/appengine/tools/devappserver2/instance.py
index 2eebbec..d3d7888 100644
--- a/google/appengine/tools/devappserver2/instance.py
+++ b/google/appengine/tools/devappserver2/instance.py
@@ -17,6 +17,7 @@
 """Manage the lifecycle of runtime processes and dispatch requests to them."""
 
 
+
 import collections
 import logging
 import threading
diff --git a/google/appengine/tools/devappserver2/instance_test.py b/google/appengine/tools/devappserver2/instance_test.py
index 871379d..58e9cde 100644
--- a/google/appengine/tools/devappserver2/instance_test.py
+++ b/google/appengine/tools/devappserver2/instance_test.py
@@ -17,6 +17,7 @@
 """Tests for google.apphosting.tools.devappserver2.instance."""
 
 
+
 import time
 import unittest
 
diff --git a/google/appengine/tools/devappserver2/java_application.py b/google/appengine/tools/devappserver2/java_application.py
index 27e9f9d..5494f57 100644
--- a/google/appengine/tools/devappserver2/java_application.py
+++ b/google/appengine/tools/devappserver2/java_application.py
@@ -17,6 +17,8 @@
 """An abstraction around the source and classfiles for a Java application."""
 
 
+
+
 import os
 import os.path
 import google
diff --git a/google/appengine/tools/devappserver2/java_runtime.py b/google/appengine/tools/devappserver2/java_runtime.py
index ee4e741..f510d23 100644
--- a/google/appengine/tools/devappserver2/java_runtime.py
+++ b/google/appengine/tools/devappserver2/java_runtime.py
@@ -17,6 +17,8 @@
 """Serves content for "script" handlers using the Java runtime."""
 
 
+
+
 import os
 import os.path
 import sys
diff --git a/google/appengine/tools/devappserver2/log_manager.py b/google/appengine/tools/devappserver2/log_manager.py
new file mode 100644
index 0000000..1966572
--- /dev/null
+++ b/google/appengine/tools/devappserver2/log_manager.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""LogManager for Managed VMs modules.
+
+Should be accessed by get() function.
+"""
+
+import httplib
+import logging
+import os
+import threading
+import urllib
+
+import google
+
+from google.pyglib import singleton
+
+from google.appengine.tools.devappserver2 import http_utils
+from google.appengine.tools.docker import containers
+
+
+APP_ENGINE_LOG_SERVER_HOST = 'APP_ENGINE_LOG_SERVER_HOST'
+APP_ENGINE_LOG_SERVER_PORT = 'APP_ENGINE_LOG_SERVER_PORT'
+
+_DB_PATH = '/var/log/sqlite'
+_LOGS_PATH = '/var/log/app_engine'
+_TD_AGENT_PATH = '/var/tmp/td-agent'
+
+_LOG_PROCESSOR_IMAGE = 'google/appengine-log-processor'
+_LOG_SERVER_IMAGE = 'google/appengine-log-server'
+_DEFAULT_LOG_SERVER_PORT = 8080
+
+_LOG_TYPES = ['app', 'appjson', 'request']
+
+
+# TODO: more escaping.
+def _escape(s):
+  return s.replace('-', '_')
+
+
+def _make_container_name(app, module, version, instance):
+  tmpl = '{app}_{module}_{version}_{instance}'
+  return _escape(tmpl.format(app=app, module=module,
+                             version=version, instance=instance))
+
+
+def _make_external_logs_path(app, module, version, instance):
+  return os.path.join(_LOGS_PATH,
+                      app, module, version, instance)
+
+
+def _describe_volume(internal, external=None):
+  return (external if external else internal), {'bind': internal}
+
+
+class _LogManagerDisabled(object):
+  """Base class for Log Managers. Logs are disabled by default."""
+
+  def __init__(self, docker_client, log_server_port):
+    pass
+
+  def start(self):
+    pass
+
+  def add(self, app, module, version, instance):
+    pass
+
+  @property
+  def host(self):
+    return ''
+
+  @property
+  def port(self):
+    return -1
+
+
+class _LogManager(_LogManagerDisabled):
+  """Manages creation of log server and log processors for each instance."""
+
+  def __init__(self, docker_client, log_server_port):
+    super(_LogManager, self).__init__(docker_client, log_server_port)
+
+    self._docker_client = docker_client
+
+    volumes = [_describe_volume(_DB_PATH)]
+    self._server = containers.Container(
+        self._docker_client,
+        containers.ContainerOptions(
+            image_opts=containers.ImageOptions(tag=_LOG_SERVER_IMAGE),
+            port=log_server_port,
+            volumes=dict(volumes)))
+
+    self._lock = threading.RLock()
+    self._containers = {}
+
+  def __del__(self):
+    for c in self._containers:
+      c.Stop()
+    self._server.Stop()
+
+  def start(self):
+    self._server.Start()
+    http_utils.wait_for_connection(self._server.host, self._server.port, 100)
+
+  def add(self, app, module, version, instance):
+    container_name = _make_container_name(app, module, version, instance)
+
+    def _create_table(log_type):
+      """Sends a request to log-server container to create a table if needed."""
+      params = urllib.urlencode({
+          'app': _escape(app), 'module': _escape(module),
+          'version': _escape(version), 'instance': _escape(instance),
+          'log_type': log_type})
+      headers = {
+          'Content-Type': 'application/x-www-form-urlencoded',
+          'Accept': 'text/plain'}
+
+      conn = httplib.HTTPConnection(self._server.host, self._server.port)
+      conn.request('POST', '/submit', params, headers)
+      response = conn.getresponse()
+      logging.debug(
+          'Sent table creation request to {host}:{port}?{params}. '
+          'Received {status}, reason: {reason}.'.format(
+              host=self._server.host, port=self._server.port,
+              params=params, status=response.status, reason=response.reason))
+
+    def _make_logs_container():
+      """Creates a log-processor container."""
+
+      environment = {
+          'LOGS_PATH': _LOGS_PATH,
+          'PREFIX': container_name
+      }
+
+      volumes = [
+          _describe_volume(_LOGS_PATH,
+                           _make_external_logs_path(app, module,
+                                                    version, instance)),
+          _describe_volume(_DB_PATH),
+          _describe_volume(_TD_AGENT_PATH)
+      ]
+
+      return containers.Container(
+          self._docker_client,
+          containers.ContainerOptions(
+              image_opts=containers.ImageOptions(tag=_LOG_PROCESSOR_IMAGE),
+              environment=environment,
+              volumes=dict(volumes)))
+    with self._lock:
+      if container_name in self._containers:
+        return
+
+      for l in _LOG_TYPES:
+        _create_table(l)
+
+      container = _make_logs_container()
+      self._containers[container_name] = container
+    container.Start()
+
+  @property
+  def host(self):
+    return self._server.host
+
+  @property
+  def port(self):
+    return self._server.port
+
+
+@singleton.Singleton
+class LogManagerDisabled(_LogManagerDisabled):
+  """Singleton instance of _LogManagerDisabled."""
+
+
+@singleton.Singleton
+class LogManager(_LogManager):
+  """Singleton instance of _LogManager."""
+
+
+# TODO: images lookup before confirming that logs are enabled.
+def get(docker_client=None, log_server_port=_DEFAULT_LOG_SERVER_PORT,
+        enable_logging=False):
+  """Returns a LogManager/LogManagerDisabled instance. Creates one if needed."""
+  c = LogManager if enable_logging else LogManagerDisabled
+  try:
+    instance = c(docker_client, log_server_port)
+    instance.start()
+
+    # To pass these values to Admin Server to query logs.
+    os.environ[APP_ENGINE_LOG_SERVER_HOST] = instance.host
+    os.environ[APP_ENGINE_LOG_SERVER_PORT] = str(instance.port)
+  except singleton.ConstructorCalledAgainError:
+    instance = c.Singleton()
+  return instance
diff --git a/google/appengine/tools/devappserver2/login.py b/google/appengine/tools/devappserver2/login.py
index 717e2fb..2e6d0c7 100644
--- a/google/appengine/tools/devappserver2/login.py
+++ b/google/appengine/tools/devappserver2/login.py
@@ -29,6 +29,7 @@
 """
 
 
+
 import cgi
 import Cookie
 import hashlib
@@ -207,6 +208,9 @@
   else:
     login_message = 'Not logged in'
 
+
+
+
     email = 'test\x40example.com'
   admin_checked = 'checked' if admin else ''
 
diff --git a/google/appengine/tools/devappserver2/login_test.py b/google/appengine/tools/devappserver2/login_test.py
index b2428b4..1273e04 100644
--- a/google/appengine/tools/devappserver2/login_test.py
+++ b/google/appengine/tools/devappserver2/login_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.login."""
 
 
+
 import Cookie
 import unittest
 import urllib
diff --git a/google/appengine/tools/devappserver2/module.py b/google/appengine/tools/devappserver2/module.py
index 162ed7e..e1f562c 100644
--- a/google/appengine/tools/devappserver2/module.py
+++ b/google/appengine/tools/devappserver2/module.py
@@ -17,6 +17,7 @@
 """Manage the lifecycle of runtime processes and dispatch requests to them."""
 
 
+
 import collections
 import cStringIO
 import functools
@@ -108,6 +109,13 @@
 <A HREF="%(correct-url)s">here</A>.
 </BODY></HTML>'''
 
+_TIMEOUT_HTML = '<HTML><BODY>503 - This request has timed out.</BODY></HTML>'
+
+# Factor applied to the request timeouts to compensate for the
+# long vmengines reloads. TODO eventually remove that once we have
+# optimized the vm_engine reload.
+_VMENGINE_SLOWDOWN_FACTOR = 2
+
 
 def _static_files_regex_from_handlers(handlers):
   patterns = []
@@ -172,6 +180,18 @@
         'java7': java_runtime.JavaRuntimeInstanceFactory,
     })
 
+  _MAX_REQUEST_WAIT_TIME = 10
+
+  def _get_wait_time(self):
+    """Gets the wait time before timing out a request.
+
+    Returns:
+      The timeout value in seconds.
+    """
+    if self.vm_enabled():
+      return self._MAX_REQUEST_WAIT_TIME * _VMENGINE_SLOWDOWN_FACTOR
+    return self._MAX_REQUEST_WAIT_TIME
+
   def _create_instance_factory(self,
                                module_configuration):
     """Create an instance.InstanceFactory.
@@ -356,14 +376,15 @@
     # Always check for config and file changes because checking also clears
     # pending changes.
     config_changes = self._module_configuration.check_for_updates()
-    has_file_changes = self._watcher.has_changes()
-
+    file_changes = self._watcher.changes()
     if application_configuration.HANDLERS_CHANGED in config_changes:
       handlers = self._create_url_handlers()
       with self._handler_lock:
         self._handlers = handlers
 
-    if has_file_changes:
+    if file_changes:
+      logging.info(
+          'Detected file changes:\n  %s', '\n  '.join(sorted(file_changes)))
       self._instance_factory.files_changed()
 
     if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES:
@@ -371,7 +392,7 @@
 
     self._maybe_restart_instances(
         config_changed=bool(config_changes & _RESTART_INSTANCES_CONFIG_CHANGES),
-        file_changed=has_file_changes)
+        file_changed=bool(file_changes))
 
   def __init__(self,
                module_configuration,
@@ -443,6 +464,8 @@
     """
     self._module_configuration = module_configuration
     self._name = module_configuration.module_name
+    self._version = module_configuration.major_version
+    self._app_name_external = module_configuration.application_external_name
     self._host = host
     self._api_host = api_host
     self._api_port = api_port
@@ -464,8 +487,7 @@
     self._default_version_port = default_version_port
     self._port_registry = port_registry
 
-    # TODO: remove when GA.
-    if self._vm_config and self._vm_config.HasField('docker_daemon_url'):
+    if self.vm_enabled():
       self._RUNTIME_INSTANCE_FACTORIES['vm'] = (
           vm_runtime_factory.VMRuntimeInstanceFactory)
 
@@ -484,6 +506,10 @@
         (self._host, self._balanced_port), self)
     self._quit_event = threading.Event()  # Set when quit() has been called.
 
+  def vm_enabled(self):
+    # TODO: change when GA
+    return self._vm_config
+
   @property
   def name(self):
     """The name of the module, as defined in app.yaml.
@@ -494,6 +520,24 @@
     return self._name
 
   @property
+  def version(self):
+    """The version of the module, as defined in app.yaml.
+
+    This value will be constant for the lifetime of the module even in the
+    module configuration changes.
+    """
+    return self._version
+
+  @property
+  def app_name_external(self):
+    """The external application name of the module, as defined in app.yaml.
+
+    This value will be constant for the lifetime of the module even in the
+    module configuration changes.
+    """
+    return self._app_name_external
+
+  @property
   def ready(self):
     """The module is ready to handle HTTP requests."""
     return self._balanced_module.ready
@@ -570,7 +614,13 @@
     start_response('404 Not Found', [('Content-Type', 'text/plain')])
     return ['The url "%s" does not match any handlers.' % environ['PATH_INFO']]
 
-  def _error_response(self, environ, start_response, status):
+  def _error_response(self, environ, start_response, status, body=None):
+    if body:
+      start_response(
+          '%d %s' % (status, httplib.responses[status]),
+          [('Content-Type', 'text/html'),
+           ('Content-Length', str(len(body)))])
+      return body
     start_response('%d %s' % (status, httplib.responses[status]), [])
     return []
 
@@ -679,6 +729,12 @@
         # all the data. To match the behavior of production, for large files
         # < 64M read the data to prevent the client bug from being triggered.
 
+
+
+
+
+
+
         if content_length <= _MAX_UPLOAD_NO_TRIGGER_BAD_CLIENT_BYTES:
           environ['wsgi.input'].read(content_length)
         status = '%d %s' % (httplib.REQUEST_ENTITY_TOO_LARGE,
@@ -1098,7 +1154,6 @@
                                             allow_skipped_files,
                                             threadsafe_override)
 
-
     self._process_automatic_scaling(
         self._module_configuration.automatic_scaling)
 
@@ -1435,7 +1490,6 @@
   """A pool of instances that is manually-scaled."""
 
   _DEFAULT_MANUAL_SCALING = appinfo.ManualScaling(instances='1')
-  _MAX_REQUEST_WAIT_TIME = 10
 
   @classmethod
   def _populate_default_manual_scaling(cls, manual_scaling):
@@ -1473,7 +1527,6 @@
                automatic_restarts,
                allow_skipped_files,
                threadsafe_override):
-
     """Initializer for ManualScalingModule.
 
     Args:
@@ -1543,7 +1596,6 @@
                                               allow_skipped_files,
                                               threadsafe_override)
 
-
     self._process_manual_scaling(module_configuration.manual_scaling)
 
     self._instances = []  # Protected by self._condition.
@@ -1638,7 +1690,7 @@
       An iterable over strings containing the body of the HTTP response.
     """
     start_time = time.time()
-    timeout_time = start_time + self._MAX_REQUEST_WAIT_TIME
+    timeout_time = start_time + self._get_wait_time()
     try:
       while time.time() < timeout_time:
         logging.debug('Dispatching request to %s after %0.4fs pending',
@@ -1696,7 +1748,8 @@
           request_type)
 
     start_time = time.time()
-    timeout_time = start_time + self._MAX_REQUEST_WAIT_TIME
+    timeout_time = start_time + self._get_wait_time()
+
     while time.time() < timeout_time:
       if ((request_type in (instance.NORMAL_REQUEST, instance.READY_REQUEST) and
            self._suspended) or self._quit_event.is_set()):
@@ -1714,7 +1767,7 @@
           with self._condition:
             self._condition.notify()
     else:
-      return self._error_response(environ, start_response, 503)
+      return self._error_response(environ, start_response, 503, _TIMEOUT_HTML)
 
   def _add_instance(self):
     """Creates and adds a new instance.Instance to the Module.
@@ -1809,20 +1862,21 @@
     # Always check for config and file changes because checking also clears
     # pending changes.
     config_changes = self._module_configuration.check_for_updates()
-    has_file_changes = self._watcher.has_changes()
-
+    file_changes = self._watcher.changes()
     if application_configuration.HANDLERS_CHANGED in config_changes:
       handlers = self._create_url_handlers()
       with self._handler_lock:
         self._handlers = handlers
 
-    if has_file_changes:
+    if file_changes:
+      logging.info(
+          'Detected file changes:\n  %s', '\n  '.join(sorted(file_changes)))
       self._instance_factory.files_changed()
 
     if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES:
       self._instance_factory.configuration_changed(config_changes)
 
-    if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES or has_file_changes:
+    if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES or file_changes:
       with self._instances_change_lock:
         if not self._suspended:
           self.restart()
@@ -1997,7 +2051,6 @@
 
   _DEFAULT_BASIC_SCALING = appinfo.BasicScaling(max_instances='1',
                                                 idle_timeout='15m')
-  _MAX_REQUEST_WAIT_TIME = 10
 
   @staticmethod
   def _parse_idle_timeout(timing):
@@ -2056,7 +2109,6 @@
                automatic_restarts,
                allow_skipped_files,
                threadsafe_override):
-
     """Initializer for BasicScalingModule.
 
     Args:
@@ -2221,7 +2273,7 @@
     """
     instance_id = inst.instance_id
     start_time = time.time()
-    timeout_time = start_time + self._MAX_REQUEST_WAIT_TIME
+    timeout_time = start_time + self._get_wait_time()
     try:
       while time.time() < timeout_time:
         logging.debug('Dispatching request to %s after %0.4fs pending',
@@ -2287,7 +2339,7 @@
           request_type)
 
     start_time = time.time()
-    timeout_time = start_time + self._MAX_REQUEST_WAIT_TIME
+    timeout_time = start_time + self._get_wait_time()
     while time.time() < timeout_time:
       if self._quit_event.is_set():
         return self._error_response(environ, start_response, 404)
@@ -2304,7 +2356,7 @@
           with self._condition:
             self._condition.notify()
     else:
-      return self._error_response(environ, start_response, 503)
+      return self._error_response(environ, start_response, 503, _TIMEOUT_HTML)
 
   def _start_any_instance(self):
     """Choose an inactive instance and start it asynchronously.
@@ -2373,20 +2425,20 @@
     # Always check for config and file changes because checking also clears
     # pending changes.
     config_changes = self._module_configuration.check_for_updates()
-    has_file_changes = self._watcher.has_changes()
+    file_changes = self._watcher.changes()
 
     if application_configuration.HANDLERS_CHANGED in config_changes:
       handlers = self._create_url_handlers()
       with self._handler_lock:
         self._handlers = handlers
 
-    if has_file_changes:
+    if file_changes:
       self._instance_factory.files_changed()
 
     if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES:
       self._instance_factory.configuration_changed(config_changes)
 
-    if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES or has_file_changes:
+    if config_changes & _RESTART_INSTANCES_CONFIG_CHANGES or file_changes:
       self.restart()
 
   def _loop_watching_for_changes_and_idle_instances(self):
@@ -2603,7 +2655,7 @@
     assert request_type == instance.INTERACTIVE_REQUEST
 
     start_time = time.time()
-    timeout_time = start_time + self._MAX_REQUEST_WAIT_TIME
+    timeout_time = start_time + self._get_wait_time()
 
     while time.time() < timeout_time:
       new_instance = False
diff --git a/google/appengine/tools/devappserver2/module_test.py b/google/appengine/tools/devappserver2/module_test.py
index 92b035b..3887596 100644
--- a/google/appengine/tools/devappserver2/module_test.py
+++ b/google/appengine/tools/devappserver2/module_test.py
@@ -17,6 +17,7 @@
 """Tests for google.apphosting.tools.devappserver2.module."""
 
 
+
 import functools
 import httplib
 import logging
@@ -44,6 +45,7 @@
 
 
 class ModuleConfigurationStub(object):
+
   def __init__(self,
                application_root='/root',
                application='app',
@@ -60,7 +62,8 @@
                env_variables=None,
                manual_scaling=None,
                basic_scaling=None,
-               vm_health_check=None):
+               vm_health_check=None,
+               application_external_name='app'):
     self.application_root = application_root
     self.application = application
     self.module_name = module_name
@@ -78,12 +81,14 @@
     self.version_id = '%s:%s.%s' % (module_name, version, '12345')
     self.is_backend = False
     self.vm_health_check = vm_health_check
+    self.application_external_name = application_external_name
 
   def check_for_updates(self):
     return set()
 
 
 class ModuleFacade(module.Module):
+
   def __init__(self,
                module_configuration=ModuleConfigurationStub(),
                instance_factory=None,
@@ -126,6 +131,7 @@
 
 
 class AutoScalingModuleFacade(module.AutoScalingModule):
+
   def __init__(self,
                module_configuration=ModuleConfigurationStub(),
                balanced_port=0,
@@ -168,6 +174,7 @@
 
 
 class ManualScalingModuleFacade(module.ManualScalingModule):
+
   def __init__(self,
                module_configuration=None,
                balanced_port=0,
@@ -212,6 +219,7 @@
 
 
 class BasicScalingModuleFacade(module.BasicScalingModule):
+
   def __init__(self,
                host='fakehost',
                module_configuration=ModuleConfigurationStub(),
@@ -432,7 +440,7 @@
                        static_files=r'app_readable_static_images/\\1',
                        upload=r'app_readable_static_images/*.png',
                        application_readable=True),
-        ]
+    ]
     self.instance_factory = instance.InstanceFactory(None, 1)
 
   def test_static_files_regex(self):
@@ -562,7 +570,7 @@
         handlers=handlers,
         inbound_services=['warmup'],
         env_variables=appinfo.EnvironmentVariables(),
-        )
+    )
     config_path = '/appdir/app.yaml'
     application_configuration.ModuleConfiguration._parse_configuration(
         config_path).AndReturn((info, [config_path]))
@@ -591,7 +599,7 @@
         threadsafe=False,
         manual_scaling=manual_scaling,
         handlers=handlers,
-        )
+    )
     config_path = '/appdir/app.yaml'
     application_configuration.ModuleConfiguration._parse_configuration(
         config_path).AndReturn((info, [config_path]))
@@ -879,6 +887,7 @@
   """Tests for module.AutoScalingModule._split_instances."""
 
   class Instance(object):
+
     def __init__(self, num_outstanding_requests, can_accept_requests=True):
       self.num_outstanding_requests = num_outstanding_requests
       self.can_accept_requests = can_accept_requests
@@ -971,6 +980,7 @@
   """Tests for module.AutoScalingModule._choose_instance."""
 
   class Instance(object):
+
     def __init__(self, num_outstanding_requests, can_accept_requests=True):
       self.num_outstanding_requests = num_outstanding_requests
       self.remaining_request_capacity = 10 - num_outstanding_requests
@@ -1055,6 +1065,7 @@
   """Tests for module.AutoScalingModule._adjust_instances."""
 
   class Instance(object):
+
     def __init__(self, num_outstanding_requests):
       self.num_outstanding_requests = num_outstanding_requests
 
@@ -1148,14 +1159,14 @@
     self.mox.StubOutWithMock(self.servr, '_create_url_handlers')
     self.mox.StubOutWithMock(self.servr._module_configuration,
                              'check_for_updates')
-    self.mox.StubOutWithMock(self.servr._watcher, 'has_changes')
+    self.mox.StubOutWithMock(self.servr._watcher, 'changes')
 
   def tearDown(self):
     self.mox.UnsetStubs()
 
   def test_no_changes(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.servr._maybe_restart_instances(config_changed=False,
                                         file_changed=False)
     self.mox.ReplayAll()
@@ -1164,7 +1175,7 @@
 
   def test_irrelevant_config_change(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.servr._maybe_restart_instances(config_changed=False,
                                         file_changed=False)
 
@@ -1175,7 +1186,7 @@
   def test_restart_config_change(self):
     conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.instance_factory.configuration_changed(conf_change)
     self.servr._maybe_restart_instances(config_changed=True, file_changed=False)
 
@@ -1186,7 +1197,7 @@
   def test_handler_change(self):
     conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.servr._create_url_handlers()
     self.instance_factory.configuration_changed(conf_change)
     self.servr._maybe_restart_instances(config_changed=True, file_changed=False)
@@ -1197,7 +1208,7 @@
 
   def test_file_change(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(True)
+    self.servr._watcher.changes().AndReturn({'-'})
     self.instance_factory.files_changed()
     self.servr._maybe_restart_instances(config_changed=False, file_changed=True)
 
@@ -1416,6 +1427,7 @@
   """Tests for module.ManualScalingModule._add_instance."""
 
   class WsgiServer(object):
+
     def __init__(self, port):
       self.port = port
 
@@ -1449,7 +1461,7 @@
 
   def test_add_with_health_checks(self):
     servr = ManualScalingModuleFacade(instance_factory=self.factory)
-    servr.vm_config = runtime_config_pb2.VMConfig(docker_daemon_url='unused')
+    servr.vm_config = runtime_config_pb2.VMConfig()
     servr.module_configuration.runtime = 'vm'
     servr.module_configuration.vm_health_check = appinfo.VmHealthCheck(
         enable_health_check=True)
@@ -1697,8 +1709,9 @@
     self.mox.StubOutWithMock(self.manual_module, '_error_response')
 
     self.manual_module._choose_instance(10.0).WithSideEffects(advance_time)
-    self.manual_module._error_response(self.environ, self.start_response,
-                                       503).AndReturn(self.response)
+    self.manual_module._error_response(
+        self.environ, self.start_response, 503, mox.IgnoreArg()).AndReturn(
+            self.response)
     self.mox.ReplayAll()
     self.assertEqual(
         self.response,
@@ -1714,6 +1727,7 @@
   """Tests for module.ManualScalingModule._choose_instance."""
 
   class Instance(object):
+
     def __init__(self, can_accept_requests):
       self.can_accept_requests = can_accept_requests
 
@@ -1909,14 +1923,14 @@
     self.mox.StubOutWithMock(self.servr, '_create_url_handlers')
     self.mox.StubOutWithMock(self.servr._module_configuration,
                              'check_for_updates')
-    self.mox.StubOutWithMock(self.servr._watcher, 'has_changes')
+    self.mox.StubOutWithMock(self.servr._watcher, 'changes')
 
   def tearDown(self):
     self.mox.UnsetStubs()
 
   def test_no_changes(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
 
     self.mox.ReplayAll()
     self.servr._handle_changes()
@@ -1924,7 +1938,7 @@
 
   def test_irrelevant_config_change(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
 
     self.mox.ReplayAll()
     self.servr._handle_changes()
@@ -1933,7 +1947,7 @@
   def test_restart_config_change(self):
     conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.instance_factory.configuration_changed(conf_change)
     self.servr.restart()
 
@@ -1944,7 +1958,7 @@
   def test_handler_change(self):
     conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.servr._create_url_handlers()
     self.instance_factory.configuration_changed(conf_change)
 
@@ -1956,7 +1970,7 @@
 
   def test_file_change(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(True)
+    self.servr._watcher.changes().AndReturn({'-'})
     self.instance_factory.files_changed()
     self.servr.restart()
 
@@ -1968,7 +1982,7 @@
     self.servr._suspended = True
     conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.instance_factory.configuration_changed(conf_change)
 
     self.mox.ReplayAll()
@@ -1979,7 +1993,7 @@
     self.servr._suspended = True
     conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.servr._create_url_handlers()
     self.instance_factory.configuration_changed(conf_change)
 
@@ -1990,7 +2004,7 @@
   def test_file_change_suspended(self):
     self.servr._suspended = True
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(True)
+    self.servr._watcher.changes().AndReturn({'-'})
     self.instance_factory.files_changed()
 
     self.mox.ReplayAll()
@@ -2153,8 +2167,9 @@
     self.mox.StubOutWithMock(self.basic_module, '_error_response')
 
     self.basic_module._choose_instance(20).WithSideEffects(self.advance_time)
-    self.basic_module._error_response(self.environ, self.start_response,
-                                      503).AndReturn(self.response)
+    self.basic_module._error_response(
+        self.environ, self.start_response, 503, mox.IgnoreArg()).AndReturn(
+            self.response)
 
     self.mox.ReplayAll()
     self.assertEqual(
@@ -2272,6 +2287,7 @@
   """Tests for module.BasicScalingModule._choose_instance."""
 
   class Instance(object):
+
     def __init__(self, can_accept_requests):
       self.can_accept_requests = can_accept_requests
 
@@ -2422,14 +2438,14 @@
     self.mox.StubOutWithMock(self.servr, '_create_url_handlers')
     self.mox.StubOutWithMock(self.servr._module_configuration,
                              'check_for_updates')
-    self.mox.StubOutWithMock(self.servr._watcher, 'has_changes')
+    self.mox.StubOutWithMock(self.servr._watcher.__class__, 'changes')
 
   def tearDown(self):
     self.mox.UnsetStubs()
 
   def test_no_changes(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
 
     self.mox.ReplayAll()
     self.servr._handle_changes()
@@ -2437,7 +2453,7 @@
 
   def test_irrelevant_config_change(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
 
     self.mox.ReplayAll()
     self.servr._handle_changes()
@@ -2446,7 +2462,7 @@
   def test_restart_config_change(self):
     conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.instance_factory.configuration_changed(conf_change)
     self.servr.restart()
 
@@ -2457,7 +2473,7 @@
   def test_handler_change(self):
     conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
     self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
-    self.servr._watcher.has_changes().AndReturn(False)
+    self.servr._watcher.changes().AndReturn(set())
     self.servr._create_url_handlers()
     self.instance_factory.configuration_changed(conf_change)
     self.servr.restart()
@@ -2468,7 +2484,7 @@
 
   def test_file_change(self):
     self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
-    self.servr._watcher.has_changes().AndReturn(True)
+    self.servr._watcher.changes().AndReturn({'-'})
     self.instance_factory.files_changed().AndReturn(True)
     self.servr.restart()
 
@@ -2478,6 +2494,7 @@
 
 
 class TestInteractiveCommandModule(unittest.TestCase):
+
   def setUp(self):
     api_server.test_setup_stubs()
 
@@ -2623,20 +2640,24 @@
     self.mox.VerifyAll()
 
   def test_handle_script_request_timeout(self):
-    self.servr._MAX_REQUEST_WAIT_TIME = 0
-    start_response = start_response_utils.CapturingStartResponse()
+    old_timeout = self.servr._MAX_REQUEST_WAIT_TIME
+    try:
+      self.servr._MAX_REQUEST_WAIT_TIME = 0
+      start_response = start_response_utils.CapturingStartResponse()
 
-    self.mox.ReplayAll()
-    self.assertEqual(
-        ['The command timed-out while waiting for another one to complete'],
-        self.servr._handle_script_request(self.environ,
-                                          start_response,
-                                          self.url_map,
-                                          self.match,
-                                          self.request_id))
-    self.mox.VerifyAll()
-    self.assertEqual('503 Service Unavailable',
-                     start_response.status)
+      self.mox.ReplayAll()
+      self.assertEqual(
+          ['The command timed-out while waiting for another one to complete'],
+          self.servr._handle_script_request(self.environ,
+                                            start_response,
+                                            self.url_map,
+                                            self.match,
+                                            self.request_id))
+      self.mox.VerifyAll()
+      self.assertEqual('503 Service Unavailable',
+                       start_response.status)
+    finally:
+      self.servr._MAX_REQUEST_WAIT_TIME = old_timeout
 
   def test_handle_script_request_restart(self):
     def restart_and_raise(*args):
diff --git a/google/appengine/tools/devappserver2/mtime_file_watcher.py b/google/appengine/tools/devappserver2/mtime_file_watcher.py
index 20c9002..9f654ba 100644
--- a/google/appengine/tools/devappserver2/mtime_file_watcher.py
+++ b/google/appengine/tools/devappserver2/mtime_file_watcher.py
@@ -22,6 +22,8 @@
 
 from google.appengine.tools.devappserver2 import watcher_common
 
+_MAX_MONITORED_FILES = 10000
+
 
 class MtimeFileWatcher(object):
   """Monitors a directory tree for changes using mtime polling."""
@@ -31,60 +33,61 @@
 
   def __init__(self, directory):
     self._directory = directory
-    self._quit_event = threading.Event()
     self._filename_to_mtime = None
-    self._has_changes = False
-    self._has_changes_lock = threading.Lock()
-    self._watcher_thread = threading.Thread(target=self._watch_changes)
-    self._watcher_thread.daemon = True
+    self._startup_thread = None
+
+  def _first_pass(self):
+    self._filename_to_mtime = (
+        MtimeFileWatcher._generate_filename_to_mtime(self._directory))
 
   def start(self):
     """Start watching a directory for changes."""
-    self._watcher_thread.start()
+    self._startup_thread = threading.Thread(target=self._first_pass)
+    self._startup_thread.start()
 
   def quit(self):
     """Stop watching a directory for changes."""
-    self._quit_event.set()
+    # TODO: stop the current crawling and join on the start thread.
 
-  def has_changes(self):
-    """Returns True if the watched directory has changed since the last call.
+  def changes(self):
+    """Returns a set of changed files if the watched directory has changed.
 
+    The changes set is reset at every call.
     start() must be called before this method.
 
     Returns:
-      Returns True if the watched directory has changed since the last call to
-      has_changes or, if has_changes has never been called, since start was
-      called.
+      Returns the set of file paths changes if the watched directory has changed
+      since the last call to changes or, if changes has never been called,
+      since start was called.
     """
-    with self._has_changes_lock:
-      has_changes = self._has_changes
-      self._has_changes = False
-    return has_changes
+    self._startup_thread.join()
+    old_filename_to_mtime = self._filename_to_mtime
+    self._filename_to_mtime = (
+        MtimeFileWatcher._generate_filename_to_mtime(self._directory))
+    diff_items = set(self._filename_to_mtime.items()).symmetric_difference(
+        old_filename_to_mtime.items())
+    return {k for k, _ in diff_items}
 
-  def _watch_changes(self):
-    while not self._quit_event.wait(1):
-      self._check_for_changes()
+  @staticmethod
+  def _generate_filename_to_mtime(directory):
+    """Records the state of a directory.
 
-  def _check_for_changes(self):
-    if self._has_changed_paths():
-      with self._has_changes_lock:
-        self._has_changes = True
+    Args:
+      directory: the root directory to traverse.
 
-  def _has_changed_paths(self):
-    self._filename_to_mtime, old_filename_to_mtime = (
-        self._generate_filename_to_mtime(), self._filename_to_mtime)
-    return (old_filename_to_mtime is not None and
-            self._filename_to_mtime != old_filename_to_mtime)
-
-  def _generate_filename_to_mtime(self):
+    Returns:
+      A dictionary of subdirectories and files under
+      directory associated with their timestamps.
+      the keys are absolute paths and values are epoch timestamps.
+    """
     filename_to_mtime = {}
     num_files = 0
-    for dirname, dirnames, filenames in os.walk(self._directory,
+    for dirname, dirnames, filenames in os.walk(directory,
                                                 followlinks=True):
       watcher_common.skip_ignored_dirs(dirnames)
       filenames = [f for f in filenames if not watcher_common.ignore_file(f)]
       for filename in filenames + dirnames:
-        if num_files == 10000:
+        if num_files == _MAX_MONITORED_FILES:
           warnings.warn(
               'There are too many files in your application for '
               'changes in all of them to be monitored. You may have to '
@@ -94,9 +97,7 @@
         num_files += 1
         path = os.path.join(dirname, filename)
         try:
-          mtime = os.path.getmtime(path)
+          filename_to_mtime[path] = os.path.getmtime(path)
         except (IOError, OSError):
           pass
-        else:
-          filename_to_mtime[path] = mtime
     return filename_to_mtime
diff --git a/google/appengine/tools/devappserver2/mtime_file_watcher_test.py b/google/appengine/tools/devappserver2/mtime_file_watcher_test.py
index c13a11b..7b74f02 100644
--- a/google/appengine/tools/devappserver2/mtime_file_watcher_test.py
+++ b/google/appengine/tools/devappserver2/mtime_file_watcher_test.py
@@ -16,7 +16,6 @@
 #
 """Tests for google.appengine.tools.devappserver2.mtime_file_watcher."""
 
-import logging
 import os
 import os.path
 import shutil
@@ -27,9 +26,8 @@
 from google.appengine.tools.devappserver2 import mtime_file_watcher
 
 
-class FakeThread(object):
-  def start(self):
-    pass
+def _sync():
+  time.sleep(.1)  # just to stay over the FS timestamp resolution
 
 
 class TestMtimeFileWatcher(unittest.TestCase):
@@ -39,9 +37,6 @@
     self._directory = tempfile.mkdtemp()  # The watched directory
     self._junk_directory = tempfile.mkdtemp()  # A scrap directory.
     self._watcher = mtime_file_watcher.MtimeFileWatcher(self._directory)
-    self._watcher._watcher_thread = FakeThread()
-    logging.debug('watched directory=%r, junk directory=%r',
-                  self._directory, self._junk_directory)
 
   def tearDown(self):
     self._watcher.quit()
@@ -61,143 +56,158 @@
 
   def test_file_created(self):
     self._watcher.start()
-    self._watcher._has_changed_paths()
-    self._create_file('test')
-    self.assertTrue(self._watcher._has_changed_paths())
+    self._watcher._startup_thread.join()
+    path = self._create_file('test')
+    self.assertEqual(self._watcher.changes(), {path})
 
   def test_file_modified(self):
     path = self._create_file('test')
+    _sync()
     self._watcher.start()
-    self._watcher._has_changed_paths()
-    time.sleep(1)
+    self._watcher._startup_thread.join()
     with open(path, 'w') as f:
       f.write('testing')
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {path})
 
   def test_file_read(self):
     path = self._create_file('test')
     with open(path, 'w') as f:
       f.write('testing')
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     with open(path, 'r') as f:
       f.read()
     # Reads should not trigger updates.
-    self.assertFalse(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), set())
 
   def test_file_deleted(self):
     path = self._create_file('test')
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     os.remove(path)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {path})
 
   def test_file_renamed(self):
     source = self._create_file('test')
     target = os.path.join(os.path.dirname(source), 'test2')
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     os.rename(source, target)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {source, target})
 
   def test_create_directory(self):
     self._watcher.start()
-    self._watcher._has_changed_paths()
-    self._create_directory('test')
-    self.assertTrue(self._watcher._has_changed_paths())
+    self._watcher._startup_thread.join()
+    path = self._create_directory('test')
+    self.assertEqual(self._watcher.changes(), {path})
 
   def test_file_created_in_directory(self):
-    self._create_directory('test')
+    dir_path = self._create_directory('test')
+    _sync()
     self._watcher.start()
-    self._watcher._has_changed_paths()
-    self._create_file('test/file')
-    self.assertTrue(self._watcher._has_changed_paths())
+    self._watcher._startup_thread.join()
+    path = self._create_file('test/file')
+    # Note : creating an entry in an inode changes its timestamp so the is
+    # normal
+    self.assertEqual(self._watcher.changes(), {dir_path, path})
 
   def test_move_directory(self):
     source = self._create_directory('test')
     target = os.path.join(os.path.dirname(source), 'test2')
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     os.rename(source, target)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {source, target})
 
   def test_move_directory_out_of_watched(self):
     source = self._create_directory('test')
     target = os.path.join(self._junk_directory, 'test')
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     os.rename(source, target)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {source})
     with open(os.path.join(target, 'file'), 'w'):
       pass
     # Changes to files in subdirectories that have been moved should be ignored.
-    self.assertFalse(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), set())
 
   def test_move_directory_into_watched(self):
     source = os.path.join(self._junk_directory, 'source')
     target = os.path.join(self._directory, 'target')
     os.mkdir(source)
+    _sync()
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     os.rename(source, target)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {target})
     file_path = os.path.join(target, 'file')
     with open(file_path, 'w+'):
       pass
-    self.assertTrue(self._watcher._has_changed_paths())
+    # file has been created so target changed
+    self.assertEqual(self._watcher.changes(), {target, file_path})
 
   def test_directory_deleted(self):
     path = self._create_directory('test')
+    _sync()
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
     os.rmdir(path)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), {path})
 
   @unittest.skipUnless(hasattr(os, 'symlink'), 'requires os.symlink')
   def test_symlink(self):
     sym_target = os.path.join(self._directory, 'test')
     os.mkdir(os.path.join(self._junk_directory, 'subdir'))
+    # the translated path in the target dir
+    sym_subdir_path = os.path.join(sym_target, 'subdir')
+    _sync()
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
 
     # Check that an added symlinked directory is reported.
     os.symlink(self._junk_directory, sym_target)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(
+        self._watcher.changes(),
+        {sym_target, os.path.join(sym_target, 'subdir')})
 
     # Check that a file added to the symlinked directory is reported.
     with open(os.path.join(self._junk_directory, 'file1'), 'w'):
       pass
-    self.assertTrue(self._watcher._has_changed_paths())
+    sym_file_path = os.path.join(sym_target, 'file1')
+    # file1 has been created so sym_target changed
+    self.assertEqual(
+        self._watcher.changes(), {sym_target, sym_file_path})
 
     # Check that a removed symlinked directory is reported.
     os.remove(sym_target)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(
+        self._watcher.changes(), {sym_target, sym_file_path, sym_subdir_path})
 
     # Check that a file added to the removed symlinked directory is *not*
     # reported.
     with open(os.path.join(self._junk_directory, 'subdir', 'file2'), 'w'):
       pass
-    self.assertFalse(self._watcher._has_changed_paths())
+    self.assertEqual(self._watcher.changes(), set())
 
   def test_too_many_files(self):
     self._watcher.start()
-    self._watcher._has_changed_paths()
-
+    self._watcher._startup_thread.join()
     for i in range(10001):
       self._create_file('file%d' % i)
-    self.assertTrue(self._watcher._has_changed_paths())
+    self.assertEqual(len(self._watcher.changes()), 10000)
 
   @unittest.skipUnless(hasattr(os, 'symlink'), 'requires os.symlink')
   def test_symlink_loop(self):
     self._watcher.start()
-    self._watcher._has_changed_paths()
+    self._watcher._startup_thread.join()
 
     for i in range(1000):
       self._create_file('file%d' % i)
 
     for i in range(11):
       os.symlink(self._directory, os.path.join(self._directory, 'test%d' % i))
-    self.assertTrue(self._watcher._has_changed_paths())
+    # basically the set is completely crazy
+    self.assertEqual(len(self._watcher.changes()), 10000)
 
 
 if __name__ == '__main__':
diff --git a/google/appengine/tools/devappserver2/php/runtime.py b/google/appengine/tools/devappserver2/php/runtime.py
index bd5d039..1ac84ae 100644
--- a/google/appengine/tools/devappserver2/php/runtime.py
+++ b/google/appengine/tools/devappserver2/php/runtime.py
@@ -17,6 +17,7 @@
 """A PHP devappserver2 runtime."""
 
 
+
 import base64
 import cStringIO
 import httplib
@@ -39,7 +40,18 @@
 from google.appengine.tools.devappserver2 import wsgi_server
 
 SDK_PATH = os.path.abspath(
-    os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'php/sdk'))
+
+
+
+
+
+
+
+os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'php/sdk'))
+
+
+
+
 
 
 if not os.path.exists(SDK_PATH):
diff --git a/google/appengine/tools/devappserver2/php_runtime.py b/google/appengine/tools/devappserver2/php_runtime.py
index 2891815..c0faf8a 100644
--- a/google/appengine/tools/devappserver2/php_runtime.py
+++ b/google/appengine/tools/devappserver2/php_runtime.py
@@ -17,6 +17,7 @@
 """Serves content for "script" handlers using the PHP runtime."""
 
 
+
 import cgi
 import logging
 import os
diff --git a/google/appengine/tools/devappserver2/python/request_handler.py b/google/appengine/tools/devappserver2/python/request_handler.py
index 5276a96..9110afb 100644
--- a/google/appengine/tools/devappserver2/python/request_handler.py
+++ b/google/appengine/tools/devappserver2/python/request_handler.py
@@ -21,6 +21,7 @@
 """
 
 
+
 import cStringIO
 import os
 import sys
@@ -49,6 +50,14 @@
 # our httplib "forwarder" as the environment variable that controls which
 # implementation we get is not yet set.
 
+
+
+
+
+
+
+
+
 httplib_responses = {
     100: 'Continue',
     101: 'Switching Protocols',
diff --git a/google/appengine/tools/devappserver2/python/runtime.py b/google/appengine/tools/devappserver2/python/runtime.py
index c9a1f59..cc98288 100644
--- a/google/appengine/tools/devappserver2/python/runtime.py
+++ b/google/appengine/tools/devappserver2/python/runtime.py
@@ -17,6 +17,7 @@
 """A Python devappserver2 runtime."""
 
 
+
 import os
 import sys
 import time
diff --git a/google/appengine/tools/devappserver2/python/runtime_test.py b/google/appengine/tools/devappserver2/python/runtime_test.py
index 31f564c..fad23a1 100644
--- a/google/appengine/tools/devappserver2/python/runtime_test.py
+++ b/google/appengine/tools/devappserver2/python/runtime_test.py
@@ -17,6 +17,7 @@
 """Tests for google.appengine.tools.devappserver2.python.runtime."""
 
 
+
 import unittest
 
 import google
diff --git a/google/appengine/tools/devappserver2/python/sandbox.py b/google/appengine/tools/devappserver2/python/sandbox.py
index 7a17f19..7a51eaf 100644
--- a/google/appengine/tools/devappserver2/python/sandbox.py
+++ b/google/appengine/tools/devappserver2/python/sandbox.py
@@ -17,6 +17,7 @@
 """A sandbox implementation that emulates production App Engine."""
 
 
+
 import __builtin__
 import imp
 import os
@@ -137,6 +138,14 @@
     config: The runtime_config_pb2.Config to use to configure the sandbox.
   """
 
+
+
+
+
+
+
+
+
   devnull = open(os.path.devnull)
   modules = [os, traceback, google]
   c_module = _find_shared_object_c_module()
@@ -228,6 +237,9 @@
                    'google') or
           name.startswith('google.') or name.startswith('encodings.') or
 
+
+
+
           # Making mysql available is a hack to make the CloudSQL functionality
           # work.
           'mysql' in name.lower())
diff --git a/google/appengine/tools/devappserver2/python/sandbox_test.py b/google/appengine/tools/devappserver2/python/sandbox_test.py
index 93e9584..2c260fe 100644
--- a/google/appengine/tools/devappserver2/python/sandbox_test.py
+++ b/google/appengine/tools/devappserver2/python/sandbox_test.py
@@ -17,6 +17,7 @@
 """Tests for google.appengine.tools.devappserver2.python.sandbox."""
 
 
+
 import __builtin__
 import imp
 import os
diff --git a/google/appengine/tools/devappserver2/python/stubs.py b/google/appengine/tools/devappserver2/python/stubs.py
index aa426a3..6b2db5f 100644
--- a/google/appengine/tools/devappserver2/python/stubs.py
+++ b/google/appengine/tools/devappserver2/python/stubs.py
@@ -17,6 +17,7 @@
 """Stub implementations of restricted functions."""
 
 
+
 import errno
 import functools
 import inspect
diff --git a/google/appengine/tools/devappserver2/python/stubs_test.py b/google/appengine/tools/devappserver2/python/stubs_test.py
index fb6c593..9a6681c 100644
--- a/google/appengine/tools/devappserver2/python/stubs_test.py
+++ b/google/appengine/tools/devappserver2/python/stubs_test.py
@@ -17,6 +17,7 @@
 """Tests for google.appengine.tools.devappserver2.python.stubs."""
 
 
+
 import errno
 import locale
 import mimetypes
diff --git a/google/appengine/tools/devappserver2/python_runtime.py b/google/appengine/tools/devappserver2/python_runtime.py
index 86fb637..5b9f4af 100644
--- a/google/appengine/tools/devappserver2/python_runtime.py
+++ b/google/appengine/tools/devappserver2/python_runtime.py
@@ -17,6 +17,7 @@
 """Serves content for "script" handlers using the Python runtime."""
 
 
+
 import os
 import sys
 
diff --git a/google/appengine/tools/devappserver2/request_rewriter.py b/google/appengine/tools/devappserver2/request_rewriter.py
index e46d128..388a1e5 100644
--- a/google/appengine/tools/devappserver2/request_rewriter.py
+++ b/google/appengine/tools/devappserver2/request_rewriter.py
@@ -30,6 +30,7 @@
 """
 
 
+
 import calendar
 import cStringIO
 import email
@@ -183,6 +184,8 @@
       state.headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
 
 
+
+
   if 'Set-Cookie' in state.headers:
     # It is a security risk to have any caching with Set-Cookie.
     # If Expires is omitted or set to a future date, and response code is
diff --git a/google/appengine/tools/devappserver2/request_rewriter_test.py b/google/appengine/tools/devappserver2/request_rewriter_test.py
index 1dda5f0..6c72f43 100644
--- a/google/appengine/tools/devappserver2/request_rewriter_test.py
+++ b/google/appengine/tools/devappserver2/request_rewriter_test.py
@@ -17,6 +17,7 @@
 """Tests for devappserver2.request_rewriter."""
 
 
+
 import functools
 import sys
 import time
diff --git a/google/appengine/tools/devappserver2/runtime_config_pb2.py b/google/appengine/tools/devappserver2/runtime_config_pb2.py
index b63767b..0660743 100644
--- a/google/appengine/tools/devappserver2/runtime_config_pb2.py
+++ b/google/appengine/tools/devappserver2/runtime_config_pb2.py
@@ -19,6 +19,7 @@
 
 import sys
 _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+import google
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
@@ -34,7 +35,7 @@
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='apphosting/tools/devappserver2/runtime_config.proto',
   package='apphosting.tools.devappserver2',
-  serialized_pb=_b('\n3apphosting/tools/devappserver2/runtime_config.proto\x12\x1e\x61pphosting.tools.devappserver2\"\x85\x06\n\x06\x43onfig\x12\x0e\n\x06\x61pp_id\x18\x01 \x02(\x0c\x12\x12\n\nversion_id\x18\x02 \x02(\x0c\x12\x18\n\x10\x61pplication_root\x18\x03 \x02(\x0c\x12\x19\n\nthreadsafe\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x08\x61pi_host\x18\x11 \x01(\t:\tlocalhost\x12\x10\n\x08\x61pi_port\x18\x05 \x02(\x05\x12:\n\tlibraries\x18\x06 \x03(\x0b\x32\'.apphosting.tools.devappserver2.Library\x12\x16\n\nskip_files\x18\x07 \x01(\t:\x02^$\x12\x18\n\x0cstatic_files\x18\x08 \x01(\t:\x02^$\x12\x43\n\rpython_config\x18\x0e \x01(\x0b\x32,.apphosting.tools.devappserver2.PythonConfig\x12=\n\nphp_config\x18\t \x01(\x0b\x32).apphosting.tools.devappserver2.PhpConfig\x12?\n\x0bjava_config\x18\x15 \x01(\x0b\x32*.apphosting.tools.devappserver2.JavaConfig\x12\x38\n\x07\x65nviron\x18\n \x03(\x0b\x32\'.apphosting.tools.devappserver2.Environ\x12\x42\n\x10\x63loud_sql_config\x18\x0b \x01(\x0b\x32(.apphosting.tools.devappserver2.CloudSQL\x12\x12\n\ndatacenter\x18\x0c \x02(\t\x12\x13\n\x0binstance_id\x18\r \x02(\t\x12\x1b\n\x10stderr_log_level\x18\x0f \x01(\x03:\x01\x31\x12\x13\n\x0b\x61uth_domain\x18\x10 \x02(\t\x12\x15\n\rmax_instances\x18\x12 \x01(\x05\x12;\n\tvm_config\x18\x13 \x01(\x0b\x32(.apphosting.tools.devappserver2.VMConfig\x12\x13\n\x0bserver_port\x18\x14 \x01(\x05\"A\n\tPhpConfig\x12\x1b\n\x13php_executable_path\x18\x01 \x01(\x0c\x12\x17\n\x0f\x65nable_debugger\x18\x03 \x02(\x08\"<\n\x0cPythonConfig\x12\x16\n\x0estartup_script\x18\x01 \x01(\t\x12\x14\n\x0cstartup_args\x18\x02 \x01(\t\"\x1e\n\nJavaConfig\x12\x10\n\x08jvm_args\x18\x01 \x03(\t\"t\n\x08\x43loudSQL\x12\x12\n\nmysql_host\x18\x01 \x02(\t\x12\x12\n\nmysql_port\x18\x02 \x02(\x05\x12\x12\n\nmysql_user\x18\x03 \x02(\t\x12\x16\n\x0emysql_password\x18\x04 \x02(\t\x12\x14\n\x0cmysql_socket\x18\x05 \x01(\t\"(\n\x07Library\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\t\"%\n\x07\x45nviron\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x02(\x0c\"f\n\x08VMConfig\x12\x19\n\x11\x64ocker_daemon_url\x18\x01 \x01(\t\x12?\n\x0b\x64\x61rt_config\x18\x02 \x01(\x0b\x32*.apphosting.tools.devappserver2.DartConfig\"o\n\nDartConfig\x12\x10\n\x08\x64\x61rt_sdk\x18\x01 \x01(\t\x12\x15\n\rdart_dev_mode\x18\x02 \x01(\t\x12\x1b\n\x13\x64\x61rt_pub_serve_host\x18\x03 \x01(\t\x12\x1b\n\x13\x64\x61rt_pub_serve_port\x18\x04 \x01(\x05\x42\x32\n,com.google.appengine.tools.development.proto \x02P\x01')
+  serialized_pb=_b('\n3apphosting/tools/devappserver2/runtime_config.proto\x12\x1e\x61pphosting.tools.devappserver2\"\x85\x06\n\x06\x43onfig\x12\x0e\n\x06\x61pp_id\x18\x01 \x02(\x0c\x12\x12\n\nversion_id\x18\x02 \x02(\x0c\x12\x18\n\x10\x61pplication_root\x18\x03 \x02(\x0c\x12\x19\n\nthreadsafe\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x08\x61pi_host\x18\x11 \x01(\t:\tlocalhost\x12\x10\n\x08\x61pi_port\x18\x05 \x02(\x05\x12:\n\tlibraries\x18\x06 \x03(\x0b\x32\'.apphosting.tools.devappserver2.Library\x12\x16\n\nskip_files\x18\x07 \x01(\t:\x02^$\x12\x18\n\x0cstatic_files\x18\x08 \x01(\t:\x02^$\x12\x43\n\rpython_config\x18\x0e \x01(\x0b\x32,.apphosting.tools.devappserver2.PythonConfig\x12=\n\nphp_config\x18\t \x01(\x0b\x32).apphosting.tools.devappserver2.PhpConfig\x12?\n\x0bjava_config\x18\x15 \x01(\x0b\x32*.apphosting.tools.devappserver2.JavaConfig\x12\x38\n\x07\x65nviron\x18\n \x03(\x0b\x32\'.apphosting.tools.devappserver2.Environ\x12\x42\n\x10\x63loud_sql_config\x18\x0b \x01(\x0b\x32(.apphosting.tools.devappserver2.CloudSQL\x12\x12\n\ndatacenter\x18\x0c \x02(\t\x12\x13\n\x0binstance_id\x18\r \x02(\t\x12\x1b\n\x10stderr_log_level\x18\x0f \x01(\x03:\x01\x31\x12\x13\n\x0b\x61uth_domain\x18\x10 \x02(\t\x12\x15\n\rmax_instances\x18\x12 \x01(\x05\x12;\n\tvm_config\x18\x13 \x01(\x0b\x32(.apphosting.tools.devappserver2.VMConfig\x12\x13\n\x0bserver_port\x18\x14 \x01(\x05\"A\n\tPhpConfig\x12\x1b\n\x13php_executable_path\x18\x01 \x01(\x0c\x12\x17\n\x0f\x65nable_debugger\x18\x03 \x02(\x08\"<\n\x0cPythonConfig\x12\x16\n\x0estartup_script\x18\x01 \x01(\t\x12\x14\n\x0cstartup_args\x18\x02 \x01(\t\"\x1e\n\nJavaConfig\x12\x10\n\x08jvm_args\x18\x01 \x03(\t\"t\n\x08\x43loudSQL\x12\x12\n\nmysql_host\x18\x01 \x02(\t\x12\x12\n\nmysql_port\x18\x02 \x02(\x05\x12\x12\n\nmysql_user\x18\x03 \x02(\t\x12\x16\n\x0emysql_password\x18\x04 \x02(\t\x12\x14\n\x0cmysql_socket\x18\x05 \x01(\t\"(\n\x07Library\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\t\"%\n\x07\x45nviron\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x02(\x0c\"{\n\x08VMConfig\x12\x19\n\x11\x64ocker_daemon_url\x18\x01 \x01(\t\x12?\n\x0b\x64\x61rt_config\x18\x02 \x01(\x0b\x32*.apphosting.tools.devappserver2.DartConfig\x12\x13\n\x0b\x65nable_logs\x18\x03 \x01(\x08\"o\n\nDartConfig\x12\x10\n\x08\x64\x61rt_sdk\x18\x01 \x01(\t\x12\x15\n\rdart_dev_mode\x18\x02 \x01(\t\x12\x1b\n\x13\x64\x61rt_pub_serve_host\x18\x03 \x01(\t\x12\x1b\n\x13\x64\x61rt_pub_serve_port\x18\x04 \x01(\x05\x42\x32\n,com.google.appengine.tools.development.proto \x02P\x01')
 )
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
@@ -468,6 +469,13 @@
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
+    _descriptor.FieldDescriptor(
+      name='enable_logs', full_name='apphosting.tools.devappserver2.VMConfig.enable_logs', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
   ],
   extensions=[
   ],
@@ -480,7 +488,7 @@
   oneofs=[
   ],
   serialized_start=1223,
-  serialized_end=1325,
+  serialized_end=1346,
 )
 
 
@@ -530,8 +538,8 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1327,
-  serialized_end=1438,
+  serialized_start=1348,
+  serialized_end=1459,
 )
 
 _CONFIG.fields_by_name['libraries'].message_type = _LIBRARY
diff --git a/google/appengine/tools/devappserver2/shutdown.py b/google/appengine/tools/devappserver2/shutdown.py
index 33f1cc9..4cca09d 100644
--- a/google/appengine/tools/devappserver2/shutdown.py
+++ b/google/appengine/tools/devappserver2/shutdown.py
@@ -17,6 +17,7 @@
 """Helper functions to quit the development server."""
 
 
+
 import logging
 import os
 import signal
diff --git a/google/appengine/tools/devappserver2/start_response_utils.py b/google/appengine/tools/devappserver2/start_response_utils.py
index 821fbac..05bb574 100644
--- a/google/appengine/tools/devappserver2/start_response_utils.py
+++ b/google/appengine/tools/devappserver2/start_response_utils.py
@@ -17,6 +17,7 @@
 """Implementations of start_response callables as defined in PEP-333."""
 
 
+
 import cStringIO
 
 
diff --git a/google/appengine/tools/devappserver2/start_response_utils_test.py b/google/appengine/tools/devappserver2/start_response_utils_test.py
index 0a4ece2..e101555 100644
--- a/google/appengine/tools/devappserver2/start_response_utils_test.py
+++ b/google/appengine/tools/devappserver2/start_response_utils_test.py
@@ -17,6 +17,7 @@
 """Tests for google.apphosting.tools.devappserver2.start_response_utils."""
 
 
+
 import unittest
 
 from google.appengine.tools.devappserver2 import start_response_utils
diff --git a/google/appengine/tools/devappserver2/static_files_handler.py b/google/appengine/tools/devappserver2/static_files_handler.py
index 7cf0458..fc0ab5b 100644
--- a/google/appengine/tools/devappserver2/static_files_handler.py
+++ b/google/appengine/tools/devappserver2/static_files_handler.py
@@ -17,6 +17,7 @@
 """Serves static content for "static_dir" and "static_files" handlers."""
 
 
+
 import base64
 import errno
 import httplib