App Engine Python SDK version 1.9.0

git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@411 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 5db6916..4fee26b 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,6 +3,65 @@
 
 App Engine SDK - Release Notes
 
+Version 1.9.0
+
+All
+==============================
+- Modules Service API and application management features are now GA.
+- New App Engine Application Identifiers must now start with a letter,
+  in addition to the existing requirements that the identifier be 6-30
+  characters which are letters, numbers, and hyphens, and not start or end with
+  a hyphen.
+
+Python
+==============================
+- The size limit on the Search API is now computed and enforced on a per-index
+  basis, rather than for the app as a whole. The per-index limit is now 10GB.
+  There is no fixed limit on the number of indexes, or on the total amount of
+  Search API storage an application may use.
+- Newly created App Engine applications now have a Google Cloud Storage bucket
+  created automatically as part of the same Google Cloud Project. For more
+  information please see:
+  https://developers.google.com/appengine/docs/python/
+  googlecloudstorageclient/activate#Using_the_default_Gcs_bucket
+- Users now have the ability to embed images in emails via the Content-Id
+  attachment header.
+    https://code.google.com/p/googleappengine/issues/detail?id=965
+    https://code.google.com/p/googleappengine/issues/detail?id=10503
+- Removed deprecated modules.py methods start_module, start_module_async,
+  stop_module, stop_module_async get_modules_async, get_versions_async,
+  get_default_version_async, get_num_instances_async, and get_hostname_async.
+- Fixed an issue with NDB backup/restore corrupting certain compressed
+  entities.
+    https://code.google.com/p/googleappengine/issues/detail?id=8599
+
+PHP
+==============================
+- The PHP interpreter has been upgraded from PHP 5.4.19 to PHP 5.4.22.
+- Autoloading is now available in the SDK so developers will no longer need to
+  explicitly require SDK files.
+- Expanded php.ini setting google_appengine.allow_include_gs_buckets to allow
+  a path filter to be included for improved security.
+- Newly created App Engine applications now have a Google Cloud Storage bucket
+  created automatically as part of the same Google Cloud Project. For more
+  information please see:
+  https://developers.google.com/appengine/docs/php/googlestorage/
+  #before_you_begin
+- An "E_USER_WARNING" warning message is now triggered if an application moves
+  a user uploaded file to a Google Cloud Storage bucket/path. This is due to
+  the fact that code may be included and lead to a local file inclusion
+  vulnerability.
+- Added API functions CloudStorageTools::getMetadata() and
+  CloudStorageTools::getContentType() for retrieving the metadata and content
+  type of Google Cloud Storage objects.
+    https://code.google.com/p/googleappengine/issues/detail?id=10182
+- Fixed an issue with empty Google Cloud Storage folders not displaying
+  correctly in the Developers Console. New folders are now created using the
+  "/" suffix instead of "_$folder_" so that they are displayed correctly.
+- Fixed an issue with PHP_SELF and SCRIPT_NAME not being implemented correctly.
+    https://code.google.com/p/googleappengine/issues/detail?id=9989
+    https://code.google.com/p/googleappengine/issues/detail?id=10478
+
 Version 1.8.9
 
 All
diff --git a/VERSION b/VERSION
index 99302d0..4e88d5a 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
-release: "1.8.9"
-timestamp: 1386206330
+release: "1.9.0"
+timestamp: 1389815587
 api_versions: ['1']
 supported_api_versions:
   python:
diff --git a/_php_runtime.py b/_php_runtime.py
index 483d3ed..c47660d 100644
--- a/_php_runtime.py
+++ b/_php_runtime.py
@@ -21,151 +21,27 @@
 import sys
 
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.7.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 7):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.7.\n' % version_tuple)
-  sys.exit(1)
 
 
-def _get_dir_path(sibling):
-  """Get a path to the directory of this script.
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-  By default, the canonical path (symlinks resolved) will be returned. In some
-  environments the canonical directory is not sufficient because different
-  parts of the SDK are referenced by symlinks, including this very module's
-  file. In this case, the non-canonical path to this file's directory will be
-  returned (i.e., the directory where the symlink lives, not the directory
-  where it points).
+  import wrapper_util
 
-  Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
-    that is potentially symlinked into the parent directory.
+finally:
+  sys.path = sys_path
 
-  Returns:
-    A directory name.
+wrapper_util.reject_old_python_versions((2, 7))
 
-  Raises:
-    ValueError: If no proper path could be determined.
-  """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
-
-
-
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+_DIR_PATH = wrapper_util.get_dir_path(__file__, os.path.join('lib', 'ipaddr'))
+_PATHS = wrapper_util.Paths(_DIR_PATH)
 
 
 
 
 
-
-
-
-
-_DIR_PATH = _get_dir_path(os.path.join('lib', 'ipaddr'))
-_SCRIPT_DIR = os.path.join(_DIR_PATH, 'google', 'appengine', 'tools')
-_DEVAPPSERVER2_DIR = os.path.join(
-    _DIR_PATH, 'google', 'appengine', 'tools', 'devappserver2')
-_PHP_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'php')
-_PYTHON_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'python')
-
-_STUB_DEPENDENCIES = [
-    os.path.join(_DIR_PATH, 'lib', 'antlr3'),
-    os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
-    os.path.join(_DIR_PATH, 'lib', 'ipaddr'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    os.path.join(_DIR_PATH, 'lib', 'rsa'),
-    os.path.join(_DIR_PATH, 'lib', 'pyasn1'),
-    os.path.join(_DIR_PATH, 'lib', 'pyasn1_modules'),
-    ]
-
-
-
-
-
-EXTRA_PATHS = _STUB_DEPENDENCIES + [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'simplejson'),
-
-
-    os.path.join(_DIR_PATH, 'lib', 'django-1.4'),
-    os.path.join(_DIR_PATH, 'lib', 'endpoints-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
-    os.path.join(_DIR_PATH, 'lib', 'protorpc-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'PyAMF-0.6.1'),
-    os.path.join(_DIR_PATH, 'lib', 'markupsafe-0.15'),
-    os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
-    os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.2'),
-    ]
-
-_DEVAPPSERVER2_PATHS = _STUB_DEPENDENCIES + [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
-    os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
-    os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.1'),
-    ]
-
-_PHP_RUNTIME_PATHS = [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    ]
-
-_PYTHON_RUNTIME_PATHS = [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
-    os.path.join(_DIR_PATH, 'lib', 'protorpc-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    ]
-
-
-_BOOTSTAP_NAME_TO_REAL_NAME = {
-    'dev_appserver.py': 'devappserver2.py',
-    '_php_runtime.py': 'runtime.py',
-    '_python_runtime.py': 'runtime.py',
-    }
-
-_SCRIPT_TO_DIR = {
-    'dev_appserver.py': _DEVAPPSERVER2_DIR,
-    '_php_runtime.py': _PHP_RUNTIME_DIR,
-    '_python_runtime.py': _PYTHON_RUNTIME_DIR,
-    }
-
-_SYS_PATH_ADDITIONS = {
-    'dev_appserver.py': _DEVAPPSERVER2_PATHS,
-    '_php_runtime.py': _PHP_RUNTIME_PATHS,
-    '_python_runtime.py': _PYTHON_RUNTIME_PATHS,
-    }
+EXTRA_PATHS = _PATHS.v2_extra_paths
 
 
 def fix_sys_path(extra_extra_paths=()):
@@ -177,10 +53,18 @@
   sys.path[1:1] = EXTRA_PATHS
 
 
-def _run_file(file_path, globals_, script_dir=_SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def _run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
-  sys.path = _SYS_PATH_ADDITIONS[script_name] + sys.path
+
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -191,10 +75,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_dir = _SCRIPT_TO_DIR.get(script_name, script_dir)
-  script_name = _BOOTSTAP_NAME_TO_REAL_NAME.get(script_name, script_name)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/_python_runtime.py b/_python_runtime.py
index 483d3ed..c47660d 100644
--- a/_python_runtime.py
+++ b/_python_runtime.py
@@ -21,151 +21,27 @@
 import sys
 
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.7.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 7):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.7.\n' % version_tuple)
-  sys.exit(1)
 
 
-def _get_dir_path(sibling):
-  """Get a path to the directory of this script.
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-  By default, the canonical path (symlinks resolved) will be returned. In some
-  environments the canonical directory is not sufficient because different
-  parts of the SDK are referenced by symlinks, including this very module's
-  file. In this case, the non-canonical path to this file's directory will be
-  returned (i.e., the directory where the symlink lives, not the directory
-  where it points).
+  import wrapper_util
 
-  Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
-    that is potentially symlinked into the parent directory.
+finally:
+  sys.path = sys_path
 
-  Returns:
-    A directory name.
+wrapper_util.reject_old_python_versions((2, 7))
 
-  Raises:
-    ValueError: If no proper path could be determined.
-  """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
-
-
-
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+_DIR_PATH = wrapper_util.get_dir_path(__file__, os.path.join('lib', 'ipaddr'))
+_PATHS = wrapper_util.Paths(_DIR_PATH)
 
 
 
 
 
-
-
-
-
-_DIR_PATH = _get_dir_path(os.path.join('lib', 'ipaddr'))
-_SCRIPT_DIR = os.path.join(_DIR_PATH, 'google', 'appengine', 'tools')
-_DEVAPPSERVER2_DIR = os.path.join(
-    _DIR_PATH, 'google', 'appengine', 'tools', 'devappserver2')
-_PHP_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'php')
-_PYTHON_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'python')
-
-_STUB_DEPENDENCIES = [
-    os.path.join(_DIR_PATH, 'lib', 'antlr3'),
-    os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
-    os.path.join(_DIR_PATH, 'lib', 'ipaddr'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    os.path.join(_DIR_PATH, 'lib', 'rsa'),
-    os.path.join(_DIR_PATH, 'lib', 'pyasn1'),
-    os.path.join(_DIR_PATH, 'lib', 'pyasn1_modules'),
-    ]
-
-
-
-
-
-EXTRA_PATHS = _STUB_DEPENDENCIES + [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'simplejson'),
-
-
-    os.path.join(_DIR_PATH, 'lib', 'django-1.4'),
-    os.path.join(_DIR_PATH, 'lib', 'endpoints-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
-    os.path.join(_DIR_PATH, 'lib', 'protorpc-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'PyAMF-0.6.1'),
-    os.path.join(_DIR_PATH, 'lib', 'markupsafe-0.15'),
-    os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
-    os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.2'),
-    ]
-
-_DEVAPPSERVER2_PATHS = _STUB_DEPENDENCIES + [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
-    os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
-    os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.1'),
-    ]
-
-_PHP_RUNTIME_PATHS = [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    ]
-
-_PYTHON_RUNTIME_PATHS = [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
-    os.path.join(_DIR_PATH, 'lib', 'protorpc-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    ]
-
-
-_BOOTSTAP_NAME_TO_REAL_NAME = {
-    'dev_appserver.py': 'devappserver2.py',
-    '_php_runtime.py': 'runtime.py',
-    '_python_runtime.py': 'runtime.py',
-    }
-
-_SCRIPT_TO_DIR = {
-    'dev_appserver.py': _DEVAPPSERVER2_DIR,
-    '_php_runtime.py': _PHP_RUNTIME_DIR,
-    '_python_runtime.py': _PYTHON_RUNTIME_DIR,
-    }
-
-_SYS_PATH_ADDITIONS = {
-    'dev_appserver.py': _DEVAPPSERVER2_PATHS,
-    '_php_runtime.py': _PHP_RUNTIME_PATHS,
-    '_python_runtime.py': _PYTHON_RUNTIME_PATHS,
-    }
+EXTRA_PATHS = _PATHS.v2_extra_paths
 
 
 def fix_sys_path(extra_extra_paths=()):
@@ -177,10 +53,18 @@
   sys.path[1:1] = EXTRA_PATHS
 
 
-def _run_file(file_path, globals_, script_dir=_SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def _run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
-  sys.path = _SYS_PATH_ADDITIONS[script_name] + sys.path
+
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -191,10 +75,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_dir = _SCRIPT_TO_DIR.get(script_name, script_dir)
-  script_name = _BOOTSTAP_NAME_TO_REAL_NAME.get(script_name, script_name)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/api_server.py b/api_server.py
index 2af168b..de775b6 100644
--- a/api_server.py
+++ b/api_server.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/appcfg.py b/appcfg.py
index 2af168b..de775b6 100644
--- a/appcfg.py
+++ b/appcfg.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/backends_conversion.py b/backends_conversion.py
index 2af168b..de775b6 100644
--- a/backends_conversion.py
+++ b/backends_conversion.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/bulkload_client.py b/bulkload_client.py
index 2af168b..de775b6 100644
--- a/bulkload_client.py
+++ b/bulkload_client.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/bulkloader.py b/bulkloader.py
index 2af168b..de775b6 100644
--- a/bulkloader.py
+++ b/bulkloader.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/demos/php/mod_rewrite/app.yaml b/demos/php/mod_rewrite/app.yaml
new file mode 100644
index 0000000..828f21e
--- /dev/null
+++ b/demos/php/mod_rewrite/app.yaml
@@ -0,0 +1,15 @@
+application: mod_rewrite_simulator
+version: 1
+runtime: php
+api_version: 1
+
+handlers:
+# Example of handler which should be placed above the catch-all handler.
+- url: /downloads
+  static_dir: downloads
+
+# Catch all unhandled requests and pass to mod_rewrite.php which will simulate
+# mod_rewrite by forwarding the requests to index.php?q=... (or other root-level
+# PHP file if specified in incoming URL.
+- url: /.*
+  script: mod_rewrite.php
diff --git a/demos/php/mod_rewrite/index.php b/demos/php/mod_rewrite/index.php
new file mode 100644
index 0000000..5fc775a
--- /dev/null
+++ b/demos/php/mod_rewrite/index.php
@@ -0,0 +1,22 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+if ($_GET['q'] == '/help') {
+  echo 'This is some help text.';
+  exit;
+}
+
+echo 'Welcome to the site!';
diff --git a/demos/php/mod_rewrite/mod_rewrite.php b/demos/php/mod_rewrite/mod_rewrite.php
new file mode 100644
index 0000000..e3e44d3
--- /dev/null
+++ b/demos/php/mod_rewrite/mod_rewrite.php
@@ -0,0 +1,50 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * @file
+ * Provide basic mod_rewrite like functionality.
+ *
+ * Pass through requests for root php files and forward all other requests to
+ * index.php with $_GET['q'] equal to path. The following are examples that
+ * demonstrate how a request using mod_rewrite.php will appear to a PHP script.
+ *
+ * - /install.php: install.php
+ * - /update.php?op=info: update.php?op=info
+ * - /foo/bar: index.php?q=/foo/bar
+ * - /: index.php?q=/
+ */
+
+$path = parse_url($_SERVER['REQUEST_URI'], PHP_URL_PATH);
+
+// Provide mod_rewrite like functionality. If a php file in the root directory
+// is explicitly requested then load the file, otherwise load index.php and
+// set get variable 'q' to $_SERVER['REQUEST_URI'].
+if (dirname($path) == '/' && pathinfo($path, PATHINFO_EXTENSION) == 'php') {
+  $file = pathinfo($path, PATHINFO_BASENAME);
+} else {
+  $file = 'index.php';
+
+  // Provide mod_rewrite like functionality by using the path which excludes
+  // any other part of the request query (ie. ignores ?foo=bar).
+  $_GET['q'] = $path;
+}
+
+// Override the script name to simulate the behavior without mod_rewrite.php.
+// Ensure that $_SERVER['SCRIPT_NAME'] always begins with a / to be consistent
+// with HTTP request and the value that is normally provided.
+$_SERVER['SCRIPT_NAME'] = '/' . $file;
+require $file;
diff --git a/demos/php/mod_rewrite/other.php b/demos/php/mod_rewrite/other.php
new file mode 100644
index 0000000..1cca0b3
--- /dev/null
+++ b/demos/php/mod_rewrite/other.php
@@ -0,0 +1,17 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+echo 'Welcome to the other site.';
diff --git a/dev_appserver.py b/dev_appserver.py
index 483d3ed..c47660d 100644
--- a/dev_appserver.py
+++ b/dev_appserver.py
@@ -21,151 +21,27 @@
 import sys
 
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.7.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 7):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.7.\n' % version_tuple)
-  sys.exit(1)
 
 
-def _get_dir_path(sibling):
-  """Get a path to the directory of this script.
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-  By default, the canonical path (symlinks resolved) will be returned. In some
-  environments the canonical directory is not sufficient because different
-  parts of the SDK are referenced by symlinks, including this very module's
-  file. In this case, the non-canonical path to this file's directory will be
-  returned (i.e., the directory where the symlink lives, not the directory
-  where it points).
+  import wrapper_util
 
-  Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
-    that is potentially symlinked into the parent directory.
+finally:
+  sys.path = sys_path
 
-  Returns:
-    A directory name.
+wrapper_util.reject_old_python_versions((2, 7))
 
-  Raises:
-    ValueError: If no proper path could be determined.
-  """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
-
-
-
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+_DIR_PATH = wrapper_util.get_dir_path(__file__, os.path.join('lib', 'ipaddr'))
+_PATHS = wrapper_util.Paths(_DIR_PATH)
 
 
 
 
 
-
-
-
-
-_DIR_PATH = _get_dir_path(os.path.join('lib', 'ipaddr'))
-_SCRIPT_DIR = os.path.join(_DIR_PATH, 'google', 'appengine', 'tools')
-_DEVAPPSERVER2_DIR = os.path.join(
-    _DIR_PATH, 'google', 'appengine', 'tools', 'devappserver2')
-_PHP_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'php')
-_PYTHON_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'python')
-
-_STUB_DEPENDENCIES = [
-    os.path.join(_DIR_PATH, 'lib', 'antlr3'),
-    os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
-    os.path.join(_DIR_PATH, 'lib', 'ipaddr'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    os.path.join(_DIR_PATH, 'lib', 'rsa'),
-    os.path.join(_DIR_PATH, 'lib', 'pyasn1'),
-    os.path.join(_DIR_PATH, 'lib', 'pyasn1_modules'),
-    ]
-
-
-
-
-
-EXTRA_PATHS = _STUB_DEPENDENCIES + [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'simplejson'),
-
-
-    os.path.join(_DIR_PATH, 'lib', 'django-1.4'),
-    os.path.join(_DIR_PATH, 'lib', 'endpoints-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
-    os.path.join(_DIR_PATH, 'lib', 'protorpc-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'PyAMF-0.6.1'),
-    os.path.join(_DIR_PATH, 'lib', 'markupsafe-0.15'),
-    os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
-    os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.2'),
-    ]
-
-_DEVAPPSERVER2_PATHS = _STUB_DEPENDENCIES + [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
-    os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
-    os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.1'),
-    ]
-
-_PHP_RUNTIME_PATHS = [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    ]
-
-_PYTHON_RUNTIME_PATHS = [
-    _DIR_PATH,
-
-    os.path.join(_DIR_PATH, 'lib', 'concurrent'),
-    os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
-    os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
-    os.path.join(_DIR_PATH, 'lib', 'protorpc-1.0'),
-    os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
-    ]
-
-
-_BOOTSTAP_NAME_TO_REAL_NAME = {
-    'dev_appserver.py': 'devappserver2.py',
-    '_php_runtime.py': 'runtime.py',
-    '_python_runtime.py': 'runtime.py',
-    }
-
-_SCRIPT_TO_DIR = {
-    'dev_appserver.py': _DEVAPPSERVER2_DIR,
-    '_php_runtime.py': _PHP_RUNTIME_DIR,
-    '_python_runtime.py': _PYTHON_RUNTIME_DIR,
-    }
-
-_SYS_PATH_ADDITIONS = {
-    'dev_appserver.py': _DEVAPPSERVER2_PATHS,
-    '_php_runtime.py': _PHP_RUNTIME_PATHS,
-    '_python_runtime.py': _PYTHON_RUNTIME_PATHS,
-    }
+EXTRA_PATHS = _PATHS.v2_extra_paths
 
 
 def fix_sys_path(extra_extra_paths=()):
@@ -177,10 +53,18 @@
   sys.path[1:1] = EXTRA_PATHS
 
 
-def _run_file(file_path, globals_, script_dir=_SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def _run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
-  sys.path = _SYS_PATH_ADDITIONS[script_name] + sys.path
+
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -191,10 +75,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_dir = _SCRIPT_TO_DIR.get(script_name, script_dir)
-  script_name = _BOOTSTAP_NAME_TO_REAL_NAME.get(script_name, script_name)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/download_appstats.py b/download_appstats.py
index 2af168b..de775b6 100644
--- a/download_appstats.py
+++ b/download_appstats.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/endpointscfg.py b/endpointscfg.py
index 2af168b..de775b6 100644
--- a/endpointscfg.py
+++ b/endpointscfg.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/gen_protorpc.py b/gen_protorpc.py
index 2af168b..de775b6 100644
--- a/gen_protorpc.py
+++ b/gen_protorpc.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/google/appengine/api/app_identity/app_identity_service_pb.py b/google/appengine/api/app_identity/app_identity_service_pb.py
index b9427dc..f21f759 100644
--- a/google/appengine/api/app_identity/app_identity_service_pb.py
+++ b/google/appengine/api/app_identity/app_identity_service_pb.py
@@ -2034,31 +2034,31 @@
         SignForAppRequest,
         SignForAppResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.GetPublicCertificatesForApp.im_func,
         GetPublicCertificateForAppRequest,
         GetPublicCertificateForAppResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.GetServiceAccountName.im_func,
         GetServiceAccountNameRequest,
         GetServiceAccountNameResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.GetAccessToken.im_func,
         GetAccessTokenRequest,
         GetAccessTokenResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.GetDefaultGcsBucketName.im_func,
         GetDefaultGcsBucketNameRequest,
         GetDefaultGcsBucketNameResponse,
         None,
-        'none')
+        'INTEGRITY')
 
 if _extension_runtime:
   pass
diff --git a/google/appengine/api/app_identity/app_identity_stub.py b/google/appengine/api/app_identity/app_identity_stub.py
index 1c204d8..f50776c 100644
--- a/google/appengine/api/app_identity/app_identity_stub.py
+++ b/google/appengine/api/app_identity/app_identity_stub.py
@@ -45,6 +45,12 @@
 except ImportError, e:
   CRYPTO_LIB_INSTALLED = False
 
+try:
+  import rsa
+  RSA_LIB_INSTALLED = True
+except ImportError, e:
+  RSA_LIB_INSTALLED = False
+
 from google.appengine.api import apiproxy_stub
 
 APP_SERVICE_ACCOUNT_NAME = 'test@localhost'
@@ -107,17 +113,31 @@
 
   def _Dynamic_SignForApp(self, request, response):
     """Implementation of AppIdentityService::SignForApp."""
-    if not CRYPTO_LIB_INSTALLED:
+    bytes_to_sign = request.bytes_to_sign()
+    if RSA_LIB_INSTALLED:
+
+
+
+
+      signature_bytes = rsa.pkcs1.sign(
+          bytes_to_sign,
+          rsa.key.PrivateKey(N, E, D, 3, 5),
+          'SHA-256')
+    elif CRYPTO_LIB_INSTALLED:
+
+
+      rsa_obj = RSA.construct((N, E, D))
+      hash_obj = SHA256.new()
+      hash_obj.update(bytes_to_sign)
+      padding_length = MODULUS_BYTES - LEN_OF_PREFIX - LENGTH_OF_SHA256_HASH - 3
+      emsa = (HEADER1 + (PADDING * padding_length) + HEADER2 +
+              PREFIX + hash_obj.hexdigest())
+      sig = rsa_obj.sign(binascii.a2b_hex(emsa), '')
+      signature_bytes = number.long_to_bytes(sig[0])
+    else:
       raise NotImplementedError("""Unable to import the pycrypto module,
                                 SignForApp is disabled.""")
-    rsa_obj = RSA.construct((N, E, D))
-    hashObj = SHA256.new()
-    hashObj.update(request.bytes_to_sign())
-    padding_length = MODULUS_BYTES - LEN_OF_PREFIX - LENGTH_OF_SHA256_HASH - 3
-    emsa = (HEADER1 + (PADDING * padding_length) + HEADER2 +
-            PREFIX + hashObj.hexdigest())
-    sig = rsa_obj.sign(binascii.a2b_hex(emsa), '')
-    response.set_signature_bytes(number.long_to_bytes(sig[0]))
+    response.set_signature_bytes(signature_bytes)
     response.set_key_name(SIGNING_KEY_NAME)
 
   def _Dynamic_GetPublicCertificatesForApp(self, request, response):
diff --git a/google/appengine/api/appinfo.py b/google/appengine/api/appinfo.py
index 6b433af..85bcc2f 100644
--- a/google/appengine/api/appinfo.py
+++ b/google/appengine/api/appinfo.py
@@ -217,6 +217,7 @@
 BASIC_SCALING = 'basic_scaling'
 VM = 'vm'
 VM_SETTINGS = 'vm_settings'
+VM_HEALTH_CHECK = 'vm_health_check'
 VERSION = 'version'
 MAJOR_VERSION = 'major_version'
 MINOR_VERSION = 'minor_version'
@@ -275,6 +276,17 @@
 OFF_ALIASES = ['no', 'n', 'False', 'f', '0', 'false']
 
 
+
+
+ENABLE_HEALTH_CHECK = 'enable_health_check'
+CHECK_INTERVAL_SEC = 'check_interval_sec'
+TIMEOUT_SEC = 'timeout_sec'
+UNHEALTHY_THRESHOLD = 'unhealthy_threshold'
+HEALTHY_THRESHOLD = 'healthy_threshold'
+RESTART_THRESHOLD = 'restart_threshold'
+HOST = 'host'
+
+
 class _VersionedLibrary(object):
   """A versioned library supported by App Engine."""
 
@@ -842,6 +854,8 @@
 
       HandlerTypeMissingAttribute: when the handler is missing a
         required attribute for its handler type.
+
+      MissingHandlerAttribute: when a URL handler is missing an attribute
     """
 
 
@@ -968,6 +982,10 @@
 
   def ErrorOnPositionForAppInfo(self):
     """Raises an error if position is specified outside of AppInclude objects.
+
+    Raises:
+      PositionUsedInAppYamlHandler: when position attribute is specified for an
+      app.yaml file instead of an include.yaml file.
     """
     if self.position:
       raise appinfo_errors.PositionUsedInAppYamlHandler(
@@ -1093,6 +1111,10 @@
     Whenever validate calls iteritems(), it is always called on ATTRIBUTES,
     not on __dict__, so this override is important to ensure that functions
     such as ToYAML() return the correct set of keys.
+
+    Raises:
+      MultipleBuiltinsSpecified: when more than one builtin is defined in a list
+      element.
     """
     if key == 'builtin_name':
       object.__setattr__(self, key, value)
@@ -1156,9 +1178,9 @@
           deprecated in the given runtime.
 
     Raises:
-      InvalidBuiltinFormat if the name of a Builtinhandler object
+      InvalidBuiltinFormat: if the name of a Builtinhandler object
           cannot be determined.
-      DuplicateBuiltinSpecified if a builtin handler name is used
+      DuplicateBuiltinsSpecified: if a builtin handler name is used
           more than once in the list.
     """
     seen = set()
@@ -1334,6 +1356,19 @@
   return appyaml
 
 
+class VmHealthCheck(validation.Validated):
+  """Class representing the configuration of a single library."""
+
+  ATTRIBUTES = {
+      ENABLE_HEALTH_CHECK: validation.Optional(validation.TYPE_BOOL),
+      CHECK_INTERVAL_SEC: validation.Optional(validation.TYPE_INT),
+      TIMEOUT_SEC: validation.Optional(validation.TYPE_INT),
+      UNHEALTHY_THRESHOLD: validation.Optional(validation.TYPE_INT),
+      HEALTHY_THRESHOLD: validation.Optional(validation.TYPE_INT),
+      RESTART_THRESHOLD: validation.Optional(validation.TYPE_INT),
+      HOST: validation.Optional(validation.TYPE_STR)}
+
+
 class AppInclude(validation.Validated):
   """Class representing the contents of an included app.yaml file.
 
@@ -1528,6 +1563,7 @@
       BASIC_SCALING: validation.Optional(BasicScaling),
       VM: validation.Optional(bool),
       VM_SETTINGS: validation.Optional(VmSettings),
+      VM_HEALTH_CHECK: validation.Optional(VmHealthCheck),
       BUILTINS: validation.Optional(validation.Repeated(BuiltinHandler)),
       INCLUDES: validation.Optional(validation.Type(list)),
       HANDLERS: validation.Optional(validation.Repeated(URLMap)),
@@ -1582,6 +1618,8 @@
           and CGI handlers are specified.
       TooManyScalingSettingsError: if more than one scaling settings block is
           present.
+      RuntimeDoesNotSupportLibraries: if libraries clause is used for a runtime
+          that does not support it (e.g. python25).
     """
     super(AppInfoExternal, self).CheckInitialized()
     if not self.handlers and not self.builtins and not self.includes:
@@ -1613,7 +1651,12 @@
           + datastore_auto_ids_url + '\n' + appcfg_auto_ids_url + '\n')
 
     if self.libraries:
-      if self.runtime != 'python27' and not self._skip_runtime_checks:
+      vm_runtime_python27 = (
+          self.runtime == 'vm' and
+          hasattr(self, 'vm_settings') and
+          self.vm_settings['vm_runtime'] == 'python27')
+      if not self._skip_runtime_checks and not (
+          vm_runtime_python27 or self.runtime == 'python27'):
         raise appinfo_errors.RuntimeDoesNotSupportLibraries(
             'libraries entries are only supported by the "python27" runtime')
 
@@ -1711,7 +1754,8 @@
       backend_name: The name of a backend defined in 'backends'.
 
     Raises:
-      BackendNotFound: If the indicated backend was not listed in 'backends'.
+      BackendNotFound: if the indicated backend was not listed in 'backends'.
+      DuplicateBackend: if backend is found more than once in 'backends'.
     """
     if backend_name is None:
       return
@@ -1774,6 +1818,7 @@
     EmptyConfigurationFile: when there are no documents in YAML file.
     MultipleConfigurationFile: when there is more than one document in YAML
     file.
+    DuplicateBackend: if backend is found more than once in 'backends'.
   """
   builder = yaml_object.ObjectBuilder(AppInfoExternal)
   handler = yaml_builder.BuilderHandler(builder)
diff --git a/google/appengine/api/logservice/logservice.py b/google/appengine/api/logservice/logservice.py
index 56b9cb3..aadb38c 100644
--- a/google/appengine/api/logservice/logservice.py
+++ b/google/appengine/api/logservice/logservice.py
@@ -496,13 +496,6 @@
     return self.__pb.app_id()
 
   @property
-  def server_id(self):
-    """Module id that handled this request, as a string."""
-    logging.warning('The server_id property is deprecated, please use '
-                    'the module_id property instead.')
-    return self.__pb.module_id()
-
-  @property
   def module_id(self):
     """Module id that handled this request, as a string."""
     return self.__pb.module_id()
diff --git a/google/appengine/api/mail.py b/google/appengine/api/mail.py
index fca0ce4..1928ff2 100644
--- a/google/appengine/api/mail.py
+++ b/google/appengine/api/mail.py
@@ -656,8 +656,8 @@
 class Attachment(object):
   """Attachment object.
 
-  Subclasses tuple to retain compatibility with existing code. An Attachment
-  object is largely interchangeable with a (filename, payload) tuple.
+  An Attachment object is largely interchangeable with a (filename, payload)
+  tuple.
 
   Note that the behavior is a bit asymmetric with respect to unpacking and
   equality comparison. An Attachment object without a content ID will be
@@ -731,6 +731,15 @@
   def __iter__(self):
     return iter((self.filename, self.payload))
 
+  def __getitem__(self, i):
+    return tuple(iter(self))[i]
+
+  def __contains__(self, val):
+    return val in (self.filename, self.payload)
+
+  def __len__(self):
+    return 2
+
 
 class EncodedPayload(object):
   """Wrapper for a payload that contains encoding information.
@@ -1052,7 +1061,7 @@
     if hasattr(self, 'attachments'):
       for attachment in _attachment_sequence(self.attachments):
         if isinstance(attachment.payload, EncodedPayload):
-          data = data.decode()
+          attachment.payload = attachment.payload.decode()
         protoattachment = message.add_attachment()
         protoattachment.set_filename(_to_str(attachment.filename))
         protoattachment.set_data(_to_str(attachment.payload))
diff --git a/google/appengine/api/modules/modules.py b/google/appengine/api/modules/modules.py
index 02b7eb4..4f482d4 100644
--- a/google/appengine/api/modules/modules.py
+++ b/google/appengine/api/modules/modules.py
@@ -28,25 +28,16 @@
     'get_current_version_name',
     'get_current_instance_id',
     'get_modules',
-    'get_modules_async',
     'get_versions',
-    'get_versions_async',
     'get_default_version',
-    'get_default_version_async',
     'get_num_instances',
-    'get_num_instances_async',
     'set_num_instances',
     'set_num_instances_async',
-    'start_module',
-    'start_module_async',
     'start_version',
     'start_version_async',
-    'stop_module',
-    'stop_module_async',
     'stop_version',
     'stop_version_async',
-    'get_hostname',
-    'get_hostname_async']
+    'get_hostname']
 
 
 import logging
@@ -179,30 +170,6 @@
                         _ResultHook).get_result()
 
 
-def get_modules_async():
-  """Returns a UserRPC whose result contains this application's module names.
-
-  DEPRECATED. Please use get_modules instead.
-
-  Returns:
-    A UserRPC whose result contains a list of strings containing the names
-    of modules associated with this application. The 'default' module will be
-    included if it exists, as will the name of the module that is associated
-    with the instance that calls this function.
-  """
-  logging.warning('The get_modules_async function is deprecated. Please '
-                  'use get_modules instead.')
-  def _ResultHook(rpc):
-    _CheckAsyncResult(rpc, [], {})
-
-
-    return list(rpc.response.module_list())
-
-  request = modules_service_pb.GetModulesRequest()
-  response = modules_service_pb.GetModulesResponse()
-  return _MakeAsyncCall('GetModules', request, response, _ResultHook)
-
-
 def get_versions(module=None):
   """Returns a list of versions for a given module.
 
@@ -236,36 +203,6 @@
                         _ResultHook).get_result()
 
 
-def get_versions_async(module=None):
-  """Returns a UserRPC whose result contains list of versions for a module.
-
-  DEPRECATED. Please use get_versions instead.
-
-  Args:
-    module: Module to retrieve version for, if None then the current module will
-      be used.
-
-  Returns:
-    Returns a UserRPC whose result contains the list of strings containing
-    the names of versions associated with the specified module.
-  """
-  logging.warning('The get_versions_async function is deprecated. Please '
-                  'use get_versions instead.')
-  def _ResultHook(rpc):
-    mapped_errors = [modules_service_pb.ModulesServiceError.INVALID_MODULE,
-                     modules_service_pb.ModulesServiceError.TRANSIENT_ERROR]
-    _CheckAsyncResult(rpc, mapped_errors, {})
-
-
-    return list(rpc.response.version_list())
-
-  request = modules_service_pb.GetVersionsRequest()
-  if module:
-    request.set_module(module)
-  response = modules_service_pb.GetVersionsResponse()
-  return _MakeAsyncCall('GetVersions', request, response, _ResultHook)
-
-
 def get_default_version(module=None):
   """Returns the name of the default version for the module.
 
@@ -296,35 +233,6 @@
                         _ResultHook).get_result()
 
 
-def get_default_version_async(
-    module=None):
-  """Returns a UserRPC whose result contains a module's default version.
-
-  DEPRECATED. Please use get_default_version instead.
-
-  Args:
-    module: Module to retrieve the default version for, if None then the current
-      module will be used.
-
-  Returns:
-    Returns a UserRPC whose result contains a string holding the name of the
-    default version of the specified module.
-  """
-  logging.warning('The get_default_version_async function is deprecated. '
-                  'Please use get_default_version instead.')
-  def _ResultHook(rpc):
-    mapped_errors = [modules_service_pb.ModulesServiceError.INVALID_MODULE,
-                     modules_service_pb.ModulesServiceError.INVALID_VERSION]
-    _CheckAsyncResult(rpc, mapped_errors, {})
-    return rpc.response.version()
-
-  request = modules_service_pb.GetDefaultVersionRequest()
-  if module:
-    request.set_module(module)
-  response = modules_service_pb.GetDefaultVersionResponse()
-  return _MakeAsyncCall('GetDefaultVersion', request, response, _ResultHook)
-
-
 def get_num_instances(module=None,
                       version=None):
   """Return the number of instances that are set for the given module version.
@@ -363,42 +271,6 @@
                         _ResultHook).get_result()
 
 
-def get_num_instances_async(
-    module=None, version=None):
-  """Returns a UserRPC whose result holds the number of instances for a version.
-
-  DEPRECATED. Please use get_num_instances instead.
-
-  This is only valid for fixed modules, an error will be raised for
-  automatically-scaled modules.  Support for automatically-scaled modules may be
-  supported in the future.
-
-  Args:
-    module: String containing the name of the module to fetch this info for, if
-      None the module of the current instance will be used.
-    version: String containing the name of the version to fetch this info for,
-      if None the version of the current instance will be used.  If that version
-      does not exist in the other module, then an InvalidVersionError is raised.
-
-  Returns:
-    A UserRPC whose result holds the number of instances for a version.
-  """
-  logging.warning('The get_num_instances_async function is deprecated. '
-                  'Please use get_num_instances instead.')
-  def _ResultHook(rpc):
-    mapped_errors = [modules_service_pb.ModulesServiceError.INVALID_VERSION]
-    _CheckAsyncResult(rpc, mapped_errors, {})
-    return rpc.response.instances()
-
-  request = modules_service_pb.GetNumInstancesRequest()
-  if module:
-    request.set_module(module)
-  if version:
-    request.set_version(version)
-  response = modules_service_pb.GetNumInstancesResponse()
-  return _MakeAsyncCall('GetNumInstances', request, response, _ResultHook)
-
-
 def set_num_instances(instances,
                       module=None, version=None):
   """Sets the number of instances on the module and version.
@@ -465,25 +337,6 @@
   rpc.get_result()
 
 
-def start_module(module,
-                 version):
-  """Start all instances for the given version of the module.
-
-  DEPRECATED. Please use start_version instead.
-
-  Args:
-    module: String containing the name of the module to affect.
-    version: String containing the name of the version of the module to start.
-
-  Raises:
-    InvalidVersionError if the given module version is invalid.
-    TransientError if there is a problem persisting the change.
-  """
-  logging.warning('The start_module function is deprecated, please use the '
-                  'start_version function instead.')
-  start_version(module, version)
-
-
 def start_version_async(module,
                         version):
   """Returns a UserRPC  to start all instances for the given module version.
@@ -512,24 +365,6 @@
   return _MakeAsyncCall('StartModule', request, response, _ResultHook)
 
 
-def start_module_async(module,
-                       version):
-  """Returns a UserRPC  to start all instances for the given module version.
-
-  DEPRECATED. Please use start_version_async instead.
-
-  Args:
-    module: String containing the name of the module to affect.
-    version: String containing the name of the version of the module to start.
-
-  Returns:
-    A UserRPC  to start all instances for the given module version.
-  """
-  logging.warning('The start_module_async function is deprecated, please use '
-                  'the start_version_async function isntead.')
-  return start_version_async(module, version)
-
-
 def stop_version(module=None,
                  version=None):
   """Stops all instances for the given version of the module.
@@ -547,27 +382,6 @@
   rpc.get_result()
 
 
-def stop_module(module=None,
-                version=None):
-  """Stops all instances for the given version of the module.
-
-  DEPRECATED. Please use stop_version instead.
-
-  Args:
-    module: The module to affect, if None the current module is used.
-    version: The version of the given module to affect, if None the current
-      version is used.
-
-  Raises:
-    InvalidVersionError if the given module version is invalid.
-    UnexpectedStateError if the module is already stopped, or cannot be stopped.
-    TransientError if there is a problem persisting the change.
-  """
-  logging.warning('The stop_module function is deprecated, please use '
-                  'the stop_version function isntead.')
-  stop_version(module, version)
-
-
 def stop_version_async(module=None,
                        version=None):
   """Returns a UserRPC  to stop all instances for the given module version.
@@ -599,25 +413,6 @@
   return _MakeAsyncCall('StopModule', request, response, _ResultHook)
 
 
-def stop_module_async(module=None,
-                      version=None):
-  """Returns a UserRPC  to stop all instances for the given module version.
-
-  DEPRECATED. Please use stop_version_async instead.
-
-  Args:
-    module: The module to affect, if None the current module is used.
-    version: The version of the given module to affect, if None the current
-      version is used.
-
-  Returns:
-    A UserRPC  to stop all instances for the given module version.
-  """
-  logging.warning('The stop_module_async function is deprecated. Please use '
-                  'the stop_version_async function instead.')
-  return stop_version_async(module, version)
-
-
 def get_hostname(module=None,
                  version=None, instance=None):
   """Returns a hostname to use to contact the module.
@@ -662,48 +457,3 @@
                         response,
                         _ResultHook).get_result()
 
-
-def get_hostname_async(module=None,
-                       version=None, instance=None):
-  """Returns a UserRPC whose result contains the hostname to contact a module.
-
-  DEPRECATED. Please use get_hostname instead.
-
-  Args:
-    module: Name of module, if None, take module of the current instance.
-    version: Name of version, if version is None then either use the version of
-      the current instance if that version exists for the target module,
-      otherwise use the default version of the target module.
-    instance: Instance to construct a hostname for, if instance is None, a
-      loadbalanced hostname for the module will be returned.  If the target
-      module is not a fixed module, then instance is not considered valid.
-
-  Returns:
-    Returns a UserRPC whose result contains a valid canonical hostname that
-    can be used to communicate with the given module/version/instance.
-    E.g. 0.v1.module5.myapp.appspot.com
-
-  Raises:
-    TypeError: If the given instance type is invalid.
-  """
-  logging.warning('The get_hostname_async function is deprecated. Please '
-                  'use get_hostname instead.')
-  def _ResultHook(rpc):
-    mapped_errors = [modules_service_pb.ModulesServiceError.INVALID_MODULE,
-                     modules_service_pb.ModulesServiceError.INVALID_INSTANCES]
-    _CheckAsyncResult(rpc, mapped_errors, [])
-    return rpc.response.hostname()
-
-  request = modules_service_pb.GetHostnameRequest()
-  if module:
-    request.set_module(module)
-  if version:
-    request.set_version(version)
-  if instance:
-    if not isinstance(instance, (basestring, long, int)):
-      raise TypeError(
-          "'instance' arg must be of type basestring, long or int.")
-    request.set_instance(str(instance))
-  response = modules_service_pb.GetHostnameResponse()
-  return _MakeAsyncCall('GetHostname', request, response, _ResultHook)
-
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index ddfacf2..2800573 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -757,7 +757,6 @@
   _CheckStatus(response.status())
   return response
 
-
 class Field(object):
   """An abstract base class which represents a field of a document.
 
@@ -2394,6 +2393,8 @@
       self._namespace = u''
     namespace_manager.validate_namespace(self._namespace, exception=ValueError)
     self._schema = None
+    self._storage_usage = None
+    self._storage_limit = None
 
   @property
   def schema(self):
@@ -2403,6 +2404,25 @@
     return self._schema
 
   @property
+  def storage_usage(self):
+    """The approximate number of bytes used by this index.
+
+    The number may be slightly stale, as it may not reflect the
+    results of recent changes.
+
+    Returns None for indexes not obtained from search.get_indexes.
+
+    """
+    return self._storage_usage
+
+  @property
+  def storage_limit(self):
+    """The maximum allowable storage for this index, in bytes.
+
+    Returns None for indexes not obtained from search.get_indexes."""
+    return self._storage_limit
+
+  @property
   def name(self):
     """Returns the name of the index."""
     return self._name
@@ -2434,7 +2454,9 @@
 
     return _Repr(self, [('name', self.name), ('namespace', self.namespace),
                         ('source', self._source),
-                        ('schema', self.schema)])
+                        ('schema', self.schema),
+                        ('storage_usage', self.storage_usage),
+                        ('storage_limit', self.storage_limit)])
 
   def _NewPutResultFromPb(self, status_pb, doc_id):
     """Constructs PutResult from RequestStatus pb and doc_id."""
@@ -2920,6 +2942,9 @@
   index = _NewIndexFromIndexSpecPb(index_metadata_pb.index_spec())
   if index_metadata_pb.field_list():
     index._schema = _NewSchemaFromPb(index_metadata_pb.field_list())
+  if index_metadata_pb.has_storage():
+    index._storage_usage = index_metadata_pb.storage().amount_used()
+    index._storage_limit = index_metadata_pb.storage().limit()
   return index
 
 
diff --git a/google/appengine/api/search/search_service_pb.py b/google/appengine/api/search/search_service_pb.py
index 5fc8266..be791ae 100644
--- a/google/appengine/api/search/search_service_pb.py
+++ b/google/appengine/api/search/search_service_pb.py
@@ -566,12 +566,144 @@
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexSpec'
+class IndexMetadata_Storage(ProtocolBuffer.ProtocolMessage):
+  has_amount_used_ = 0
+  amount_used_ = 0
+  has_limit_ = 0
+  limit_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def amount_used(self): return self.amount_used_
+
+  def set_amount_used(self, x):
+    self.has_amount_used_ = 1
+    self.amount_used_ = x
+
+  def clear_amount_used(self):
+    if self.has_amount_used_:
+      self.has_amount_used_ = 0
+      self.amount_used_ = 0
+
+  def has_amount_used(self): return self.has_amount_used_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_amount_used()): self.set_amount_used(x.amount_used())
+    if (x.has_limit()): self.set_limit(x.limit())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_amount_used_ != x.has_amount_used_: return 0
+    if self.has_amount_used_ and self.amount_used_ != x.amount_used_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_amount_used_): n += 1 + self.lengthVarInt64(self.amount_used_)
+    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_amount_used_): n += 1 + self.lengthVarInt64(self.amount_used_)
+    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+    return n
+
+  def Clear(self):
+    self.clear_amount_used()
+    self.clear_limit()
+
+  def OutputUnchecked(self, out):
+    if (self.has_amount_used_):
+      out.putVarInt32(8)
+      out.putVarInt64(self.amount_used_)
+    if (self.has_limit_):
+      out.putVarInt32(16)
+      out.putVarInt64(self.limit_)
+
+  def OutputPartial(self, out):
+    if (self.has_amount_used_):
+      out.putVarInt32(8)
+      out.putVarInt64(self.amount_used_)
+    if (self.has_limit_):
+      out.putVarInt32(16)
+      out.putVarInt64(self.limit_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_amount_used(d.getVarInt64())
+        continue
+      if tt == 16:
+        self.set_limit(d.getVarInt64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_amount_used_: res+=prefix+("amount_used: %s\n" % self.DebugFormatInt64(self.amount_used_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt64(self.limit_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kamount_used = 1
+  klimit = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "amount_used",
+    2: "limit",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexMetadata_Storage'
 class IndexMetadata(ProtocolBuffer.ProtocolMessage):
   has_index_spec_ = 0
+  has_storage_ = 0
+  storage_ = None
 
   def __init__(self, contents=None):
     self.index_spec_ = IndexSpec()
     self.field_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
   def index_spec(self): return self.index_spec_
@@ -598,11 +730,31 @@
 
   def clear_field(self):
     self.field_ = []
+  def storage(self):
+    if self.storage_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.storage_ is None: self.storage_ = IndexMetadata_Storage()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.storage_
+
+  def mutable_storage(self): self.has_storage_ = 1; return self.storage()
+
+  def clear_storage(self):
+
+    if self.has_storage_:
+      self.has_storage_ = 0;
+      if self.storage_ is not None: self.storage_.Clear()
+
+  def has_storage(self): return self.has_storage_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_index_spec()): self.mutable_index_spec().MergeFrom(x.index_spec())
     for i in xrange(x.field_size()): self.add_field().CopyFrom(x.field(i))
+    if (x.has_storage()): self.mutable_storage().MergeFrom(x.storage())
 
   def Equals(self, x):
     if x is self: return 1
@@ -611,6 +763,8 @@
     if len(self.field_) != len(x.field_): return 0
     for e1, e2 in zip(self.field_, x.field_):
       if e1 != e2: return 0
+    if self.has_storage_ != x.has_storage_: return 0
+    if self.has_storage_ and self.storage_ != x.storage_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -622,6 +776,7 @@
     elif not self.index_spec_.IsInitialized(debug_strs): initialized = 0
     for p in self.field_:
       if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_storage_ and not self.storage_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
@@ -629,6 +784,7 @@
     n += self.lengthString(self.index_spec_.ByteSize())
     n += 1 * len(self.field_)
     for i in xrange(len(self.field_)): n += self.lengthString(self.field_[i].ByteSize())
+    if (self.has_storage_): n += 1 + self.lengthString(self.storage_.ByteSize())
     return n + 1
 
   def ByteSizePartial(self):
@@ -638,11 +794,13 @@
       n += self.lengthString(self.index_spec_.ByteSizePartial())
     n += 1 * len(self.field_)
     for i in xrange(len(self.field_)): n += self.lengthString(self.field_[i].ByteSizePartial())
+    if (self.has_storage_): n += 1 + self.lengthString(self.storage_.ByteSizePartial())
     return n
 
   def Clear(self):
     self.clear_index_spec()
     self.clear_field()
+    self.clear_storage()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -652,6 +810,10 @@
       out.putVarInt32(18)
       out.putVarInt32(self.field_[i].ByteSize())
       self.field_[i].OutputUnchecked(out)
+    if (self.has_storage_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.storage_.ByteSize())
+      self.storage_.OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_index_spec_):
@@ -662,6 +824,10 @@
       out.putVarInt32(18)
       out.putVarInt32(self.field_[i].ByteSizePartial())
       self.field_[i].OutputPartial(out)
+    if (self.has_storage_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.storage_.ByteSizePartial())
+      self.storage_.OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -678,6 +844,12 @@
         d.skip(length)
         self.add_field().TryMerge(tmp)
         continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_storage().TryMerge(tmp)
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -698,6 +870,10 @@
       res+=e.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
       cnt+=1
+    if self.has_storage_:
+      res+=prefix+"storage <\n"
+      res+=self.storage_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
 
@@ -706,18 +882,21 @@
 
   kindex_spec = 1
   kfield = 2
+  kstorage = 3
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "index_spec",
     2: "field",
-  }, 2)
+    3: "storage",
+  }, 3)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.STRING,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
@@ -3889,6 +4068,8 @@
   keys_only_ = 0
   has_parsing_mode_ = 0
   parsing_mode_ = 0
+  has_faceted_search_ = 0
+  faceted_search_ = 0
 
   def __init__(self, contents=None):
     self.index_spec_ = IndexSpec()
@@ -4062,6 +4243,19 @@
 
   def has_parsing_mode(self): return self.has_parsing_mode_
 
+  def faceted_search(self): return self.faceted_search_
+
+  def set_faceted_search(self, x):
+    self.has_faceted_search_ = 1
+    self.faceted_search_ = x
+
+  def clear_faceted_search(self):
+    if self.has_faceted_search_:
+      self.has_faceted_search_ = 0
+      self.faceted_search_ = 0
+
+  def has_faceted_search(self): return self.has_faceted_search_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -4077,6 +4271,7 @@
     if (x.has_field_spec()): self.mutable_field_spec().MergeFrom(x.field_spec())
     if (x.has_keys_only()): self.set_keys_only(x.keys_only())
     if (x.has_parsing_mode()): self.set_parsing_mode(x.parsing_mode())
+    if (x.has_faceted_search()): self.set_faceted_search(x.faceted_search())
 
   def Equals(self, x):
     if x is self: return 1
@@ -4105,6 +4300,8 @@
     if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     if self.has_parsing_mode_ != x.has_parsing_mode_: return 0
     if self.has_parsing_mode_ and self.parsing_mode_ != x.parsing_mode_: return 0
+    if self.has_faceted_search_ != x.has_faceted_search_: return 0
+    if self.has_faceted_search_ and self.faceted_search_ != x.faceted_search_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -4139,6 +4336,7 @@
     if (self.has_field_spec_): n += 1 + self.lengthString(self.field_spec_.ByteSize())
     if (self.has_keys_only_): n += 2
     if (self.has_parsing_mode_): n += 1 + self.lengthVarInt64(self.parsing_mode_)
+    if (self.has_faceted_search_): n += 2
     return n + 2
 
   def ByteSizePartial(self):
@@ -4160,6 +4358,7 @@
     if (self.has_field_spec_): n += 1 + self.lengthString(self.field_spec_.ByteSizePartial())
     if (self.has_keys_only_): n += 2
     if (self.has_parsing_mode_): n += 1 + self.lengthVarInt64(self.parsing_mode_)
+    if (self.has_faceted_search_): n += 2
     return n
 
   def Clear(self):
@@ -4175,6 +4374,7 @@
     self.clear_field_spec()
     self.clear_keys_only()
     self.clear_parsing_mode()
+    self.clear_faceted_search()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -4215,6 +4415,9 @@
     if (self.has_parsing_mode_):
       out.putVarInt32(104)
       out.putVarInt32(self.parsing_mode_)
+    if (self.has_faceted_search_):
+      out.putVarInt32(112)
+      out.putBoolean(self.faceted_search_)
 
   def OutputPartial(self, out):
     if (self.has_index_spec_):
@@ -4257,6 +4460,9 @@
     if (self.has_parsing_mode_):
       out.putVarInt32(104)
       out.putVarInt32(self.parsing_mode_)
+    if (self.has_faceted_search_):
+      out.putVarInt32(112)
+      out.putBoolean(self.faceted_search_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -4309,6 +4515,9 @@
       if tt == 104:
         self.set_parsing_mode(d.getVarInt32())
         continue
+      if tt == 112:
+        self.set_faceted_search(d.getBoolean())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -4345,6 +4554,7 @@
       res+=prefix+">\n"
     if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
     if self.has_parsing_mode_: res+=prefix+("parsing_mode: %s\n" % self.DebugFormatInt32(self.parsing_mode_))
+    if self.has_faceted_search_: res+=prefix+("faceted_search: %s\n" % self.DebugFormatBool(self.faceted_search_))
     return res
 
 
@@ -4363,6 +4573,7 @@
   kfield_spec = 10
   kkeys_only = 12
   kparsing_mode = 13
+  kfaceted_search = 14
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -4378,7 +4589,8 @@
     11: "offset",
     12: "keys_only",
     13: "parsing_mode",
-  }, 13)
+    14: "faceted_search",
+  }, 14)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -4394,7 +4606,8 @@
     11: ProtocolBuffer.Encoder.NUMERIC,
     12: ProtocolBuffer.Encoder.NUMERIC,
     13: ProtocolBuffer.Encoder.NUMERIC,
-  }, 13, ProtocolBuffer.Encoder.MAX_TYPE)
+    14: ProtocolBuffer.Encoder.NUMERIC,
+  }, 14, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
@@ -4538,6 +4751,341 @@
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.SearchRequest'
+class FacetResultValue(ProtocolBuffer.ProtocolMessage):
+  has_name_ = 0
+  name_ = ""
+  has_count_ = 0
+  count_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def name(self): return self.name_
+
+  def set_name(self, x):
+    self.has_name_ = 1
+    self.name_ = x
+
+  def clear_name(self):
+    if self.has_name_:
+      self.has_name_ = 0
+      self.name_ = ""
+
+  def has_name(self): return self.has_name_
+
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_name()): self.set_name(x.name())
+    if (x.has_count()): self.set_count(x.count())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_name_ != x.has_name_: return 0
+    if self.has_name_ and self.name_ != x.name_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_name_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: name not set.')
+    if (not self.has_count_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: count not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.name_))
+    n += self.lengthVarInt64(self.count_)
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_name_):
+      n += 1
+      n += self.lengthString(len(self.name_))
+    if (self.has_count_):
+      n += 1
+      n += self.lengthVarInt64(self.count_)
+    return n
+
+  def Clear(self):
+    self.clear_name()
+    self.clear_count()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.name_)
+    out.putVarInt32(16)
+    out.putVarInt32(self.count_)
+
+  def OutputPartial(self, out):
+    if (self.has_name_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.name_)
+    if (self.has_count_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.count_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_name(d.getPrefixedString())
+        continue
+      if tt == 16:
+        self.set_count(d.getVarInt32())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kname = 1
+  kcount = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "name",
+    2: "count",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.FacetResultValue'
+class FacetResult(ProtocolBuffer.ProtocolMessage):
+  has_name_ = 0
+  name_ = ""
+  has_type_ = 0
+
+  def __init__(self, contents=None):
+    self.type_ = ContentType()
+    self.value_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def name(self): return self.name_
+
+  def set_name(self, x):
+    self.has_name_ = 1
+    self.name_ = x
+
+  def clear_name(self):
+    if self.has_name_:
+      self.has_name_ = 0
+      self.name_ = ""
+
+  def has_name(self): return self.has_name_
+
+  def type(self): return self.type_
+
+  def mutable_type(self): self.has_type_ = 1; return self.type_
+
+  def clear_type(self):self.has_type_ = 0; self.type_.Clear()
+
+  def has_type(self): return self.has_type_
+
+  def value_size(self): return len(self.value_)
+  def value_list(self): return self.value_
+
+  def value(self, i):
+    return self.value_[i]
+
+  def mutable_value(self, i):
+    return self.value_[i]
+
+  def add_value(self):
+    x = FacetResultValue()
+    self.value_.append(x)
+    return x
+
+  def clear_value(self):
+    self.value_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_name()): self.set_name(x.name())
+    if (x.has_type()): self.mutable_type().MergeFrom(x.type())
+    for i in xrange(x.value_size()): self.add_value().CopyFrom(x.value(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_name_ != x.has_name_: return 0
+    if self.has_name_ and self.name_ != x.name_: return 0
+    if self.has_type_ != x.has_type_: return 0
+    if self.has_type_ and self.type_ != x.type_: return 0
+    if len(self.value_) != len(x.value_): return 0
+    for e1, e2 in zip(self.value_, x.value_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_name_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: name not set.')
+    if (not self.has_type_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: type not set.')
+    elif not self.type_.IsInitialized(debug_strs): initialized = 0
+    for p in self.value_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.name_))
+    n += self.lengthString(self.type_.ByteSize())
+    n += 1 * len(self.value_)
+    for i in xrange(len(self.value_)): n += self.lengthString(self.value_[i].ByteSize())
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_name_):
+      n += 1
+      n += self.lengthString(len(self.name_))
+    if (self.has_type_):
+      n += 1
+      n += self.lengthString(self.type_.ByteSizePartial())
+    n += 1 * len(self.value_)
+    for i in xrange(len(self.value_)): n += self.lengthString(self.value_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_name()
+    self.clear_type()
+    self.clear_value()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.name_)
+    out.putVarInt32(18)
+    out.putVarInt32(self.type_.ByteSize())
+    self.type_.OutputUnchecked(out)
+    for i in xrange(len(self.value_)):
+      out.putVarInt32(26)
+      out.putVarInt32(self.value_[i].ByteSize())
+      self.value_[i].OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_name_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.name_)
+    if (self.has_type_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.type_.ByteSizePartial())
+      self.type_.OutputPartial(out)
+    for i in xrange(len(self.value_)):
+      out.putVarInt32(26)
+      out.putVarInt32(self.value_[i].ByteSizePartial())
+      self.value_[i].OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_name(d.getPrefixedString())
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_type().TryMerge(tmp)
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_value().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+    if self.has_type_:
+      res+=prefix+"type <\n"
+      res+=self.type_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("value%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kname = 1
+  ktype = 2
+  kvalue = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "name",
+    2: "type",
+    3: "value",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.FacetResult'
 class SearchResult(ProtocolBuffer.ProtocolMessage):
   has_document_ = 0
   has_cursor_ = 0
@@ -4782,6 +5330,7 @@
       self._extension_fields = {}
     self.result_ = []
     self.status_ = RequestStatus()
+    self.facet_result_ = []
     if contents is not None: self.MergeFromString(contents)
 
   def result_size(self): return len(self.result_)
@@ -4834,6 +5383,22 @@
 
   def has_cursor(self): return self.has_cursor_
 
+  def facet_result_size(self): return len(self.facet_result_)
+  def facet_result_list(self): return self.facet_result_
+
+  def facet_result(self, i):
+    return self.facet_result_[i]
+
+  def mutable_facet_result(self, i):
+    return self.facet_result_[i]
+
+  def add_facet_result(self):
+    x = FacetResult()
+    self.facet_result_.append(x)
+    return x
+
+  def clear_facet_result(self):
+    self.facet_result_ = []
 
   def MergeFrom(self, x):
     assert x is not self
@@ -4841,6 +5406,7 @@
     if (x.has_matched_count()): self.set_matched_count(x.matched_count())
     if (x.has_status()): self.mutable_status().MergeFrom(x.status())
     if (x.has_cursor()): self.set_cursor(x.cursor())
+    for i in xrange(x.facet_result_size()): self.add_facet_result().CopyFrom(x.facet_result(i))
     if _extension_runtime: self._MergeExtensionFields(x)
 
   def Equals(self, x):
@@ -4854,6 +5420,9 @@
     if self.has_status_ and self.status_ != x.status_: return 0
     if self.has_cursor_ != x.has_cursor_: return 0
     if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+    if len(self.facet_result_) != len(x.facet_result_): return 0
+    for e1, e2 in zip(self.facet_result_, x.facet_result_):
+      if e1 != e2: return 0
     if _extension_runtime and not self._ExtensionEquals(x): return 0
     return 1
 
@@ -4870,6 +5439,8 @@
       if debug_strs is not None:
         debug_strs.append('Required field: status not set.')
     elif not self.status_.IsInitialized(debug_strs): initialized = 0
+    for p in self.facet_result_:
+      if not p.IsInitialized(debug_strs): initialized=0
     return initialized
 
   def ByteSize(self):
@@ -4879,6 +5450,8 @@
     n += self.lengthVarInt64(self.matched_count_)
     n += self.lengthString(self.status_.ByteSize())
     if (self.has_cursor_): n += 1 + self.lengthString(len(self.cursor_))
+    n += 1 * len(self.facet_result_)
+    for i in xrange(len(self.facet_result_)): n += self.lengthString(self.facet_result_[i].ByteSize())
     if _extension_runtime:
       n += self._ExtensionByteSize(False)
     return n + 2
@@ -4894,6 +5467,8 @@
       n += 1
       n += self.lengthString(self.status_.ByteSizePartial())
     if (self.has_cursor_): n += 1 + self.lengthString(len(self.cursor_))
+    n += 1 * len(self.facet_result_)
+    for i in xrange(len(self.facet_result_)): n += self.lengthString(self.facet_result_[i].ByteSizePartial())
     if _extension_runtime:
       n += self._ExtensionByteSize(True)
     return n
@@ -4903,6 +5478,7 @@
     self.clear_matched_count()
     self.clear_status()
     self.clear_cursor()
+    self.clear_facet_result()
     if _extension_runtime: self._extension_fields.clear()
 
   def OutputUnchecked(self, out):
@@ -4921,6 +5497,10 @@
     if (self.has_cursor_):
       out.putVarInt32(34)
       out.putPrefixedString(self.cursor_)
+    for i in xrange(len(self.facet_result_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.facet_result_[i].ByteSize())
+      self.facet_result_[i].OutputUnchecked(out)
     if _extension_runtime:
       extension_index = self._OutputExtensionFields(out, False, extensions, extension_index, 10000)
 
@@ -4942,6 +5522,10 @@
     if (self.has_cursor_):
       out.putVarInt32(34)
       out.putPrefixedString(self.cursor_)
+    for i in xrange(len(self.facet_result_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.facet_result_[i].ByteSizePartial())
+      self.facet_result_[i].OutputPartial(out)
     if _extension_runtime:
       extension_index = self._OutputExtensionFields(out, True, extensions, extension_index, 10000)
 
@@ -4966,6 +5550,12 @@
       if tt == 34:
         self.set_cursor(d.getPrefixedString())
         continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_facet_result().TryMerge(tmp)
+        continue
       if _extension_runtime:
         if (1000 <= tt and tt < 10000):
           self._ParseOneExtensionField(tt, d)
@@ -4992,6 +5582,14 @@
       res+=self.status_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatString(self.cursor_))
+    cnt=0
+    for e in self.facet_result_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("facet_result%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
     if _extension_runtime:
       res+=self._ExtensionDebugString(prefix, printElemNumber)
     return res
@@ -5006,6 +5604,7 @@
   kmatched_count = 2
   kstatus = 3
   kcursor = 4
+  kfacet_result = 5
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -5013,7 +5612,8 @@
     2: "matched_count",
     3: "status",
     4: "cursor",
-  }, 4)
+    5: "facet_result",
+  }, 5)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -5021,7 +5621,8 @@
     2: ProtocolBuffer.Encoder.NUMERIC,
     3: ProtocolBuffer.Encoder.STRING,
     4: ProtocolBuffer.Encoder.STRING,
-  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
@@ -5030,4 +5631,4 @@
 if _extension_runtime:
   pass
 
-__all__ = ['SearchServiceError','RequestStatus','IndexSpec','IndexMetadata','IndexDocumentParams','IndexDocumentRequest','IndexDocumentResponse','DeleteDocumentParams','DeleteDocumentRequest','DeleteDocumentResponse','ListDocumentsParams','ListDocumentsRequest','ListDocumentsResponse','ListIndexesParams','ListIndexesRequest','ListIndexesResponse','DeleteSchemaParams','DeleteSchemaRequest','DeleteSchemaResponse','SortSpec','ScorerSpec','FieldSpec','FieldSpec_Expression','SearchParams','SearchRequest','SearchResult','SearchResponse']
+__all__ = ['SearchServiceError','RequestStatus','IndexSpec','IndexMetadata_Storage','IndexMetadata','IndexDocumentParams','IndexDocumentRequest','IndexDocumentResponse','DeleteDocumentParams','DeleteDocumentRequest','DeleteDocumentResponse','ListDocumentsParams','ListDocumentsRequest','ListDocumentsResponse','ListIndexesParams','ListIndexesRequest','ListIndexesResponse','DeleteSchemaParams','DeleteSchemaRequest','DeleteSchemaResponse','SortSpec','ScorerSpec','FieldSpec','FieldSpec_Expression','SearchParams','SearchRequest','FacetResultValue','FacetResult','SearchResult','SearchResponse']
diff --git a/google/appengine/api/search/simple_search_stub.py b/google/appengine/api/search/simple_search_stub.py
index 5f5bf25..e05b3b8 100644
--- a/google/appengine/api/search/simple_search_stub.py
+++ b/google/appengine/api/search/simple_search_stub.py
@@ -593,6 +593,12 @@
 
   _VERSION = 1
 
+
+
+
+
+  _MAX_STORAGE_LIMIT = 1024 * 1024 * 1024
+
   def __init__(self, service_name='search', index_file=None):
     """Constructor.
 
@@ -731,6 +737,7 @@
       new_index_spec.set_namespace(index_spec.namespace())
       if params.fetch_schema():
         self._AddSchemaInformation(index, metadata)
+      self._AddStorageInformation(index, metadata)
 
   def _AddSchemaInformation(self, index, metadata_pb):
     schema = index.GetSchema()
@@ -739,6 +746,19 @@
       new_field_types = metadata_pb.add_field()
       new_field_types.MergeFrom(field_types)
 
+  def _AddStorageInformation(self, index, metadata_pb):
+    total_usage = 0
+    for document in index.Documents():
+
+
+
+      for field in document.field_list():
+        total_usage += field.ByteSize()
+      total_usage += len(document.id())
+    storage = metadata_pb.mutable_storage()
+    storage.set_amount_used(total_usage)
+    storage.set_limit(self._MAX_STORAGE_LIMIT)
+
   def _AddDocument(self, response, document, ids_only):
     doc = response.add_document()
     if ids_only:
diff --git a/google/appengine/api/taskqueue/taskqueue_stub.py b/google/appengine/api/taskqueue/taskqueue_stub.py
index b017662..c25ae30 100644
--- a/google/appengine/api/taskqueue/taskqueue_stub.py
+++ b/google/appengine/api/taskqueue/taskqueue_stub.py
@@ -482,7 +482,8 @@
 
         self._ConstructQueue(queue_name, bucket_capacity=bucket_size,
                              user_specified_rate=max_rate, queue_mode=mode,
-                             acl=acl, retry_parameters=retry_parameters)
+                             acl=acl, retry_parameters=retry_parameters,
+                             target=entry.target)
       else:
 
 
@@ -979,7 +980,7 @@
                user_specified_rate=DEFAULT_RATE, retry_parameters=None,
                max_concurrent_requests=None, paused=False,
                queue_mode=QUEUE_MODE.PUSH, acl=None,
-               _testing_validate_state=None):
+               _testing_validate_state=None, target=None):
 
     self.queue_name = queue_name
     self.bucket_refill_per_second = bucket_refill_per_second
@@ -990,7 +991,7 @@
     self.paused = paused
     self.queue_mode = queue_mode
     self.acl = acl
-
+    self.target = target
     self._testing_validate_state = _testing_validate_state
 
 
@@ -1831,18 +1832,18 @@
       queue: The queue that this task belongs to, an _Queue instance.
 
     Returns:
-      A tuple of (header_dict, headers), where:
-        header_dict: A mapping from lowercase header name to a list of values.
-        headers: a list of tuples containing the http header and value. There
-            may be be mutiple entries with the same key.
+      A list of tuples containing the http header and value. There
+          may be be mutiple entries with the same key.
     """
     headers = []
-    header_dict = {}
     for header in task.header_list():
       header_key_lower = header.key().lower()
-      if header_key_lower not in BUILT_IN_HEADERS:
+
+      if header_key_lower == 'host' and queue.target is not None:
+        headers.append(
+            (header.key(), '.'.join([queue.target, self._default_host])))
+      elif header_key_lower not in BUILT_IN_HEADERS:
         headers.append((header.key(), header.value()))
-        header_dict.setdefault(header_key_lower, []).append(header.value())
 
 
     headers.append(('X-AppEngine-QueueName', queue.queue_name))
@@ -1852,7 +1853,8 @@
                     str(_UsecToSec(task.eta_usec()))))
     headers.append(('X-AppEngine-Fake-Is-Admin', '1'))
     headers.append(('Content-Length', str(len(task.body()))))
-    if task.has_body() and 'content-type' not in header_dict:
+    if (task.has_body() and 'content-type' not in
+        [key.lower() for key, _ in headers]):
       headers.append(('Content-Type', 'application/octet-stream'))
     headers.append(('X-AppEngine-TaskExecutionCount',
                     str(task.execution_count())))
@@ -1860,7 +1862,7 @@
       headers.append(('X-AppEngine-TaskPreviousResponse',
                       str(task.runlog().response_code())))
 
-    return header_dict, headers
+    return headers
 
   def ExecuteTask(self, task, queue):
     """Construct a http request from the task and dispatch it.
@@ -1874,15 +1876,7 @@
       Http Response code from the task's execution, 0 if an exception occurred.
     """
     method = task.RequestMethod_Name(task.method())
-    header_dict, headers = self._HeadersFromTask(task, queue)
-    connection_host, = header_dict.get('host', [self._default_host])
-    if connection_host is None:
-      logging.error('Could not determine where to send the task "%s" '
-                    '(Url: "%s") in queue "%s". Treating as an error.',
-                    task.task_name(), task.url(), queue.queue_name)
-      return False
-    else:
-      header_dict['Host'] = connection_host
+    headers = self._HeadersFromTask(task, queue)
     dispatcher = self._request_data.get_dispatcher()
     try:
       response = dispatcher.add_request(method, task.url(), headers,
diff --git a/google/appengine/api/urlfetch.py b/google/appengine/api/urlfetch.py
index 1c38f7d..df485fa 100644
--- a/google/appengine/api/urlfetch.py
+++ b/google/appengine/api/urlfetch.py
@@ -278,8 +278,15 @@
   The first argument is a UserRPC instance.  See urlfetch.fetch for a
   thorough description of remaining arguments.
 
+  Raises:
+    InvalidMethodError: if requested method is not in _VALID_METHODS
+    ResponseTooLargeError: if the response payload is too large
+    InvalidURLError: if there are issues with the content/size of the
+      requested URL
+
   Returns:
     The rpc object passed into the function.
+
   """
 
   assert rpc.service == 'urlfetch', repr(rpc.service)
@@ -350,9 +357,9 @@
     rpc: A UserRPC object.
 
   Raises:
-    InvalidURLError if the url was invalid.
-    DownloadError if there was a problem fetching the url.
-    ResponseTooLargeError if the response was either truncated (and
+    InvalidURLError: if the url was invalid.
+    DownloadError: if there was a problem fetching the url.
+    ResponseTooLargeError: if the response was either truncated (and
       allow_truncated=False was passed to make_fetch_call()), or if it
       was too big for us to download.
 
@@ -366,6 +373,12 @@
 
   try:
     rpc.check_success()
+  except apiproxy_errors.RequestTooLargeError, err:
+    error_detail = ''
+    if err.error_detail:
+      error_detail = ' Error: ' + err.error_detail
+    raise InvalidURLError(
+        'Invalid request URL: ' + url + error_detail)
   except apiproxy_errors.ApplicationError, err:
     error_detail = ''
     if err.error_detail:
diff --git a/google/appengine/datastore/datastore_pb.py b/google/appengine/datastore/datastore_pb.py
index 5ba578f..ad1cab8 100644
--- a/google/appengine/datastore/datastore_pb.py
+++ b/google/appengine/datastore/datastore_pb.py
@@ -17,7782 +17,32 @@
 
 
 
-from google.net.proto import ProtocolBuffer
-import array
-import dummy_thread as thread
 
-__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
-                   unusednames=printElemNumber,debug_strs no-special"""
 
-if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
-  _extension_runtime = True
-  _ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
-else:
-  _extension_runtime = False
-  _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
+"""The Python datastore protocol buffer definition (old name)."""
 
-from google.appengine.api.api_base_pb import Integer64Proto;
-from google.appengine.api.api_base_pb import StringProto;
-from google.appengine.api.api_base_pb import VoidProto;
+
+
+
+
+
+
 from google.appengine.datastore.action_pb import Action
 from google.appengine.datastore.entity_pb import CompositeIndex
 from google.appengine.datastore.entity_pb import EntityProto
 from google.appengine.datastore.entity_pb import Index
+from google.appengine.datastore.entity_pb import Path
 from google.appengine.datastore.entity_pb import Property
 from google.appengine.datastore.entity_pb import PropertyValue
-from google.appengine.datastore.entity_pb import Path
 from google.appengine.datastore.entity_pb import Reference
 from google.appengine.datastore.snapshot_pb import Snapshot
-class Transaction(ProtocolBuffer.ProtocolMessage):
-  has_handle_ = 0
-  handle_ = 0
-  has_app_ = 0
-  app_ = ""
-  has_mark_changes_ = 0
-  mark_changes_ = 0
 
-  def __init__(self, contents=None):
-    if contents is not None: self.MergeFromString(contents)
+from google.appengine.api.api_base_pb import Integer64Proto
+from google.appengine.api.api_base_pb import StringProto
+from google.appengine.api.api_base_pb import VoidProto
+from google.appengine.datastore import datastore_v3_pb
+from google.appengine.datastore.datastore_v3_pb import *
 
-  def handle(self): return self.handle_
 
-  def set_handle(self, x):
-    self.has_handle_ = 1
-    self.handle_ = x
-
-  def clear_handle(self):
-    if self.has_handle_:
-      self.has_handle_ = 0
-      self.handle_ = 0
-
-  def has_handle(self): return self.has_handle_
-
-  def app(self): return self.app_
-
-  def set_app(self, x):
-    self.has_app_ = 1
-    self.app_ = x
-
-  def clear_app(self):
-    if self.has_app_:
-      self.has_app_ = 0
-      self.app_ = ""
-
-  def has_app(self): return self.has_app_
-
-  def mark_changes(self): return self.mark_changes_
-
-  def set_mark_changes(self, x):
-    self.has_mark_changes_ = 1
-    self.mark_changes_ = x
-
-  def clear_mark_changes(self):
-    if self.has_mark_changes_:
-      self.has_mark_changes_ = 0
-      self.mark_changes_ = 0
-
-  def has_mark_changes(self): return self.has_mark_changes_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_handle()): self.set_handle(x.handle())
-    if (x.has_app()): self.set_app(x.app())
-    if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_handle_ != x.has_handle_: return 0
-    if self.has_handle_ and self.handle_ != x.handle_: return 0
-    if self.has_app_ != x.has_app_: return 0
-    if self.has_app_ and self.app_ != x.app_: return 0
-    if self.has_mark_changes_ != x.has_mark_changes_: return 0
-    if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_handle_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: handle not set.')
-    if (not self.has_app_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: app not set.')
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(len(self.app_))
-    if (self.has_mark_changes_): n += 2
-    return n + 10
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_handle_):
-      n += 9
-    if (self.has_app_):
-      n += 1
-      n += self.lengthString(len(self.app_))
-    if (self.has_mark_changes_): n += 2
-    return n
-
-  def Clear(self):
-    self.clear_handle()
-    self.clear_app()
-    self.clear_mark_changes()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(9)
-    out.put64(self.handle_)
-    out.putVarInt32(18)
-    out.putPrefixedString(self.app_)
-    if (self.has_mark_changes_):
-      out.putVarInt32(24)
-      out.putBoolean(self.mark_changes_)
-
-  def OutputPartial(self, out):
-    if (self.has_handle_):
-      out.putVarInt32(9)
-      out.put64(self.handle_)
-    if (self.has_app_):
-      out.putVarInt32(18)
-      out.putPrefixedString(self.app_)
-    if (self.has_mark_changes_):
-      out.putVarInt32(24)
-      out.putBoolean(self.mark_changes_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 9:
-        self.set_handle(d.get64())
-        continue
-      if tt == 18:
-        self.set_app(d.getPrefixedString())
-        continue
-      if tt == 24:
-        self.set_mark_changes(d.getBoolean())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
-    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
-    if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  khandle = 1
-  kapp = 2
-  kmark_changes = 3
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "handle",
-    2: "app",
-    3: "mark_changes",
-  }, 3)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.DOUBLE,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction'
-class Query_Filter(ProtocolBuffer.ProtocolMessage):
-
-
-  LESS_THAN    =    1
-  LESS_THAN_OR_EQUAL =    2
-  GREATER_THAN =    3
-  GREATER_THAN_OR_EQUAL =    4
-  EQUAL        =    5
-  IN           =    6
-  EXISTS       =    7
-
-  _Operator_NAMES = {
-    1: "LESS_THAN",
-    2: "LESS_THAN_OR_EQUAL",
-    3: "GREATER_THAN",
-    4: "GREATER_THAN_OR_EQUAL",
-    5: "EQUAL",
-    6: "IN",
-    7: "EXISTS",
-  }
-
-  def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
-  Operator_Name = classmethod(Operator_Name)
-
-  has_op_ = 0
-  op_ = 0
-
-  def __init__(self, contents=None):
-    self.property_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def op(self): return self.op_
-
-  def set_op(self, x):
-    self.has_op_ = 1
-    self.op_ = x
-
-  def clear_op(self):
-    if self.has_op_:
-      self.has_op_ = 0
-      self.op_ = 0
-
-  def has_op(self): return self.has_op_
-
-  def property_size(self): return len(self.property_)
-  def property_list(self): return self.property_
-
-  def property(self, i):
-    return self.property_[i]
-
-  def mutable_property(self, i):
-    return self.property_[i]
-
-  def add_property(self):
-    x = Property()
-    self.property_.append(x)
-    return x
-
-  def clear_property(self):
-    self.property_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_op()): self.set_op(x.op())
-    for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_op_ != x.has_op_: return 0
-    if self.has_op_ and self.op_ != x.op_: return 0
-    if len(self.property_) != len(x.property_): return 0
-    for e1, e2 in zip(self.property_, x.property_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_op_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: op not set.')
-    for p in self.property_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthVarInt64(self.op_)
-    n += 1 * len(self.property_)
-    for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_op_):
-      n += 1
-      n += self.lengthVarInt64(self.op_)
-    n += 1 * len(self.property_)
-    for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_op()
-    self.clear_property()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(48)
-    out.putVarInt32(self.op_)
-    for i in xrange(len(self.property_)):
-      out.putVarInt32(114)
-      out.putVarInt32(self.property_[i].ByteSize())
-      self.property_[i].OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_op_):
-      out.putVarInt32(48)
-      out.putVarInt32(self.op_)
-    for i in xrange(len(self.property_)):
-      out.putVarInt32(114)
-      out.putVarInt32(self.property_[i].ByteSizePartial())
-      self.property_[i].OutputPartial(out)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 36: break
-      if tt == 48:
-        self.set_op(d.getVarInt32())
-        continue
-      if tt == 114:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_property().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
-    cnt=0
-    for e in self.property_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("property%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    return res
-
-class Query_Order(ProtocolBuffer.ProtocolMessage):
-
-
-  ASCENDING    =    1
-  DESCENDING   =    2
-
-  _Direction_NAMES = {
-    1: "ASCENDING",
-    2: "DESCENDING",
-  }
-
-  def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
-  Direction_Name = classmethod(Direction_Name)
-
-  has_property_ = 0
-  property_ = ""
-  has_direction_ = 0
-  direction_ = 1
-
-  def __init__(self, contents=None):
-    if contents is not None: self.MergeFromString(contents)
-
-  def property(self): return self.property_
-
-  def set_property(self, x):
-    self.has_property_ = 1
-    self.property_ = x
-
-  def clear_property(self):
-    if self.has_property_:
-      self.has_property_ = 0
-      self.property_ = ""
-
-  def has_property(self): return self.has_property_
-
-  def direction(self): return self.direction_
-
-  def set_direction(self, x):
-    self.has_direction_ = 1
-    self.direction_ = x
-
-  def clear_direction(self):
-    if self.has_direction_:
-      self.has_direction_ = 0
-      self.direction_ = 1
-
-  def has_direction(self): return self.has_direction_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_property()): self.set_property(x.property())
-    if (x.has_direction()): self.set_direction(x.direction())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_property_ != x.has_property_: return 0
-    if self.has_property_ and self.property_ != x.property_: return 0
-    if self.has_direction_ != x.has_direction_: return 0
-    if self.has_direction_ and self.direction_ != x.direction_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_property_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: property not set.')
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(len(self.property_))
-    if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_property_):
-      n += 1
-      n += self.lengthString(len(self.property_))
-    if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
-    return n
-
-  def Clear(self):
-    self.clear_property()
-    self.clear_direction()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(82)
-    out.putPrefixedString(self.property_)
-    if (self.has_direction_):
-      out.putVarInt32(88)
-      out.putVarInt32(self.direction_)
-
-  def OutputPartial(self, out):
-    if (self.has_property_):
-      out.putVarInt32(82)
-      out.putPrefixedString(self.property_)
-    if (self.has_direction_):
-      out.putVarInt32(88)
-      out.putVarInt32(self.direction_)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 76: break
-      if tt == 82:
-        self.set_property(d.getPrefixedString())
-        continue
-      if tt == 88:
-        self.set_direction(d.getVarInt32())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
-    if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
-    return res
-
-class Query(ProtocolBuffer.ProtocolMessage):
-
-
-  ORDER_FIRST  =    1
-  ANCESTOR_FIRST =    2
-  FILTER_FIRST =    3
-
-  _Hint_NAMES = {
-    1: "ORDER_FIRST",
-    2: "ANCESTOR_FIRST",
-    3: "FILTER_FIRST",
-  }
-
-  def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
-  Hint_Name = classmethod(Hint_Name)
-
-  has_app_ = 0
-  app_ = ""
-  has_name_space_ = 0
-  name_space_ = ""
-  has_kind_ = 0
-  kind_ = ""
-  has_ancestor_ = 0
-  ancestor_ = None
-  has_search_query_ = 0
-  search_query_ = ""
-  has_hint_ = 0
-  hint_ = 0
-  has_count_ = 0
-  count_ = 0
-  has_offset_ = 0
-  offset_ = 0
-  has_limit_ = 0
-  limit_ = 0
-  has_compiled_cursor_ = 0
-  compiled_cursor_ = None
-  has_end_compiled_cursor_ = 0
-  end_compiled_cursor_ = None
-  has_require_perfect_plan_ = 0
-  require_perfect_plan_ = 0
-  has_keys_only_ = 0
-  keys_only_ = 0
-  has_transaction_ = 0
-  transaction_ = None
-  has_compile_ = 0
-  compile_ = 0
-  has_failover_ms_ = 0
-  failover_ms_ = 0
-  has_strong_ = 0
-  strong_ = 0
-  has_distinct_ = 0
-  distinct_ = 0
-  has_min_safe_time_seconds_ = 0
-  min_safe_time_seconds_ = 0
-  has_persist_offset_ = 0
-  persist_offset_ = 0
-
-  def __init__(self, contents=None):
-    self.filter_ = []
-    self.order_ = []
-    self.composite_index_ = []
-    self.property_name_ = []
-    self.group_by_property_name_ = []
-    self.safe_replica_name_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def app(self): return self.app_
-
-  def set_app(self, x):
-    self.has_app_ = 1
-    self.app_ = x
-
-  def clear_app(self):
-    if self.has_app_:
-      self.has_app_ = 0
-      self.app_ = ""
-
-  def has_app(self): return self.has_app_
-
-  def name_space(self): return self.name_space_
-
-  def set_name_space(self, x):
-    self.has_name_space_ = 1
-    self.name_space_ = x
-
-  def clear_name_space(self):
-    if self.has_name_space_:
-      self.has_name_space_ = 0
-      self.name_space_ = ""
-
-  def has_name_space(self): return self.has_name_space_
-
-  def kind(self): return self.kind_
-
-  def set_kind(self, x):
-    self.has_kind_ = 1
-    self.kind_ = x
-
-  def clear_kind(self):
-    if self.has_kind_:
-      self.has_kind_ = 0
-      self.kind_ = ""
-
-  def has_kind(self): return self.has_kind_
-
-  def ancestor(self):
-    if self.ancestor_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.ancestor_ is None: self.ancestor_ = Reference()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.ancestor_
-
-  def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
-
-  def clear_ancestor(self):
-
-    if self.has_ancestor_:
-      self.has_ancestor_ = 0;
-      if self.ancestor_ is not None: self.ancestor_.Clear()
-
-  def has_ancestor(self): return self.has_ancestor_
-
-  def filter_size(self): return len(self.filter_)
-  def filter_list(self): return self.filter_
-
-  def filter(self, i):
-    return self.filter_[i]
-
-  def mutable_filter(self, i):
-    return self.filter_[i]
-
-  def add_filter(self):
-    x = Query_Filter()
-    self.filter_.append(x)
-    return x
-
-  def clear_filter(self):
-    self.filter_ = []
-  def search_query(self): return self.search_query_
-
-  def set_search_query(self, x):
-    self.has_search_query_ = 1
-    self.search_query_ = x
-
-  def clear_search_query(self):
-    if self.has_search_query_:
-      self.has_search_query_ = 0
-      self.search_query_ = ""
-
-  def has_search_query(self): return self.has_search_query_
-
-  def order_size(self): return len(self.order_)
-  def order_list(self): return self.order_
-
-  def order(self, i):
-    return self.order_[i]
-
-  def mutable_order(self, i):
-    return self.order_[i]
-
-  def add_order(self):
-    x = Query_Order()
-    self.order_.append(x)
-    return x
-
-  def clear_order(self):
-    self.order_ = []
-  def hint(self): return self.hint_
-
-  def set_hint(self, x):
-    self.has_hint_ = 1
-    self.hint_ = x
-
-  def clear_hint(self):
-    if self.has_hint_:
-      self.has_hint_ = 0
-      self.hint_ = 0
-
-  def has_hint(self): return self.has_hint_
-
-  def count(self): return self.count_
-
-  def set_count(self, x):
-    self.has_count_ = 1
-    self.count_ = x
-
-  def clear_count(self):
-    if self.has_count_:
-      self.has_count_ = 0
-      self.count_ = 0
-
-  def has_count(self): return self.has_count_
-
-  def offset(self): return self.offset_
-
-  def set_offset(self, x):
-    self.has_offset_ = 1
-    self.offset_ = x
-
-  def clear_offset(self):
-    if self.has_offset_:
-      self.has_offset_ = 0
-      self.offset_ = 0
-
-  def has_offset(self): return self.has_offset_
-
-  def limit(self): return self.limit_
-
-  def set_limit(self, x):
-    self.has_limit_ = 1
-    self.limit_ = x
-
-  def clear_limit(self):
-    if self.has_limit_:
-      self.has_limit_ = 0
-      self.limit_ = 0
-
-  def has_limit(self): return self.has_limit_
-
-  def compiled_cursor(self):
-    if self.compiled_cursor_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.compiled_cursor_
-
-  def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
-
-  def clear_compiled_cursor(self):
-
-    if self.has_compiled_cursor_:
-      self.has_compiled_cursor_ = 0;
-      if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
-
-  def has_compiled_cursor(self): return self.has_compiled_cursor_
-
-  def end_compiled_cursor(self):
-    if self.end_compiled_cursor_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.end_compiled_cursor_ is None: self.end_compiled_cursor_ = CompiledCursor()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.end_compiled_cursor_
-
-  def mutable_end_compiled_cursor(self): self.has_end_compiled_cursor_ = 1; return self.end_compiled_cursor()
-
-  def clear_end_compiled_cursor(self):
-
-    if self.has_end_compiled_cursor_:
-      self.has_end_compiled_cursor_ = 0;
-      if self.end_compiled_cursor_ is not None: self.end_compiled_cursor_.Clear()
-
-  def has_end_compiled_cursor(self): return self.has_end_compiled_cursor_
-
-  def composite_index_size(self): return len(self.composite_index_)
-  def composite_index_list(self): return self.composite_index_
-
-  def composite_index(self, i):
-    return self.composite_index_[i]
-
-  def mutable_composite_index(self, i):
-    return self.composite_index_[i]
-
-  def add_composite_index(self):
-    x = CompositeIndex()
-    self.composite_index_.append(x)
-    return x
-
-  def clear_composite_index(self):
-    self.composite_index_ = []
-  def require_perfect_plan(self): return self.require_perfect_plan_
-
-  def set_require_perfect_plan(self, x):
-    self.has_require_perfect_plan_ = 1
-    self.require_perfect_plan_ = x
-
-  def clear_require_perfect_plan(self):
-    if self.has_require_perfect_plan_:
-      self.has_require_perfect_plan_ = 0
-      self.require_perfect_plan_ = 0
-
-  def has_require_perfect_plan(self): return self.has_require_perfect_plan_
-
-  def keys_only(self): return self.keys_only_
-
-  def set_keys_only(self, x):
-    self.has_keys_only_ = 1
-    self.keys_only_ = x
-
-  def clear_keys_only(self):
-    if self.has_keys_only_:
-      self.has_keys_only_ = 0
-      self.keys_only_ = 0
-
-  def has_keys_only(self): return self.has_keys_only_
-
-  def transaction(self):
-    if self.transaction_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.transaction_ is None: self.transaction_ = Transaction()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.transaction_
-
-  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
-
-  def clear_transaction(self):
-
-    if self.has_transaction_:
-      self.has_transaction_ = 0;
-      if self.transaction_ is not None: self.transaction_.Clear()
-
-  def has_transaction(self): return self.has_transaction_
-
-  def compile(self): return self.compile_
-
-  def set_compile(self, x):
-    self.has_compile_ = 1
-    self.compile_ = x
-
-  def clear_compile(self):
-    if self.has_compile_:
-      self.has_compile_ = 0
-      self.compile_ = 0
-
-  def has_compile(self): return self.has_compile_
-
-  def failover_ms(self): return self.failover_ms_
-
-  def set_failover_ms(self, x):
-    self.has_failover_ms_ = 1
-    self.failover_ms_ = x
-
-  def clear_failover_ms(self):
-    if self.has_failover_ms_:
-      self.has_failover_ms_ = 0
-      self.failover_ms_ = 0
-
-  def has_failover_ms(self): return self.has_failover_ms_
-
-  def strong(self): return self.strong_
-
-  def set_strong(self, x):
-    self.has_strong_ = 1
-    self.strong_ = x
-
-  def clear_strong(self):
-    if self.has_strong_:
-      self.has_strong_ = 0
-      self.strong_ = 0
-
-  def has_strong(self): return self.has_strong_
-
-  def property_name_size(self): return len(self.property_name_)
-  def property_name_list(self): return self.property_name_
-
-  def property_name(self, i):
-    return self.property_name_[i]
-
-  def set_property_name(self, i, x):
-    self.property_name_[i] = x
-
-  def add_property_name(self, x):
-    self.property_name_.append(x)
-
-  def clear_property_name(self):
-    self.property_name_ = []
-
-  def group_by_property_name_size(self): return len(self.group_by_property_name_)
-  def group_by_property_name_list(self): return self.group_by_property_name_
-
-  def group_by_property_name(self, i):
-    return self.group_by_property_name_[i]
-
-  def set_group_by_property_name(self, i, x):
-    self.group_by_property_name_[i] = x
-
-  def add_group_by_property_name(self, x):
-    self.group_by_property_name_.append(x)
-
-  def clear_group_by_property_name(self):
-    self.group_by_property_name_ = []
-
-  def distinct(self): return self.distinct_
-
-  def set_distinct(self, x):
-    self.has_distinct_ = 1
-    self.distinct_ = x
-
-  def clear_distinct(self):
-    if self.has_distinct_:
-      self.has_distinct_ = 0
-      self.distinct_ = 0
-
-  def has_distinct(self): return self.has_distinct_
-
-  def min_safe_time_seconds(self): return self.min_safe_time_seconds_
-
-  def set_min_safe_time_seconds(self, x):
-    self.has_min_safe_time_seconds_ = 1
-    self.min_safe_time_seconds_ = x
-
-  def clear_min_safe_time_seconds(self):
-    if self.has_min_safe_time_seconds_:
-      self.has_min_safe_time_seconds_ = 0
-      self.min_safe_time_seconds_ = 0
-
-  def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_
-
-  def safe_replica_name_size(self): return len(self.safe_replica_name_)
-  def safe_replica_name_list(self): return self.safe_replica_name_
-
-  def safe_replica_name(self, i):
-    return self.safe_replica_name_[i]
-
-  def set_safe_replica_name(self, i, x):
-    self.safe_replica_name_[i] = x
-
-  def add_safe_replica_name(self, x):
-    self.safe_replica_name_.append(x)
-
-  def clear_safe_replica_name(self):
-    self.safe_replica_name_ = []
-
-  def persist_offset(self): return self.persist_offset_
-
-  def set_persist_offset(self, x):
-    self.has_persist_offset_ = 1
-    self.persist_offset_ = x
-
-  def clear_persist_offset(self):
-    if self.has_persist_offset_:
-      self.has_persist_offset_ = 0
-      self.persist_offset_ = 0
-
-  def has_persist_offset(self): return self.has_persist_offset_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_app()): self.set_app(x.app())
-    if (x.has_name_space()): self.set_name_space(x.name_space())
-    if (x.has_kind()): self.set_kind(x.kind())
-    if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
-    for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
-    if (x.has_search_query()): self.set_search_query(x.search_query())
-    for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
-    if (x.has_hint()): self.set_hint(x.hint())
-    if (x.has_count()): self.set_count(x.count())
-    if (x.has_offset()): self.set_offset(x.offset())
-    if (x.has_limit()): self.set_limit(x.limit())
-    if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
-    if (x.has_end_compiled_cursor()): self.mutable_end_compiled_cursor().MergeFrom(x.end_compiled_cursor())
-    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
-    if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
-    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
-    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
-    if (x.has_compile()): self.set_compile(x.compile())
-    if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
-    if (x.has_strong()): self.set_strong(x.strong())
-    for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
-    for i in xrange(x.group_by_property_name_size()): self.add_group_by_property_name(x.group_by_property_name(i))
-    if (x.has_distinct()): self.set_distinct(x.distinct())
-    if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds())
-    for i in xrange(x.safe_replica_name_size()): self.add_safe_replica_name(x.safe_replica_name(i))
-    if (x.has_persist_offset()): self.set_persist_offset(x.persist_offset())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_app_ != x.has_app_: return 0
-    if self.has_app_ and self.app_ != x.app_: return 0
-    if self.has_name_space_ != x.has_name_space_: return 0
-    if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
-    if self.has_kind_ != x.has_kind_: return 0
-    if self.has_kind_ and self.kind_ != x.kind_: return 0
-    if self.has_ancestor_ != x.has_ancestor_: return 0
-    if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
-    if len(self.filter_) != len(x.filter_): return 0
-    for e1, e2 in zip(self.filter_, x.filter_):
-      if e1 != e2: return 0
-    if self.has_search_query_ != x.has_search_query_: return 0
-    if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
-    if len(self.order_) != len(x.order_): return 0
-    for e1, e2 in zip(self.order_, x.order_):
-      if e1 != e2: return 0
-    if self.has_hint_ != x.has_hint_: return 0
-    if self.has_hint_ and self.hint_ != x.hint_: return 0
-    if self.has_count_ != x.has_count_: return 0
-    if self.has_count_ and self.count_ != x.count_: return 0
-    if self.has_offset_ != x.has_offset_: return 0
-    if self.has_offset_ and self.offset_ != x.offset_: return 0
-    if self.has_limit_ != x.has_limit_: return 0
-    if self.has_limit_ and self.limit_ != x.limit_: return 0
-    if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
-    if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
-    if self.has_end_compiled_cursor_ != x.has_end_compiled_cursor_: return 0
-    if self.has_end_compiled_cursor_ and self.end_compiled_cursor_ != x.end_compiled_cursor_: return 0
-    if len(self.composite_index_) != len(x.composite_index_): return 0
-    for e1, e2 in zip(self.composite_index_, x.composite_index_):
-      if e1 != e2: return 0
-    if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
-    if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
-    if self.has_keys_only_ != x.has_keys_only_: return 0
-    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
-    if self.has_transaction_ != x.has_transaction_: return 0
-    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
-    if self.has_compile_ != x.has_compile_: return 0
-    if self.has_compile_ and self.compile_ != x.compile_: return 0
-    if self.has_failover_ms_ != x.has_failover_ms_: return 0
-    if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
-    if self.has_strong_ != x.has_strong_: return 0
-    if self.has_strong_ and self.strong_ != x.strong_: return 0
-    if len(self.property_name_) != len(x.property_name_): return 0
-    for e1, e2 in zip(self.property_name_, x.property_name_):
-      if e1 != e2: return 0
-    if len(self.group_by_property_name_) != len(x.group_by_property_name_): return 0
-    for e1, e2 in zip(self.group_by_property_name_, x.group_by_property_name_):
-      if e1 != e2: return 0
-    if self.has_distinct_ != x.has_distinct_: return 0
-    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
-    if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0
-    if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0
-    if len(self.safe_replica_name_) != len(x.safe_replica_name_): return 0
-    for e1, e2 in zip(self.safe_replica_name_, x.safe_replica_name_):
-      if e1 != e2: return 0
-    if self.has_persist_offset_ != x.has_persist_offset_: return 0
-    if self.has_persist_offset_ and self.persist_offset_ != x.persist_offset_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_app_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: app not set.')
-    if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.filter_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    for p in self.order_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
-    if (self.has_end_compiled_cursor_ and not self.end_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.composite_index_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(len(self.app_))
-    if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
-    if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
-    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
-    n += 2 * len(self.filter_)
-    for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
-    if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
-    n += 2 * len(self.order_)
-    for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
-    if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
-    if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
-    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
-    if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
-    if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSize())
-    if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSize())
-    n += 2 * len(self.composite_index_)
-    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
-    if (self.has_require_perfect_plan_): n += 3
-    if (self.has_keys_only_): n += 3
-    if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
-    if (self.has_compile_): n += 3
-    if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
-    if (self.has_strong_): n += 3
-    n += 2 * len(self.property_name_)
-    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
-    n += 2 * len(self.group_by_property_name_)
-    for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
-    if (self.has_distinct_): n += 3
-    if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
-    n += 2 * len(self.safe_replica_name_)
-    for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
-    if (self.has_persist_offset_): n += 3
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_app_):
-      n += 1
-      n += self.lengthString(len(self.app_))
-    if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
-    if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
-    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
-    n += 2 * len(self.filter_)
-    for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSizePartial()
-    if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
-    n += 2 * len(self.order_)
-    for i in xrange(len(self.order_)): n += self.order_[i].ByteSizePartial()
-    if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
-    if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
-    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
-    if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
-    if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
-    if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSizePartial())
-    n += 2 * len(self.composite_index_)
-    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
-    if (self.has_require_perfect_plan_): n += 3
-    if (self.has_keys_only_): n += 3
-    if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSizePartial())
-    if (self.has_compile_): n += 3
-    if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
-    if (self.has_strong_): n += 3
-    n += 2 * len(self.property_name_)
-    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
-    n += 2 * len(self.group_by_property_name_)
-    for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
-    if (self.has_distinct_): n += 3
-    if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
-    n += 2 * len(self.safe_replica_name_)
-    for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
-    if (self.has_persist_offset_): n += 3
-    return n
-
-  def Clear(self):
-    self.clear_app()
-    self.clear_name_space()
-    self.clear_kind()
-    self.clear_ancestor()
-    self.clear_filter()
-    self.clear_search_query()
-    self.clear_order()
-    self.clear_hint()
-    self.clear_count()
-    self.clear_offset()
-    self.clear_limit()
-    self.clear_compiled_cursor()
-    self.clear_end_compiled_cursor()
-    self.clear_composite_index()
-    self.clear_require_perfect_plan()
-    self.clear_keys_only()
-    self.clear_transaction()
-    self.clear_compile()
-    self.clear_failover_ms()
-    self.clear_strong()
-    self.clear_property_name()
-    self.clear_group_by_property_name()
-    self.clear_distinct()
-    self.clear_min_safe_time_seconds()
-    self.clear_safe_replica_name()
-    self.clear_persist_offset()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putPrefixedString(self.app_)
-    if (self.has_kind_):
-      out.putVarInt32(26)
-      out.putPrefixedString(self.kind_)
-    for i in xrange(len(self.filter_)):
-      out.putVarInt32(35)
-      self.filter_[i].OutputUnchecked(out)
-      out.putVarInt32(36)
-    if (self.has_search_query_):
-      out.putVarInt32(66)
-      out.putPrefixedString(self.search_query_)
-    for i in xrange(len(self.order_)):
-      out.putVarInt32(75)
-      self.order_[i].OutputUnchecked(out)
-      out.putVarInt32(76)
-    if (self.has_offset_):
-      out.putVarInt32(96)
-      out.putVarInt32(self.offset_)
-    if (self.has_limit_):
-      out.putVarInt32(128)
-      out.putVarInt32(self.limit_)
-    if (self.has_ancestor_):
-      out.putVarInt32(138)
-      out.putVarInt32(self.ancestor_.ByteSize())
-      self.ancestor_.OutputUnchecked(out)
-    if (self.has_hint_):
-      out.putVarInt32(144)
-      out.putVarInt32(self.hint_)
-    for i in xrange(len(self.composite_index_)):
-      out.putVarInt32(154)
-      out.putVarInt32(self.composite_index_[i].ByteSize())
-      self.composite_index_[i].OutputUnchecked(out)
-    if (self.has_require_perfect_plan_):
-      out.putVarInt32(160)
-      out.putBoolean(self.require_perfect_plan_)
-    if (self.has_keys_only_):
-      out.putVarInt32(168)
-      out.putBoolean(self.keys_only_)
-    if (self.has_transaction_):
-      out.putVarInt32(178)
-      out.putVarInt32(self.transaction_.ByteSize())
-      self.transaction_.OutputUnchecked(out)
-    if (self.has_count_):
-      out.putVarInt32(184)
-      out.putVarInt32(self.count_)
-    if (self.has_distinct_):
-      out.putVarInt32(192)
-      out.putBoolean(self.distinct_)
-    if (self.has_compile_):
-      out.putVarInt32(200)
-      out.putBoolean(self.compile_)
-    if (self.has_failover_ms_):
-      out.putVarInt32(208)
-      out.putVarInt64(self.failover_ms_)
-    if (self.has_name_space_):
-      out.putVarInt32(234)
-      out.putPrefixedString(self.name_space_)
-    if (self.has_compiled_cursor_):
-      out.putVarInt32(242)
-      out.putVarInt32(self.compiled_cursor_.ByteSize())
-      self.compiled_cursor_.OutputUnchecked(out)
-    if (self.has_end_compiled_cursor_):
-      out.putVarInt32(250)
-      out.putVarInt32(self.end_compiled_cursor_.ByteSize())
-      self.end_compiled_cursor_.OutputUnchecked(out)
-    if (self.has_strong_):
-      out.putVarInt32(256)
-      out.putBoolean(self.strong_)
-    for i in xrange(len(self.property_name_)):
-      out.putVarInt32(266)
-      out.putPrefixedString(self.property_name_[i])
-    for i in xrange(len(self.group_by_property_name_)):
-      out.putVarInt32(274)
-      out.putPrefixedString(self.group_by_property_name_[i])
-    if (self.has_min_safe_time_seconds_):
-      out.putVarInt32(280)
-      out.putVarInt64(self.min_safe_time_seconds_)
-    for i in xrange(len(self.safe_replica_name_)):
-      out.putVarInt32(290)
-      out.putPrefixedString(self.safe_replica_name_[i])
-    if (self.has_persist_offset_):
-      out.putVarInt32(296)
-      out.putBoolean(self.persist_offset_)
-
-  def OutputPartial(self, out):
-    if (self.has_app_):
-      out.putVarInt32(10)
-      out.putPrefixedString(self.app_)
-    if (self.has_kind_):
-      out.putVarInt32(26)
-      out.putPrefixedString(self.kind_)
-    for i in xrange(len(self.filter_)):
-      out.putVarInt32(35)
-      self.filter_[i].OutputPartial(out)
-      out.putVarInt32(36)
-    if (self.has_search_query_):
-      out.putVarInt32(66)
-      out.putPrefixedString(self.search_query_)
-    for i in xrange(len(self.order_)):
-      out.putVarInt32(75)
-      self.order_[i].OutputPartial(out)
-      out.putVarInt32(76)
-    if (self.has_offset_):
-      out.putVarInt32(96)
-      out.putVarInt32(self.offset_)
-    if (self.has_limit_):
-      out.putVarInt32(128)
-      out.putVarInt32(self.limit_)
-    if (self.has_ancestor_):
-      out.putVarInt32(138)
-      out.putVarInt32(self.ancestor_.ByteSizePartial())
-      self.ancestor_.OutputPartial(out)
-    if (self.has_hint_):
-      out.putVarInt32(144)
-      out.putVarInt32(self.hint_)
-    for i in xrange(len(self.composite_index_)):
-      out.putVarInt32(154)
-      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
-      self.composite_index_[i].OutputPartial(out)
-    if (self.has_require_perfect_plan_):
-      out.putVarInt32(160)
-      out.putBoolean(self.require_perfect_plan_)
-    if (self.has_keys_only_):
-      out.putVarInt32(168)
-      out.putBoolean(self.keys_only_)
-    if (self.has_transaction_):
-      out.putVarInt32(178)
-      out.putVarInt32(self.transaction_.ByteSizePartial())
-      self.transaction_.OutputPartial(out)
-    if (self.has_count_):
-      out.putVarInt32(184)
-      out.putVarInt32(self.count_)
-    if (self.has_distinct_):
-      out.putVarInt32(192)
-      out.putBoolean(self.distinct_)
-    if (self.has_compile_):
-      out.putVarInt32(200)
-      out.putBoolean(self.compile_)
-    if (self.has_failover_ms_):
-      out.putVarInt32(208)
-      out.putVarInt64(self.failover_ms_)
-    if (self.has_name_space_):
-      out.putVarInt32(234)
-      out.putPrefixedString(self.name_space_)
-    if (self.has_compiled_cursor_):
-      out.putVarInt32(242)
-      out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
-      self.compiled_cursor_.OutputPartial(out)
-    if (self.has_end_compiled_cursor_):
-      out.putVarInt32(250)
-      out.putVarInt32(self.end_compiled_cursor_.ByteSizePartial())
-      self.end_compiled_cursor_.OutputPartial(out)
-    if (self.has_strong_):
-      out.putVarInt32(256)
-      out.putBoolean(self.strong_)
-    for i in xrange(len(self.property_name_)):
-      out.putVarInt32(266)
-      out.putPrefixedString(self.property_name_[i])
-    for i in xrange(len(self.group_by_property_name_)):
-      out.putVarInt32(274)
-      out.putPrefixedString(self.group_by_property_name_[i])
-    if (self.has_min_safe_time_seconds_):
-      out.putVarInt32(280)
-      out.putVarInt64(self.min_safe_time_seconds_)
-    for i in xrange(len(self.safe_replica_name_)):
-      out.putVarInt32(290)
-      out.putPrefixedString(self.safe_replica_name_[i])
-    if (self.has_persist_offset_):
-      out.putVarInt32(296)
-      out.putBoolean(self.persist_offset_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        self.set_app(d.getPrefixedString())
-        continue
-      if tt == 26:
-        self.set_kind(d.getPrefixedString())
-        continue
-      if tt == 35:
-        self.add_filter().TryMerge(d)
-        continue
-      if tt == 66:
-        self.set_search_query(d.getPrefixedString())
-        continue
-      if tt == 75:
-        self.add_order().TryMerge(d)
-        continue
-      if tt == 96:
-        self.set_offset(d.getVarInt32())
-        continue
-      if tt == 128:
-        self.set_limit(d.getVarInt32())
-        continue
-      if tt == 138:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_ancestor().TryMerge(tmp)
-        continue
-      if tt == 144:
-        self.set_hint(d.getVarInt32())
-        continue
-      if tt == 154:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_composite_index().TryMerge(tmp)
-        continue
-      if tt == 160:
-        self.set_require_perfect_plan(d.getBoolean())
-        continue
-      if tt == 168:
-        self.set_keys_only(d.getBoolean())
-        continue
-      if tt == 178:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_transaction().TryMerge(tmp)
-        continue
-      if tt == 184:
-        self.set_count(d.getVarInt32())
-        continue
-      if tt == 192:
-        self.set_distinct(d.getBoolean())
-        continue
-      if tt == 200:
-        self.set_compile(d.getBoolean())
-        continue
-      if tt == 208:
-        self.set_failover_ms(d.getVarInt64())
-        continue
-      if tt == 234:
-        self.set_name_space(d.getPrefixedString())
-        continue
-      if tt == 242:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_compiled_cursor().TryMerge(tmp)
-        continue
-      if tt == 250:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_end_compiled_cursor().TryMerge(tmp)
-        continue
-      if tt == 256:
-        self.set_strong(d.getBoolean())
-        continue
-      if tt == 266:
-        self.add_property_name(d.getPrefixedString())
-        continue
-      if tt == 274:
-        self.add_group_by_property_name(d.getPrefixedString())
-        continue
-      if tt == 280:
-        self.set_min_safe_time_seconds(d.getVarInt64())
-        continue
-      if tt == 290:
-        self.add_safe_replica_name(d.getPrefixedString())
-        continue
-      if tt == 296:
-        self.set_persist_offset(d.getBoolean())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
-    if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
-    if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
-    if self.has_ancestor_:
-      res+=prefix+"ancestor <\n"
-      res+=self.ancestor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.filter_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("Filter%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
-    cnt=0
-    for e in self.order_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("Order%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
-    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
-    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
-    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
-    if self.has_compiled_cursor_:
-      res+=prefix+"compiled_cursor <\n"
-      res+=self.compiled_cursor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_end_compiled_cursor_:
-      res+=prefix+"end_compiled_cursor <\n"
-      res+=self.end_compiled_cursor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.composite_index_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("composite_index%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
-    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
-    if self.has_transaction_:
-      res+=prefix+"transaction <\n"
-      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
-    if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
-    if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
-    cnt=0
-    for e in self.property_name_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    cnt=0
-    for e in self.group_by_property_name_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("group_by_property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
-    if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_))
-    cnt=0
-    for e in self.safe_replica_name_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("safe_replica_name%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    if self.has_persist_offset_: res+=prefix+("persist_offset: %s\n" % self.DebugFormatBool(self.persist_offset_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kapp = 1
-  kname_space = 29
-  kkind = 3
-  kancestor = 17
-  kFilterGroup = 4
-  kFilterop = 6
-  kFilterproperty = 14
-  ksearch_query = 8
-  kOrderGroup = 9
-  kOrderproperty = 10
-  kOrderdirection = 11
-  khint = 18
-  kcount = 23
-  koffset = 12
-  klimit = 16
-  kcompiled_cursor = 30
-  kend_compiled_cursor = 31
-  kcomposite_index = 19
-  krequire_perfect_plan = 20
-  kkeys_only = 21
-  ktransaction = 22
-  kcompile = 25
-  kfailover_ms = 26
-  kstrong = 32
-  kproperty_name = 33
-  kgroup_by_property_name = 34
-  kdistinct = 24
-  kmin_safe_time_seconds = 35
-  ksafe_replica_name = 36
-  kpersist_offset = 37
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "app",
-    3: "kind",
-    4: "Filter",
-    6: "op",
-    8: "search_query",
-    9: "Order",
-    10: "property",
-    11: "direction",
-    12: "offset",
-    14: "property",
-    16: "limit",
-    17: "ancestor",
-    18: "hint",
-    19: "composite_index",
-    20: "require_perfect_plan",
-    21: "keys_only",
-    22: "transaction",
-    23: "count",
-    24: "distinct",
-    25: "compile",
-    26: "failover_ms",
-    29: "name_space",
-    30: "compiled_cursor",
-    31: "end_compiled_cursor",
-    32: "strong",
-    33: "property_name",
-    34: "group_by_property_name",
-    35: "min_safe_time_seconds",
-    36: "safe_replica_name",
-    37: "persist_offset",
-  }, 37)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.STRING,
-    4: ProtocolBuffer.Encoder.STARTGROUP,
-    6: ProtocolBuffer.Encoder.NUMERIC,
-    8: ProtocolBuffer.Encoder.STRING,
-    9: ProtocolBuffer.Encoder.STARTGROUP,
-    10: ProtocolBuffer.Encoder.STRING,
-    11: ProtocolBuffer.Encoder.NUMERIC,
-    12: ProtocolBuffer.Encoder.NUMERIC,
-    14: ProtocolBuffer.Encoder.STRING,
-    16: ProtocolBuffer.Encoder.NUMERIC,
-    17: ProtocolBuffer.Encoder.STRING,
-    18: ProtocolBuffer.Encoder.NUMERIC,
-    19: ProtocolBuffer.Encoder.STRING,
-    20: ProtocolBuffer.Encoder.NUMERIC,
-    21: ProtocolBuffer.Encoder.NUMERIC,
-    22: ProtocolBuffer.Encoder.STRING,
-    23: ProtocolBuffer.Encoder.NUMERIC,
-    24: ProtocolBuffer.Encoder.NUMERIC,
-    25: ProtocolBuffer.Encoder.NUMERIC,
-    26: ProtocolBuffer.Encoder.NUMERIC,
-    29: ProtocolBuffer.Encoder.STRING,
-    30: ProtocolBuffer.Encoder.STRING,
-    31: ProtocolBuffer.Encoder.STRING,
-    32: ProtocolBuffer.Encoder.NUMERIC,
-    33: ProtocolBuffer.Encoder.STRING,
-    34: ProtocolBuffer.Encoder.STRING,
-    35: ProtocolBuffer.Encoder.NUMERIC,
-    36: ProtocolBuffer.Encoder.STRING,
-    37: ProtocolBuffer.Encoder.NUMERIC,
-  }, 37, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query'
-class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
-  has_index_name_ = 0
-  index_name_ = ""
-  has_start_key_ = 0
-  start_key_ = ""
-  has_start_inclusive_ = 0
-  start_inclusive_ = 0
-  has_end_key_ = 0
-  end_key_ = ""
-  has_end_inclusive_ = 0
-  end_inclusive_ = 0
-  has_end_unapplied_log_timestamp_us_ = 0
-  end_unapplied_log_timestamp_us_ = 0
-
-  def __init__(self, contents=None):
-    self.start_postfix_value_ = []
-    self.end_postfix_value_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def index_name(self): return self.index_name_
-
-  def set_index_name(self, x):
-    self.has_index_name_ = 1
-    self.index_name_ = x
-
-  def clear_index_name(self):
-    if self.has_index_name_:
-      self.has_index_name_ = 0
-      self.index_name_ = ""
-
-  def has_index_name(self): return self.has_index_name_
-
-  def start_key(self): return self.start_key_
-
-  def set_start_key(self, x):
-    self.has_start_key_ = 1
-    self.start_key_ = x
-
-  def clear_start_key(self):
-    if self.has_start_key_:
-      self.has_start_key_ = 0
-      self.start_key_ = ""
-
-  def has_start_key(self): return self.has_start_key_
-
-  def start_inclusive(self): return self.start_inclusive_
-
-  def set_start_inclusive(self, x):
-    self.has_start_inclusive_ = 1
-    self.start_inclusive_ = x
-
-  def clear_start_inclusive(self):
-    if self.has_start_inclusive_:
-      self.has_start_inclusive_ = 0
-      self.start_inclusive_ = 0
-
-  def has_start_inclusive(self): return self.has_start_inclusive_
-
-  def end_key(self): return self.end_key_
-
-  def set_end_key(self, x):
-    self.has_end_key_ = 1
-    self.end_key_ = x
-
-  def clear_end_key(self):
-    if self.has_end_key_:
-      self.has_end_key_ = 0
-      self.end_key_ = ""
-
-  def has_end_key(self): return self.has_end_key_
-
-  def end_inclusive(self): return self.end_inclusive_
-
-  def set_end_inclusive(self, x):
-    self.has_end_inclusive_ = 1
-    self.end_inclusive_ = x
-
-  def clear_end_inclusive(self):
-    if self.has_end_inclusive_:
-      self.has_end_inclusive_ = 0
-      self.end_inclusive_ = 0
-
-  def has_end_inclusive(self): return self.has_end_inclusive_
-
-  def start_postfix_value_size(self): return len(self.start_postfix_value_)
-  def start_postfix_value_list(self): return self.start_postfix_value_
-
-  def start_postfix_value(self, i):
-    return self.start_postfix_value_[i]
-
-  def set_start_postfix_value(self, i, x):
-    self.start_postfix_value_[i] = x
-
-  def add_start_postfix_value(self, x):
-    self.start_postfix_value_.append(x)
-
-  def clear_start_postfix_value(self):
-    self.start_postfix_value_ = []
-
-  def end_postfix_value_size(self): return len(self.end_postfix_value_)
-  def end_postfix_value_list(self): return self.end_postfix_value_
-
-  def end_postfix_value(self, i):
-    return self.end_postfix_value_[i]
-
-  def set_end_postfix_value(self, i, x):
-    self.end_postfix_value_[i] = x
-
-  def add_end_postfix_value(self, x):
-    self.end_postfix_value_.append(x)
-
-  def clear_end_postfix_value(self):
-    self.end_postfix_value_ = []
-
-  def end_unapplied_log_timestamp_us(self): return self.end_unapplied_log_timestamp_us_
-
-  def set_end_unapplied_log_timestamp_us(self, x):
-    self.has_end_unapplied_log_timestamp_us_ = 1
-    self.end_unapplied_log_timestamp_us_ = x
-
-  def clear_end_unapplied_log_timestamp_us(self):
-    if self.has_end_unapplied_log_timestamp_us_:
-      self.has_end_unapplied_log_timestamp_us_ = 0
-      self.end_unapplied_log_timestamp_us_ = 0
-
-  def has_end_unapplied_log_timestamp_us(self): return self.has_end_unapplied_log_timestamp_us_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_index_name()): self.set_index_name(x.index_name())
-    if (x.has_start_key()): self.set_start_key(x.start_key())
-    if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
-    if (x.has_end_key()): self.set_end_key(x.end_key())
-    if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
-    for i in xrange(x.start_postfix_value_size()): self.add_start_postfix_value(x.start_postfix_value(i))
-    for i in xrange(x.end_postfix_value_size()): self.add_end_postfix_value(x.end_postfix_value(i))
-    if (x.has_end_unapplied_log_timestamp_us()): self.set_end_unapplied_log_timestamp_us(x.end_unapplied_log_timestamp_us())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_index_name_ != x.has_index_name_: return 0
-    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
-    if self.has_start_key_ != x.has_start_key_: return 0
-    if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
-    if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
-    if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
-    if self.has_end_key_ != x.has_end_key_: return 0
-    if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
-    if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
-    if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
-    if len(self.start_postfix_value_) != len(x.start_postfix_value_): return 0
-    for e1, e2 in zip(self.start_postfix_value_, x.start_postfix_value_):
-      if e1 != e2: return 0
-    if len(self.end_postfix_value_) != len(x.end_postfix_value_): return 0
-    for e1, e2 in zip(self.end_postfix_value_, x.end_postfix_value_):
-      if e1 != e2: return 0
-    if self.has_end_unapplied_log_timestamp_us_ != x.has_end_unapplied_log_timestamp_us_: return 0
-    if self.has_end_unapplied_log_timestamp_us_ and self.end_unapplied_log_timestamp_us_ != x.end_unapplied_log_timestamp_us_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
-    if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
-    if (self.has_start_inclusive_): n += 2
-    if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
-    if (self.has_end_inclusive_): n += 2
-    n += 2 * len(self.start_postfix_value_)
-    for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
-    n += 2 * len(self.end_postfix_value_)
-    for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
-    if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
-    if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
-    if (self.has_start_inclusive_): n += 2
-    if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
-    if (self.has_end_inclusive_): n += 2
-    n += 2 * len(self.start_postfix_value_)
-    for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
-    n += 2 * len(self.end_postfix_value_)
-    for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
-    if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
-    return n
-
-  def Clear(self):
-    self.clear_index_name()
-    self.clear_start_key()
-    self.clear_start_inclusive()
-    self.clear_end_key()
-    self.clear_end_inclusive()
-    self.clear_start_postfix_value()
-    self.clear_end_postfix_value()
-    self.clear_end_unapplied_log_timestamp_us()
-
-  def OutputUnchecked(self, out):
-    if (self.has_index_name_):
-      out.putVarInt32(18)
-      out.putPrefixedString(self.index_name_)
-    if (self.has_start_key_):
-      out.putVarInt32(26)
-      out.putPrefixedString(self.start_key_)
-    if (self.has_start_inclusive_):
-      out.putVarInt32(32)
-      out.putBoolean(self.start_inclusive_)
-    if (self.has_end_key_):
-      out.putVarInt32(42)
-      out.putPrefixedString(self.end_key_)
-    if (self.has_end_inclusive_):
-      out.putVarInt32(48)
-      out.putBoolean(self.end_inclusive_)
-    if (self.has_end_unapplied_log_timestamp_us_):
-      out.putVarInt32(152)
-      out.putVarInt64(self.end_unapplied_log_timestamp_us_)
-    for i in xrange(len(self.start_postfix_value_)):
-      out.putVarInt32(178)
-      out.putPrefixedString(self.start_postfix_value_[i])
-    for i in xrange(len(self.end_postfix_value_)):
-      out.putVarInt32(186)
-      out.putPrefixedString(self.end_postfix_value_[i])
-
-  def OutputPartial(self, out):
-    if (self.has_index_name_):
-      out.putVarInt32(18)
-      out.putPrefixedString(self.index_name_)
-    if (self.has_start_key_):
-      out.putVarInt32(26)
-      out.putPrefixedString(self.start_key_)
-    if (self.has_start_inclusive_):
-      out.putVarInt32(32)
-      out.putBoolean(self.start_inclusive_)
-    if (self.has_end_key_):
-      out.putVarInt32(42)
-      out.putPrefixedString(self.end_key_)
-    if (self.has_end_inclusive_):
-      out.putVarInt32(48)
-      out.putBoolean(self.end_inclusive_)
-    if (self.has_end_unapplied_log_timestamp_us_):
-      out.putVarInt32(152)
-      out.putVarInt64(self.end_unapplied_log_timestamp_us_)
-    for i in xrange(len(self.start_postfix_value_)):
-      out.putVarInt32(178)
-      out.putPrefixedString(self.start_postfix_value_[i])
-    for i in xrange(len(self.end_postfix_value_)):
-      out.putVarInt32(186)
-      out.putPrefixedString(self.end_postfix_value_[i])
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 12: break
-      if tt == 18:
-        self.set_index_name(d.getPrefixedString())
-        continue
-      if tt == 26:
-        self.set_start_key(d.getPrefixedString())
-        continue
-      if tt == 32:
-        self.set_start_inclusive(d.getBoolean())
-        continue
-      if tt == 42:
-        self.set_end_key(d.getPrefixedString())
-        continue
-      if tt == 48:
-        self.set_end_inclusive(d.getBoolean())
-        continue
-      if tt == 152:
-        self.set_end_unapplied_log_timestamp_us(d.getVarInt64())
-        continue
-      if tt == 178:
-        self.add_start_postfix_value(d.getPrefixedString())
-        continue
-      if tt == 186:
-        self.add_end_postfix_value(d.getPrefixedString())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
-    if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
-    if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
-    if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
-    if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
-    cnt=0
-    for e in self.start_postfix_value_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("start_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    cnt=0
-    for e in self.end_postfix_value_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("end_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    if self.has_end_unapplied_log_timestamp_us_: res+=prefix+("end_unapplied_log_timestamp_us: %s\n" % self.DebugFormatInt64(self.end_unapplied_log_timestamp_us_))
-    return res
-
-class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
-  has_index_name_ = 0
-  index_name_ = ""
-  has_value_prefix_ = 0
-  value_prefix_ = 0
-
-  def __init__(self, contents=None):
-    self.prefix_value_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def index_name(self): return self.index_name_
-
-  def set_index_name(self, x):
-    self.has_index_name_ = 1
-    self.index_name_ = x
-
-  def clear_index_name(self):
-    if self.has_index_name_:
-      self.has_index_name_ = 0
-      self.index_name_ = ""
-
-  def has_index_name(self): return self.has_index_name_
-
-  def prefix_value_size(self): return len(self.prefix_value_)
-  def prefix_value_list(self): return self.prefix_value_
-
-  def prefix_value(self, i):
-    return self.prefix_value_[i]
-
-  def set_prefix_value(self, i, x):
-    self.prefix_value_[i] = x
-
-  def add_prefix_value(self, x):
-    self.prefix_value_.append(x)
-
-  def clear_prefix_value(self):
-    self.prefix_value_ = []
-
-  def value_prefix(self): return self.value_prefix_
-
-  def set_value_prefix(self, x):
-    self.has_value_prefix_ = 1
-    self.value_prefix_ = x
-
-  def clear_value_prefix(self):
-    if self.has_value_prefix_:
-      self.has_value_prefix_ = 0
-      self.value_prefix_ = 0
-
-  def has_value_prefix(self): return self.has_value_prefix_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_index_name()): self.set_index_name(x.index_name())
-    for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
-    if (x.has_value_prefix()): self.set_value_prefix(x.value_prefix())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_index_name_ != x.has_index_name_: return 0
-    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
-    if len(self.prefix_value_) != len(x.prefix_value_): return 0
-    for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
-      if e1 != e2: return 0
-    if self.has_value_prefix_ != x.has_value_prefix_: return 0
-    if self.has_value_prefix_ and self.value_prefix_ != x.value_prefix_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_index_name_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: index_name not set.')
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(len(self.index_name_))
-    n += 1 * len(self.prefix_value_)
-    for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
-    if (self.has_value_prefix_): n += 3
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_index_name_):
-      n += 1
-      n += self.lengthString(len(self.index_name_))
-    n += 1 * len(self.prefix_value_)
-    for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
-    if (self.has_value_prefix_): n += 3
-    return n
-
-  def Clear(self):
-    self.clear_index_name()
-    self.clear_prefix_value()
-    self.clear_value_prefix()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(66)
-    out.putPrefixedString(self.index_name_)
-    for i in xrange(len(self.prefix_value_)):
-      out.putVarInt32(74)
-      out.putPrefixedString(self.prefix_value_[i])
-    if (self.has_value_prefix_):
-      out.putVarInt32(160)
-      out.putBoolean(self.value_prefix_)
-
-  def OutputPartial(self, out):
-    if (self.has_index_name_):
-      out.putVarInt32(66)
-      out.putPrefixedString(self.index_name_)
-    for i in xrange(len(self.prefix_value_)):
-      out.putVarInt32(74)
-      out.putPrefixedString(self.prefix_value_[i])
-    if (self.has_value_prefix_):
-      out.putVarInt32(160)
-      out.putBoolean(self.value_prefix_)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 60: break
-      if tt == 66:
-        self.set_index_name(d.getPrefixedString())
-        continue
-      if tt == 74:
-        self.add_prefix_value(d.getPrefixedString())
-        continue
-      if tt == 160:
-        self.set_value_prefix(d.getBoolean())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
-    cnt=0
-    for e in self.prefix_value_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    if self.has_value_prefix_: res+=prefix+("value_prefix: %s\n" % self.DebugFormatBool(self.value_prefix_))
-    return res
-
-class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
-  has_distinct_ = 0
-  distinct_ = 0
-  has_kind_ = 0
-  kind_ = ""
-  has_ancestor_ = 0
-  ancestor_ = None
-
-  def __init__(self, contents=None):
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def distinct(self): return self.distinct_
-
-  def set_distinct(self, x):
-    self.has_distinct_ = 1
-    self.distinct_ = x
-
-  def clear_distinct(self):
-    if self.has_distinct_:
-      self.has_distinct_ = 0
-      self.distinct_ = 0
-
-  def has_distinct(self): return self.has_distinct_
-
-  def kind(self): return self.kind_
-
-  def set_kind(self, x):
-    self.has_kind_ = 1
-    self.kind_ = x
-
-  def clear_kind(self):
-    if self.has_kind_:
-      self.has_kind_ = 0
-      self.kind_ = ""
-
-  def has_kind(self): return self.has_kind_
-
-  def ancestor(self):
-    if self.ancestor_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.ancestor_ is None: self.ancestor_ = Reference()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.ancestor_
-
-  def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
-
-  def clear_ancestor(self):
-
-    if self.has_ancestor_:
-      self.has_ancestor_ = 0;
-      if self.ancestor_ is not None: self.ancestor_.Clear()
-
-  def has_ancestor(self): return self.has_ancestor_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_distinct()): self.set_distinct(x.distinct())
-    if (x.has_kind()): self.set_kind(x.kind())
-    if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_distinct_ != x.has_distinct_: return 0
-    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
-    if self.has_kind_ != x.has_kind_: return 0
-    if self.has_kind_ and self.kind_ != x.kind_: return 0
-    if self.has_ancestor_ != x.has_ancestor_: return 0
-    if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_distinct_): n += 2
-    if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
-    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_distinct_): n += 2
-    if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
-    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_distinct()
-    self.clear_kind()
-    self.clear_ancestor()
-
-  def OutputUnchecked(self, out):
-    if (self.has_distinct_):
-      out.putVarInt32(112)
-      out.putBoolean(self.distinct_)
-    if (self.has_kind_):
-      out.putVarInt32(138)
-      out.putPrefixedString(self.kind_)
-    if (self.has_ancestor_):
-      out.putVarInt32(146)
-      out.putVarInt32(self.ancestor_.ByteSize())
-      self.ancestor_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_distinct_):
-      out.putVarInt32(112)
-      out.putBoolean(self.distinct_)
-    if (self.has_kind_):
-      out.putVarInt32(138)
-      out.putPrefixedString(self.kind_)
-    if (self.has_ancestor_):
-      out.putVarInt32(146)
-      out.putVarInt32(self.ancestor_.ByteSizePartial())
-      self.ancestor_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 108: break
-      if tt == 112:
-        self.set_distinct(d.getBoolean())
-        continue
-      if tt == 138:
-        self.set_kind(d.getPrefixedString())
-        continue
-      if tt == 146:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_ancestor().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
-    if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
-    if self.has_ancestor_:
-      res+=prefix+"ancestor <\n"
-      res+=self.ancestor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    return res
-
-class CompiledQuery(ProtocolBuffer.ProtocolMessage):
-  has_primaryscan_ = 0
-  has_index_def_ = 0
-  index_def_ = None
-  has_offset_ = 0
-  offset_ = 0
-  has_limit_ = 0
-  limit_ = 0
-  has_keys_only_ = 0
-  keys_only_ = 0
-  has_distinct_infix_size_ = 0
-  distinct_infix_size_ = 0
-  has_entityfilter_ = 0
-  entityfilter_ = None
-
-  def __init__(self, contents=None):
-    self.primaryscan_ = CompiledQuery_PrimaryScan()
-    self.mergejoinscan_ = []
-    self.property_name_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def primaryscan(self): return self.primaryscan_
-
-  def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
-
-  def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
-
-  def has_primaryscan(self): return self.has_primaryscan_
-
-  def mergejoinscan_size(self): return len(self.mergejoinscan_)
-  def mergejoinscan_list(self): return self.mergejoinscan_
-
-  def mergejoinscan(self, i):
-    return self.mergejoinscan_[i]
-
-  def mutable_mergejoinscan(self, i):
-    return self.mergejoinscan_[i]
-
-  def add_mergejoinscan(self):
-    x = CompiledQuery_MergeJoinScan()
-    self.mergejoinscan_.append(x)
-    return x
-
-  def clear_mergejoinscan(self):
-    self.mergejoinscan_ = []
-  def index_def(self):
-    if self.index_def_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.index_def_ is None: self.index_def_ = Index()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.index_def_
-
-  def mutable_index_def(self): self.has_index_def_ = 1; return self.index_def()
-
-  def clear_index_def(self):
-
-    if self.has_index_def_:
-      self.has_index_def_ = 0;
-      if self.index_def_ is not None: self.index_def_.Clear()
-
-  def has_index_def(self): return self.has_index_def_
-
-  def offset(self): return self.offset_
-
-  def set_offset(self, x):
-    self.has_offset_ = 1
-    self.offset_ = x
-
-  def clear_offset(self):
-    if self.has_offset_:
-      self.has_offset_ = 0
-      self.offset_ = 0
-
-  def has_offset(self): return self.has_offset_
-
-  def limit(self): return self.limit_
-
-  def set_limit(self, x):
-    self.has_limit_ = 1
-    self.limit_ = x
-
-  def clear_limit(self):
-    if self.has_limit_:
-      self.has_limit_ = 0
-      self.limit_ = 0
-
-  def has_limit(self): return self.has_limit_
-
-  def keys_only(self): return self.keys_only_
-
-  def set_keys_only(self, x):
-    self.has_keys_only_ = 1
-    self.keys_only_ = x
-
-  def clear_keys_only(self):
-    if self.has_keys_only_:
-      self.has_keys_only_ = 0
-      self.keys_only_ = 0
-
-  def has_keys_only(self): return self.has_keys_only_
-
-  def property_name_size(self): return len(self.property_name_)
-  def property_name_list(self): return self.property_name_
-
-  def property_name(self, i):
-    return self.property_name_[i]
-
-  def set_property_name(self, i, x):
-    self.property_name_[i] = x
-
-  def add_property_name(self, x):
-    self.property_name_.append(x)
-
-  def clear_property_name(self):
-    self.property_name_ = []
-
-  def distinct_infix_size(self): return self.distinct_infix_size_
-
-  def set_distinct_infix_size(self, x):
-    self.has_distinct_infix_size_ = 1
-    self.distinct_infix_size_ = x
-
-  def clear_distinct_infix_size(self):
-    if self.has_distinct_infix_size_:
-      self.has_distinct_infix_size_ = 0
-      self.distinct_infix_size_ = 0
-
-  def has_distinct_infix_size(self): return self.has_distinct_infix_size_
-
-  def entityfilter(self):
-    if self.entityfilter_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.entityfilter_
-
-  def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
-
-  def clear_entityfilter(self):
-
-    if self.has_entityfilter_:
-      self.has_entityfilter_ = 0;
-      if self.entityfilter_ is not None: self.entityfilter_.Clear()
-
-  def has_entityfilter(self): return self.has_entityfilter_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
-    for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
-    if (x.has_index_def()): self.mutable_index_def().MergeFrom(x.index_def())
-    if (x.has_offset()): self.set_offset(x.offset())
-    if (x.has_limit()): self.set_limit(x.limit())
-    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
-    for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
-    if (x.has_distinct_infix_size()): self.set_distinct_infix_size(x.distinct_infix_size())
-    if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_primaryscan_ != x.has_primaryscan_: return 0
-    if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
-    if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
-    for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
-      if e1 != e2: return 0
-    if self.has_index_def_ != x.has_index_def_: return 0
-    if self.has_index_def_ and self.index_def_ != x.index_def_: return 0
-    if self.has_offset_ != x.has_offset_: return 0
-    if self.has_offset_ and self.offset_ != x.offset_: return 0
-    if self.has_limit_ != x.has_limit_: return 0
-    if self.has_limit_ and self.limit_ != x.limit_: return 0
-    if self.has_keys_only_ != x.has_keys_only_: return 0
-    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
-    if len(self.property_name_) != len(x.property_name_): return 0
-    for e1, e2 in zip(self.property_name_, x.property_name_):
-      if e1 != e2: return 0
-    if self.has_distinct_infix_size_ != x.has_distinct_infix_size_: return 0
-    if self.has_distinct_infix_size_ and self.distinct_infix_size_ != x.distinct_infix_size_: return 0
-    if self.has_entityfilter_ != x.has_entityfilter_: return 0
-    if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_primaryscan_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: primaryscan not set.')
-    elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
-    for p in self.mergejoinscan_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_index_def_ and not self.index_def_.IsInitialized(debug_strs)): initialized = 0
-    if (not self.has_keys_only_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: keys_only not set.')
-    if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.primaryscan_.ByteSize()
-    n += 2 * len(self.mergejoinscan_)
-    for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
-    if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSize())
-    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
-    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
-    n += 2 * len(self.property_name_)
-    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
-    if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
-    if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
-    return n + 4
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_primaryscan_):
-      n += 2
-      n += self.primaryscan_.ByteSizePartial()
-    n += 2 * len(self.mergejoinscan_)
-    for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSizePartial()
-    if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSizePartial())
-    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
-    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
-    if (self.has_keys_only_):
-      n += 2
-    n += 2 * len(self.property_name_)
-    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
-    if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
-    if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSizePartial()
-    return n
-
-  def Clear(self):
-    self.clear_primaryscan()
-    self.clear_mergejoinscan()
-    self.clear_index_def()
-    self.clear_offset()
-    self.clear_limit()
-    self.clear_keys_only()
-    self.clear_property_name()
-    self.clear_distinct_infix_size()
-    self.clear_entityfilter()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(11)
-    self.primaryscan_.OutputUnchecked(out)
-    out.putVarInt32(12)
-    for i in xrange(len(self.mergejoinscan_)):
-      out.putVarInt32(59)
-      self.mergejoinscan_[i].OutputUnchecked(out)
-      out.putVarInt32(60)
-    if (self.has_offset_):
-      out.putVarInt32(80)
-      out.putVarInt32(self.offset_)
-    if (self.has_limit_):
-      out.putVarInt32(88)
-      out.putVarInt32(self.limit_)
-    out.putVarInt32(96)
-    out.putBoolean(self.keys_only_)
-    if (self.has_entityfilter_):
-      out.putVarInt32(107)
-      self.entityfilter_.OutputUnchecked(out)
-      out.putVarInt32(108)
-    if (self.has_index_def_):
-      out.putVarInt32(170)
-      out.putVarInt32(self.index_def_.ByteSize())
-      self.index_def_.OutputUnchecked(out)
-    for i in xrange(len(self.property_name_)):
-      out.putVarInt32(194)
-      out.putPrefixedString(self.property_name_[i])
-    if (self.has_distinct_infix_size_):
-      out.putVarInt32(200)
-      out.putVarInt32(self.distinct_infix_size_)
-
-  def OutputPartial(self, out):
-    if (self.has_primaryscan_):
-      out.putVarInt32(11)
-      self.primaryscan_.OutputPartial(out)
-      out.putVarInt32(12)
-    for i in xrange(len(self.mergejoinscan_)):
-      out.putVarInt32(59)
-      self.mergejoinscan_[i].OutputPartial(out)
-      out.putVarInt32(60)
-    if (self.has_offset_):
-      out.putVarInt32(80)
-      out.putVarInt32(self.offset_)
-    if (self.has_limit_):
-      out.putVarInt32(88)
-      out.putVarInt32(self.limit_)
-    if (self.has_keys_only_):
-      out.putVarInt32(96)
-      out.putBoolean(self.keys_only_)
-    if (self.has_entityfilter_):
-      out.putVarInt32(107)
-      self.entityfilter_.OutputPartial(out)
-      out.putVarInt32(108)
-    if (self.has_index_def_):
-      out.putVarInt32(170)
-      out.putVarInt32(self.index_def_.ByteSizePartial())
-      self.index_def_.OutputPartial(out)
-    for i in xrange(len(self.property_name_)):
-      out.putVarInt32(194)
-      out.putPrefixedString(self.property_name_[i])
-    if (self.has_distinct_infix_size_):
-      out.putVarInt32(200)
-      out.putVarInt32(self.distinct_infix_size_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 11:
-        self.mutable_primaryscan().TryMerge(d)
-        continue
-      if tt == 59:
-        self.add_mergejoinscan().TryMerge(d)
-        continue
-      if tt == 80:
-        self.set_offset(d.getVarInt32())
-        continue
-      if tt == 88:
-        self.set_limit(d.getVarInt32())
-        continue
-      if tt == 96:
-        self.set_keys_only(d.getBoolean())
-        continue
-      if tt == 107:
-        self.mutable_entityfilter().TryMerge(d)
-        continue
-      if tt == 170:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_index_def().TryMerge(tmp)
-        continue
-      if tt == 194:
-        self.add_property_name(d.getPrefixedString())
-        continue
-      if tt == 200:
-        self.set_distinct_infix_size(d.getVarInt32())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_primaryscan_:
-      res+=prefix+"PrimaryScan {\n"
-      res+=self.primaryscan_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-    cnt=0
-    for e in self.mergejoinscan_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("MergeJoinScan%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    if self.has_index_def_:
-      res+=prefix+"index_def <\n"
-      res+=self.index_def_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
-    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
-    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
-    cnt=0
-    for e in self.property_name_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
-      cnt+=1
-    if self.has_distinct_infix_size_: res+=prefix+("distinct_infix_size: %s\n" % self.DebugFormatInt32(self.distinct_infix_size_))
-    if self.has_entityfilter_:
-      res+=prefix+"EntityFilter {\n"
-      res+=self.entityfilter_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kPrimaryScanGroup = 1
-  kPrimaryScanindex_name = 2
-  kPrimaryScanstart_key = 3
-  kPrimaryScanstart_inclusive = 4
-  kPrimaryScanend_key = 5
-  kPrimaryScanend_inclusive = 6
-  kPrimaryScanstart_postfix_value = 22
-  kPrimaryScanend_postfix_value = 23
-  kPrimaryScanend_unapplied_log_timestamp_us = 19
-  kMergeJoinScanGroup = 7
-  kMergeJoinScanindex_name = 8
-  kMergeJoinScanprefix_value = 9
-  kMergeJoinScanvalue_prefix = 20
-  kindex_def = 21
-  koffset = 10
-  klimit = 11
-  kkeys_only = 12
-  kproperty_name = 24
-  kdistinct_infix_size = 25
-  kEntityFilterGroup = 13
-  kEntityFilterdistinct = 14
-  kEntityFilterkind = 17
-  kEntityFilterancestor = 18
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "PrimaryScan",
-    2: "index_name",
-    3: "start_key",
-    4: "start_inclusive",
-    5: "end_key",
-    6: "end_inclusive",
-    7: "MergeJoinScan",
-    8: "index_name",
-    9: "prefix_value",
-    10: "offset",
-    11: "limit",
-    12: "keys_only",
-    13: "EntityFilter",
-    14: "distinct",
-    17: "kind",
-    18: "ancestor",
-    19: "end_unapplied_log_timestamp_us",
-    20: "value_prefix",
-    21: "index_def",
-    22: "start_postfix_value",
-    23: "end_postfix_value",
-    24: "property_name",
-    25: "distinct_infix_size",
-  }, 25)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STARTGROUP,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.STRING,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-    5: ProtocolBuffer.Encoder.STRING,
-    6: ProtocolBuffer.Encoder.NUMERIC,
-    7: ProtocolBuffer.Encoder.STARTGROUP,
-    8: ProtocolBuffer.Encoder.STRING,
-    9: ProtocolBuffer.Encoder.STRING,
-    10: ProtocolBuffer.Encoder.NUMERIC,
-    11: ProtocolBuffer.Encoder.NUMERIC,
-    12: ProtocolBuffer.Encoder.NUMERIC,
-    13: ProtocolBuffer.Encoder.STARTGROUP,
-    14: ProtocolBuffer.Encoder.NUMERIC,
-    17: ProtocolBuffer.Encoder.STRING,
-    18: ProtocolBuffer.Encoder.STRING,
-    19: ProtocolBuffer.Encoder.NUMERIC,
-    20: ProtocolBuffer.Encoder.NUMERIC,
-    21: ProtocolBuffer.Encoder.STRING,
-    22: ProtocolBuffer.Encoder.STRING,
-    23: ProtocolBuffer.Encoder.STRING,
-    24: ProtocolBuffer.Encoder.STRING,
-    25: ProtocolBuffer.Encoder.NUMERIC,
-  }, 25, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery'
-class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage):
-  has_property_ = 0
-  property_ = ""
-  has_value_ = 0
-
-  def __init__(self, contents=None):
-    self.value_ = PropertyValue()
-    if contents is not None: self.MergeFromString(contents)
-
-  def property(self): return self.property_
-
-  def set_property(self, x):
-    self.has_property_ = 1
-    self.property_ = x
-
-  def clear_property(self):
-    if self.has_property_:
-      self.has_property_ = 0
-      self.property_ = ""
-
-  def has_property(self): return self.has_property_
-
-  def value(self): return self.value_
-
-  def mutable_value(self): self.has_value_ = 1; return self.value_
-
-  def clear_value(self):self.has_value_ = 0; self.value_.Clear()
-
-  def has_value(self): return self.has_value_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_property()): self.set_property(x.property())
-    if (x.has_value()): self.mutable_value().MergeFrom(x.value())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_property_ != x.has_property_: return 0
-    if self.has_property_ and self.property_ != x.property_: return 0
-    if self.has_value_ != x.has_value_: return 0
-    if self.has_value_ and self.value_ != x.value_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_value_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: value not set.')
-    elif not self.value_.IsInitialized(debug_strs): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
-    n += self.lengthString(self.value_.ByteSize())
-    return n + 2
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
-    if (self.has_value_):
-      n += 2
-      n += self.lengthString(self.value_.ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_property()
-    self.clear_value()
-
-  def OutputUnchecked(self, out):
-    if (self.has_property_):
-      out.putVarInt32(242)
-      out.putPrefixedString(self.property_)
-    out.putVarInt32(250)
-    out.putVarInt32(self.value_.ByteSize())
-    self.value_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_property_):
-      out.putVarInt32(242)
-      out.putPrefixedString(self.property_)
-    if (self.has_value_):
-      out.putVarInt32(250)
-      out.putVarInt32(self.value_.ByteSizePartial())
-      self.value_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 236: break
-      if tt == 242:
-        self.set_property(d.getPrefixedString())
-        continue
-      if tt == 250:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_value().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
-    if self.has_value_:
-      res+=prefix+"value <\n"
-      res+=self.value_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    return res
-
-class CompiledCursor_Position(ProtocolBuffer.ProtocolMessage):
-  has_start_key_ = 0
-  start_key_ = ""
-  has_key_ = 0
-  key_ = None
-  has_start_inclusive_ = 0
-  start_inclusive_ = 1
-
-  def __init__(self, contents=None):
-    self.indexvalue_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def start_key(self): return self.start_key_
-
-  def set_start_key(self, x):
-    self.has_start_key_ = 1
-    self.start_key_ = x
-
-  def clear_start_key(self):
-    if self.has_start_key_:
-      self.has_start_key_ = 0
-      self.start_key_ = ""
-
-  def has_start_key(self): return self.has_start_key_
-
-  def indexvalue_size(self): return len(self.indexvalue_)
-  def indexvalue_list(self): return self.indexvalue_
-
-  def indexvalue(self, i):
-    return self.indexvalue_[i]
-
-  def mutable_indexvalue(self, i):
-    return self.indexvalue_[i]
-
-  def add_indexvalue(self):
-    x = CompiledCursor_PositionIndexValue()
-    self.indexvalue_.append(x)
-    return x
-
-  def clear_indexvalue(self):
-    self.indexvalue_ = []
-  def key(self):
-    if self.key_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.key_ is None: self.key_ = Reference()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.key_
-
-  def mutable_key(self): self.has_key_ = 1; return self.key()
-
-  def clear_key(self):
-
-    if self.has_key_:
-      self.has_key_ = 0;
-      if self.key_ is not None: self.key_.Clear()
-
-  def has_key(self): return self.has_key_
-
-  def start_inclusive(self): return self.start_inclusive_
-
-  def set_start_inclusive(self, x):
-    self.has_start_inclusive_ = 1
-    self.start_inclusive_ = x
-
-  def clear_start_inclusive(self):
-    if self.has_start_inclusive_:
-      self.has_start_inclusive_ = 0
-      self.start_inclusive_ = 1
-
-  def has_start_inclusive(self): return self.has_start_inclusive_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_start_key()): self.set_start_key(x.start_key())
-    for i in xrange(x.indexvalue_size()): self.add_indexvalue().CopyFrom(x.indexvalue(i))
-    if (x.has_key()): self.mutable_key().MergeFrom(x.key())
-    if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_start_key_ != x.has_start_key_: return 0
-    if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
-    if len(self.indexvalue_) != len(x.indexvalue_): return 0
-    for e1, e2 in zip(self.indexvalue_, x.indexvalue_):
-      if e1 != e2: return 0
-    if self.has_key_ != x.has_key_: return 0
-    if self.has_key_ and self.key_ != x.key_: return 0
-    if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
-    if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.indexvalue_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
-    n += 4 * len(self.indexvalue_)
-    for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSize()
-    if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSize())
-    if (self.has_start_inclusive_): n += 3
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
-    n += 4 * len(self.indexvalue_)
-    for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSizePartial()
-    if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSizePartial())
-    if (self.has_start_inclusive_): n += 3
-    return n
-
-  def Clear(self):
-    self.clear_start_key()
-    self.clear_indexvalue()
-    self.clear_key()
-    self.clear_start_inclusive()
-
-  def OutputUnchecked(self, out):
-    if (self.has_start_key_):
-      out.putVarInt32(218)
-      out.putPrefixedString(self.start_key_)
-    if (self.has_start_inclusive_):
-      out.putVarInt32(224)
-      out.putBoolean(self.start_inclusive_)
-    for i in xrange(len(self.indexvalue_)):
-      out.putVarInt32(235)
-      self.indexvalue_[i].OutputUnchecked(out)
-      out.putVarInt32(236)
-    if (self.has_key_):
-      out.putVarInt32(258)
-      out.putVarInt32(self.key_.ByteSize())
-      self.key_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_start_key_):
-      out.putVarInt32(218)
-      out.putPrefixedString(self.start_key_)
-    if (self.has_start_inclusive_):
-      out.putVarInt32(224)
-      out.putBoolean(self.start_inclusive_)
-    for i in xrange(len(self.indexvalue_)):
-      out.putVarInt32(235)
-      self.indexvalue_[i].OutputPartial(out)
-      out.putVarInt32(236)
-    if (self.has_key_):
-      out.putVarInt32(258)
-      out.putVarInt32(self.key_.ByteSizePartial())
-      self.key_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 20: break
-      if tt == 218:
-        self.set_start_key(d.getPrefixedString())
-        continue
-      if tt == 224:
-        self.set_start_inclusive(d.getBoolean())
-        continue
-      if tt == 235:
-        self.add_indexvalue().TryMerge(d)
-        continue
-      if tt == 258:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_key().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
-    cnt=0
-    for e in self.indexvalue_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("IndexValue%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    if self.has_key_:
-      res+=prefix+"key <\n"
-      res+=self.key_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
-    return res
-
-class CompiledCursor(ProtocolBuffer.ProtocolMessage):
-  has_multiquery_index_ = 0
-  multiquery_index_ = 0
-
-  def __init__(self, contents=None):
-    self.position_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def multiquery_index(self): return self.multiquery_index_
-
-  def set_multiquery_index(self, x):
-    self.has_multiquery_index_ = 1
-    self.multiquery_index_ = x
-
-  def clear_multiquery_index(self):
-    if self.has_multiquery_index_:
-      self.has_multiquery_index_ = 0
-      self.multiquery_index_ = 0
-
-  def has_multiquery_index(self): return self.has_multiquery_index_
-
-  def position_size(self): return len(self.position_)
-  def position_list(self): return self.position_
-
-  def position(self, i):
-    return self.position_[i]
-
-  def mutable_position(self, i):
-    return self.position_[i]
-
-  def add_position(self):
-    x = CompiledCursor_Position()
-    self.position_.append(x)
-    return x
-
-  def clear_position(self):
-    self.position_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_multiquery_index()): self.set_multiquery_index(x.multiquery_index())
-    for i in xrange(x.position_size()): self.add_position().CopyFrom(x.position(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_multiquery_index_ != x.has_multiquery_index_: return 0
-    if self.has_multiquery_index_ and self.multiquery_index_ != x.multiquery_index_: return 0
-    if len(self.position_) != len(x.position_): return 0
-    for e1, e2 in zip(self.position_, x.position_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.position_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_multiquery_index_): n += 1 + self.lengthVarInt64(self.multiquery_index_)
-    n += 2 * len(self.position_)
-    for i in xrange(len(self.position_)): n += self.position_[i].ByteSize()
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_multiquery_index_): n += 1 + self.lengthVarInt64(self.multiquery_index_)
-    n += 2 * len(self.position_)
-    for i in xrange(len(self.position_)): n += self.position_[i].ByteSizePartial()
-    return n
-
-  def Clear(self):
-    self.clear_multiquery_index()
-    self.clear_position()
-
-  def OutputUnchecked(self, out):
-    if (self.has_multiquery_index_):
-      out.putVarInt32(8)
-      out.putVarInt32(self.multiquery_index_)
-    for i in xrange(len(self.position_)):
-      out.putVarInt32(19)
-      self.position_[i].OutputUnchecked(out)
-      out.putVarInt32(20)
-
-  def OutputPartial(self, out):
-    if (self.has_multiquery_index_):
-      out.putVarInt32(8)
-      out.putVarInt32(self.multiquery_index_)
-    for i in xrange(len(self.position_)):
-      out.putVarInt32(19)
-      self.position_[i].OutputPartial(out)
-      out.putVarInt32(20)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 8:
-        self.set_multiquery_index(d.getVarInt32())
-        continue
-      if tt == 19:
-        self.add_position().TryMerge(d)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_multiquery_index_: res+=prefix+("multiquery_index: %s\n" % self.DebugFormatInt32(self.multiquery_index_))
-    cnt=0
-    for e in self.position_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("Position%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kmultiquery_index = 1
-  kPositionGroup = 2
-  kPositionstart_key = 27
-  kPositionIndexValueGroup = 29
-  kPositionIndexValueproperty = 30
-  kPositionIndexValuevalue = 31
-  kPositionkey = 32
-  kPositionstart_inclusive = 28
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "multiquery_index",
-    2: "Position",
-    27: "start_key",
-    28: "start_inclusive",
-    29: "IndexValue",
-    30: "property",
-    31: "value",
-    32: "key",
-  }, 32)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.NUMERIC,
-    2: ProtocolBuffer.Encoder.STARTGROUP,
-    27: ProtocolBuffer.Encoder.STRING,
-    28: ProtocolBuffer.Encoder.NUMERIC,
-    29: ProtocolBuffer.Encoder.STARTGROUP,
-    30: ProtocolBuffer.Encoder.STRING,
-    31: ProtocolBuffer.Encoder.STRING,
-    32: ProtocolBuffer.Encoder.STRING,
-  }, 32, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor'
-class Cursor(ProtocolBuffer.ProtocolMessage):
-  has_cursor_ = 0
-  cursor_ = 0
-  has_app_ = 0
-  app_ = ""
-
-  def __init__(self, contents=None):
-    if contents is not None: self.MergeFromString(contents)
-
-  def cursor(self): return self.cursor_
-
-  def set_cursor(self, x):
-    self.has_cursor_ = 1
-    self.cursor_ = x
-
-  def clear_cursor(self):
-    if self.has_cursor_:
-      self.has_cursor_ = 0
-      self.cursor_ = 0
-
-  def has_cursor(self): return self.has_cursor_
-
-  def app(self): return self.app_
-
-  def set_app(self, x):
-    self.has_app_ = 1
-    self.app_ = x
-
-  def clear_app(self):
-    if self.has_app_:
-      self.has_app_ = 0
-      self.app_ = ""
-
-  def has_app(self): return self.has_app_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_cursor()): self.set_cursor(x.cursor())
-    if (x.has_app()): self.set_app(x.app())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_cursor_ != x.has_cursor_: return 0
-    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
-    if self.has_app_ != x.has_app_: return 0
-    if self.has_app_ and self.app_ != x.app_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_cursor_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: cursor not set.')
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
-    return n + 9
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_cursor_):
-      n += 9
-    if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
-    return n
-
-  def Clear(self):
-    self.clear_cursor()
-    self.clear_app()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(9)
-    out.put64(self.cursor_)
-    if (self.has_app_):
-      out.putVarInt32(18)
-      out.putPrefixedString(self.app_)
-
-  def OutputPartial(self, out):
-    if (self.has_cursor_):
-      out.putVarInt32(9)
-      out.put64(self.cursor_)
-    if (self.has_app_):
-      out.putVarInt32(18)
-      out.putPrefixedString(self.app_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 9:
-        self.set_cursor(d.get64())
-        continue
-      if tt == 18:
-        self.set_app(d.getPrefixedString())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
-    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kcursor = 1
-  kapp = 2
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "cursor",
-    2: "app",
-  }, 2)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.DOUBLE,
-    2: ProtocolBuffer.Encoder.STRING,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor'
-class Error(ProtocolBuffer.ProtocolMessage):
-
-
-  BAD_REQUEST  =    1
-  CONCURRENT_TRANSACTION =    2
-  INTERNAL_ERROR =    3
-  NEED_INDEX   =    4
-  TIMEOUT      =    5
-  PERMISSION_DENIED =    6
-  BIGTABLE_ERROR =    7
-  COMMITTED_BUT_STILL_APPLYING =    8
-  CAPABILITY_DISABLED =    9
-  TRY_ALTERNATE_BACKEND =   10
-  SAFE_TIME_TOO_OLD =   11
-
-  _ErrorCode_NAMES = {
-    1: "BAD_REQUEST",
-    2: "CONCURRENT_TRANSACTION",
-    3: "INTERNAL_ERROR",
-    4: "NEED_INDEX",
-    5: "TIMEOUT",
-    6: "PERMISSION_DENIED",
-    7: "BIGTABLE_ERROR",
-    8: "COMMITTED_BUT_STILL_APPLYING",
-    9: "CAPABILITY_DISABLED",
-    10: "TRY_ALTERNATE_BACKEND",
-    11: "SAFE_TIME_TOO_OLD",
-  }
-
-  def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
-  ErrorCode_Name = classmethod(ErrorCode_Name)
-
-
-  def __init__(self, contents=None):
-    pass
-    if contents is not None: self.MergeFromString(contents)
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-
-  def Equals(self, x):
-    if x is self: return 1
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    return n
-
-  def Clear(self):
-    pass
-
-  def OutputUnchecked(self, out):
-    pass
-
-  def OutputPartial(self, out):
-    pass
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-  }, 0)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error'
-class Cost_CommitCost(ProtocolBuffer.ProtocolMessage):
-  has_requested_entity_puts_ = 0
-  requested_entity_puts_ = 0
-  has_requested_entity_deletes_ = 0
-  requested_entity_deletes_ = 0
-
-  def __init__(self, contents=None):
-    if contents is not None: self.MergeFromString(contents)
-
-  def requested_entity_puts(self): return self.requested_entity_puts_
-
-  def set_requested_entity_puts(self, x):
-    self.has_requested_entity_puts_ = 1
-    self.requested_entity_puts_ = x
-
-  def clear_requested_entity_puts(self):
-    if self.has_requested_entity_puts_:
-      self.has_requested_entity_puts_ = 0
-      self.requested_entity_puts_ = 0
-
-  def has_requested_entity_puts(self): return self.has_requested_entity_puts_
-
-  def requested_entity_deletes(self): return self.requested_entity_deletes_
-
-  def set_requested_entity_deletes(self, x):
-    self.has_requested_entity_deletes_ = 1
-    self.requested_entity_deletes_ = x
-
-  def clear_requested_entity_deletes(self):
-    if self.has_requested_entity_deletes_:
-      self.has_requested_entity_deletes_ = 0
-      self.requested_entity_deletes_ = 0
-
-  def has_requested_entity_deletes(self): return self.has_requested_entity_deletes_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_requested_entity_puts()): self.set_requested_entity_puts(x.requested_entity_puts())
-    if (x.has_requested_entity_deletes()): self.set_requested_entity_deletes(x.requested_entity_deletes())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_requested_entity_puts_ != x.has_requested_entity_puts_: return 0
-    if self.has_requested_entity_puts_ and self.requested_entity_puts_ != x.requested_entity_puts_: return 0
-    if self.has_requested_entity_deletes_ != x.has_requested_entity_deletes_: return 0
-    if self.has_requested_entity_deletes_ and self.requested_entity_deletes_ != x.requested_entity_deletes_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
-    if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
-    if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
-    return n
-
-  def Clear(self):
-    self.clear_requested_entity_puts()
-    self.clear_requested_entity_deletes()
-
-  def OutputUnchecked(self, out):
-    if (self.has_requested_entity_puts_):
-      out.putVarInt32(48)
-      out.putVarInt32(self.requested_entity_puts_)
-    if (self.has_requested_entity_deletes_):
-      out.putVarInt32(56)
-      out.putVarInt32(self.requested_entity_deletes_)
-
-  def OutputPartial(self, out):
-    if (self.has_requested_entity_puts_):
-      out.putVarInt32(48)
-      out.putVarInt32(self.requested_entity_puts_)
-    if (self.has_requested_entity_deletes_):
-      out.putVarInt32(56)
-      out.putVarInt32(self.requested_entity_deletes_)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 44: break
-      if tt == 48:
-        self.set_requested_entity_puts(d.getVarInt32())
-        continue
-      if tt == 56:
-        self.set_requested_entity_deletes(d.getVarInt32())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_requested_entity_puts_: res+=prefix+("requested_entity_puts: %s\n" % self.DebugFormatInt32(self.requested_entity_puts_))
-    if self.has_requested_entity_deletes_: res+=prefix+("requested_entity_deletes: %s\n" % self.DebugFormatInt32(self.requested_entity_deletes_))
-    return res
-
-class Cost(ProtocolBuffer.ProtocolMessage):
-  has_index_writes_ = 0
-  index_writes_ = 0
-  has_index_write_bytes_ = 0
-  index_write_bytes_ = 0
-  has_entity_writes_ = 0
-  entity_writes_ = 0
-  has_entity_write_bytes_ = 0
-  entity_write_bytes_ = 0
-  has_commitcost_ = 0
-  commitcost_ = None
-  has_approximate_storage_delta_ = 0
-  approximate_storage_delta_ = 0
-  has_id_sequence_updates_ = 0
-  id_sequence_updates_ = 0
-
-  def __init__(self, contents=None):
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def index_writes(self): return self.index_writes_
-
-  def set_index_writes(self, x):
-    self.has_index_writes_ = 1
-    self.index_writes_ = x
-
-  def clear_index_writes(self):
-    if self.has_index_writes_:
-      self.has_index_writes_ = 0
-      self.index_writes_ = 0
-
-  def has_index_writes(self): return self.has_index_writes_
-
-  def index_write_bytes(self): return self.index_write_bytes_
-
-  def set_index_write_bytes(self, x):
-    self.has_index_write_bytes_ = 1
-    self.index_write_bytes_ = x
-
-  def clear_index_write_bytes(self):
-    if self.has_index_write_bytes_:
-      self.has_index_write_bytes_ = 0
-      self.index_write_bytes_ = 0
-
-  def has_index_write_bytes(self): return self.has_index_write_bytes_
-
-  def entity_writes(self): return self.entity_writes_
-
-  def set_entity_writes(self, x):
-    self.has_entity_writes_ = 1
-    self.entity_writes_ = x
-
-  def clear_entity_writes(self):
-    if self.has_entity_writes_:
-      self.has_entity_writes_ = 0
-      self.entity_writes_ = 0
-
-  def has_entity_writes(self): return self.has_entity_writes_
-
-  def entity_write_bytes(self): return self.entity_write_bytes_
-
-  def set_entity_write_bytes(self, x):
-    self.has_entity_write_bytes_ = 1
-    self.entity_write_bytes_ = x
-
-  def clear_entity_write_bytes(self):
-    if self.has_entity_write_bytes_:
-      self.has_entity_write_bytes_ = 0
-      self.entity_write_bytes_ = 0
-
-  def has_entity_write_bytes(self): return self.has_entity_write_bytes_
-
-  def commitcost(self):
-    if self.commitcost_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.commitcost_ is None: self.commitcost_ = Cost_CommitCost()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.commitcost_
-
-  def mutable_commitcost(self): self.has_commitcost_ = 1; return self.commitcost()
-
-  def clear_commitcost(self):
-
-    if self.has_commitcost_:
-      self.has_commitcost_ = 0;
-      if self.commitcost_ is not None: self.commitcost_.Clear()
-
-  def has_commitcost(self): return self.has_commitcost_
-
-  def approximate_storage_delta(self): return self.approximate_storage_delta_
-
-  def set_approximate_storage_delta(self, x):
-    self.has_approximate_storage_delta_ = 1
-    self.approximate_storage_delta_ = x
-
-  def clear_approximate_storage_delta(self):
-    if self.has_approximate_storage_delta_:
-      self.has_approximate_storage_delta_ = 0
-      self.approximate_storage_delta_ = 0
-
-  def has_approximate_storage_delta(self): return self.has_approximate_storage_delta_
-
-  def id_sequence_updates(self): return self.id_sequence_updates_
-
-  def set_id_sequence_updates(self, x):
-    self.has_id_sequence_updates_ = 1
-    self.id_sequence_updates_ = x
-
-  def clear_id_sequence_updates(self):
-    if self.has_id_sequence_updates_:
-      self.has_id_sequence_updates_ = 0
-      self.id_sequence_updates_ = 0
-
-  def has_id_sequence_updates(self): return self.has_id_sequence_updates_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_index_writes()): self.set_index_writes(x.index_writes())
-    if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes())
-    if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes())
-    if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes())
-    if (x.has_commitcost()): self.mutable_commitcost().MergeFrom(x.commitcost())
-    if (x.has_approximate_storage_delta()): self.set_approximate_storage_delta(x.approximate_storage_delta())
-    if (x.has_id_sequence_updates()): self.set_id_sequence_updates(x.id_sequence_updates())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_index_writes_ != x.has_index_writes_: return 0
-    if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
-    if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0
-    if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0
-    if self.has_entity_writes_ != x.has_entity_writes_: return 0
-    if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0
-    if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0
-    if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0
-    if self.has_commitcost_ != x.has_commitcost_: return 0
-    if self.has_commitcost_ and self.commitcost_ != x.commitcost_: return 0
-    if self.has_approximate_storage_delta_ != x.has_approximate_storage_delta_: return 0
-    if self.has_approximate_storage_delta_ and self.approximate_storage_delta_ != x.approximate_storage_delta_: return 0
-    if self.has_id_sequence_updates_ != x.has_id_sequence_updates_: return 0
-    if self.has_id_sequence_updates_ and self.id_sequence_updates_ != x.id_sequence_updates_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_commitcost_ and not self.commitcost_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
-    if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
-    if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
-    if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
-    if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSize()
-    if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
-    if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
-    if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
-    if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
-    if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
-    if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSizePartial()
-    if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
-    if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
-    return n
-
-  def Clear(self):
-    self.clear_index_writes()
-    self.clear_index_write_bytes()
-    self.clear_entity_writes()
-    self.clear_entity_write_bytes()
-    self.clear_commitcost()
-    self.clear_approximate_storage_delta()
-    self.clear_id_sequence_updates()
-
-  def OutputUnchecked(self, out):
-    if (self.has_index_writes_):
-      out.putVarInt32(8)
-      out.putVarInt32(self.index_writes_)
-    if (self.has_index_write_bytes_):
-      out.putVarInt32(16)
-      out.putVarInt32(self.index_write_bytes_)
-    if (self.has_entity_writes_):
-      out.putVarInt32(24)
-      out.putVarInt32(self.entity_writes_)
-    if (self.has_entity_write_bytes_):
-      out.putVarInt32(32)
-      out.putVarInt32(self.entity_write_bytes_)
-    if (self.has_commitcost_):
-      out.putVarInt32(43)
-      self.commitcost_.OutputUnchecked(out)
-      out.putVarInt32(44)
-    if (self.has_approximate_storage_delta_):
-      out.putVarInt32(64)
-      out.putVarInt32(self.approximate_storage_delta_)
-    if (self.has_id_sequence_updates_):
-      out.putVarInt32(72)
-      out.putVarInt32(self.id_sequence_updates_)
-
-  def OutputPartial(self, out):
-    if (self.has_index_writes_):
-      out.putVarInt32(8)
-      out.putVarInt32(self.index_writes_)
-    if (self.has_index_write_bytes_):
-      out.putVarInt32(16)
-      out.putVarInt32(self.index_write_bytes_)
-    if (self.has_entity_writes_):
-      out.putVarInt32(24)
-      out.putVarInt32(self.entity_writes_)
-    if (self.has_entity_write_bytes_):
-      out.putVarInt32(32)
-      out.putVarInt32(self.entity_write_bytes_)
-    if (self.has_commitcost_):
-      out.putVarInt32(43)
-      self.commitcost_.OutputPartial(out)
-      out.putVarInt32(44)
-    if (self.has_approximate_storage_delta_):
-      out.putVarInt32(64)
-      out.putVarInt32(self.approximate_storage_delta_)
-    if (self.has_id_sequence_updates_):
-      out.putVarInt32(72)
-      out.putVarInt32(self.id_sequence_updates_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 8:
-        self.set_index_writes(d.getVarInt32())
-        continue
-      if tt == 16:
-        self.set_index_write_bytes(d.getVarInt32())
-        continue
-      if tt == 24:
-        self.set_entity_writes(d.getVarInt32())
-        continue
-      if tt == 32:
-        self.set_entity_write_bytes(d.getVarInt32())
-        continue
-      if tt == 43:
-        self.mutable_commitcost().TryMerge(d)
-        continue
-      if tt == 64:
-        self.set_approximate_storage_delta(d.getVarInt32())
-        continue
-      if tt == 72:
-        self.set_id_sequence_updates(d.getVarInt32())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
-    if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_))
-    if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_))
-    if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
-    if self.has_commitcost_:
-      res+=prefix+"CommitCost {\n"
-      res+=self.commitcost_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-    if self.has_approximate_storage_delta_: res+=prefix+("approximate_storage_delta: %s\n" % self.DebugFormatInt32(self.approximate_storage_delta_))
-    if self.has_id_sequence_updates_: res+=prefix+("id_sequence_updates: %s\n" % self.DebugFormatInt32(self.id_sequence_updates_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kindex_writes = 1
-  kindex_write_bytes = 2
-  kentity_writes = 3
-  kentity_write_bytes = 4
-  kCommitCostGroup = 5
-  kCommitCostrequested_entity_puts = 6
-  kCommitCostrequested_entity_deletes = 7
-  kapproximate_storage_delta = 8
-  kid_sequence_updates = 9
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "index_writes",
-    2: "index_write_bytes",
-    3: "entity_writes",
-    4: "entity_write_bytes",
-    5: "CommitCost",
-    6: "requested_entity_puts",
-    7: "requested_entity_deletes",
-    8: "approximate_storage_delta",
-    9: "id_sequence_updates",
-  }, 9)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.NUMERIC,
-    2: ProtocolBuffer.Encoder.NUMERIC,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-    5: ProtocolBuffer.Encoder.STARTGROUP,
-    6: ProtocolBuffer.Encoder.NUMERIC,
-    7: ProtocolBuffer.Encoder.NUMERIC,
-    8: ProtocolBuffer.Encoder.NUMERIC,
-    9: ProtocolBuffer.Encoder.NUMERIC,
-  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost'
-class GetRequest(ProtocolBuffer.ProtocolMessage):
-  has_transaction_ = 0
-  transaction_ = None
-  has_failover_ms_ = 0
-  failover_ms_ = 0
-  has_strong_ = 0
-  strong_ = 0
-  has_allow_deferred_ = 0
-  allow_deferred_ = 0
-
-  def __init__(self, contents=None):
-    self.key_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def key_size(self): return len(self.key_)
-  def key_list(self): return self.key_
-
-  def key(self, i):
-    return self.key_[i]
-
-  def mutable_key(self, i):
-    return self.key_[i]
-
-  def add_key(self):
-    x = Reference()
-    self.key_.append(x)
-    return x
-
-  def clear_key(self):
-    self.key_ = []
-  def transaction(self):
-    if self.transaction_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.transaction_ is None: self.transaction_ = Transaction()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.transaction_
-
-  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
-
-  def clear_transaction(self):
-
-    if self.has_transaction_:
-      self.has_transaction_ = 0;
-      if self.transaction_ is not None: self.transaction_.Clear()
-
-  def has_transaction(self): return self.has_transaction_
-
-  def failover_ms(self): return self.failover_ms_
-
-  def set_failover_ms(self, x):
-    self.has_failover_ms_ = 1
-    self.failover_ms_ = x
-
-  def clear_failover_ms(self):
-    if self.has_failover_ms_:
-      self.has_failover_ms_ = 0
-      self.failover_ms_ = 0
-
-  def has_failover_ms(self): return self.has_failover_ms_
-
-  def strong(self): return self.strong_
-
-  def set_strong(self, x):
-    self.has_strong_ = 1
-    self.strong_ = x
-
-  def clear_strong(self):
-    if self.has_strong_:
-      self.has_strong_ = 0
-      self.strong_ = 0
-
-  def has_strong(self): return self.has_strong_
-
-  def allow_deferred(self): return self.allow_deferred_
-
-  def set_allow_deferred(self, x):
-    self.has_allow_deferred_ = 1
-    self.allow_deferred_ = x
-
-  def clear_allow_deferred(self):
-    if self.has_allow_deferred_:
-      self.has_allow_deferred_ = 0
-      self.allow_deferred_ = 0
-
-  def has_allow_deferred(self): return self.has_allow_deferred_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
-    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
-    if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
-    if (x.has_strong()): self.set_strong(x.strong())
-    if (x.has_allow_deferred()): self.set_allow_deferred(x.allow_deferred())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.key_) != len(x.key_): return 0
-    for e1, e2 in zip(self.key_, x.key_):
-      if e1 != e2: return 0
-    if self.has_transaction_ != x.has_transaction_: return 0
-    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
-    if self.has_failover_ms_ != x.has_failover_ms_: return 0
-    if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
-    if self.has_strong_ != x.has_strong_: return 0
-    if self.has_strong_ and self.strong_ != x.strong_: return 0
-    if self.has_allow_deferred_ != x.has_allow_deferred_: return 0
-    if self.has_allow_deferred_ and self.allow_deferred_ != x.allow_deferred_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.key_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
-    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
-    if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
-    if (self.has_strong_): n += 2
-    if (self.has_allow_deferred_): n += 2
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
-    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
-    if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
-    if (self.has_strong_): n += 2
-    if (self.has_allow_deferred_): n += 2
-    return n
-
-  def Clear(self):
-    self.clear_key()
-    self.clear_transaction()
-    self.clear_failover_ms()
-    self.clear_strong()
-    self.clear_allow_deferred()
-
-  def OutputUnchecked(self, out):
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.key_[i].ByteSize())
-      self.key_[i].OutputUnchecked(out)
-    if (self.has_transaction_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.transaction_.ByteSize())
-      self.transaction_.OutputUnchecked(out)
-    if (self.has_failover_ms_):
-      out.putVarInt32(24)
-      out.putVarInt64(self.failover_ms_)
-    if (self.has_strong_):
-      out.putVarInt32(32)
-      out.putBoolean(self.strong_)
-    if (self.has_allow_deferred_):
-      out.putVarInt32(40)
-      out.putBoolean(self.allow_deferred_)
-
-  def OutputPartial(self, out):
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.key_[i].ByteSizePartial())
-      self.key_[i].OutputPartial(out)
-    if (self.has_transaction_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.transaction_.ByteSizePartial())
-      self.transaction_.OutputPartial(out)
-    if (self.has_failover_ms_):
-      out.putVarInt32(24)
-      out.putVarInt64(self.failover_ms_)
-    if (self.has_strong_):
-      out.putVarInt32(32)
-      out.putBoolean(self.strong_)
-    if (self.has_allow_deferred_):
-      out.putVarInt32(40)
-      out.putBoolean(self.allow_deferred_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_key().TryMerge(tmp)
-        continue
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_transaction().TryMerge(tmp)
-        continue
-      if tt == 24:
-        self.set_failover_ms(d.getVarInt64())
-        continue
-      if tt == 32:
-        self.set_strong(d.getBoolean())
-        continue
-      if tt == 40:
-        self.set_allow_deferred(d.getBoolean())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.key_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("key%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_transaction_:
-      res+=prefix+"transaction <\n"
-      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
-    if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
-    if self.has_allow_deferred_: res+=prefix+("allow_deferred: %s\n" % self.DebugFormatBool(self.allow_deferred_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kkey = 1
-  ktransaction = 2
-  kfailover_ms = 3
-  kstrong = 4
-  kallow_deferred = 5
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "key",
-    2: "transaction",
-    3: "failover_ms",
-    4: "strong",
-    5: "allow_deferred",
-  }, 5)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-    5: ProtocolBuffer.Encoder.NUMERIC,
-  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest'
-class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
-  has_entity_ = 0
-  entity_ = None
-  has_key_ = 0
-  key_ = None
-  has_version_ = 0
-  version_ = 0
-
-  def __init__(self, contents=None):
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def entity(self):
-    if self.entity_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.entity_ is None: self.entity_ = EntityProto()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.entity_
-
-  def mutable_entity(self): self.has_entity_ = 1; return self.entity()
-
-  def clear_entity(self):
-
-    if self.has_entity_:
-      self.has_entity_ = 0;
-      if self.entity_ is not None: self.entity_.Clear()
-
-  def has_entity(self): return self.has_entity_
-
-  def key(self):
-    if self.key_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.key_ is None: self.key_ = Reference()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.key_
-
-  def mutable_key(self): self.has_key_ = 1; return self.key()
-
-  def clear_key(self):
-
-    if self.has_key_:
-      self.has_key_ = 0;
-      if self.key_ is not None: self.key_.Clear()
-
-  def has_key(self): return self.has_key_
-
-  def version(self): return self.version_
-
-  def set_version(self, x):
-    self.has_version_ = 1
-    self.version_ = x
-
-  def clear_version(self):
-    if self.has_version_:
-      self.has_version_ = 0
-      self.version_ = 0
-
-  def has_version(self): return self.has_version_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
-    if (x.has_key()): self.mutable_key().MergeFrom(x.key())
-    if (x.has_version()): self.set_version(x.version())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_entity_ != x.has_entity_: return 0
-    if self.has_entity_ and self.entity_ != x.entity_: return 0
-    if self.has_key_ != x.has_key_: return 0
-    if self.has_key_ and self.key_ != x.key_: return 0
-    if self.has_version_ != x.has_version_: return 0
-    if self.has_version_ and self.version_ != x.version_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
-    if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
-    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
-    if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
-    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
-    if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
-    return n
-
-  def Clear(self):
-    self.clear_entity()
-    self.clear_key()
-    self.clear_version()
-
-  def OutputUnchecked(self, out):
-    if (self.has_entity_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.entity_.ByteSize())
-      self.entity_.OutputUnchecked(out)
-    if (self.has_version_):
-      out.putVarInt32(24)
-      out.putVarInt64(self.version_)
-    if (self.has_key_):
-      out.putVarInt32(34)
-      out.putVarInt32(self.key_.ByteSize())
-      self.key_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_entity_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.entity_.ByteSizePartial())
-      self.entity_.OutputPartial(out)
-    if (self.has_version_):
-      out.putVarInt32(24)
-      out.putVarInt64(self.version_)
-    if (self.has_key_):
-      out.putVarInt32(34)
-      out.putVarInt32(self.key_.ByteSizePartial())
-      self.key_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 12: break
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_entity().TryMerge(tmp)
-        continue
-      if tt == 24:
-        self.set_version(d.getVarInt64())
-        continue
-      if tt == 34:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_key().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_entity_:
-      res+=prefix+"entity <\n"
-      res+=self.entity_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_key_:
-      res+=prefix+"key <\n"
-      res+=self.key_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
-    return res
-
-class GetResponse(ProtocolBuffer.ProtocolMessage):
-  has_in_order_ = 0
-  in_order_ = 1
-
-  def __init__(self, contents=None):
-    self.entity_ = []
-    self.deferred_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def entity_size(self): return len(self.entity_)
-  def entity_list(self): return self.entity_
-
-  def entity(self, i):
-    return self.entity_[i]
-
-  def mutable_entity(self, i):
-    return self.entity_[i]
-
-  def add_entity(self):
-    x = GetResponse_Entity()
-    self.entity_.append(x)
-    return x
-
-  def clear_entity(self):
-    self.entity_ = []
-  def deferred_size(self): return len(self.deferred_)
-  def deferred_list(self): return self.deferred_
-
-  def deferred(self, i):
-    return self.deferred_[i]
-
-  def mutable_deferred(self, i):
-    return self.deferred_[i]
-
-  def add_deferred(self):
-    x = Reference()
-    self.deferred_.append(x)
-    return x
-
-  def clear_deferred(self):
-    self.deferred_ = []
-  def in_order(self): return self.in_order_
-
-  def set_in_order(self, x):
-    self.has_in_order_ = 1
-    self.in_order_ = x
-
-  def clear_in_order(self):
-    if self.has_in_order_:
-      self.has_in_order_ = 0
-      self.in_order_ = 1
-
-  def has_in_order(self): return self.has_in_order_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
-    for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i))
-    if (x.has_in_order()): self.set_in_order(x.in_order())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.entity_) != len(x.entity_): return 0
-    for e1, e2 in zip(self.entity_, x.entity_):
-      if e1 != e2: return 0
-    if len(self.deferred_) != len(x.deferred_): return 0
-    for e1, e2 in zip(self.deferred_, x.deferred_):
-      if e1 != e2: return 0
-    if self.has_in_order_ != x.has_in_order_: return 0
-    if self.has_in_order_ and self.in_order_ != x.in_order_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.entity_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    for p in self.deferred_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 2 * len(self.entity_)
-    for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
-    n += 1 * len(self.deferred_)
-    for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize())
-    if (self.has_in_order_): n += 2
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 2 * len(self.entity_)
-    for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSizePartial()
-    n += 1 * len(self.deferred_)
-    for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial())
-    if (self.has_in_order_): n += 2
-    return n
-
-  def Clear(self):
-    self.clear_entity()
-    self.clear_deferred()
-    self.clear_in_order()
-
-  def OutputUnchecked(self, out):
-    for i in xrange(len(self.entity_)):
-      out.putVarInt32(11)
-      self.entity_[i].OutputUnchecked(out)
-      out.putVarInt32(12)
-    for i in xrange(len(self.deferred_)):
-      out.putVarInt32(42)
-      out.putVarInt32(self.deferred_[i].ByteSize())
-      self.deferred_[i].OutputUnchecked(out)
-    if (self.has_in_order_):
-      out.putVarInt32(48)
-      out.putBoolean(self.in_order_)
-
-  def OutputPartial(self, out):
-    for i in xrange(len(self.entity_)):
-      out.putVarInt32(11)
-      self.entity_[i].OutputPartial(out)
-      out.putVarInt32(12)
-    for i in xrange(len(self.deferred_)):
-      out.putVarInt32(42)
-      out.putVarInt32(self.deferred_[i].ByteSizePartial())
-      self.deferred_[i].OutputPartial(out)
-    if (self.has_in_order_):
-      out.putVarInt32(48)
-      out.putBoolean(self.in_order_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 11:
-        self.add_entity().TryMerge(d)
-        continue
-      if tt == 42:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_deferred().TryMerge(tmp)
-        continue
-      if tt == 48:
-        self.set_in_order(d.getBoolean())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.entity_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("Entity%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    cnt=0
-    for e in self.deferred_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("deferred%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_in_order_: res+=prefix+("in_order: %s\n" % self.DebugFormatBool(self.in_order_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kEntityGroup = 1
-  kEntityentity = 2
-  kEntitykey = 4
-  kEntityversion = 3
-  kdeferred = 5
-  kin_order = 6
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "Entity",
-    2: "entity",
-    3: "version",
-    4: "key",
-    5: "deferred",
-    6: "in_order",
-  }, 6)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STARTGROUP,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-    4: ProtocolBuffer.Encoder.STRING,
-    5: ProtocolBuffer.Encoder.STRING,
-    6: ProtocolBuffer.Encoder.NUMERIC,
-  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse'
-class PutRequest(ProtocolBuffer.ProtocolMessage):
-
-
-  CURRENT      =    0
-  SEQUENTIAL   =    1
-
-  _AutoIdPolicy_NAMES = {
-    0: "CURRENT",
-    1: "SEQUENTIAL",
-  }
-
-  def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "")
-  AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name)
-
-  has_transaction_ = 0
-  transaction_ = None
-  has_trusted_ = 0
-  trusted_ = 0
-  has_force_ = 0
-  force_ = 0
-  has_mark_changes_ = 0
-  mark_changes_ = 0
-  has_auto_id_policy_ = 0
-  auto_id_policy_ = 0
-
-  def __init__(self, contents=None):
-    self.entity_ = []
-    self.composite_index_ = []
-    self.snapshot_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def entity_size(self): return len(self.entity_)
-  def entity_list(self): return self.entity_
-
-  def entity(self, i):
-    return self.entity_[i]
-
-  def mutable_entity(self, i):
-    return self.entity_[i]
-
-  def add_entity(self):
-    x = EntityProto()
-    self.entity_.append(x)
-    return x
-
-  def clear_entity(self):
-    self.entity_ = []
-  def transaction(self):
-    if self.transaction_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.transaction_ is None: self.transaction_ = Transaction()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.transaction_
-
-  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
-
-  def clear_transaction(self):
-
-    if self.has_transaction_:
-      self.has_transaction_ = 0;
-      if self.transaction_ is not None: self.transaction_.Clear()
-
-  def has_transaction(self): return self.has_transaction_
-
-  def composite_index_size(self): return len(self.composite_index_)
-  def composite_index_list(self): return self.composite_index_
-
-  def composite_index(self, i):
-    return self.composite_index_[i]
-
-  def mutable_composite_index(self, i):
-    return self.composite_index_[i]
-
-  def add_composite_index(self):
-    x = CompositeIndex()
-    self.composite_index_.append(x)
-    return x
-
-  def clear_composite_index(self):
-    self.composite_index_ = []
-  def trusted(self): return self.trusted_
-
-  def set_trusted(self, x):
-    self.has_trusted_ = 1
-    self.trusted_ = x
-
-  def clear_trusted(self):
-    if self.has_trusted_:
-      self.has_trusted_ = 0
-      self.trusted_ = 0
-
-  def has_trusted(self): return self.has_trusted_
-
-  def force(self): return self.force_
-
-  def set_force(self, x):
-    self.has_force_ = 1
-    self.force_ = x
-
-  def clear_force(self):
-    if self.has_force_:
-      self.has_force_ = 0
-      self.force_ = 0
-
-  def has_force(self): return self.has_force_
-
-  def mark_changes(self): return self.mark_changes_
-
-  def set_mark_changes(self, x):
-    self.has_mark_changes_ = 1
-    self.mark_changes_ = x
-
-  def clear_mark_changes(self):
-    if self.has_mark_changes_:
-      self.has_mark_changes_ = 0
-      self.mark_changes_ = 0
-
-  def has_mark_changes(self): return self.has_mark_changes_
-
-  def snapshot_size(self): return len(self.snapshot_)
-  def snapshot_list(self): return self.snapshot_
-
-  def snapshot(self, i):
-    return self.snapshot_[i]
-
-  def mutable_snapshot(self, i):
-    return self.snapshot_[i]
-
-  def add_snapshot(self):
-    x = Snapshot()
-    self.snapshot_.append(x)
-    return x
-
-  def clear_snapshot(self):
-    self.snapshot_ = []
-  def auto_id_policy(self): return self.auto_id_policy_
-
-  def set_auto_id_policy(self, x):
-    self.has_auto_id_policy_ = 1
-    self.auto_id_policy_ = x
-
-  def clear_auto_id_policy(self):
-    if self.has_auto_id_policy_:
-      self.has_auto_id_policy_ = 0
-      self.auto_id_policy_ = 0
-
-  def has_auto_id_policy(self): return self.has_auto_id_policy_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
-    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
-    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
-    if (x.has_trusted()): self.set_trusted(x.trusted())
-    if (x.has_force()): self.set_force(x.force())
-    if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
-    for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
-    if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.entity_) != len(x.entity_): return 0
-    for e1, e2 in zip(self.entity_, x.entity_):
-      if e1 != e2: return 0
-    if self.has_transaction_ != x.has_transaction_: return 0
-    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
-    if len(self.composite_index_) != len(x.composite_index_): return 0
-    for e1, e2 in zip(self.composite_index_, x.composite_index_):
-      if e1 != e2: return 0
-    if self.has_trusted_ != x.has_trusted_: return 0
-    if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
-    if self.has_force_ != x.has_force_: return 0
-    if self.has_force_ and self.force_ != x.force_: return 0
-    if self.has_mark_changes_ != x.has_mark_changes_: return 0
-    if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
-    if len(self.snapshot_) != len(x.snapshot_): return 0
-    for e1, e2 in zip(self.snapshot_, x.snapshot_):
-      if e1 != e2: return 0
-    if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0
-    if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.entity_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.composite_index_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    for p in self.snapshot_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 1 * len(self.entity_)
-    for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
-    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
-    n += 1 * len(self.composite_index_)
-    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
-    if (self.has_trusted_): n += 2
-    if (self.has_force_): n += 2
-    if (self.has_mark_changes_): n += 2
-    n += 1 * len(self.snapshot_)
-    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
-    if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 1 * len(self.entity_)
-    for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSizePartial())
-    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
-    n += 1 * len(self.composite_index_)
-    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
-    if (self.has_trusted_): n += 2
-    if (self.has_force_): n += 2
-    if (self.has_mark_changes_): n += 2
-    n += 1 * len(self.snapshot_)
-    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
-    if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
-    return n
-
-  def Clear(self):
-    self.clear_entity()
-    self.clear_transaction()
-    self.clear_composite_index()
-    self.clear_trusted()
-    self.clear_force()
-    self.clear_mark_changes()
-    self.clear_snapshot()
-    self.clear_auto_id_policy()
-
-  def OutputUnchecked(self, out):
-    for i in xrange(len(self.entity_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.entity_[i].ByteSize())
-      self.entity_[i].OutputUnchecked(out)
-    if (self.has_transaction_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.transaction_.ByteSize())
-      self.transaction_.OutputUnchecked(out)
-    for i in xrange(len(self.composite_index_)):
-      out.putVarInt32(26)
-      out.putVarInt32(self.composite_index_[i].ByteSize())
-      self.composite_index_[i].OutputUnchecked(out)
-    if (self.has_trusted_):
-      out.putVarInt32(32)
-      out.putBoolean(self.trusted_)
-    if (self.has_force_):
-      out.putVarInt32(56)
-      out.putBoolean(self.force_)
-    if (self.has_mark_changes_):
-      out.putVarInt32(64)
-      out.putBoolean(self.mark_changes_)
-    for i in xrange(len(self.snapshot_)):
-      out.putVarInt32(74)
-      out.putVarInt32(self.snapshot_[i].ByteSize())
-      self.snapshot_[i].OutputUnchecked(out)
-    if (self.has_auto_id_policy_):
-      out.putVarInt32(80)
-      out.putVarInt32(self.auto_id_policy_)
-
-  def OutputPartial(self, out):
-    for i in xrange(len(self.entity_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.entity_[i].ByteSizePartial())
-      self.entity_[i].OutputPartial(out)
-    if (self.has_transaction_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.transaction_.ByteSizePartial())
-      self.transaction_.OutputPartial(out)
-    for i in xrange(len(self.composite_index_)):
-      out.putVarInt32(26)
-      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
-      self.composite_index_[i].OutputPartial(out)
-    if (self.has_trusted_):
-      out.putVarInt32(32)
-      out.putBoolean(self.trusted_)
-    if (self.has_force_):
-      out.putVarInt32(56)
-      out.putBoolean(self.force_)
-    if (self.has_mark_changes_):
-      out.putVarInt32(64)
-      out.putBoolean(self.mark_changes_)
-    for i in xrange(len(self.snapshot_)):
-      out.putVarInt32(74)
-      out.putVarInt32(self.snapshot_[i].ByteSizePartial())
-      self.snapshot_[i].OutputPartial(out)
-    if (self.has_auto_id_policy_):
-      out.putVarInt32(80)
-      out.putVarInt32(self.auto_id_policy_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_entity().TryMerge(tmp)
-        continue
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_transaction().TryMerge(tmp)
-        continue
-      if tt == 26:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_composite_index().TryMerge(tmp)
-        continue
-      if tt == 32:
-        self.set_trusted(d.getBoolean())
-        continue
-      if tt == 56:
-        self.set_force(d.getBoolean())
-        continue
-      if tt == 64:
-        self.set_mark_changes(d.getBoolean())
-        continue
-      if tt == 74:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_snapshot().TryMerge(tmp)
-        continue
-      if tt == 80:
-        self.set_auto_id_policy(d.getVarInt32())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.entity_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("entity%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_transaction_:
-      res+=prefix+"transaction <\n"
-      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.composite_index_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("composite_index%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
-    if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
-    if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
-    cnt=0
-    for e in self.snapshot_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("snapshot%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kentity = 1
-  ktransaction = 2
-  kcomposite_index = 3
-  ktrusted = 4
-  kforce = 7
-  kmark_changes = 8
-  ksnapshot = 9
-  kauto_id_policy = 10
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "entity",
-    2: "transaction",
-    3: "composite_index",
-    4: "trusted",
-    7: "force",
-    8: "mark_changes",
-    9: "snapshot",
-    10: "auto_id_policy",
-  }, 10)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.STRING,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-    7: ProtocolBuffer.Encoder.NUMERIC,
-    8: ProtocolBuffer.Encoder.NUMERIC,
-    9: ProtocolBuffer.Encoder.STRING,
-    10: ProtocolBuffer.Encoder.NUMERIC,
-  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest'
-class PutResponse(ProtocolBuffer.ProtocolMessage):
-  has_cost_ = 0
-  cost_ = None
-
-  def __init__(self, contents=None):
-    self.key_ = []
-    self.version_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def key_size(self): return len(self.key_)
-  def key_list(self): return self.key_
-
-  def key(self, i):
-    return self.key_[i]
-
-  def mutable_key(self, i):
-    return self.key_[i]
-
-  def add_key(self):
-    x = Reference()
-    self.key_.append(x)
-    return x
-
-  def clear_key(self):
-    self.key_ = []
-  def cost(self):
-    if self.cost_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.cost_ is None: self.cost_ = Cost()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.cost_
-
-  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
-
-  def clear_cost(self):
-
-    if self.has_cost_:
-      self.has_cost_ = 0;
-      if self.cost_ is not None: self.cost_.Clear()
-
-  def has_cost(self): return self.has_cost_
-
-  def version_size(self): return len(self.version_)
-  def version_list(self): return self.version_
-
-  def version(self, i):
-    return self.version_[i]
-
-  def set_version(self, i, x):
-    self.version_[i] = x
-
-  def add_version(self, x):
-    self.version_.append(x)
-
-  def clear_version(self):
-    self.version_ = []
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
-    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
-    for i in xrange(x.version_size()): self.add_version(x.version(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.key_) != len(x.key_): return 0
-    for e1, e2 in zip(self.key_, x.key_):
-      if e1 != e2: return 0
-    if self.has_cost_ != x.has_cost_: return 0
-    if self.has_cost_ and self.cost_ != x.cost_: return 0
-    if len(self.version_) != len(x.version_): return 0
-    for e1, e2 in zip(self.version_, x.version_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.key_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
-    n += 1 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
-    n += 1 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
-    return n
-
-  def Clear(self):
-    self.clear_key()
-    self.clear_cost()
-    self.clear_version()
-
-  def OutputUnchecked(self, out):
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.key_[i].ByteSize())
-      self.key_[i].OutputUnchecked(out)
-    if (self.has_cost_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.cost_.ByteSize())
-      self.cost_.OutputUnchecked(out)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(24)
-      out.putVarInt64(self.version_[i])
-
-  def OutputPartial(self, out):
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.key_[i].ByteSizePartial())
-      self.key_[i].OutputPartial(out)
-    if (self.has_cost_):
-      out.putVarInt32(18)
-      out.putVarInt32(self.cost_.ByteSizePartial())
-      self.cost_.OutputPartial(out)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(24)
-      out.putVarInt64(self.version_[i])
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_key().TryMerge(tmp)
-        continue
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cost().TryMerge(tmp)
-        continue
-      if tt == 24:
-        self.add_version(d.getVarInt64())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.key_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("key%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_cost_:
-      res+=prefix+"cost <\n"
-      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.version_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kkey = 1
-  kcost = 2
-  kversion = 3
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "key",
-    2: "cost",
-    3: "version",
-  }, 3)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse'
-class TouchRequest(ProtocolBuffer.ProtocolMessage):
-  has_force_ = 0
-  force_ = 0
-
-  def __init__(self, contents=None):
-    self.key_ = []
-    self.composite_index_ = []
-    self.snapshot_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def key_size(self): return len(self.key_)
-  def key_list(self): return self.key_
-
-  def key(self, i):
-    return self.key_[i]
-
-  def mutable_key(self, i):
-    return self.key_[i]
-
-  def add_key(self):
-    x = Reference()
-    self.key_.append(x)
-    return x
-
-  def clear_key(self):
-    self.key_ = []
-  def composite_index_size(self): return len(self.composite_index_)
-  def composite_index_list(self): return self.composite_index_
-
-  def composite_index(self, i):
-    return self.composite_index_[i]
-
-  def mutable_composite_index(self, i):
-    return self.composite_index_[i]
-
-  def add_composite_index(self):
-    x = CompositeIndex()
-    self.composite_index_.append(x)
-    return x
-
-  def clear_composite_index(self):
-    self.composite_index_ = []
-  def force(self): return self.force_
-
-  def set_force(self, x):
-    self.has_force_ = 1
-    self.force_ = x
-
-  def clear_force(self):
-    if self.has_force_:
-      self.has_force_ = 0
-      self.force_ = 0
-
-  def has_force(self): return self.has_force_
-
-  def snapshot_size(self): return len(self.snapshot_)
-  def snapshot_list(self): return self.snapshot_
-
-  def snapshot(self, i):
-    return self.snapshot_[i]
-
-  def mutable_snapshot(self, i):
-    return self.snapshot_[i]
-
-  def add_snapshot(self):
-    x = Snapshot()
-    self.snapshot_.append(x)
-    return x
-
-  def clear_snapshot(self):
-    self.snapshot_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
-    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
-    if (x.has_force()): self.set_force(x.force())
-    for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.key_) != len(x.key_): return 0
-    for e1, e2 in zip(self.key_, x.key_):
-      if e1 != e2: return 0
-    if len(self.composite_index_) != len(x.composite_index_): return 0
-    for e1, e2 in zip(self.composite_index_, x.composite_index_):
-      if e1 != e2: return 0
-    if self.has_force_ != x.has_force_: return 0
-    if self.has_force_ and self.force_ != x.force_: return 0
-    if len(self.snapshot_) != len(x.snapshot_): return 0
-    for e1, e2 in zip(self.snapshot_, x.snapshot_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.key_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    for p in self.composite_index_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    for p in self.snapshot_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
-    n += 1 * len(self.composite_index_)
-    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
-    if (self.has_force_): n += 2
-    n += 1 * len(self.snapshot_)
-    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
-    n += 1 * len(self.composite_index_)
-    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
-    if (self.has_force_): n += 2
-    n += 1 * len(self.snapshot_)
-    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_key()
-    self.clear_composite_index()
-    self.clear_force()
-    self.clear_snapshot()
-
-  def OutputUnchecked(self, out):
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.key_[i].ByteSize())
-      self.key_[i].OutputUnchecked(out)
-    for i in xrange(len(self.composite_index_)):
-      out.putVarInt32(18)
-      out.putVarInt32(self.composite_index_[i].ByteSize())
-      self.composite_index_[i].OutputUnchecked(out)
-    if (self.has_force_):
-      out.putVarInt32(24)
-      out.putBoolean(self.force_)
-    for i in xrange(len(self.snapshot_)):
-      out.putVarInt32(74)
-      out.putVarInt32(self.snapshot_[i].ByteSize())
-      self.snapshot_[i].OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.key_[i].ByteSizePartial())
-      self.key_[i].OutputPartial(out)
-    for i in xrange(len(self.composite_index_)):
-      out.putVarInt32(18)
-      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
-      self.composite_index_[i].OutputPartial(out)
-    if (self.has_force_):
-      out.putVarInt32(24)
-      out.putBoolean(self.force_)
-    for i in xrange(len(self.snapshot_)):
-      out.putVarInt32(74)
-      out.putVarInt32(self.snapshot_[i].ByteSizePartial())
-      self.snapshot_[i].OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_key().TryMerge(tmp)
-        continue
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_composite_index().TryMerge(tmp)
-        continue
-      if tt == 24:
-        self.set_force(d.getBoolean())
-        continue
-      if tt == 74:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_snapshot().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.key_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("key%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    cnt=0
-    for e in self.composite_index_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("composite_index%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
-    cnt=0
-    for e in self.snapshot_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("snapshot%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kkey = 1
-  kcomposite_index = 2
-  kforce = 3
-  ksnapshot = 9
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "key",
-    2: "composite_index",
-    3: "force",
-    9: "snapshot",
-  }, 9)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-    9: ProtocolBuffer.Encoder.STRING,
-  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest'
-class TouchResponse(ProtocolBuffer.ProtocolMessage):
-  has_cost_ = 0
-  cost_ = None
-
-  def __init__(self, contents=None):
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def cost(self):
-    if self.cost_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.cost_ is None: self.cost_ = Cost()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.cost_
-
-  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
-
-  def clear_cost(self):
-
-    if self.has_cost_:
-      self.has_cost_ = 0;
-      if self.cost_ is not None: self.cost_.Clear()
-
-  def has_cost(self): return self.has_cost_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_cost_ != x.has_cost_: return 0
-    if self.has_cost_ and self.cost_ != x.cost_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_cost()
-
-  def OutputUnchecked(self, out):
-    if (self.has_cost_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cost_.ByteSize())
-      self.cost_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_cost_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cost_.ByteSizePartial())
-      self.cost_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cost().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_cost_:
-      res+=prefix+"cost <\n"
-      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kcost = 1
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "cost",
-  }, 1)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse'
-class DeleteRequest(ProtocolBuffer.ProtocolMessage):
-  has_transaction_ = 0
-  transaction_ = None
-  has_trusted_ = 0
-  trusted_ = 0
-  has_force_ = 0
-  force_ = 0
-  has_mark_changes_ = 0
-  mark_changes_ = 0
-
-  def __init__(self, contents=None):
-    self.key_ = []
-    self.snapshot_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def key_size(self): return len(self.key_)
-  def key_list(self): return self.key_
-
-  def key(self, i):
-    return self.key_[i]
-
-  def mutable_key(self, i):
-    return self.key_[i]
-
-  def add_key(self):
-    x = Reference()
-    self.key_.append(x)
-    return x
-
-  def clear_key(self):
-    self.key_ = []
-  def transaction(self):
-    if self.transaction_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.transaction_ is None: self.transaction_ = Transaction()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.transaction_
-
-  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
-
-  def clear_transaction(self):
-
-    if self.has_transaction_:
-      self.has_transaction_ = 0;
-      if self.transaction_ is not None: self.transaction_.Clear()
-
-  def has_transaction(self): return self.has_transaction_
-
-  def trusted(self): return self.trusted_
-
-  def set_trusted(self, x):
-    self.has_trusted_ = 1
-    self.trusted_ = x
-
-  def clear_trusted(self):
-    if self.has_trusted_:
-      self.has_trusted_ = 0
-      self.trusted_ = 0
-
-  def has_trusted(self): return self.has_trusted_
-
-  def force(self): return self.force_
-
-  def set_force(self, x):
-    self.has_force_ = 1
-    self.force_ = x
-
-  def clear_force(self):
-    if self.has_force_:
-      self.has_force_ = 0
-      self.force_ = 0
-
-  def has_force(self): return self.has_force_
-
-  def mark_changes(self): return self.mark_changes_
-
-  def set_mark_changes(self, x):
-    self.has_mark_changes_ = 1
-    self.mark_changes_ = x
-
-  def clear_mark_changes(self):
-    if self.has_mark_changes_:
-      self.has_mark_changes_ = 0
-      self.mark_changes_ = 0
-
-  def has_mark_changes(self): return self.has_mark_changes_
-
-  def snapshot_size(self): return len(self.snapshot_)
-  def snapshot_list(self): return self.snapshot_
-
-  def snapshot(self, i):
-    return self.snapshot_[i]
-
-  def mutable_snapshot(self, i):
-    return self.snapshot_[i]
-
-  def add_snapshot(self):
-    x = Snapshot()
-    self.snapshot_.append(x)
-    return x
-
-  def clear_snapshot(self):
-    self.snapshot_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
-    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
-    if (x.has_trusted()): self.set_trusted(x.trusted())
-    if (x.has_force()): self.set_force(x.force())
-    if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
-    for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.key_) != len(x.key_): return 0
-    for e1, e2 in zip(self.key_, x.key_):
-      if e1 != e2: return 0
-    if self.has_transaction_ != x.has_transaction_: return 0
-    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
-    if self.has_trusted_ != x.has_trusted_: return 0
-    if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
-    if self.has_force_ != x.has_force_: return 0
-    if self.has_force_ and self.force_ != x.force_: return 0
-    if self.has_mark_changes_ != x.has_mark_changes_: return 0
-    if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
-    if len(self.snapshot_) != len(x.snapshot_): return 0
-    for e1, e2 in zip(self.snapshot_, x.snapshot_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.key_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.snapshot_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
-    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
-    if (self.has_trusted_): n += 2
-    if (self.has_force_): n += 2
-    if (self.has_mark_changes_): n += 2
-    n += 1 * len(self.snapshot_)
-    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 1 * len(self.key_)
-    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
-    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
-    if (self.has_trusted_): n += 2
-    if (self.has_force_): n += 2
-    if (self.has_mark_changes_): n += 2
-    n += 1 * len(self.snapshot_)
-    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_key()
-    self.clear_transaction()
-    self.clear_trusted()
-    self.clear_force()
-    self.clear_mark_changes()
-    self.clear_snapshot()
-
-  def OutputUnchecked(self, out):
-    if (self.has_trusted_):
-      out.putVarInt32(32)
-      out.putBoolean(self.trusted_)
-    if (self.has_transaction_):
-      out.putVarInt32(42)
-      out.putVarInt32(self.transaction_.ByteSize())
-      self.transaction_.OutputUnchecked(out)
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(50)
-      out.putVarInt32(self.key_[i].ByteSize())
-      self.key_[i].OutputUnchecked(out)
-    if (self.has_force_):
-      out.putVarInt32(56)
-      out.putBoolean(self.force_)
-    if (self.has_mark_changes_):
-      out.putVarInt32(64)
-      out.putBoolean(self.mark_changes_)
-    for i in xrange(len(self.snapshot_)):
-      out.putVarInt32(74)
-      out.putVarInt32(self.snapshot_[i].ByteSize())
-      self.snapshot_[i].OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_trusted_):
-      out.putVarInt32(32)
-      out.putBoolean(self.trusted_)
-    if (self.has_transaction_):
-      out.putVarInt32(42)
-      out.putVarInt32(self.transaction_.ByteSizePartial())
-      self.transaction_.OutputPartial(out)
-    for i in xrange(len(self.key_)):
-      out.putVarInt32(50)
-      out.putVarInt32(self.key_[i].ByteSizePartial())
-      self.key_[i].OutputPartial(out)
-    if (self.has_force_):
-      out.putVarInt32(56)
-      out.putBoolean(self.force_)
-    if (self.has_mark_changes_):
-      out.putVarInt32(64)
-      out.putBoolean(self.mark_changes_)
-    for i in xrange(len(self.snapshot_)):
-      out.putVarInt32(74)
-      out.putVarInt32(self.snapshot_[i].ByteSizePartial())
-      self.snapshot_[i].OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 32:
-        self.set_trusted(d.getBoolean())
-        continue
-      if tt == 42:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_transaction().TryMerge(tmp)
-        continue
-      if tt == 50:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_key().TryMerge(tmp)
-        continue
-      if tt == 56:
-        self.set_force(d.getBoolean())
-        continue
-      if tt == 64:
-        self.set_mark_changes(d.getBoolean())
-        continue
-      if tt == 74:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_snapshot().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.key_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("key%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_transaction_:
-      res+=prefix+"transaction <\n"
-      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
-    if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
-    if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
-    cnt=0
-    for e in self.snapshot_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("snapshot%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kkey = 6
-  ktransaction = 5
-  ktrusted = 4
-  kforce = 7
-  kmark_changes = 8
-  ksnapshot = 9
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    4: "trusted",
-    5: "transaction",
-    6: "key",
-    7: "force",
-    8: "mark_changes",
-    9: "snapshot",
-  }, 9)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-    5: ProtocolBuffer.Encoder.STRING,
-    6: ProtocolBuffer.Encoder.STRING,
-    7: ProtocolBuffer.Encoder.NUMERIC,
-    8: ProtocolBuffer.Encoder.NUMERIC,
-    9: ProtocolBuffer.Encoder.STRING,
-  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest'
-class DeleteResponse(ProtocolBuffer.ProtocolMessage):
-  has_cost_ = 0
-  cost_ = None
-
-  def __init__(self, contents=None):
-    self.version_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def cost(self):
-    if self.cost_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.cost_ is None: self.cost_ = Cost()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.cost_
-
-  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
-
-  def clear_cost(self):
-
-    if self.has_cost_:
-      self.has_cost_ = 0;
-      if self.cost_ is not None: self.cost_.Clear()
-
-  def has_cost(self): return self.has_cost_
-
-  def version_size(self): return len(self.version_)
-  def version_list(self): return self.version_
-
-  def version(self, i):
-    return self.version_[i]
-
-  def set_version(self, i, x):
-    self.version_[i] = x
-
-  def add_version(self, x):
-    self.version_.append(x)
-
-  def clear_version(self):
-    self.version_ = []
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
-    for i in xrange(x.version_size()): self.add_version(x.version(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_cost_ != x.has_cost_: return 0
-    if self.has_cost_ and self.cost_ != x.cost_: return 0
-    if len(self.version_) != len(x.version_): return 0
-    for e1, e2 in zip(self.version_, x.version_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
-    n += 1 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
-    n += 1 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
-    return n
-
-  def Clear(self):
-    self.clear_cost()
-    self.clear_version()
-
-  def OutputUnchecked(self, out):
-    if (self.has_cost_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cost_.ByteSize())
-      self.cost_.OutputUnchecked(out)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(24)
-      out.putVarInt64(self.version_[i])
-
-  def OutputPartial(self, out):
-    if (self.has_cost_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cost_.ByteSizePartial())
-      self.cost_.OutputPartial(out)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(24)
-      out.putVarInt64(self.version_[i])
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cost().TryMerge(tmp)
-        continue
-      if tt == 24:
-        self.add_version(d.getVarInt64())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_cost_:
-      res+=prefix+"cost <\n"
-      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.version_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kcost = 1
-  kversion = 3
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "cost",
-    3: "version",
-  }, 3)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse'
-class NextRequest(ProtocolBuffer.ProtocolMessage):
-  has_cursor_ = 0
-  has_count_ = 0
-  count_ = 0
-  has_offset_ = 0
-  offset_ = 0
-  has_compile_ = 0
-  compile_ = 0
-
-  def __init__(self, contents=None):
-    self.cursor_ = Cursor()
-    if contents is not None: self.MergeFromString(contents)
-
-  def cursor(self): return self.cursor_
-
-  def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
-
-  def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
-
-  def has_cursor(self): return self.has_cursor_
-
-  def count(self): return self.count_
-
-  def set_count(self, x):
-    self.has_count_ = 1
-    self.count_ = x
-
-  def clear_count(self):
-    if self.has_count_:
-      self.has_count_ = 0
-      self.count_ = 0
-
-  def has_count(self): return self.has_count_
-
-  def offset(self): return self.offset_
-
-  def set_offset(self, x):
-    self.has_offset_ = 1
-    self.offset_ = x
-
-  def clear_offset(self):
-    if self.has_offset_:
-      self.has_offset_ = 0
-      self.offset_ = 0
-
-  def has_offset(self): return self.has_offset_
-
-  def compile(self): return self.compile_
-
-  def set_compile(self, x):
-    self.has_compile_ = 1
-    self.compile_ = x
-
-  def clear_compile(self):
-    if self.has_compile_:
-      self.has_compile_ = 0
-      self.compile_ = 0
-
-  def has_compile(self): return self.has_compile_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
-    if (x.has_count()): self.set_count(x.count())
-    if (x.has_offset()): self.set_offset(x.offset())
-    if (x.has_compile()): self.set_compile(x.compile())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_cursor_ != x.has_cursor_: return 0
-    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
-    if self.has_count_ != x.has_count_: return 0
-    if self.has_count_ and self.count_ != x.count_: return 0
-    if self.has_offset_ != x.has_offset_: return 0
-    if self.has_offset_ and self.offset_ != x.offset_: return 0
-    if self.has_compile_ != x.has_compile_: return 0
-    if self.has_compile_ and self.compile_ != x.compile_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_cursor_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: cursor not set.')
-    elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(self.cursor_.ByteSize())
-    if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
-    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
-    if (self.has_compile_): n += 2
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_cursor_):
-      n += 1
-      n += self.lengthString(self.cursor_.ByteSizePartial())
-    if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
-    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
-    if (self.has_compile_): n += 2
-    return n
-
-  def Clear(self):
-    self.clear_cursor()
-    self.clear_count()
-    self.clear_offset()
-    self.clear_compile()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putVarInt32(self.cursor_.ByteSize())
-    self.cursor_.OutputUnchecked(out)
-    if (self.has_count_):
-      out.putVarInt32(16)
-      out.putVarInt32(self.count_)
-    if (self.has_compile_):
-      out.putVarInt32(24)
-      out.putBoolean(self.compile_)
-    if (self.has_offset_):
-      out.putVarInt32(32)
-      out.putVarInt32(self.offset_)
-
-  def OutputPartial(self, out):
-    if (self.has_cursor_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cursor_.ByteSizePartial())
-      self.cursor_.OutputPartial(out)
-    if (self.has_count_):
-      out.putVarInt32(16)
-      out.putVarInt32(self.count_)
-    if (self.has_compile_):
-      out.putVarInt32(24)
-      out.putBoolean(self.compile_)
-    if (self.has_offset_):
-      out.putVarInt32(32)
-      out.putVarInt32(self.offset_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cursor().TryMerge(tmp)
-        continue
-      if tt == 16:
-        self.set_count(d.getVarInt32())
-        continue
-      if tt == 24:
-        self.set_compile(d.getBoolean())
-        continue
-      if tt == 32:
-        self.set_offset(d.getVarInt32())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_cursor_:
-      res+=prefix+"cursor <\n"
-      res+=self.cursor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
-    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
-    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kcursor = 1
-  kcount = 2
-  koffset = 4
-  kcompile = 3
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "cursor",
-    2: "count",
-    3: "compile",
-    4: "offset",
-  }, 4)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.NUMERIC,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest'
-class QueryResult(ProtocolBuffer.ProtocolMessage):
-  has_cursor_ = 0
-  cursor_ = None
-  has_skipped_results_ = 0
-  skipped_results_ = 0
-  has_more_results_ = 0
-  more_results_ = 0
-  has_keys_only_ = 0
-  keys_only_ = 0
-  has_index_only_ = 0
-  index_only_ = 0
-  has_small_ops_ = 0
-  small_ops_ = 0
-  has_compiled_query_ = 0
-  compiled_query_ = None
-  has_compiled_cursor_ = 0
-  compiled_cursor_ = None
-
-  def __init__(self, contents=None):
-    self.result_ = []
-    self.index_ = []
-    self.version_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def cursor(self):
-    if self.cursor_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.cursor_ is None: self.cursor_ = Cursor()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.cursor_
-
-  def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
-
-  def clear_cursor(self):
-
-    if self.has_cursor_:
-      self.has_cursor_ = 0;
-      if self.cursor_ is not None: self.cursor_.Clear()
-
-  def has_cursor(self): return self.has_cursor_
-
-  def result_size(self): return len(self.result_)
-  def result_list(self): return self.result_
-
-  def result(self, i):
-    return self.result_[i]
-
-  def mutable_result(self, i):
-    return self.result_[i]
-
-  def add_result(self):
-    x = EntityProto()
-    self.result_.append(x)
-    return x
-
-  def clear_result(self):
-    self.result_ = []
-  def skipped_results(self): return self.skipped_results_
-
-  def set_skipped_results(self, x):
-    self.has_skipped_results_ = 1
-    self.skipped_results_ = x
-
-  def clear_skipped_results(self):
-    if self.has_skipped_results_:
-      self.has_skipped_results_ = 0
-      self.skipped_results_ = 0
-
-  def has_skipped_results(self): return self.has_skipped_results_
-
-  def more_results(self): return self.more_results_
-
-  def set_more_results(self, x):
-    self.has_more_results_ = 1
-    self.more_results_ = x
-
-  def clear_more_results(self):
-    if self.has_more_results_:
-      self.has_more_results_ = 0
-      self.more_results_ = 0
-
-  def has_more_results(self): return self.has_more_results_
-
-  def keys_only(self): return self.keys_only_
-
-  def set_keys_only(self, x):
-    self.has_keys_only_ = 1
-    self.keys_only_ = x
-
-  def clear_keys_only(self):
-    if self.has_keys_only_:
-      self.has_keys_only_ = 0
-      self.keys_only_ = 0
-
-  def has_keys_only(self): return self.has_keys_only_
-
-  def index_only(self): return self.index_only_
-
-  def set_index_only(self, x):
-    self.has_index_only_ = 1
-    self.index_only_ = x
-
-  def clear_index_only(self):
-    if self.has_index_only_:
-      self.has_index_only_ = 0
-      self.index_only_ = 0
-
-  def has_index_only(self): return self.has_index_only_
-
-  def small_ops(self): return self.small_ops_
-
-  def set_small_ops(self, x):
-    self.has_small_ops_ = 1
-    self.small_ops_ = x
-
-  def clear_small_ops(self):
-    if self.has_small_ops_:
-      self.has_small_ops_ = 0
-      self.small_ops_ = 0
-
-  def has_small_ops(self): return self.has_small_ops_
-
-  def compiled_query(self):
-    if self.compiled_query_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.compiled_query_
-
-  def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
-
-  def clear_compiled_query(self):
-
-    if self.has_compiled_query_:
-      self.has_compiled_query_ = 0;
-      if self.compiled_query_ is not None: self.compiled_query_.Clear()
-
-  def has_compiled_query(self): return self.has_compiled_query_
-
-  def compiled_cursor(self):
-    if self.compiled_cursor_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.compiled_cursor_
-
-  def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
-
-  def clear_compiled_cursor(self):
-
-    if self.has_compiled_cursor_:
-      self.has_compiled_cursor_ = 0;
-      if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
-
-  def has_compiled_cursor(self): return self.has_compiled_cursor_
-
-  def index_size(self): return len(self.index_)
-  def index_list(self): return self.index_
-
-  def index(self, i):
-    return self.index_[i]
-
-  def mutable_index(self, i):
-    return self.index_[i]
-
-  def add_index(self):
-    x = CompositeIndex()
-    self.index_.append(x)
-    return x
-
-  def clear_index(self):
-    self.index_ = []
-  def version_size(self): return len(self.version_)
-  def version_list(self): return self.version_
-
-  def version(self, i):
-    return self.version_[i]
-
-  def set_version(self, i, x):
-    self.version_[i] = x
-
-  def add_version(self, x):
-    self.version_.append(x)
-
-  def clear_version(self):
-    self.version_ = []
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
-    for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
-    if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results())
-    if (x.has_more_results()): self.set_more_results(x.more_results())
-    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
-    if (x.has_index_only()): self.set_index_only(x.index_only())
-    if (x.has_small_ops()): self.set_small_ops(x.small_ops())
-    if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
-    if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
-    for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
-    for i in xrange(x.version_size()): self.add_version(x.version(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_cursor_ != x.has_cursor_: return 0
-    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
-    if len(self.result_) != len(x.result_): return 0
-    for e1, e2 in zip(self.result_, x.result_):
-      if e1 != e2: return 0
-    if self.has_skipped_results_ != x.has_skipped_results_: return 0
-    if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0
-    if self.has_more_results_ != x.has_more_results_: return 0
-    if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
-    if self.has_keys_only_ != x.has_keys_only_: return 0
-    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
-    if self.has_index_only_ != x.has_index_only_: return 0
-    if self.has_index_only_ and self.index_only_ != x.index_only_: return 0
-    if self.has_small_ops_ != x.has_small_ops_: return 0
-    if self.has_small_ops_ and self.small_ops_ != x.small_ops_: return 0
-    if self.has_compiled_query_ != x.has_compiled_query_: return 0
-    if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
-    if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
-    if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
-    if len(self.index_) != len(x.index_): return 0
-    for e1, e2 in zip(self.index_, x.index_):
-      if e1 != e2: return 0
-    if len(self.version_) != len(x.version_): return 0
-    for e1, e2 in zip(self.version_, x.version_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.result_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    if (not self.has_more_results_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: more_results not set.')
-    if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
-    if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.index_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
-    n += 1 * len(self.result_)
-    for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
-    if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
-    if (self.has_keys_only_): n += 2
-    if (self.has_index_only_): n += 2
-    if (self.has_small_ops_): n += 2
-    if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
-    if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSize())
-    n += 1 * len(self.index_)
-    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
-    n += 1 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
-    return n + 2
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSizePartial())
-    n += 1 * len(self.result_)
-    for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSizePartial())
-    if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
-    if (self.has_more_results_):
-      n += 2
-    if (self.has_keys_only_): n += 2
-    if (self.has_index_only_): n += 2
-    if (self.has_small_ops_): n += 2
-    if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSizePartial())
-    if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
-    n += 1 * len(self.index_)
-    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
-    n += 1 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
-    return n
-
-  def Clear(self):
-    self.clear_cursor()
-    self.clear_result()
-    self.clear_skipped_results()
-    self.clear_more_results()
-    self.clear_keys_only()
-    self.clear_index_only()
-    self.clear_small_ops()
-    self.clear_compiled_query()
-    self.clear_compiled_cursor()
-    self.clear_index()
-    self.clear_version()
-
-  def OutputUnchecked(self, out):
-    if (self.has_cursor_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cursor_.ByteSize())
-      self.cursor_.OutputUnchecked(out)
-    for i in xrange(len(self.result_)):
-      out.putVarInt32(18)
-      out.putVarInt32(self.result_[i].ByteSize())
-      self.result_[i].OutputUnchecked(out)
-    out.putVarInt32(24)
-    out.putBoolean(self.more_results_)
-    if (self.has_keys_only_):
-      out.putVarInt32(32)
-      out.putBoolean(self.keys_only_)
-    if (self.has_compiled_query_):
-      out.putVarInt32(42)
-      out.putVarInt32(self.compiled_query_.ByteSize())
-      self.compiled_query_.OutputUnchecked(out)
-    if (self.has_compiled_cursor_):
-      out.putVarInt32(50)
-      out.putVarInt32(self.compiled_cursor_.ByteSize())
-      self.compiled_cursor_.OutputUnchecked(out)
-    if (self.has_skipped_results_):
-      out.putVarInt32(56)
-      out.putVarInt32(self.skipped_results_)
-    for i in xrange(len(self.index_)):
-      out.putVarInt32(66)
-      out.putVarInt32(self.index_[i].ByteSize())
-      self.index_[i].OutputUnchecked(out)
-    if (self.has_index_only_):
-      out.putVarInt32(72)
-      out.putBoolean(self.index_only_)
-    if (self.has_small_ops_):
-      out.putVarInt32(80)
-      out.putBoolean(self.small_ops_)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(88)
-      out.putVarInt64(self.version_[i])
-
-  def OutputPartial(self, out):
-    if (self.has_cursor_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cursor_.ByteSizePartial())
-      self.cursor_.OutputPartial(out)
-    for i in xrange(len(self.result_)):
-      out.putVarInt32(18)
-      out.putVarInt32(self.result_[i].ByteSizePartial())
-      self.result_[i].OutputPartial(out)
-    if (self.has_more_results_):
-      out.putVarInt32(24)
-      out.putBoolean(self.more_results_)
-    if (self.has_keys_only_):
-      out.putVarInt32(32)
-      out.putBoolean(self.keys_only_)
-    if (self.has_compiled_query_):
-      out.putVarInt32(42)
-      out.putVarInt32(self.compiled_query_.ByteSizePartial())
-      self.compiled_query_.OutputPartial(out)
-    if (self.has_compiled_cursor_):
-      out.putVarInt32(50)
-      out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
-      self.compiled_cursor_.OutputPartial(out)
-    if (self.has_skipped_results_):
-      out.putVarInt32(56)
-      out.putVarInt32(self.skipped_results_)
-    for i in xrange(len(self.index_)):
-      out.putVarInt32(66)
-      out.putVarInt32(self.index_[i].ByteSizePartial())
-      self.index_[i].OutputPartial(out)
-    if (self.has_index_only_):
-      out.putVarInt32(72)
-      out.putBoolean(self.index_only_)
-    if (self.has_small_ops_):
-      out.putVarInt32(80)
-      out.putBoolean(self.small_ops_)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(88)
-      out.putVarInt64(self.version_[i])
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cursor().TryMerge(tmp)
-        continue
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_result().TryMerge(tmp)
-        continue
-      if tt == 24:
-        self.set_more_results(d.getBoolean())
-        continue
-      if tt == 32:
-        self.set_keys_only(d.getBoolean())
-        continue
-      if tt == 42:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_compiled_query().TryMerge(tmp)
-        continue
-      if tt == 50:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_compiled_cursor().TryMerge(tmp)
-        continue
-      if tt == 56:
-        self.set_skipped_results(d.getVarInt32())
-        continue
-      if tt == 66:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_index().TryMerge(tmp)
-        continue
-      if tt == 72:
-        self.set_index_only(d.getBoolean())
-        continue
-      if tt == 80:
-        self.set_small_ops(d.getBoolean())
-        continue
-      if tt == 88:
-        self.add_version(d.getVarInt64())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_cursor_:
-      res+=prefix+"cursor <\n"
-      res+=self.cursor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.result_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("result%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_))
-    if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
-    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
-    if self.has_index_only_: res+=prefix+("index_only: %s\n" % self.DebugFormatBool(self.index_only_))
-    if self.has_small_ops_: res+=prefix+("small_ops: %s\n" % self.DebugFormatBool(self.small_ops_))
-    if self.has_compiled_query_:
-      res+=prefix+"compiled_query <\n"
-      res+=self.compiled_query_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_compiled_cursor_:
-      res+=prefix+"compiled_cursor <\n"
-      res+=self.compiled_cursor_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.index_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("index%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    cnt=0
-    for e in self.version_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kcursor = 1
-  kresult = 2
-  kskipped_results = 7
-  kmore_results = 3
-  kkeys_only = 4
-  kindex_only = 9
-  ksmall_ops = 10
-  kcompiled_query = 5
-  kcompiled_cursor = 6
-  kindex = 8
-  kversion = 11
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "cursor",
-    2: "result",
-    3: "more_results",
-    4: "keys_only",
-    5: "compiled_query",
-    6: "compiled_cursor",
-    7: "skipped_results",
-    8: "index",
-    9: "index_only",
-    10: "small_ops",
-    11: "version",
-  }, 11)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-    4: ProtocolBuffer.Encoder.NUMERIC,
-    5: ProtocolBuffer.Encoder.STRING,
-    6: ProtocolBuffer.Encoder.STRING,
-    7: ProtocolBuffer.Encoder.NUMERIC,
-    8: ProtocolBuffer.Encoder.STRING,
-    9: ProtocolBuffer.Encoder.NUMERIC,
-    10: ProtocolBuffer.Encoder.NUMERIC,
-    11: ProtocolBuffer.Encoder.NUMERIC,
-  }, 11, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult'
-class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
-  has_model_key_ = 0
-  has_size_ = 0
-  size_ = 0
-  has_max_ = 0
-  max_ = 0
-
-  def __init__(self, contents=None):
-    self.model_key_ = Reference()
-    if contents is not None: self.MergeFromString(contents)
-
-  def model_key(self): return self.model_key_
-
-  def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key_
-
-  def clear_model_key(self):self.has_model_key_ = 0; self.model_key_.Clear()
-
-  def has_model_key(self): return self.has_model_key_
-
-  def size(self): return self.size_
-
-  def set_size(self, x):
-    self.has_size_ = 1
-    self.size_ = x
-
-  def clear_size(self):
-    if self.has_size_:
-      self.has_size_ = 0
-      self.size_ = 0
-
-  def has_size(self): return self.has_size_
-
-  def max(self): return self.max_
-
-  def set_max(self, x):
-    self.has_max_ = 1
-    self.max_ = x
-
-  def clear_max(self):
-    if self.has_max_:
-      self.has_max_ = 0
-      self.max_ = 0
-
-  def has_max(self): return self.has_max_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
-    if (x.has_size()): self.set_size(x.size())
-    if (x.has_max()): self.set_max(x.max())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_model_key_ != x.has_model_key_: return 0
-    if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
-    if self.has_size_ != x.has_size_: return 0
-    if self.has_size_ and self.size_ != x.size_: return 0
-    if self.has_max_ != x.has_max_: return 0
-    if self.has_max_ and self.max_ != x.max_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_model_key_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: model_key not set.')
-    elif not self.model_key_.IsInitialized(debug_strs): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(self.model_key_.ByteSize())
-    if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
-    if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_model_key_):
-      n += 1
-      n += self.lengthString(self.model_key_.ByteSizePartial())
-    if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
-    if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
-    return n
-
-  def Clear(self):
-    self.clear_model_key()
-    self.clear_size()
-    self.clear_max()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putVarInt32(self.model_key_.ByteSize())
-    self.model_key_.OutputUnchecked(out)
-    if (self.has_size_):
-      out.putVarInt32(16)
-      out.putVarInt64(self.size_)
-    if (self.has_max_):
-      out.putVarInt32(24)
-      out.putVarInt64(self.max_)
-
-  def OutputPartial(self, out):
-    if (self.has_model_key_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.model_key_.ByteSizePartial())
-      self.model_key_.OutputPartial(out)
-    if (self.has_size_):
-      out.putVarInt32(16)
-      out.putVarInt64(self.size_)
-    if (self.has_max_):
-      out.putVarInt32(24)
-      out.putVarInt64(self.max_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_model_key().TryMerge(tmp)
-        continue
-      if tt == 16:
-        self.set_size(d.getVarInt64())
-        continue
-      if tt == 24:
-        self.set_max(d.getVarInt64())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_model_key_:
-      res+=prefix+"model_key <\n"
-      res+=self.model_key_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
-    if self.has_max_: res+=prefix+("max: %s\n" % self.DebugFormatInt64(self.max_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kmodel_key = 1
-  ksize = 2
-  kmax = 3
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "model_key",
-    2: "size",
-    3: "max",
-  }, 3)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.NUMERIC,
-    3: ProtocolBuffer.Encoder.NUMERIC,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest'
-class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
-  has_start_ = 0
-  start_ = 0
-  has_end_ = 0
-  end_ = 0
-  has_cost_ = 0
-  cost_ = None
-
-  def __init__(self, contents=None):
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def start(self): return self.start_
-
-  def set_start(self, x):
-    self.has_start_ = 1
-    self.start_ = x
-
-  def clear_start(self):
-    if self.has_start_:
-      self.has_start_ = 0
-      self.start_ = 0
-
-  def has_start(self): return self.has_start_
-
-  def end(self): return self.end_
-
-  def set_end(self, x):
-    self.has_end_ = 1
-    self.end_ = x
-
-  def clear_end(self):
-    if self.has_end_:
-      self.has_end_ = 0
-      self.end_ = 0
-
-  def has_end(self): return self.has_end_
-
-  def cost(self):
-    if self.cost_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.cost_ is None: self.cost_ = Cost()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.cost_
-
-  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
-
-  def clear_cost(self):
-
-    if self.has_cost_:
-      self.has_cost_ = 0;
-      if self.cost_ is not None: self.cost_.Clear()
-
-  def has_cost(self): return self.has_cost_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_start()): self.set_start(x.start())
-    if (x.has_end()): self.set_end(x.end())
-    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_start_ != x.has_start_: return 0
-    if self.has_start_ and self.start_ != x.start_: return 0
-    if self.has_end_ != x.has_end_: return 0
-    if self.has_end_ and self.end_ != x.end_: return 0
-    if self.has_cost_ != x.has_cost_: return 0
-    if self.has_cost_ and self.cost_ != x.cost_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_start_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: start not set.')
-    if (not self.has_end_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: end not set.')
-    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthVarInt64(self.start_)
-    n += self.lengthVarInt64(self.end_)
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
-    return n + 2
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_start_):
-      n += 1
-      n += self.lengthVarInt64(self.start_)
-    if (self.has_end_):
-      n += 1
-      n += self.lengthVarInt64(self.end_)
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_start()
-    self.clear_end()
-    self.clear_cost()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(8)
-    out.putVarInt64(self.start_)
-    out.putVarInt32(16)
-    out.putVarInt64(self.end_)
-    if (self.has_cost_):
-      out.putVarInt32(26)
-      out.putVarInt32(self.cost_.ByteSize())
-      self.cost_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_start_):
-      out.putVarInt32(8)
-      out.putVarInt64(self.start_)
-    if (self.has_end_):
-      out.putVarInt32(16)
-      out.putVarInt64(self.end_)
-    if (self.has_cost_):
-      out.putVarInt32(26)
-      out.putVarInt32(self.cost_.ByteSizePartial())
-      self.cost_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 8:
-        self.set_start(d.getVarInt64())
-        continue
-      if tt == 16:
-        self.set_end(d.getVarInt64())
-        continue
-      if tt == 26:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cost().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
-    if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
-    if self.has_cost_:
-      res+=prefix+"cost <\n"
-      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kstart = 1
-  kend = 2
-  kcost = 3
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "start",
-    2: "end",
-    3: "cost",
-  }, 3)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.NUMERIC,
-    2: ProtocolBuffer.Encoder.NUMERIC,
-    3: ProtocolBuffer.Encoder.STRING,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse'
-class CompositeIndices(ProtocolBuffer.ProtocolMessage):
-
-  def __init__(self, contents=None):
-    self.index_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def index_size(self): return len(self.index_)
-  def index_list(self): return self.index_
-
-  def index(self, i):
-    return self.index_[i]
-
-  def mutable_index(self, i):
-    return self.index_[i]
-
-  def add_index(self):
-    x = CompositeIndex()
-    self.index_.append(x)
-    return x
-
-  def clear_index(self):
-    self.index_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if len(self.index_) != len(x.index_): return 0
-    for e1, e2 in zip(self.index_, x.index_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    for p in self.index_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += 1 * len(self.index_)
-    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    n += 1 * len(self.index_)
-    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_index()
-
-  def OutputUnchecked(self, out):
-    for i in xrange(len(self.index_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.index_[i].ByteSize())
-      self.index_[i].OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    for i in xrange(len(self.index_)):
-      out.putVarInt32(10)
-      out.putVarInt32(self.index_[i].ByteSizePartial())
-      self.index_[i].OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_index().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    cnt=0
-    for e in self.index_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("index%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kindex = 1
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "index",
-  }, 1)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices'
-class AddActionsRequest(ProtocolBuffer.ProtocolMessage):
-  has_transaction_ = 0
-
-  def __init__(self, contents=None):
-    self.transaction_ = Transaction()
-    self.action_ = []
-    if contents is not None: self.MergeFromString(contents)
-
-  def transaction(self): return self.transaction_
-
-  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
-
-  def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
-
-  def has_transaction(self): return self.has_transaction_
-
-  def action_size(self): return len(self.action_)
-  def action_list(self): return self.action_
-
-  def action(self, i):
-    return self.action_[i]
-
-  def mutable_action(self, i):
-    return self.action_[i]
-
-  def add_action(self):
-    x = Action()
-    self.action_.append(x)
-    return x
-
-  def clear_action(self):
-    self.action_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
-    for i in xrange(x.action_size()): self.add_action().CopyFrom(x.action(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_transaction_ != x.has_transaction_: return 0
-    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
-    if len(self.action_) != len(x.action_): return 0
-    for e1, e2 in zip(self.action_, x.action_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_transaction_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: transaction not set.')
-    elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
-    for p in self.action_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(self.transaction_.ByteSize())
-    n += 1 * len(self.action_)
-    for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSize())
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_transaction_):
-      n += 1
-      n += self.lengthString(self.transaction_.ByteSizePartial())
-    n += 1 * len(self.action_)
-    for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_transaction()
-    self.clear_action()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putVarInt32(self.transaction_.ByteSize())
-    self.transaction_.OutputUnchecked(out)
-    for i in xrange(len(self.action_)):
-      out.putVarInt32(18)
-      out.putVarInt32(self.action_[i].ByteSize())
-      self.action_[i].OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_transaction_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.transaction_.ByteSizePartial())
-      self.transaction_.OutputPartial(out)
-    for i in xrange(len(self.action_)):
-      out.putVarInt32(18)
-      out.putVarInt32(self.action_[i].ByteSizePartial())
-      self.action_[i].OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_transaction().TryMerge(tmp)
-        continue
-      if tt == 18:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_action().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_transaction_:
-      res+=prefix+"transaction <\n"
-      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.action_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("action%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  ktransaction = 1
-  kaction = 2
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "transaction",
-    2: "action",
-  }, 2)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.STRING,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest'
-class AddActionsResponse(ProtocolBuffer.ProtocolMessage):
-
-  def __init__(self, contents=None):
-    pass
-    if contents is not None: self.MergeFromString(contents)
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-
-  def Equals(self, x):
-    if x is self: return 1
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    return n
-
-  def Clear(self):
-    pass
-
-  def OutputUnchecked(self, out):
-    pass
-
-  def OutputPartial(self, out):
-    pass
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-  }, 0)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse'
-class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage):
-  has_app_ = 0
-  app_ = ""
-  has_allow_multiple_eg_ = 0
-  allow_multiple_eg_ = 0
-
-  def __init__(self, contents=None):
-    if contents is not None: self.MergeFromString(contents)
-
-  def app(self): return self.app_
-
-  def set_app(self, x):
-    self.has_app_ = 1
-    self.app_ = x
-
-  def clear_app(self):
-    if self.has_app_:
-      self.has_app_ = 0
-      self.app_ = ""
-
-  def has_app(self): return self.has_app_
-
-  def allow_multiple_eg(self): return self.allow_multiple_eg_
-
-  def set_allow_multiple_eg(self, x):
-    self.has_allow_multiple_eg_ = 1
-    self.allow_multiple_eg_ = x
-
-  def clear_allow_multiple_eg(self):
-    if self.has_allow_multiple_eg_:
-      self.has_allow_multiple_eg_ = 0
-      self.allow_multiple_eg_ = 0
-
-  def has_allow_multiple_eg(self): return self.has_allow_multiple_eg_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_app()): self.set_app(x.app())
-    if (x.has_allow_multiple_eg()): self.set_allow_multiple_eg(x.allow_multiple_eg())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_app_ != x.has_app_: return 0
-    if self.has_app_ and self.app_ != x.app_: return 0
-    if self.has_allow_multiple_eg_ != x.has_allow_multiple_eg_: return 0
-    if self.has_allow_multiple_eg_ and self.allow_multiple_eg_ != x.allow_multiple_eg_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_app_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: app not set.')
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(len(self.app_))
-    if (self.has_allow_multiple_eg_): n += 2
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_app_):
-      n += 1
-      n += self.lengthString(len(self.app_))
-    if (self.has_allow_multiple_eg_): n += 2
-    return n
-
-  def Clear(self):
-    self.clear_app()
-    self.clear_allow_multiple_eg()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putPrefixedString(self.app_)
-    if (self.has_allow_multiple_eg_):
-      out.putVarInt32(16)
-      out.putBoolean(self.allow_multiple_eg_)
-
-  def OutputPartial(self, out):
-    if (self.has_app_):
-      out.putVarInt32(10)
-      out.putPrefixedString(self.app_)
-    if (self.has_allow_multiple_eg_):
-      out.putVarInt32(16)
-      out.putBoolean(self.allow_multiple_eg_)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        self.set_app(d.getPrefixedString())
-        continue
-      if tt == 16:
-        self.set_allow_multiple_eg(d.getBoolean())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
-    if self.has_allow_multiple_eg_: res+=prefix+("allow_multiple_eg: %s\n" % self.DebugFormatBool(self.allow_multiple_eg_))
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kapp = 1
-  kallow_multiple_eg = 2
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "app",
-    2: "allow_multiple_eg",
-  }, 2)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    2: ProtocolBuffer.Encoder.NUMERIC,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest'
-class CommitResponse_Version(ProtocolBuffer.ProtocolMessage):
-  has_root_entity_key_ = 0
-  has_version_ = 0
-  version_ = 0
-
-  def __init__(self, contents=None):
-    self.root_entity_key_ = Reference()
-    if contents is not None: self.MergeFromString(contents)
-
-  def root_entity_key(self): return self.root_entity_key_
-
-  def mutable_root_entity_key(self): self.has_root_entity_key_ = 1; return self.root_entity_key_
-
-  def clear_root_entity_key(self):self.has_root_entity_key_ = 0; self.root_entity_key_.Clear()
-
-  def has_root_entity_key(self): return self.has_root_entity_key_
-
-  def version(self): return self.version_
-
-  def set_version(self, x):
-    self.has_version_ = 1
-    self.version_ = x
-
-  def clear_version(self):
-    if self.has_version_:
-      self.has_version_ = 0
-      self.version_ = 0
-
-  def has_version(self): return self.has_version_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_root_entity_key()): self.mutable_root_entity_key().MergeFrom(x.root_entity_key())
-    if (x.has_version()): self.set_version(x.version())
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_root_entity_key_ != x.has_root_entity_key_: return 0
-    if self.has_root_entity_key_ and self.root_entity_key_ != x.root_entity_key_: return 0
-    if self.has_version_ != x.has_version_: return 0
-    if self.has_version_ and self.version_ != x.version_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_root_entity_key_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: root_entity_key not set.')
-    elif not self.root_entity_key_.IsInitialized(debug_strs): initialized = 0
-    if (not self.has_version_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: version not set.')
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(self.root_entity_key_.ByteSize())
-    n += self.lengthVarInt64(self.version_)
-    return n + 2
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_root_entity_key_):
-      n += 1
-      n += self.lengthString(self.root_entity_key_.ByteSizePartial())
-    if (self.has_version_):
-      n += 1
-      n += self.lengthVarInt64(self.version_)
-    return n
-
-  def Clear(self):
-    self.clear_root_entity_key()
-    self.clear_version()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(34)
-    out.putVarInt32(self.root_entity_key_.ByteSize())
-    self.root_entity_key_.OutputUnchecked(out)
-    out.putVarInt32(40)
-    out.putVarInt64(self.version_)
-
-  def OutputPartial(self, out):
-    if (self.has_root_entity_key_):
-      out.putVarInt32(34)
-      out.putVarInt32(self.root_entity_key_.ByteSizePartial())
-      self.root_entity_key_.OutputPartial(out)
-    if (self.has_version_):
-      out.putVarInt32(40)
-      out.putVarInt64(self.version_)
-
-  def TryMerge(self, d):
-    while 1:
-      tt = d.getVarInt32()
-      if tt == 28: break
-      if tt == 34:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_root_entity_key().TryMerge(tmp)
-        continue
-      if tt == 40:
-        self.set_version(d.getVarInt64())
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_root_entity_key_:
-      res+=prefix+"root_entity_key <\n"
-      res+=self.root_entity_key_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
-    return res
-
-class CommitResponse(ProtocolBuffer.ProtocolMessage):
-  has_cost_ = 0
-  cost_ = None
-
-  def __init__(self, contents=None):
-    self.version_ = []
-    self.lazy_init_lock_ = thread.allocate_lock()
-    if contents is not None: self.MergeFromString(contents)
-
-  def cost(self):
-    if self.cost_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.cost_ is None: self.cost_ = Cost()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.cost_
-
-  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
-
-  def clear_cost(self):
-
-    if self.has_cost_:
-      self.has_cost_ = 0;
-      if self.cost_ is not None: self.cost_.Clear()
-
-  def has_cost(self): return self.has_cost_
-
-  def version_size(self): return len(self.version_)
-  def version_list(self): return self.version_
-
-  def version(self, i):
-    return self.version_[i]
-
-  def mutable_version(self, i):
-    return self.version_[i]
-
-  def add_version(self):
-    x = CommitResponse_Version()
-    self.version_.append(x)
-    return x
-
-  def clear_version(self):
-    self.version_ = []
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
-    for i in xrange(x.version_size()): self.add_version().CopyFrom(x.version(i))
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_cost_ != x.has_cost_: return 0
-    if self.has_cost_ and self.cost_ != x.cost_: return 0
-    if len(self.version_) != len(x.version_): return 0
-    for e1, e2 in zip(self.version_, x.version_):
-      if e1 != e2: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
-    for p in self.version_:
-      if not p.IsInitialized(debug_strs): initialized=0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
-    n += 2 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.version_[i].ByteSize()
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
-    n += 2 * len(self.version_)
-    for i in xrange(len(self.version_)): n += self.version_[i].ByteSizePartial()
-    return n
-
-  def Clear(self):
-    self.clear_cost()
-    self.clear_version()
-
-  def OutputUnchecked(self, out):
-    if (self.has_cost_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cost_.ByteSize())
-      self.cost_.OutputUnchecked(out)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(27)
-      self.version_[i].OutputUnchecked(out)
-      out.putVarInt32(28)
-
-  def OutputPartial(self, out):
-    if (self.has_cost_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.cost_.ByteSizePartial())
-      self.cost_.OutputPartial(out)
-    for i in xrange(len(self.version_)):
-      out.putVarInt32(27)
-      self.version_[i].OutputPartial(out)
-      out.putVarInt32(28)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_cost().TryMerge(tmp)
-        continue
-      if tt == 27:
-        self.add_version().TryMerge(d)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_cost_:
-      res+=prefix+"cost <\n"
-      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    cnt=0
-    for e in self.version_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("Version%s {\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+"}\n"
-      cnt+=1
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kcost = 1
-  kVersionGroup = 3
-  kVersionroot_entity_key = 4
-  kVersionversion = 5
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "cost",
-    3: "Version",
-    4: "root_entity_key",
-    5: "version",
-  }, 5)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.STARTGROUP,
-    4: ProtocolBuffer.Encoder.STRING,
-    5: ProtocolBuffer.Encoder.NUMERIC,
-  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse'
-if _extension_runtime:
-  pass
-
-__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse','CommitResponse_Version']
+if hasattr(datastore_v3_pb, '_DatastoreService_ClientStub'):
+  _DatastoreService_ClientStub = datastore_v3_pb._DatastoreService_ClientStub
diff --git a/google/appengine/datastore/datastore_query.py b/google/appengine/datastore/datastore_query.py
index d60edb0..daf19c7 100644
--- a/google/appengine/datastore/datastore_query.py
+++ b/google/appengine/datastore/datastore_query.py
@@ -1477,13 +1477,15 @@
 
   def reversed(self):
     """Creates a cursor for use in a query with a reversed sort order."""
-    for pos in self.__compiled_cursor.position_list():
+    if self.__compiled_cursor.has_position():
+      pos = self.__compiled_cursor.position()
       if pos.has_start_key():
         raise datastore_errors.BadRequestError('Cursor cannot be reversed.')
 
     rev_pb = datastore_pb.CompiledCursor()
     rev_pb.CopyFrom(self.__compiled_cursor)
-    for pos in rev_pb.position_list():
+    if rev_pb.has_position():
+      pos = rev_pb.position()
       pos.set_start_inclusive(not pos.start_inclusive())
     return Cursor(_cursor_pb=rev_pb)
 
diff --git a/google/appengine/datastore/datastore_rpc.py b/google/appengine/datastore/datastore_rpc.py
index b85d3ee..b6f4ced 100644
--- a/google/appengine/datastore/datastore_rpc.py
+++ b/google/appengine/datastore/datastore_rpc.py
@@ -1497,7 +1497,7 @@
   def __force(self, req):
     """Configure a request to force mutations."""
     if isinstance(req, datastore_v4_pb.CommitRequest):
-      req.mutable_mutation().set_force(True)
+      req.mutable_deprecated_mutation().set_force(True)
     else:
       req.set_force(True)
 
@@ -1756,12 +1756,12 @@
     def make_put_call(base_req, pbs, user_data=None):
       req = copy.deepcopy(base_req)
       if self._api_version == _DATASTORE_V4:
-        mutation = req.mutable_mutation()
+        deprecated_mutation = req.mutable_deprecated_mutation()
         for entity in pbs:
           if datastore_pbs.is_complete_v4_key(entity.key()):
-            mutation.upsert_list().append(entity)
+            deprecated_mutation.upsert_list().append(entity)
           else:
-            mutation.insert_auto_id_list().append(entity)
+            deprecated_mutation.insert_auto_id_list().append(entity)
         method = 'Commit'
         resp = datastore_v4_pb.CommitResponse()
       else:
@@ -1825,7 +1825,8 @@
         if datastore_pbs.is_complete_v4_key(entity.key()):
           keys.append(entity.key())
         else:
-          keys.append(rpc.response.mutation_result().insert_auto_id_key(i))
+          keys.append(
+              rpc.response.deprecated_mutation_result().insert_auto_id_key(i))
           i += 1
       keys = [self.__adapter.pb_v4_to_key(key) for key in keys]
     else:
@@ -1864,7 +1865,7 @@
     def make_delete_call(base_req, pbs, user_data=None):
       req = copy.deepcopy(base_req)
       if self._api_version == _DATASTORE_V4:
-        req.mutable_mutation().delete_list().extend(pbs)
+        req.mutable_deprecated_mutation().delete_list().extend(pbs)
         method = 'Commit'
         resp = datastore_v4_pb.CommitResponse()
       else:
@@ -2506,9 +2507,11 @@
         self.__force(req)
 
 
-      mutation = req.mutable_mutation()
-      mutation.upsert_list().extend(self.__pending_v4_upserts.itervalues())
-      mutation.delete_list().extend(self.__pending_v4_deletes.itervalues())
+      deprecated_mutation = req.mutable_deprecated_mutation()
+      deprecated_mutation.upsert_list().extend(
+          self.__pending_v4_upserts.itervalues())
+      deprecated_mutation.delete_list().extend(
+          self.__pending_v4_deletes.itervalues())
 
 
       self.__pending_v4_upserts.clear()
diff --git a/google/appengine/datastore/datastore_stub_util.py b/google/appengine/datastore/datastore_stub_util.py
index 3a76744..d9b1b71 100644
--- a/google/appengine/datastore/datastore_stub_util.py
+++ b/google/appengine/datastore/datastore_stub_util.py
@@ -1070,9 +1070,9 @@
       (cursor_entity, inclusive): a entity_pb.EntityProto and if it should
       be included in the result set.
     """
-    assert len(compiled_cursor.position_list()) == 1
+    assert compiled_cursor.has_position()
 
-    position = compiled_cursor.position(0)
+    position = compiled_cursor.position()
 
 
 
@@ -1111,7 +1111,7 @@
     if last_result is not None:
 
 
-      position = compiled_cursor.add_position()
+      position = compiled_cursor.mutable_position()
 
 
       if '__key__' in self.__cursor_properties:
@@ -1154,7 +1154,7 @@
           new_results.append(result)
       results = new_results
 
-    if query.has_compiled_cursor() and query.compiled_cursor().position_list():
+    if query.has_compiled_cursor() and query.compiled_cursor().has_position():
       start_cursor = self._DecodeCompiledCursor(query.compiled_cursor())
       self.__last_result = start_cursor[0]
       start_cursor_position = self._GetCursorOffset(results, start_cursor)
@@ -1163,7 +1163,7 @@
       start_cursor_position = 0
 
     if query.has_end_compiled_cursor():
-      if query.end_compiled_cursor().position_list():
+      if query.end_compiled_cursor().has_position():
         end_cursor = self._DecodeCompiledCursor(query.end_compiled_cursor())
         end_cursor_position = self._GetCursorOffset(results, end_cursor)
       else:
@@ -3055,16 +3055,32 @@
     self._datastore.DeleteIndex(index, self._trusted, self._app_id)
 
   def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response):
-    CheckAppId(allocate_ids_request.model_key().app(),
-               self._trusted, self._app_id)
+    Check(not allocate_ids_request.has_model_key()
+          or not allocate_ids_request.reserve_list(),
+          'Cannot allocate and reserve IDs in the same request')
+    if allocate_ids_request.reserve_list():
+      Check(not allocate_ids_request.has_size(),
+            'Cannot specify size when reserving IDs')
+      Check(not allocate_ids_request.has_max(),
+            'Cannot specify max when reserving IDs')
 
-    reference = allocate_ids_request.model_key()
+    if allocate_ids_request.has_model_key():
+      CheckAppId(allocate_ids_request.model_key().app(),
+                 self._trusted, self._app_id)
 
-    (start, end) = self._datastore._AllocateSequentialIds(
-        reference, allocate_ids_request.size(), allocate_ids_request.max())
+      reference = allocate_ids_request.model_key()
 
-    allocate_ids_response.set_start(start)
-    allocate_ids_response.set_end(end)
+      (start, end) = self._datastore._AllocateSequentialIds(
+          reference, allocate_ids_request.size(), allocate_ids_request.max())
+
+      allocate_ids_response.set_start(start)
+      allocate_ids_response.set_end(end)
+    else:
+      for reference in allocate_ids_request.reserve_list():
+        CheckAppId(reference.app(), self._trusted, self._app_id)
+      self._datastore._AllocateIds(allocate_ids_request.reserve_list())
+      allocate_ids_response.set_start(0)
+      allocate_ids_response.set_end(0)
 
   def _SetupIndexes(self, _open=open):
     """Ensure that the set of existing composite indexes matches index.yaml.
@@ -4253,4 +4269,3 @@
   prop_copy.MergeFrom(prop)
   prop_copy.set_multiple(False)
   return prop_copy
-
diff --git a/google/appengine/datastore/datastore_v3_pb.py b/google/appengine/datastore/datastore_v3_pb.py
index 8e14771..7470db3 100644
--- a/google/appengine/datastore/datastore_v3_pb.py
+++ b/google/appengine/datastore/datastore_v3_pb.py
@@ -17,14 +17,8391 @@
 
 
 
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+                   unusednames=printElemNumber,debug_strs no-special"""
+
+if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
+  _extension_runtime = True
+  _ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
+else:
+  _extension_runtime = False
+  _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
+
+from google.appengine.api.api_base_pb import *
+import google.appengine.api.api_base_pb
+from google.appengine.datastore.action_pb import *
+import google.appengine.datastore.action_pb
+from google.appengine.datastore.entity_pb import *
+import google.appengine.datastore.entity_pb
+from google.appengine.datastore.snapshot_pb import *
+import google.appengine.datastore.snapshot_pb
+class InternalHeader(ProtocolBuffer.ProtocolMessage):
+  has_qos_ = 0
+  qos_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def qos(self): return self.qos_
+
+  def set_qos(self, x):
+    self.has_qos_ = 1
+    self.qos_ = x
+
+  def clear_qos(self):
+    if self.has_qos_:
+      self.has_qos_ = 0
+      self.qos_ = ""
+
+  def has_qos(self): return self.has_qos_
 
 
-"""The Python datastore protocol buffer definition.
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_qos()): self.set_qos(x.qos())
 
-Proto2 compiler expects generated file names to follow specific pattern,
-which is not the case for the datastore_pb.py (should be datastore_v3_pb.py).
-This file with the expected name redirects to the real legacy file.
-"""
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_qos_ != x.has_qos_: return 0
+    if self.has_qos_ and self.qos_ != x.qos_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_qos_): n += 1 + self.lengthString(len(self.qos_))
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_qos_): n += 1 + self.lengthString(len(self.qos_))
+    return n
+
+  def Clear(self):
+    self.clear_qos()
+
+  def OutputUnchecked(self, out):
+    if (self.has_qos_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.qos_)
+
+  def OutputPartial(self, out):
+    if (self.has_qos_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.qos_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_qos(d.getPrefixedString())
+        continue
 
 
-from google.appengine.datastore.datastore_pb import *
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_qos_: res+=prefix+("qos: %s\n" % self.DebugFormatString(self.qos_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kqos = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "qos",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.InternalHeader'
+class Transaction(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_handle_ = 0
+  handle_ = 0
+  has_app_ = 0
+  app_ = ""
+  has_mark_changes_ = 0
+  mark_changes_ = 0
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def handle(self): return self.handle_
+
+  def set_handle(self, x):
+    self.has_handle_ = 1
+    self.handle_ = x
+
+  def clear_handle(self):
+    if self.has_handle_:
+      self.has_handle_ = 0
+      self.handle_ = 0
+
+  def has_handle(self): return self.has_handle_
+
+  def app(self): return self.app_
+
+  def set_app(self, x):
+    self.has_app_ = 1
+    self.app_ = x
+
+  def clear_app(self):
+    if self.has_app_:
+      self.has_app_ = 0
+      self.app_ = ""
+
+  def has_app(self): return self.has_app_
+
+  def mark_changes(self): return self.mark_changes_
+
+  def set_mark_changes(self, x):
+    self.has_mark_changes_ = 1
+    self.mark_changes_ = x
+
+  def clear_mark_changes(self):
+    if self.has_mark_changes_:
+      self.has_mark_changes_ = 0
+      self.mark_changes_ = 0
+
+  def has_mark_changes(self): return self.has_mark_changes_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    if (x.has_handle()): self.set_handle(x.handle())
+    if (x.has_app()): self.set_app(x.app())
+    if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if self.has_handle_ != x.has_handle_: return 0
+    if self.has_handle_ and self.handle_ != x.handle_: return 0
+    if self.has_app_ != x.has_app_: return 0
+    if self.has_app_ and self.app_ != x.app_: return 0
+    if self.has_mark_changes_ != x.has_mark_changes_: return 0
+    if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    if (not self.has_handle_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: handle not set.')
+    if (not self.has_app_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: app not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += self.lengthString(len(self.app_))
+    if (self.has_mark_changes_): n += 2
+    return n + 10
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    if (self.has_handle_):
+      n += 9
+    if (self.has_app_):
+      n += 1
+      n += self.lengthString(len(self.app_))
+    if (self.has_mark_changes_): n += 2
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_handle()
+    self.clear_app()
+    self.clear_mark_changes()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(9)
+    out.put64(self.handle_)
+    out.putVarInt32(18)
+    out.putPrefixedString(self.app_)
+    if (self.has_mark_changes_):
+      out.putVarInt32(24)
+      out.putBoolean(self.mark_changes_)
+    if (self.has_header_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_handle_):
+      out.putVarInt32(9)
+      out.put64(self.handle_)
+    if (self.has_app_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.app_)
+    if (self.has_mark_changes_):
+      out.putVarInt32(24)
+      out.putBoolean(self.mark_changes_)
+    if (self.has_header_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 9:
+        self.set_handle(d.get64())
+        continue
+      if tt == 18:
+        self.set_app(d.getPrefixedString())
+        continue
+      if tt == 24:
+        self.set_mark_changes(d.getBoolean())
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
+    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+    if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 4
+  khandle = 1
+  kapp = 2
+  kmark_changes = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "handle",
+    2: "app",
+    3: "mark_changes",
+    4: "header",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction'
+class Query_Filter(ProtocolBuffer.ProtocolMessage):
+
+
+  LESS_THAN    =    1
+  LESS_THAN_OR_EQUAL =    2
+  GREATER_THAN =    3
+  GREATER_THAN_OR_EQUAL =    4
+  EQUAL        =    5
+  IN           =    6
+  EXISTS       =    7
+
+  _Operator_NAMES = {
+    1: "LESS_THAN",
+    2: "LESS_THAN_OR_EQUAL",
+    3: "GREATER_THAN",
+    4: "GREATER_THAN_OR_EQUAL",
+    5: "EQUAL",
+    6: "IN",
+    7: "EXISTS",
+  }
+
+  def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
+  Operator_Name = classmethod(Operator_Name)
+
+  has_op_ = 0
+  op_ = 0
+
+  def __init__(self, contents=None):
+    self.property_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def op(self): return self.op_
+
+  def set_op(self, x):
+    self.has_op_ = 1
+    self.op_ = x
+
+  def clear_op(self):
+    if self.has_op_:
+      self.has_op_ = 0
+      self.op_ = 0
+
+  def has_op(self): return self.has_op_
+
+  def property_size(self): return len(self.property_)
+  def property_list(self): return self.property_
+
+  def property(self, i):
+    return self.property_[i]
+
+  def mutable_property(self, i):
+    return self.property_[i]
+
+  def add_property(self):
+    x = Property()
+    self.property_.append(x)
+    return x
+
+  def clear_property(self):
+    self.property_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_op()): self.set_op(x.op())
+    for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_op_ != x.has_op_: return 0
+    if self.has_op_ and self.op_ != x.op_: return 0
+    if len(self.property_) != len(x.property_): return 0
+    for e1, e2 in zip(self.property_, x.property_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_op_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: op not set.')
+    for p in self.property_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.op_)
+    n += 1 * len(self.property_)
+    for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_op_):
+      n += 1
+      n += self.lengthVarInt64(self.op_)
+    n += 1 * len(self.property_)
+    for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_op()
+    self.clear_property()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(48)
+    out.putVarInt32(self.op_)
+    for i in xrange(len(self.property_)):
+      out.putVarInt32(114)
+      out.putVarInt32(self.property_[i].ByteSize())
+      self.property_[i].OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_op_):
+      out.putVarInt32(48)
+      out.putVarInt32(self.op_)
+    for i in xrange(len(self.property_)):
+      out.putVarInt32(114)
+      out.putVarInt32(self.property_[i].ByteSizePartial())
+      self.property_[i].OutputPartial(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 36: break
+      if tt == 48:
+        self.set_op(d.getVarInt32())
+        continue
+      if tt == 114:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_property().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
+    cnt=0
+    for e in self.property_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("property%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+class Query_Order(ProtocolBuffer.ProtocolMessage):
+
+
+  ASCENDING    =    1
+  DESCENDING   =    2
+
+  _Direction_NAMES = {
+    1: "ASCENDING",
+    2: "DESCENDING",
+  }
+
+  def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
+  Direction_Name = classmethod(Direction_Name)
+
+  has_property_ = 0
+  property_ = ""
+  has_direction_ = 0
+  direction_ = 1
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def property(self): return self.property_
+
+  def set_property(self, x):
+    self.has_property_ = 1
+    self.property_ = x
+
+  def clear_property(self):
+    if self.has_property_:
+      self.has_property_ = 0
+      self.property_ = ""
+
+  def has_property(self): return self.has_property_
+
+  def direction(self): return self.direction_
+
+  def set_direction(self, x):
+    self.has_direction_ = 1
+    self.direction_ = x
+
+  def clear_direction(self):
+    if self.has_direction_:
+      self.has_direction_ = 0
+      self.direction_ = 1
+
+  def has_direction(self): return self.has_direction_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_property()): self.set_property(x.property())
+    if (x.has_direction()): self.set_direction(x.direction())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_property_ != x.has_property_: return 0
+    if self.has_property_ and self.property_ != x.property_: return 0
+    if self.has_direction_ != x.has_direction_: return 0
+    if self.has_direction_ and self.direction_ != x.direction_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_property_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: property not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.property_))
+    if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_property_):
+      n += 1
+      n += self.lengthString(len(self.property_))
+    if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+    return n
+
+  def Clear(self):
+    self.clear_property()
+    self.clear_direction()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(82)
+    out.putPrefixedString(self.property_)
+    if (self.has_direction_):
+      out.putVarInt32(88)
+      out.putVarInt32(self.direction_)
+
+  def OutputPartial(self, out):
+    if (self.has_property_):
+      out.putVarInt32(82)
+      out.putPrefixedString(self.property_)
+    if (self.has_direction_):
+      out.putVarInt32(88)
+      out.putVarInt32(self.direction_)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 76: break
+      if tt == 82:
+        self.set_property(d.getPrefixedString())
+        continue
+      if tt == 88:
+        self.set_direction(d.getVarInt32())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
+    if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+    return res
+
+class Query(ProtocolBuffer.ProtocolMessage):
+
+
+  ORDER_FIRST  =    1
+  ANCESTOR_FIRST =    2
+  FILTER_FIRST =    3
+
+  _Hint_NAMES = {
+    1: "ORDER_FIRST",
+    2: "ANCESTOR_FIRST",
+    3: "FILTER_FIRST",
+  }
+
+  def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
+  Hint_Name = classmethod(Hint_Name)
+
+  has_header_ = 0
+  header_ = None
+  has_app_ = 0
+  app_ = ""
+  has_name_space_ = 0
+  name_space_ = ""
+  has_kind_ = 0
+  kind_ = ""
+  has_ancestor_ = 0
+  ancestor_ = None
+  has_search_query_ = 0
+  search_query_ = ""
+  has_hint_ = 0
+  hint_ = 0
+  has_count_ = 0
+  count_ = 0
+  has_offset_ = 0
+  offset_ = 0
+  has_limit_ = 0
+  limit_ = 0
+  has_compiled_cursor_ = 0
+  compiled_cursor_ = None
+  has_end_compiled_cursor_ = 0
+  end_compiled_cursor_ = None
+  has_require_perfect_plan_ = 0
+  require_perfect_plan_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
+  has_transaction_ = 0
+  transaction_ = None
+  has_compile_ = 0
+  compile_ = 0
+  has_failover_ms_ = 0
+  failover_ms_ = 0
+  has_strong_ = 0
+  strong_ = 0
+  has_distinct_ = 0
+  distinct_ = 0
+  has_min_safe_time_seconds_ = 0
+  min_safe_time_seconds_ = 0
+  has_persist_offset_ = 0
+  persist_offset_ = 0
+
+  def __init__(self, contents=None):
+    self.filter_ = []
+    self.order_ = []
+    self.composite_index_ = []
+    self.property_name_ = []
+    self.group_by_property_name_ = []
+    self.safe_replica_name_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def app(self): return self.app_
+
+  def set_app(self, x):
+    self.has_app_ = 1
+    self.app_ = x
+
+  def clear_app(self):
+    if self.has_app_:
+      self.has_app_ = 0
+      self.app_ = ""
+
+  def has_app(self): return self.has_app_
+
+  def name_space(self): return self.name_space_
+
+  def set_name_space(self, x):
+    self.has_name_space_ = 1
+    self.name_space_ = x
+
+  def clear_name_space(self):
+    if self.has_name_space_:
+      self.has_name_space_ = 0
+      self.name_space_ = ""
+
+  def has_name_space(self): return self.has_name_space_
+
+  def kind(self): return self.kind_
+
+  def set_kind(self, x):
+    self.has_kind_ = 1
+    self.kind_ = x
+
+  def clear_kind(self):
+    if self.has_kind_:
+      self.has_kind_ = 0
+      self.kind_ = ""
+
+  def has_kind(self): return self.has_kind_
+
+  def ancestor(self):
+    if self.ancestor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.ancestor_ is None: self.ancestor_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.ancestor_
+
+  def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
+
+  def clear_ancestor(self):
+
+    if self.has_ancestor_:
+      self.has_ancestor_ = 0;
+      if self.ancestor_ is not None: self.ancestor_.Clear()
+
+  def has_ancestor(self): return self.has_ancestor_
+
+  def filter_size(self): return len(self.filter_)
+  def filter_list(self): return self.filter_
+
+  def filter(self, i):
+    return self.filter_[i]
+
+  def mutable_filter(self, i):
+    return self.filter_[i]
+
+  def add_filter(self):
+    x = Query_Filter()
+    self.filter_.append(x)
+    return x
+
+  def clear_filter(self):
+    self.filter_ = []
+  def search_query(self): return self.search_query_
+
+  def set_search_query(self, x):
+    self.has_search_query_ = 1
+    self.search_query_ = x
+
+  def clear_search_query(self):
+    if self.has_search_query_:
+      self.has_search_query_ = 0
+      self.search_query_ = ""
+
+  def has_search_query(self): return self.has_search_query_
+
+  def order_size(self): return len(self.order_)
+  def order_list(self): return self.order_
+
+  def order(self, i):
+    return self.order_[i]
+
+  def mutable_order(self, i):
+    return self.order_[i]
+
+  def add_order(self):
+    x = Query_Order()
+    self.order_.append(x)
+    return x
+
+  def clear_order(self):
+    self.order_ = []
+  def hint(self): return self.hint_
+
+  def set_hint(self, x):
+    self.has_hint_ = 1
+    self.hint_ = x
+
+  def clear_hint(self):
+    if self.has_hint_:
+      self.has_hint_ = 0
+      self.hint_ = 0
+
+  def has_hint(self): return self.has_hint_
+
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+  def compiled_cursor(self):
+    if self.compiled_cursor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.compiled_cursor_
+
+  def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
+
+  def clear_compiled_cursor(self):
+
+    if self.has_compiled_cursor_:
+      self.has_compiled_cursor_ = 0;
+      if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
+
+  def has_compiled_cursor(self): return self.has_compiled_cursor_
+
+  def end_compiled_cursor(self):
+    if self.end_compiled_cursor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.end_compiled_cursor_ is None: self.end_compiled_cursor_ = CompiledCursor()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.end_compiled_cursor_
+
+  def mutable_end_compiled_cursor(self): self.has_end_compiled_cursor_ = 1; return self.end_compiled_cursor()
+
+  def clear_end_compiled_cursor(self):
+
+    if self.has_end_compiled_cursor_:
+      self.has_end_compiled_cursor_ = 0;
+      if self.end_compiled_cursor_ is not None: self.end_compiled_cursor_.Clear()
+
+  def has_end_compiled_cursor(self): return self.has_end_compiled_cursor_
+
+  def composite_index_size(self): return len(self.composite_index_)
+  def composite_index_list(self): return self.composite_index_
+
+  def composite_index(self, i):
+    return self.composite_index_[i]
+
+  def mutable_composite_index(self, i):
+    return self.composite_index_[i]
+
+  def add_composite_index(self):
+    x = CompositeIndex()
+    self.composite_index_.append(x)
+    return x
+
+  def clear_composite_index(self):
+    self.composite_index_ = []
+  def require_perfect_plan(self): return self.require_perfect_plan_
+
+  def set_require_perfect_plan(self, x):
+    self.has_require_perfect_plan_ = 1
+    self.require_perfect_plan_ = x
+
+  def clear_require_perfect_plan(self):
+    if self.has_require_perfect_plan_:
+      self.has_require_perfect_plan_ = 0
+      self.require_perfect_plan_ = 0
+
+  def has_require_perfect_plan(self): return self.has_require_perfect_plan_
+
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
+  def transaction(self):
+    if self.transaction_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.transaction_ is None: self.transaction_ = Transaction()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+  def clear_transaction(self):
+
+    if self.has_transaction_:
+      self.has_transaction_ = 0;
+      if self.transaction_ is not None: self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def compile(self): return self.compile_
+
+  def set_compile(self, x):
+    self.has_compile_ = 1
+    self.compile_ = x
+
+  def clear_compile(self):
+    if self.has_compile_:
+      self.has_compile_ = 0
+      self.compile_ = 0
+
+  def has_compile(self): return self.has_compile_
+
+  def failover_ms(self): return self.failover_ms_
+
+  def set_failover_ms(self, x):
+    self.has_failover_ms_ = 1
+    self.failover_ms_ = x
+
+  def clear_failover_ms(self):
+    if self.has_failover_ms_:
+      self.has_failover_ms_ = 0
+      self.failover_ms_ = 0
+
+  def has_failover_ms(self): return self.has_failover_ms_
+
+  def strong(self): return self.strong_
+
+  def set_strong(self, x):
+    self.has_strong_ = 1
+    self.strong_ = x
+
+  def clear_strong(self):
+    if self.has_strong_:
+      self.has_strong_ = 0
+      self.strong_ = 0
+
+  def has_strong(self): return self.has_strong_
+
+  def property_name_size(self): return len(self.property_name_)
+  def property_name_list(self): return self.property_name_
+
+  def property_name(self, i):
+    return self.property_name_[i]
+
+  def set_property_name(self, i, x):
+    self.property_name_[i] = x
+
+  def add_property_name(self, x):
+    self.property_name_.append(x)
+
+  def clear_property_name(self):
+    self.property_name_ = []
+
+  def group_by_property_name_size(self): return len(self.group_by_property_name_)
+  def group_by_property_name_list(self): return self.group_by_property_name_
+
+  def group_by_property_name(self, i):
+    return self.group_by_property_name_[i]
+
+  def set_group_by_property_name(self, i, x):
+    self.group_by_property_name_[i] = x
+
+  def add_group_by_property_name(self, x):
+    self.group_by_property_name_.append(x)
+
+  def clear_group_by_property_name(self):
+    self.group_by_property_name_ = []
+
+  def distinct(self): return self.distinct_
+
+  def set_distinct(self, x):
+    self.has_distinct_ = 1
+    self.distinct_ = x
+
+  def clear_distinct(self):
+    if self.has_distinct_:
+      self.has_distinct_ = 0
+      self.distinct_ = 0
+
+  def has_distinct(self): return self.has_distinct_
+
+  def min_safe_time_seconds(self): return self.min_safe_time_seconds_
+
+  def set_min_safe_time_seconds(self, x):
+    self.has_min_safe_time_seconds_ = 1
+    self.min_safe_time_seconds_ = x
+
+  def clear_min_safe_time_seconds(self):
+    if self.has_min_safe_time_seconds_:
+      self.has_min_safe_time_seconds_ = 0
+      self.min_safe_time_seconds_ = 0
+
+  def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_
+
+  def safe_replica_name_size(self): return len(self.safe_replica_name_)
+  def safe_replica_name_list(self): return self.safe_replica_name_
+
+  def safe_replica_name(self, i):
+    return self.safe_replica_name_[i]
+
+  def set_safe_replica_name(self, i, x):
+    self.safe_replica_name_[i] = x
+
+  def add_safe_replica_name(self, x):
+    self.safe_replica_name_.append(x)
+
+  def clear_safe_replica_name(self):
+    self.safe_replica_name_ = []
+
+  def persist_offset(self): return self.persist_offset_
+
+  def set_persist_offset(self, x):
+    self.has_persist_offset_ = 1
+    self.persist_offset_ = x
+
+  def clear_persist_offset(self):
+    if self.has_persist_offset_:
+      self.has_persist_offset_ = 0
+      self.persist_offset_ = 0
+
+  def has_persist_offset(self): return self.has_persist_offset_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    if (x.has_app()): self.set_app(x.app())
+    if (x.has_name_space()): self.set_name_space(x.name_space())
+    if (x.has_kind()): self.set_kind(x.kind())
+    if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
+    for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
+    if (x.has_search_query()): self.set_search_query(x.search_query())
+    for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
+    if (x.has_hint()): self.set_hint(x.hint())
+    if (x.has_count()): self.set_count(x.count())
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_limit()): self.set_limit(x.limit())
+    if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
+    if (x.has_end_compiled_cursor()): self.mutable_end_compiled_cursor().MergeFrom(x.end_compiled_cursor())
+    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
+    if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_compile()): self.set_compile(x.compile())
+    if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
+    if (x.has_strong()): self.set_strong(x.strong())
+    for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
+    for i in xrange(x.group_by_property_name_size()): self.add_group_by_property_name(x.group_by_property_name(i))
+    if (x.has_distinct()): self.set_distinct(x.distinct())
+    if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds())
+    for i in xrange(x.safe_replica_name_size()): self.add_safe_replica_name(x.safe_replica_name(i))
+    if (x.has_persist_offset()): self.set_persist_offset(x.persist_offset())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if self.has_app_ != x.has_app_: return 0
+    if self.has_app_ and self.app_ != x.app_: return 0
+    if self.has_name_space_ != x.has_name_space_: return 0
+    if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+    if self.has_kind_ != x.has_kind_: return 0
+    if self.has_kind_ and self.kind_ != x.kind_: return 0
+    if self.has_ancestor_ != x.has_ancestor_: return 0
+    if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+    if len(self.filter_) != len(x.filter_): return 0
+    for e1, e2 in zip(self.filter_, x.filter_):
+      if e1 != e2: return 0
+    if self.has_search_query_ != x.has_search_query_: return 0
+    if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
+    if len(self.order_) != len(x.order_): return 0
+    for e1, e2 in zip(self.order_, x.order_):
+      if e1 != e2: return 0
+    if self.has_hint_ != x.has_hint_: return 0
+    if self.has_hint_ and self.hint_ != x.hint_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
+    if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
+    if self.has_end_compiled_cursor_ != x.has_end_compiled_cursor_: return 0
+    if self.has_end_compiled_cursor_ and self.end_compiled_cursor_ != x.end_compiled_cursor_: return 0
+    if len(self.composite_index_) != len(x.composite_index_): return 0
+    for e1, e2 in zip(self.composite_index_, x.composite_index_):
+      if e1 != e2: return 0
+    if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
+    if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_compile_ != x.has_compile_: return 0
+    if self.has_compile_ and self.compile_ != x.compile_: return 0
+    if self.has_failover_ms_ != x.has_failover_ms_: return 0
+    if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
+    if self.has_strong_ != x.has_strong_: return 0
+    if self.has_strong_ and self.strong_ != x.strong_: return 0
+    if len(self.property_name_) != len(x.property_name_): return 0
+    for e1, e2 in zip(self.property_name_, x.property_name_):
+      if e1 != e2: return 0
+    if len(self.group_by_property_name_) != len(x.group_by_property_name_): return 0
+    for e1, e2 in zip(self.group_by_property_name_, x.group_by_property_name_):
+      if e1 != e2: return 0
+    if self.has_distinct_ != x.has_distinct_: return 0
+    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+    if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0
+    if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0
+    if len(self.safe_replica_name_) != len(x.safe_replica_name_): return 0
+    for e1, e2 in zip(self.safe_replica_name_, x.safe_replica_name_):
+      if e1 != e2: return 0
+    if self.has_persist_offset_ != x.has_persist_offset_: return 0
+    if self.has_persist_offset_ and self.persist_offset_ != x.persist_offset_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    if (not self.has_app_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: app not set.')
+    if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.filter_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    for p in self.order_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_end_compiled_cursor_ and not self.end_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.composite_index_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSize())
+    n += self.lengthString(len(self.app_))
+    if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
+    if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
+    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
+    n += 2 * len(self.filter_)
+    for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
+    if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
+    n += 2 * len(self.order_)
+    for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
+    if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
+    if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
+    if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSize())
+    if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSize())
+    n += 2 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
+    if (self.has_require_perfect_plan_): n += 3
+    if (self.has_keys_only_): n += 3
+    if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
+    if (self.has_compile_): n += 3
+    if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
+    if (self.has_strong_): n += 3
+    n += 2 * len(self.property_name_)
+    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
+    n += 2 * len(self.group_by_property_name_)
+    for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
+    if (self.has_distinct_): n += 3
+    if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
+    n += 2 * len(self.safe_replica_name_)
+    for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
+    if (self.has_persist_offset_): n += 3
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSizePartial())
+    if (self.has_app_):
+      n += 1
+      n += self.lengthString(len(self.app_))
+    if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
+    if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
+    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
+    n += 2 * len(self.filter_)
+    for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSizePartial()
+    if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
+    n += 2 * len(self.order_)
+    for i in xrange(len(self.order_)): n += self.order_[i].ByteSizePartial()
+    if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
+    if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
+    if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
+    if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSizePartial())
+    n += 2 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
+    if (self.has_require_perfect_plan_): n += 3
+    if (self.has_keys_only_): n += 3
+    if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSizePartial())
+    if (self.has_compile_): n += 3
+    if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
+    if (self.has_strong_): n += 3
+    n += 2 * len(self.property_name_)
+    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
+    n += 2 * len(self.group_by_property_name_)
+    for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
+    if (self.has_distinct_): n += 3
+    if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
+    n += 2 * len(self.safe_replica_name_)
+    for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
+    if (self.has_persist_offset_): n += 3
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_app()
+    self.clear_name_space()
+    self.clear_kind()
+    self.clear_ancestor()
+    self.clear_filter()
+    self.clear_search_query()
+    self.clear_order()
+    self.clear_hint()
+    self.clear_count()
+    self.clear_offset()
+    self.clear_limit()
+    self.clear_compiled_cursor()
+    self.clear_end_compiled_cursor()
+    self.clear_composite_index()
+    self.clear_require_perfect_plan()
+    self.clear_keys_only()
+    self.clear_transaction()
+    self.clear_compile()
+    self.clear_failover_ms()
+    self.clear_strong()
+    self.clear_property_name()
+    self.clear_group_by_property_name()
+    self.clear_distinct()
+    self.clear_min_safe_time_seconds()
+    self.clear_safe_replica_name()
+    self.clear_persist_offset()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.app_)
+    if (self.has_kind_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.kind_)
+    for i in xrange(len(self.filter_)):
+      out.putVarInt32(35)
+      self.filter_[i].OutputUnchecked(out)
+      out.putVarInt32(36)
+    if (self.has_search_query_):
+      out.putVarInt32(66)
+      out.putPrefixedString(self.search_query_)
+    for i in xrange(len(self.order_)):
+      out.putVarInt32(75)
+      self.order_[i].OutputUnchecked(out)
+      out.putVarInt32(76)
+    if (self.has_offset_):
+      out.putVarInt32(96)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(128)
+      out.putVarInt32(self.limit_)
+    if (self.has_ancestor_):
+      out.putVarInt32(138)
+      out.putVarInt32(self.ancestor_.ByteSize())
+      self.ancestor_.OutputUnchecked(out)
+    if (self.has_hint_):
+      out.putVarInt32(144)
+      out.putVarInt32(self.hint_)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(154)
+      out.putVarInt32(self.composite_index_[i].ByteSize())
+      self.composite_index_[i].OutputUnchecked(out)
+    if (self.has_require_perfect_plan_):
+      out.putVarInt32(160)
+      out.putBoolean(self.require_perfect_plan_)
+    if (self.has_keys_only_):
+      out.putVarInt32(168)
+      out.putBoolean(self.keys_only_)
+    if (self.has_transaction_):
+      out.putVarInt32(178)
+      out.putVarInt32(self.transaction_.ByteSize())
+      self.transaction_.OutputUnchecked(out)
+    if (self.has_count_):
+      out.putVarInt32(184)
+      out.putVarInt32(self.count_)
+    if (self.has_distinct_):
+      out.putVarInt32(192)
+      out.putBoolean(self.distinct_)
+    if (self.has_compile_):
+      out.putVarInt32(200)
+      out.putBoolean(self.compile_)
+    if (self.has_failover_ms_):
+      out.putVarInt32(208)
+      out.putVarInt64(self.failover_ms_)
+    if (self.has_name_space_):
+      out.putVarInt32(234)
+      out.putPrefixedString(self.name_space_)
+    if (self.has_compiled_cursor_):
+      out.putVarInt32(242)
+      out.putVarInt32(self.compiled_cursor_.ByteSize())
+      self.compiled_cursor_.OutputUnchecked(out)
+    if (self.has_end_compiled_cursor_):
+      out.putVarInt32(250)
+      out.putVarInt32(self.end_compiled_cursor_.ByteSize())
+      self.end_compiled_cursor_.OutputUnchecked(out)
+    if (self.has_strong_):
+      out.putVarInt32(256)
+      out.putBoolean(self.strong_)
+    for i in xrange(len(self.property_name_)):
+      out.putVarInt32(266)
+      out.putPrefixedString(self.property_name_[i])
+    for i in xrange(len(self.group_by_property_name_)):
+      out.putVarInt32(274)
+      out.putPrefixedString(self.group_by_property_name_[i])
+    if (self.has_min_safe_time_seconds_):
+      out.putVarInt32(280)
+      out.putVarInt64(self.min_safe_time_seconds_)
+    for i in xrange(len(self.safe_replica_name_)):
+      out.putVarInt32(290)
+      out.putPrefixedString(self.safe_replica_name_[i])
+    if (self.has_persist_offset_):
+      out.putVarInt32(296)
+      out.putBoolean(self.persist_offset_)
+    if (self.has_header_):
+      out.putVarInt32(314)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_app_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.app_)
+    if (self.has_kind_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.kind_)
+    for i in xrange(len(self.filter_)):
+      out.putVarInt32(35)
+      self.filter_[i].OutputPartial(out)
+      out.putVarInt32(36)
+    if (self.has_search_query_):
+      out.putVarInt32(66)
+      out.putPrefixedString(self.search_query_)
+    for i in xrange(len(self.order_)):
+      out.putVarInt32(75)
+      self.order_[i].OutputPartial(out)
+      out.putVarInt32(76)
+    if (self.has_offset_):
+      out.putVarInt32(96)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(128)
+      out.putVarInt32(self.limit_)
+    if (self.has_ancestor_):
+      out.putVarInt32(138)
+      out.putVarInt32(self.ancestor_.ByteSizePartial())
+      self.ancestor_.OutputPartial(out)
+    if (self.has_hint_):
+      out.putVarInt32(144)
+      out.putVarInt32(self.hint_)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(154)
+      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
+      self.composite_index_[i].OutputPartial(out)
+    if (self.has_require_perfect_plan_):
+      out.putVarInt32(160)
+      out.putBoolean(self.require_perfect_plan_)
+    if (self.has_keys_only_):
+      out.putVarInt32(168)
+      out.putBoolean(self.keys_only_)
+    if (self.has_transaction_):
+      out.putVarInt32(178)
+      out.putVarInt32(self.transaction_.ByteSizePartial())
+      self.transaction_.OutputPartial(out)
+    if (self.has_count_):
+      out.putVarInt32(184)
+      out.putVarInt32(self.count_)
+    if (self.has_distinct_):
+      out.putVarInt32(192)
+      out.putBoolean(self.distinct_)
+    if (self.has_compile_):
+      out.putVarInt32(200)
+      out.putBoolean(self.compile_)
+    if (self.has_failover_ms_):
+      out.putVarInt32(208)
+      out.putVarInt64(self.failover_ms_)
+    if (self.has_name_space_):
+      out.putVarInt32(234)
+      out.putPrefixedString(self.name_space_)
+    if (self.has_compiled_cursor_):
+      out.putVarInt32(242)
+      out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
+      self.compiled_cursor_.OutputPartial(out)
+    if (self.has_end_compiled_cursor_):
+      out.putVarInt32(250)
+      out.putVarInt32(self.end_compiled_cursor_.ByteSizePartial())
+      self.end_compiled_cursor_.OutputPartial(out)
+    if (self.has_strong_):
+      out.putVarInt32(256)
+      out.putBoolean(self.strong_)
+    for i in xrange(len(self.property_name_)):
+      out.putVarInt32(266)
+      out.putPrefixedString(self.property_name_[i])
+    for i in xrange(len(self.group_by_property_name_)):
+      out.putVarInt32(274)
+      out.putPrefixedString(self.group_by_property_name_[i])
+    if (self.has_min_safe_time_seconds_):
+      out.putVarInt32(280)
+      out.putVarInt64(self.min_safe_time_seconds_)
+    for i in xrange(len(self.safe_replica_name_)):
+      out.putVarInt32(290)
+      out.putPrefixedString(self.safe_replica_name_[i])
+    if (self.has_persist_offset_):
+      out.putVarInt32(296)
+      out.putBoolean(self.persist_offset_)
+    if (self.has_header_):
+      out.putVarInt32(314)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_app(d.getPrefixedString())
+        continue
+      if tt == 26:
+        self.set_kind(d.getPrefixedString())
+        continue
+      if tt == 35:
+        self.add_filter().TryMerge(d)
+        continue
+      if tt == 66:
+        self.set_search_query(d.getPrefixedString())
+        continue
+      if tt == 75:
+        self.add_order().TryMerge(d)
+        continue
+      if tt == 96:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 128:
+        self.set_limit(d.getVarInt32())
+        continue
+      if tt == 138:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_ancestor().TryMerge(tmp)
+        continue
+      if tt == 144:
+        self.set_hint(d.getVarInt32())
+        continue
+      if tt == 154:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_composite_index().TryMerge(tmp)
+        continue
+      if tt == 160:
+        self.set_require_perfect_plan(d.getBoolean())
+        continue
+      if tt == 168:
+        self.set_keys_only(d.getBoolean())
+        continue
+      if tt == 178:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 184:
+        self.set_count(d.getVarInt32())
+        continue
+      if tt == 192:
+        self.set_distinct(d.getBoolean())
+        continue
+      if tt == 200:
+        self.set_compile(d.getBoolean())
+        continue
+      if tt == 208:
+        self.set_failover_ms(d.getVarInt64())
+        continue
+      if tt == 234:
+        self.set_name_space(d.getPrefixedString())
+        continue
+      if tt == 242:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_cursor().TryMerge(tmp)
+        continue
+      if tt == 250:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_end_compiled_cursor().TryMerge(tmp)
+        continue
+      if tt == 256:
+        self.set_strong(d.getBoolean())
+        continue
+      if tt == 266:
+        self.add_property_name(d.getPrefixedString())
+        continue
+      if tt == 274:
+        self.add_group_by_property_name(d.getPrefixedString())
+        continue
+      if tt == 280:
+        self.set_min_safe_time_seconds(d.getVarInt64())
+        continue
+      if tt == 290:
+        self.add_safe_replica_name(d.getPrefixedString())
+        continue
+      if tt == 296:
+        self.set_persist_offset(d.getBoolean())
+        continue
+      if tt == 314:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+    if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+    if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
+    if self.has_ancestor_:
+      res+=prefix+"ancestor <\n"
+      res+=self.ancestor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.filter_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("Filter%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
+    cnt=0
+    for e in self.order_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("Order%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+    if self.has_compiled_cursor_:
+      res+=prefix+"compiled_cursor <\n"
+      res+=self.compiled_cursor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_end_compiled_cursor_:
+      res+=prefix+"end_compiled_cursor <\n"
+      res+=self.end_compiled_cursor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.composite_index_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("composite_index%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
+    if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
+    if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
+    cnt=0
+    for e in self.property_name_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    cnt=0
+    for e in self.group_by_property_name_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("group_by_property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+    if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_))
+    cnt=0
+    for e in self.safe_replica_name_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("safe_replica_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_persist_offset_: res+=prefix+("persist_offset: %s\n" % self.DebugFormatBool(self.persist_offset_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 39
+  kapp = 1
+  kname_space = 29
+  kkind = 3
+  kancestor = 17
+  kFilterGroup = 4
+  kFilterop = 6
+  kFilterproperty = 14
+  ksearch_query = 8
+  kOrderGroup = 9
+  kOrderproperty = 10
+  kOrderdirection = 11
+  khint = 18
+  kcount = 23
+  koffset = 12
+  klimit = 16
+  kcompiled_cursor = 30
+  kend_compiled_cursor = 31
+  kcomposite_index = 19
+  krequire_perfect_plan = 20
+  kkeys_only = 21
+  ktransaction = 22
+  kcompile = 25
+  kfailover_ms = 26
+  kstrong = 32
+  kproperty_name = 33
+  kgroup_by_property_name = 34
+  kdistinct = 24
+  kmin_safe_time_seconds = 35
+  ksafe_replica_name = 36
+  kpersist_offset = 37
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app",
+    3: "kind",
+    4: "Filter",
+    6: "op",
+    8: "search_query",
+    9: "Order",
+    10: "property",
+    11: "direction",
+    12: "offset",
+    14: "property",
+    16: "limit",
+    17: "ancestor",
+    18: "hint",
+    19: "composite_index",
+    20: "require_perfect_plan",
+    21: "keys_only",
+    22: "transaction",
+    23: "count",
+    24: "distinct",
+    25: "compile",
+    26: "failover_ms",
+    29: "name_space",
+    30: "compiled_cursor",
+    31: "end_compiled_cursor",
+    32: "strong",
+    33: "property_name",
+    34: "group_by_property_name",
+    35: "min_safe_time_seconds",
+    36: "safe_replica_name",
+    37: "persist_offset",
+    39: "header",
+  }, 39)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STARTGROUP,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STARTGROUP,
+    10: ProtocolBuffer.Encoder.STRING,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+    12: ProtocolBuffer.Encoder.NUMERIC,
+    14: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.NUMERIC,
+    19: ProtocolBuffer.Encoder.STRING,
+    20: ProtocolBuffer.Encoder.NUMERIC,
+    21: ProtocolBuffer.Encoder.NUMERIC,
+    22: ProtocolBuffer.Encoder.STRING,
+    23: ProtocolBuffer.Encoder.NUMERIC,
+    24: ProtocolBuffer.Encoder.NUMERIC,
+    25: ProtocolBuffer.Encoder.NUMERIC,
+    26: ProtocolBuffer.Encoder.NUMERIC,
+    29: ProtocolBuffer.Encoder.STRING,
+    30: ProtocolBuffer.Encoder.STRING,
+    31: ProtocolBuffer.Encoder.STRING,
+    32: ProtocolBuffer.Encoder.NUMERIC,
+    33: ProtocolBuffer.Encoder.STRING,
+    34: ProtocolBuffer.Encoder.STRING,
+    35: ProtocolBuffer.Encoder.NUMERIC,
+    36: ProtocolBuffer.Encoder.STRING,
+    37: ProtocolBuffer.Encoder.NUMERIC,
+    39: ProtocolBuffer.Encoder.STRING,
+  }, 39, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query'
+class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
+  has_index_name_ = 0
+  index_name_ = ""
+  has_start_key_ = 0
+  start_key_ = ""
+  has_start_inclusive_ = 0
+  start_inclusive_ = 0
+  has_end_key_ = 0
+  end_key_ = ""
+  has_end_inclusive_ = 0
+  end_inclusive_ = 0
+  has_end_unapplied_log_timestamp_us_ = 0
+  end_unapplied_log_timestamp_us_ = 0
+
+  def __init__(self, contents=None):
+    self.start_postfix_value_ = []
+    self.end_postfix_value_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_name(self): return self.index_name_
+
+  def set_index_name(self, x):
+    self.has_index_name_ = 1
+    self.index_name_ = x
+
+  def clear_index_name(self):
+    if self.has_index_name_:
+      self.has_index_name_ = 0
+      self.index_name_ = ""
+
+  def has_index_name(self): return self.has_index_name_
+
+  def start_key(self): return self.start_key_
+
+  def set_start_key(self, x):
+    self.has_start_key_ = 1
+    self.start_key_ = x
+
+  def clear_start_key(self):
+    if self.has_start_key_:
+      self.has_start_key_ = 0
+      self.start_key_ = ""
+
+  def has_start_key(self): return self.has_start_key_
+
+  def start_inclusive(self): return self.start_inclusive_
+
+  def set_start_inclusive(self, x):
+    self.has_start_inclusive_ = 1
+    self.start_inclusive_ = x
+
+  def clear_start_inclusive(self):
+    if self.has_start_inclusive_:
+      self.has_start_inclusive_ = 0
+      self.start_inclusive_ = 0
+
+  def has_start_inclusive(self): return self.has_start_inclusive_
+
+  def end_key(self): return self.end_key_
+
+  def set_end_key(self, x):
+    self.has_end_key_ = 1
+    self.end_key_ = x
+
+  def clear_end_key(self):
+    if self.has_end_key_:
+      self.has_end_key_ = 0
+      self.end_key_ = ""
+
+  def has_end_key(self): return self.has_end_key_
+
+  def end_inclusive(self): return self.end_inclusive_
+
+  def set_end_inclusive(self, x):
+    self.has_end_inclusive_ = 1
+    self.end_inclusive_ = x
+
+  def clear_end_inclusive(self):
+    if self.has_end_inclusive_:
+      self.has_end_inclusive_ = 0
+      self.end_inclusive_ = 0
+
+  def has_end_inclusive(self): return self.has_end_inclusive_
+
+  def start_postfix_value_size(self): return len(self.start_postfix_value_)
+  def start_postfix_value_list(self): return self.start_postfix_value_
+
+  def start_postfix_value(self, i):
+    return self.start_postfix_value_[i]
+
+  def set_start_postfix_value(self, i, x):
+    self.start_postfix_value_[i] = x
+
+  def add_start_postfix_value(self, x):
+    self.start_postfix_value_.append(x)
+
+  def clear_start_postfix_value(self):
+    self.start_postfix_value_ = []
+
+  def end_postfix_value_size(self): return len(self.end_postfix_value_)
+  def end_postfix_value_list(self): return self.end_postfix_value_
+
+  def end_postfix_value(self, i):
+    return self.end_postfix_value_[i]
+
+  def set_end_postfix_value(self, i, x):
+    self.end_postfix_value_[i] = x
+
+  def add_end_postfix_value(self, x):
+    self.end_postfix_value_.append(x)
+
+  def clear_end_postfix_value(self):
+    self.end_postfix_value_ = []
+
+  def end_unapplied_log_timestamp_us(self): return self.end_unapplied_log_timestamp_us_
+
+  def set_end_unapplied_log_timestamp_us(self, x):
+    self.has_end_unapplied_log_timestamp_us_ = 1
+    self.end_unapplied_log_timestamp_us_ = x
+
+  def clear_end_unapplied_log_timestamp_us(self):
+    if self.has_end_unapplied_log_timestamp_us_:
+      self.has_end_unapplied_log_timestamp_us_ = 0
+      self.end_unapplied_log_timestamp_us_ = 0
+
+  def has_end_unapplied_log_timestamp_us(self): return self.has_end_unapplied_log_timestamp_us_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_name()): self.set_index_name(x.index_name())
+    if (x.has_start_key()): self.set_start_key(x.start_key())
+    if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
+    if (x.has_end_key()): self.set_end_key(x.end_key())
+    if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
+    for i in xrange(x.start_postfix_value_size()): self.add_start_postfix_value(x.start_postfix_value(i))
+    for i in xrange(x.end_postfix_value_size()): self.add_end_postfix_value(x.end_postfix_value(i))
+    if (x.has_end_unapplied_log_timestamp_us()): self.set_end_unapplied_log_timestamp_us(x.end_unapplied_log_timestamp_us())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_name_ != x.has_index_name_: return 0
+    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+    if self.has_start_key_ != x.has_start_key_: return 0
+    if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
+    if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
+    if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
+    if self.has_end_key_ != x.has_end_key_: return 0
+    if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
+    if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
+    if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
+    if len(self.start_postfix_value_) != len(x.start_postfix_value_): return 0
+    for e1, e2 in zip(self.start_postfix_value_, x.start_postfix_value_):
+      if e1 != e2: return 0
+    if len(self.end_postfix_value_) != len(x.end_postfix_value_): return 0
+    for e1, e2 in zip(self.end_postfix_value_, x.end_postfix_value_):
+      if e1 != e2: return 0
+    if self.has_end_unapplied_log_timestamp_us_ != x.has_end_unapplied_log_timestamp_us_: return 0
+    if self.has_end_unapplied_log_timestamp_us_ and self.end_unapplied_log_timestamp_us_ != x.end_unapplied_log_timestamp_us_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
+    if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
+    if (self.has_start_inclusive_): n += 2
+    if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
+    if (self.has_end_inclusive_): n += 2
+    n += 2 * len(self.start_postfix_value_)
+    for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
+    n += 2 * len(self.end_postfix_value_)
+    for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
+    if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
+    if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
+    if (self.has_start_inclusive_): n += 2
+    if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
+    if (self.has_end_inclusive_): n += 2
+    n += 2 * len(self.start_postfix_value_)
+    for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
+    n += 2 * len(self.end_postfix_value_)
+    for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
+    if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
+    return n
+
+  def Clear(self):
+    self.clear_index_name()
+    self.clear_start_key()
+    self.clear_start_inclusive()
+    self.clear_end_key()
+    self.clear_end_inclusive()
+    self.clear_start_postfix_value()
+    self.clear_end_postfix_value()
+    self.clear_end_unapplied_log_timestamp_us()
+
+  def OutputUnchecked(self, out):
+    if (self.has_index_name_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.index_name_)
+    if (self.has_start_key_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.start_key_)
+    if (self.has_start_inclusive_):
+      out.putVarInt32(32)
+      out.putBoolean(self.start_inclusive_)
+    if (self.has_end_key_):
+      out.putVarInt32(42)
+      out.putPrefixedString(self.end_key_)
+    if (self.has_end_inclusive_):
+      out.putVarInt32(48)
+      out.putBoolean(self.end_inclusive_)
+    if (self.has_end_unapplied_log_timestamp_us_):
+      out.putVarInt32(152)
+      out.putVarInt64(self.end_unapplied_log_timestamp_us_)
+    for i in xrange(len(self.start_postfix_value_)):
+      out.putVarInt32(178)
+      out.putPrefixedString(self.start_postfix_value_[i])
+    for i in xrange(len(self.end_postfix_value_)):
+      out.putVarInt32(186)
+      out.putPrefixedString(self.end_postfix_value_[i])
+
+  def OutputPartial(self, out):
+    if (self.has_index_name_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.index_name_)
+    if (self.has_start_key_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.start_key_)
+    if (self.has_start_inclusive_):
+      out.putVarInt32(32)
+      out.putBoolean(self.start_inclusive_)
+    if (self.has_end_key_):
+      out.putVarInt32(42)
+      out.putPrefixedString(self.end_key_)
+    if (self.has_end_inclusive_):
+      out.putVarInt32(48)
+      out.putBoolean(self.end_inclusive_)
+    if (self.has_end_unapplied_log_timestamp_us_):
+      out.putVarInt32(152)
+      out.putVarInt64(self.end_unapplied_log_timestamp_us_)
+    for i in xrange(len(self.start_postfix_value_)):
+      out.putVarInt32(178)
+      out.putPrefixedString(self.start_postfix_value_[i])
+    for i in xrange(len(self.end_postfix_value_)):
+      out.putVarInt32(186)
+      out.putPrefixedString(self.end_postfix_value_[i])
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 12: break
+      if tt == 18:
+        self.set_index_name(d.getPrefixedString())
+        continue
+      if tt == 26:
+        self.set_start_key(d.getPrefixedString())
+        continue
+      if tt == 32:
+        self.set_start_inclusive(d.getBoolean())
+        continue
+      if tt == 42:
+        self.set_end_key(d.getPrefixedString())
+        continue
+      if tt == 48:
+        self.set_end_inclusive(d.getBoolean())
+        continue
+      if tt == 152:
+        self.set_end_unapplied_log_timestamp_us(d.getVarInt64())
+        continue
+      if tt == 178:
+        self.add_start_postfix_value(d.getPrefixedString())
+        continue
+      if tt == 186:
+        self.add_end_postfix_value(d.getPrefixedString())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+    if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
+    if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
+    if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
+    if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
+    cnt=0
+    for e in self.start_postfix_value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("start_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    cnt=0
+    for e in self.end_postfix_value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("end_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_end_unapplied_log_timestamp_us_: res+=prefix+("end_unapplied_log_timestamp_us: %s\n" % self.DebugFormatInt64(self.end_unapplied_log_timestamp_us_))
+    return res
+
+class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
+  has_index_name_ = 0
+  index_name_ = ""
+  has_value_prefix_ = 0
+  value_prefix_ = 0
+
+  def __init__(self, contents=None):
+    self.prefix_value_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_name(self): return self.index_name_
+
+  def set_index_name(self, x):
+    self.has_index_name_ = 1
+    self.index_name_ = x
+
+  def clear_index_name(self):
+    if self.has_index_name_:
+      self.has_index_name_ = 0
+      self.index_name_ = ""
+
+  def has_index_name(self): return self.has_index_name_
+
+  def prefix_value_size(self): return len(self.prefix_value_)
+  def prefix_value_list(self): return self.prefix_value_
+
+  def prefix_value(self, i):
+    return self.prefix_value_[i]
+
+  def set_prefix_value(self, i, x):
+    self.prefix_value_[i] = x
+
+  def add_prefix_value(self, x):
+    self.prefix_value_.append(x)
+
+  def clear_prefix_value(self):
+    self.prefix_value_ = []
+
+  def value_prefix(self): return self.value_prefix_
+
+  def set_value_prefix(self, x):
+    self.has_value_prefix_ = 1
+    self.value_prefix_ = x
+
+  def clear_value_prefix(self):
+    if self.has_value_prefix_:
+      self.has_value_prefix_ = 0
+      self.value_prefix_ = 0
+
+  def has_value_prefix(self): return self.has_value_prefix_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_name()): self.set_index_name(x.index_name())
+    for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
+    if (x.has_value_prefix()): self.set_value_prefix(x.value_prefix())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_name_ != x.has_index_name_: return 0
+    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+    if len(self.prefix_value_) != len(x.prefix_value_): return 0
+    for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
+      if e1 != e2: return 0
+    if self.has_value_prefix_ != x.has_value_prefix_: return 0
+    if self.has_value_prefix_ and self.value_prefix_ != x.value_prefix_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_index_name_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: index_name not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.index_name_))
+    n += 1 * len(self.prefix_value_)
+    for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
+    if (self.has_value_prefix_): n += 3
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_index_name_):
+      n += 1
+      n += self.lengthString(len(self.index_name_))
+    n += 1 * len(self.prefix_value_)
+    for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
+    if (self.has_value_prefix_): n += 3
+    return n
+
+  def Clear(self):
+    self.clear_index_name()
+    self.clear_prefix_value()
+    self.clear_value_prefix()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(66)
+    out.putPrefixedString(self.index_name_)
+    for i in xrange(len(self.prefix_value_)):
+      out.putVarInt32(74)
+      out.putPrefixedString(self.prefix_value_[i])
+    if (self.has_value_prefix_):
+      out.putVarInt32(160)
+      out.putBoolean(self.value_prefix_)
+
+  def OutputPartial(self, out):
+    if (self.has_index_name_):
+      out.putVarInt32(66)
+      out.putPrefixedString(self.index_name_)
+    for i in xrange(len(self.prefix_value_)):
+      out.putVarInt32(74)
+      out.putPrefixedString(self.prefix_value_[i])
+    if (self.has_value_prefix_):
+      out.putVarInt32(160)
+      out.putBoolean(self.value_prefix_)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 60: break
+      if tt == 66:
+        self.set_index_name(d.getPrefixedString())
+        continue
+      if tt == 74:
+        self.add_prefix_value(d.getPrefixedString())
+        continue
+      if tt == 160:
+        self.set_value_prefix(d.getBoolean())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+    cnt=0
+    for e in self.prefix_value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_value_prefix_: res+=prefix+("value_prefix: %s\n" % self.DebugFormatBool(self.value_prefix_))
+    return res
+
+class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
+  has_distinct_ = 0
+  distinct_ = 0
+  has_kind_ = 0
+  kind_ = ""
+  has_ancestor_ = 0
+  ancestor_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def distinct(self): return self.distinct_
+
+  def set_distinct(self, x):
+    self.has_distinct_ = 1
+    self.distinct_ = x
+
+  def clear_distinct(self):
+    if self.has_distinct_:
+      self.has_distinct_ = 0
+      self.distinct_ = 0
+
+  def has_distinct(self): return self.has_distinct_
+
+  def kind(self): return self.kind_
+
+  def set_kind(self, x):
+    self.has_kind_ = 1
+    self.kind_ = x
+
+  def clear_kind(self):
+    if self.has_kind_:
+      self.has_kind_ = 0
+      self.kind_ = ""
+
+  def has_kind(self): return self.has_kind_
+
+  def ancestor(self):
+    if self.ancestor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.ancestor_ is None: self.ancestor_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.ancestor_
+
+  def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
+
+  def clear_ancestor(self):
+
+    if self.has_ancestor_:
+      self.has_ancestor_ = 0;
+      if self.ancestor_ is not None: self.ancestor_.Clear()
+
+  def has_ancestor(self): return self.has_ancestor_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_distinct()): self.set_distinct(x.distinct())
+    if (x.has_kind()): self.set_kind(x.kind())
+    if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_distinct_ != x.has_distinct_: return 0
+    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+    if self.has_kind_ != x.has_kind_: return 0
+    if self.has_kind_ and self.kind_ != x.kind_: return 0
+    if self.has_ancestor_ != x.has_ancestor_: return 0
+    if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_distinct_): n += 2
+    if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
+    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_distinct_): n += 2
+    if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
+    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_distinct()
+    self.clear_kind()
+    self.clear_ancestor()
+
+  def OutputUnchecked(self, out):
+    if (self.has_distinct_):
+      out.putVarInt32(112)
+      out.putBoolean(self.distinct_)
+    if (self.has_kind_):
+      out.putVarInt32(138)
+      out.putPrefixedString(self.kind_)
+    if (self.has_ancestor_):
+      out.putVarInt32(146)
+      out.putVarInt32(self.ancestor_.ByteSize())
+      self.ancestor_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_distinct_):
+      out.putVarInt32(112)
+      out.putBoolean(self.distinct_)
+    if (self.has_kind_):
+      out.putVarInt32(138)
+      out.putPrefixedString(self.kind_)
+    if (self.has_ancestor_):
+      out.putVarInt32(146)
+      out.putVarInt32(self.ancestor_.ByteSizePartial())
+      self.ancestor_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 108: break
+      if tt == 112:
+        self.set_distinct(d.getBoolean())
+        continue
+      if tt == 138:
+        self.set_kind(d.getPrefixedString())
+        continue
+      if tt == 146:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_ancestor().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+    if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
+    if self.has_ancestor_:
+      res+=prefix+"ancestor <\n"
+      res+=self.ancestor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+class CompiledQuery(ProtocolBuffer.ProtocolMessage):
+  has_primaryscan_ = 0
+  has_index_def_ = 0
+  index_def_ = None
+  has_offset_ = 0
+  offset_ = 0
+  has_limit_ = 0
+  limit_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
+  has_distinct_infix_size_ = 0
+  distinct_infix_size_ = 0
+  has_entityfilter_ = 0
+  entityfilter_ = None
+
+  def __init__(self, contents=None):
+    self.primaryscan_ = CompiledQuery_PrimaryScan()
+    self.mergejoinscan_ = []
+    self.property_name_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def primaryscan(self): return self.primaryscan_
+
+  def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
+
+  def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
+
+  def has_primaryscan(self): return self.has_primaryscan_
+
+  def mergejoinscan_size(self): return len(self.mergejoinscan_)
+  def mergejoinscan_list(self): return self.mergejoinscan_
+
+  def mergejoinscan(self, i):
+    return self.mergejoinscan_[i]
+
+  def mutable_mergejoinscan(self, i):
+    return self.mergejoinscan_[i]
+
+  def add_mergejoinscan(self):
+    x = CompiledQuery_MergeJoinScan()
+    self.mergejoinscan_.append(x)
+    return x
+
+  def clear_mergejoinscan(self):
+    self.mergejoinscan_ = []
+  def index_def(self):
+    if self.index_def_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.index_def_ is None: self.index_def_ = Index()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.index_def_
+
+  def mutable_index_def(self): self.has_index_def_ = 1; return self.index_def()
+
+  def clear_index_def(self):
+
+    if self.has_index_def_:
+      self.has_index_def_ = 0;
+      if self.index_def_ is not None: self.index_def_.Clear()
+
+  def has_index_def(self): return self.has_index_def_
+
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
+  def property_name_size(self): return len(self.property_name_)
+  def property_name_list(self): return self.property_name_
+
+  def property_name(self, i):
+    return self.property_name_[i]
+
+  def set_property_name(self, i, x):
+    self.property_name_[i] = x
+
+  def add_property_name(self, x):
+    self.property_name_.append(x)
+
+  def clear_property_name(self):
+    self.property_name_ = []
+
+  def distinct_infix_size(self): return self.distinct_infix_size_
+
+  def set_distinct_infix_size(self, x):
+    self.has_distinct_infix_size_ = 1
+    self.distinct_infix_size_ = x
+
+  def clear_distinct_infix_size(self):
+    if self.has_distinct_infix_size_:
+      self.has_distinct_infix_size_ = 0
+      self.distinct_infix_size_ = 0
+
+  def has_distinct_infix_size(self): return self.has_distinct_infix_size_
+
+  def entityfilter(self):
+    if self.entityfilter_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.entityfilter_
+
+  def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
+
+  def clear_entityfilter(self):
+
+    if self.has_entityfilter_:
+      self.has_entityfilter_ = 0;
+      if self.entityfilter_ is not None: self.entityfilter_.Clear()
+
+  def has_entityfilter(self): return self.has_entityfilter_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
+    for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
+    if (x.has_index_def()): self.mutable_index_def().MergeFrom(x.index_def())
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_limit()): self.set_limit(x.limit())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
+    if (x.has_distinct_infix_size()): self.set_distinct_infix_size(x.distinct_infix_size())
+    if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_primaryscan_ != x.has_primaryscan_: return 0
+    if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
+    if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
+    for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
+      if e1 != e2: return 0
+    if self.has_index_def_ != x.has_index_def_: return 0
+    if self.has_index_def_ and self.index_def_ != x.index_def_: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if len(self.property_name_) != len(x.property_name_): return 0
+    for e1, e2 in zip(self.property_name_, x.property_name_):
+      if e1 != e2: return 0
+    if self.has_distinct_infix_size_ != x.has_distinct_infix_size_: return 0
+    if self.has_distinct_infix_size_ and self.distinct_infix_size_ != x.distinct_infix_size_: return 0
+    if self.has_entityfilter_ != x.has_entityfilter_: return 0
+    if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_primaryscan_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: primaryscan not set.')
+    elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
+    for p in self.mergejoinscan_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_index_def_ and not self.index_def_.IsInitialized(debug_strs)): initialized = 0
+    if (not self.has_keys_only_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: keys_only not set.')
+    if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.primaryscan_.ByteSize()
+    n += 2 * len(self.mergejoinscan_)
+    for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
+    if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSize())
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+    n += 2 * len(self.property_name_)
+    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
+    if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
+    if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
+    return n + 4
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_primaryscan_):
+      n += 2
+      n += self.primaryscan_.ByteSizePartial()
+    n += 2 * len(self.mergejoinscan_)
+    for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSizePartial()
+    if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSizePartial())
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+    if (self.has_keys_only_):
+      n += 2
+    n += 2 * len(self.property_name_)
+    for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
+    if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
+    if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSizePartial()
+    return n
+
+  def Clear(self):
+    self.clear_primaryscan()
+    self.clear_mergejoinscan()
+    self.clear_index_def()
+    self.clear_offset()
+    self.clear_limit()
+    self.clear_keys_only()
+    self.clear_property_name()
+    self.clear_distinct_infix_size()
+    self.clear_entityfilter()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(11)
+    self.primaryscan_.OutputUnchecked(out)
+    out.putVarInt32(12)
+    for i in xrange(len(self.mergejoinscan_)):
+      out.putVarInt32(59)
+      self.mergejoinscan_[i].OutputUnchecked(out)
+      out.putVarInt32(60)
+    if (self.has_offset_):
+      out.putVarInt32(80)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(88)
+      out.putVarInt32(self.limit_)
+    out.putVarInt32(96)
+    out.putBoolean(self.keys_only_)
+    if (self.has_entityfilter_):
+      out.putVarInt32(107)
+      self.entityfilter_.OutputUnchecked(out)
+      out.putVarInt32(108)
+    if (self.has_index_def_):
+      out.putVarInt32(170)
+      out.putVarInt32(self.index_def_.ByteSize())
+      self.index_def_.OutputUnchecked(out)
+    for i in xrange(len(self.property_name_)):
+      out.putVarInt32(194)
+      out.putPrefixedString(self.property_name_[i])
+    if (self.has_distinct_infix_size_):
+      out.putVarInt32(200)
+      out.putVarInt32(self.distinct_infix_size_)
+
+  def OutputPartial(self, out):
+    if (self.has_primaryscan_):
+      out.putVarInt32(11)
+      self.primaryscan_.OutputPartial(out)
+      out.putVarInt32(12)
+    for i in xrange(len(self.mergejoinscan_)):
+      out.putVarInt32(59)
+      self.mergejoinscan_[i].OutputPartial(out)
+      out.putVarInt32(60)
+    if (self.has_offset_):
+      out.putVarInt32(80)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(88)
+      out.putVarInt32(self.limit_)
+    if (self.has_keys_only_):
+      out.putVarInt32(96)
+      out.putBoolean(self.keys_only_)
+    if (self.has_entityfilter_):
+      out.putVarInt32(107)
+      self.entityfilter_.OutputPartial(out)
+      out.putVarInt32(108)
+    if (self.has_index_def_):
+      out.putVarInt32(170)
+      out.putVarInt32(self.index_def_.ByteSizePartial())
+      self.index_def_.OutputPartial(out)
+    for i in xrange(len(self.property_name_)):
+      out.putVarInt32(194)
+      out.putPrefixedString(self.property_name_[i])
+    if (self.has_distinct_infix_size_):
+      out.putVarInt32(200)
+      out.putVarInt32(self.distinct_infix_size_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 11:
+        self.mutable_primaryscan().TryMerge(d)
+        continue
+      if tt == 59:
+        self.add_mergejoinscan().TryMerge(d)
+        continue
+      if tt == 80:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 88:
+        self.set_limit(d.getVarInt32())
+        continue
+      if tt == 96:
+        self.set_keys_only(d.getBoolean())
+        continue
+      if tt == 107:
+        self.mutable_entityfilter().TryMerge(d)
+        continue
+      if tt == 170:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_index_def().TryMerge(tmp)
+        continue
+      if tt == 194:
+        self.add_property_name(d.getPrefixedString())
+        continue
+      if tt == 200:
+        self.set_distinct_infix_size(d.getVarInt32())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_primaryscan_:
+      res+=prefix+"PrimaryScan {\n"
+      res+=self.primaryscan_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    cnt=0
+    for e in self.mergejoinscan_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("MergeJoinScan%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    if self.has_index_def_:
+      res+=prefix+"index_def <\n"
+      res+=self.index_def_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    cnt=0
+    for e in self.property_name_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_distinct_infix_size_: res+=prefix+("distinct_infix_size: %s\n" % self.DebugFormatInt32(self.distinct_infix_size_))
+    if self.has_entityfilter_:
+      res+=prefix+"EntityFilter {\n"
+      res+=self.entityfilter_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kPrimaryScanGroup = 1
+  kPrimaryScanindex_name = 2
+  kPrimaryScanstart_key = 3
+  kPrimaryScanstart_inclusive = 4
+  kPrimaryScanend_key = 5
+  kPrimaryScanend_inclusive = 6
+  kPrimaryScanstart_postfix_value = 22
+  kPrimaryScanend_postfix_value = 23
+  kPrimaryScanend_unapplied_log_timestamp_us = 19
+  kMergeJoinScanGroup = 7
+  kMergeJoinScanindex_name = 8
+  kMergeJoinScanprefix_value = 9
+  kMergeJoinScanvalue_prefix = 20
+  kindex_def = 21
+  koffset = 10
+  klimit = 11
+  kkeys_only = 12
+  kproperty_name = 24
+  kdistinct_infix_size = 25
+  kEntityFilterGroup = 13
+  kEntityFilterdistinct = 14
+  kEntityFilterkind = 17
+  kEntityFilterancestor = 18
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "PrimaryScan",
+    2: "index_name",
+    3: "start_key",
+    4: "start_inclusive",
+    5: "end_key",
+    6: "end_inclusive",
+    7: "MergeJoinScan",
+    8: "index_name",
+    9: "prefix_value",
+    10: "offset",
+    11: "limit",
+    12: "keys_only",
+    13: "EntityFilter",
+    14: "distinct",
+    17: "kind",
+    18: "ancestor",
+    19: "end_unapplied_log_timestamp_us",
+    20: "value_prefix",
+    21: "index_def",
+    22: "start_postfix_value",
+    23: "end_postfix_value",
+    24: "property_name",
+    25: "distinct_infix_size",
+  }, 25)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.STARTGROUP,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+    12: ProtocolBuffer.Encoder.NUMERIC,
+    13: ProtocolBuffer.Encoder.STARTGROUP,
+    14: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.STRING,
+    19: ProtocolBuffer.Encoder.NUMERIC,
+    20: ProtocolBuffer.Encoder.NUMERIC,
+    21: ProtocolBuffer.Encoder.STRING,
+    22: ProtocolBuffer.Encoder.STRING,
+    23: ProtocolBuffer.Encoder.STRING,
+    24: ProtocolBuffer.Encoder.STRING,
+    25: ProtocolBuffer.Encoder.NUMERIC,
+  }, 25, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery'
+class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage):
+  has_property_ = 0
+  property_ = ""
+  has_value_ = 0
+
+  def __init__(self, contents=None):
+    self.value_ = PropertyValue()
+    if contents is not None: self.MergeFromString(contents)
+
+  def property(self): return self.property_
+
+  def set_property(self, x):
+    self.has_property_ = 1
+    self.property_ = x
+
+  def clear_property(self):
+    if self.has_property_:
+      self.has_property_ = 0
+      self.property_ = ""
+
+  def has_property(self): return self.has_property_
+
+  def value(self): return self.value_
+
+  def mutable_value(self): self.has_value_ = 1; return self.value_
+
+  def clear_value(self):self.has_value_ = 0; self.value_.Clear()
+
+  def has_value(self): return self.has_value_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_property()): self.set_property(x.property())
+    if (x.has_value()): self.mutable_value().MergeFrom(x.value())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_property_ != x.has_property_: return 0
+    if self.has_property_ and self.property_ != x.property_: return 0
+    if self.has_value_ != x.has_value_: return 0
+    if self.has_value_ and self.value_ != x.value_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_value_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: value not set.')
+    elif not self.value_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
+    n += self.lengthString(self.value_.ByteSize())
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
+    if (self.has_value_):
+      n += 2
+      n += self.lengthString(self.value_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_property()
+    self.clear_value()
+
+  def OutputUnchecked(self, out):
+    if (self.has_property_):
+      out.putVarInt32(242)
+      out.putPrefixedString(self.property_)
+    out.putVarInt32(250)
+    out.putVarInt32(self.value_.ByteSize())
+    self.value_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_property_):
+      out.putVarInt32(242)
+      out.putPrefixedString(self.property_)
+    if (self.has_value_):
+      out.putVarInt32(250)
+      out.putVarInt32(self.value_.ByteSizePartial())
+      self.value_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 236: break
+      if tt == 242:
+        self.set_property(d.getPrefixedString())
+        continue
+      if tt == 250:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_value().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
+    if self.has_value_:
+      res+=prefix+"value <\n"
+      res+=self.value_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+class CompiledCursor_Position(ProtocolBuffer.ProtocolMessage):
+  has_start_key_ = 0
+  start_key_ = ""
+  has_key_ = 0
+  key_ = None
+  has_start_inclusive_ = 0
+  start_inclusive_ = 1
+
+  def __init__(self, contents=None):
+    self.indexvalue_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def start_key(self): return self.start_key_
+
+  def set_start_key(self, x):
+    self.has_start_key_ = 1
+    self.start_key_ = x
+
+  def clear_start_key(self):
+    if self.has_start_key_:
+      self.has_start_key_ = 0
+      self.start_key_ = ""
+
+  def has_start_key(self): return self.has_start_key_
+
+  def indexvalue_size(self): return len(self.indexvalue_)
+  def indexvalue_list(self): return self.indexvalue_
+
+  def indexvalue(self, i):
+    return self.indexvalue_[i]
+
+  def mutable_indexvalue(self, i):
+    return self.indexvalue_[i]
+
+  def add_indexvalue(self):
+    x = CompiledCursor_PositionIndexValue()
+    self.indexvalue_.append(x)
+    return x
+
+  def clear_indexvalue(self):
+    self.indexvalue_ = []
+  def key(self):
+    if self.key_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.key_ is None: self.key_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.key_
+
+  def mutable_key(self): self.has_key_ = 1; return self.key()
+
+  def clear_key(self):
+
+    if self.has_key_:
+      self.has_key_ = 0;
+      if self.key_ is not None: self.key_.Clear()
+
+  def has_key(self): return self.has_key_
+
+  def start_inclusive(self): return self.start_inclusive_
+
+  def set_start_inclusive(self, x):
+    self.has_start_inclusive_ = 1
+    self.start_inclusive_ = x
+
+  def clear_start_inclusive(self):
+    if self.has_start_inclusive_:
+      self.has_start_inclusive_ = 0
+      self.start_inclusive_ = 1
+
+  def has_start_inclusive(self): return self.has_start_inclusive_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_start_key()): self.set_start_key(x.start_key())
+    for i in xrange(x.indexvalue_size()): self.add_indexvalue().CopyFrom(x.indexvalue(i))
+    if (x.has_key()): self.mutable_key().MergeFrom(x.key())
+    if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_start_key_ != x.has_start_key_: return 0
+    if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
+    if len(self.indexvalue_) != len(x.indexvalue_): return 0
+    for e1, e2 in zip(self.indexvalue_, x.indexvalue_):
+      if e1 != e2: return 0
+    if self.has_key_ != x.has_key_: return 0
+    if self.has_key_ and self.key_ != x.key_: return 0
+    if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
+    if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    for p in self.indexvalue_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
+    n += 4 * len(self.indexvalue_)
+    for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSize()
+    if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSize())
+    if (self.has_start_inclusive_): n += 3
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
+    n += 4 * len(self.indexvalue_)
+    for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSizePartial()
+    if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSizePartial())
+    if (self.has_start_inclusive_): n += 3
+    return n
+
+  def Clear(self):
+    self.clear_start_key()
+    self.clear_indexvalue()
+    self.clear_key()
+    self.clear_start_inclusive()
+
+  def OutputUnchecked(self, out):
+    if (self.has_start_key_):
+      out.putVarInt32(218)
+      out.putPrefixedString(self.start_key_)
+    if (self.has_start_inclusive_):
+      out.putVarInt32(224)
+      out.putBoolean(self.start_inclusive_)
+    for i in xrange(len(self.indexvalue_)):
+      out.putVarInt32(235)
+      self.indexvalue_[i].OutputUnchecked(out)
+      out.putVarInt32(236)
+    if (self.has_key_):
+      out.putVarInt32(258)
+      out.putVarInt32(self.key_.ByteSize())
+      self.key_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_start_key_):
+      out.putVarInt32(218)
+      out.putPrefixedString(self.start_key_)
+    if (self.has_start_inclusive_):
+      out.putVarInt32(224)
+      out.putBoolean(self.start_inclusive_)
+    for i in xrange(len(self.indexvalue_)):
+      out.putVarInt32(235)
+      self.indexvalue_[i].OutputPartial(out)
+      out.putVarInt32(236)
+    if (self.has_key_):
+      out.putVarInt32(258)
+      out.putVarInt32(self.key_.ByteSizePartial())
+      self.key_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 20: break
+      if tt == 218:
+        self.set_start_key(d.getPrefixedString())
+        continue
+      if tt == 224:
+        self.set_start_inclusive(d.getBoolean())
+        continue
+      if tt == 235:
+        self.add_indexvalue().TryMerge(d)
+        continue
+      if tt == 258:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_key().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
+    cnt=0
+    for e in self.indexvalue_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("IndexValue%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    if self.has_key_:
+      res+=prefix+"key <\n"
+      res+=self.key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
+    return res
+
+class CompiledCursor(ProtocolBuffer.ProtocolMessage):
+  has_position_ = 0
+  position_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def position(self):
+    if self.position_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.position_ is None: self.position_ = CompiledCursor_Position()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.position_
+
+  def mutable_position(self): self.has_position_ = 1; return self.position()
+
+  def clear_position(self):
+
+    if self.has_position_:
+      self.has_position_ = 0;
+      if self.position_ is not None: self.position_.Clear()
+
+  def has_position(self): return self.has_position_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_position()): self.mutable_position().MergeFrom(x.position())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_position_ != x.has_position_: return 0
+    if self.has_position_ and self.position_ != x.position_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_position_ and not self.position_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_position_): n += 2 + self.position_.ByteSize()
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_position_): n += 2 + self.position_.ByteSizePartial()
+    return n
+
+  def Clear(self):
+    self.clear_position()
+
+  def OutputUnchecked(self, out):
+    if (self.has_position_):
+      out.putVarInt32(19)
+      self.position_.OutputUnchecked(out)
+      out.putVarInt32(20)
+
+  def OutputPartial(self, out):
+    if (self.has_position_):
+      out.putVarInt32(19)
+      self.position_.OutputPartial(out)
+      out.putVarInt32(20)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 19:
+        self.mutable_position().TryMerge(d)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_position_:
+      res+=prefix+"Position {\n"
+      res+=self.position_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kPositionGroup = 2
+  kPositionstart_key = 27
+  kPositionIndexValueGroup = 29
+  kPositionIndexValueproperty = 30
+  kPositionIndexValuevalue = 31
+  kPositionkey = 32
+  kPositionstart_inclusive = 28
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    2: "Position",
+    27: "start_key",
+    28: "start_inclusive",
+    29: "IndexValue",
+    30: "property",
+    31: "value",
+    32: "key",
+  }, 32)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STARTGROUP,
+    27: ProtocolBuffer.Encoder.STRING,
+    28: ProtocolBuffer.Encoder.NUMERIC,
+    29: ProtocolBuffer.Encoder.STARTGROUP,
+    30: ProtocolBuffer.Encoder.STRING,
+    31: ProtocolBuffer.Encoder.STRING,
+    32: ProtocolBuffer.Encoder.STRING,
+  }, 32, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor'
+class Cursor(ProtocolBuffer.ProtocolMessage):
+  has_cursor_ = 0
+  cursor_ = 0
+  has_app_ = 0
+  app_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def cursor(self): return self.cursor_
+
+  def set_cursor(self, x):
+    self.has_cursor_ = 1
+    self.cursor_ = x
+
+  def clear_cursor(self):
+    if self.has_cursor_:
+      self.has_cursor_ = 0
+      self.cursor_ = 0
+
+  def has_cursor(self): return self.has_cursor_
+
+  def app(self): return self.app_
+
+  def set_app(self, x):
+    self.has_app_ = 1
+    self.app_ = x
+
+  def clear_app(self):
+    if self.has_app_:
+      self.has_app_ = 0
+      self.app_ = ""
+
+  def has_app(self): return self.has_app_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_cursor()): self.set_cursor(x.cursor())
+    if (x.has_app()): self.set_app(x.app())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_cursor_ != x.has_cursor_: return 0
+    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+    if self.has_app_ != x.has_app_: return 0
+    if self.has_app_ and self.app_ != x.app_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_cursor_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: cursor not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
+    return n + 9
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_cursor_):
+      n += 9
+    if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
+    return n
+
+  def Clear(self):
+    self.clear_cursor()
+    self.clear_app()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(9)
+    out.put64(self.cursor_)
+    if (self.has_app_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.app_)
+
+  def OutputPartial(self, out):
+    if (self.has_cursor_):
+      out.putVarInt32(9)
+      out.put64(self.cursor_)
+    if (self.has_app_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.app_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 9:
+        self.set_cursor(d.get64())
+        continue
+      if tt == 18:
+        self.set_app(d.getPrefixedString())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
+    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcursor = 1
+  kapp = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "app",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor'
+class Error(ProtocolBuffer.ProtocolMessage):
+
+
+  BAD_REQUEST  =    1
+  CONCURRENT_TRANSACTION =    2
+  INTERNAL_ERROR =    3
+  NEED_INDEX   =    4
+  TIMEOUT      =    5
+  PERMISSION_DENIED =    6
+  BIGTABLE_ERROR =    7
+  COMMITTED_BUT_STILL_APPLYING =    8
+  CAPABILITY_DISABLED =    9
+  TRY_ALTERNATE_BACKEND =   10
+  SAFE_TIME_TOO_OLD =   11
+
+  _ErrorCode_NAMES = {
+    1: "BAD_REQUEST",
+    2: "CONCURRENT_TRANSACTION",
+    3: "INTERNAL_ERROR",
+    4: "NEED_INDEX",
+    5: "TIMEOUT",
+    6: "PERMISSION_DENIED",
+    7: "BIGTABLE_ERROR",
+    8: "COMMITTED_BUT_STILL_APPLYING",
+    9: "CAPABILITY_DISABLED",
+    10: "TRY_ALTERNATE_BACKEND",
+    11: "SAFE_TIME_TOO_OLD",
+  }
+
+  def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+  ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    return n
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def OutputPartial(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error'
+class Cost_CommitCost(ProtocolBuffer.ProtocolMessage):
+  has_requested_entity_puts_ = 0
+  requested_entity_puts_ = 0
+  has_requested_entity_deletes_ = 0
+  requested_entity_deletes_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def requested_entity_puts(self): return self.requested_entity_puts_
+
+  def set_requested_entity_puts(self, x):
+    self.has_requested_entity_puts_ = 1
+    self.requested_entity_puts_ = x
+
+  def clear_requested_entity_puts(self):
+    if self.has_requested_entity_puts_:
+      self.has_requested_entity_puts_ = 0
+      self.requested_entity_puts_ = 0
+
+  def has_requested_entity_puts(self): return self.has_requested_entity_puts_
+
+  def requested_entity_deletes(self): return self.requested_entity_deletes_
+
+  def set_requested_entity_deletes(self, x):
+    self.has_requested_entity_deletes_ = 1
+    self.requested_entity_deletes_ = x
+
+  def clear_requested_entity_deletes(self):
+    if self.has_requested_entity_deletes_:
+      self.has_requested_entity_deletes_ = 0
+      self.requested_entity_deletes_ = 0
+
+  def has_requested_entity_deletes(self): return self.has_requested_entity_deletes_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_requested_entity_puts()): self.set_requested_entity_puts(x.requested_entity_puts())
+    if (x.has_requested_entity_deletes()): self.set_requested_entity_deletes(x.requested_entity_deletes())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_requested_entity_puts_ != x.has_requested_entity_puts_: return 0
+    if self.has_requested_entity_puts_ and self.requested_entity_puts_ != x.requested_entity_puts_: return 0
+    if self.has_requested_entity_deletes_ != x.has_requested_entity_deletes_: return 0
+    if self.has_requested_entity_deletes_ and self.requested_entity_deletes_ != x.requested_entity_deletes_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
+    if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
+    if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
+    return n
+
+  def Clear(self):
+    self.clear_requested_entity_puts()
+    self.clear_requested_entity_deletes()
+
+  def OutputUnchecked(self, out):
+    if (self.has_requested_entity_puts_):
+      out.putVarInt32(48)
+      out.putVarInt32(self.requested_entity_puts_)
+    if (self.has_requested_entity_deletes_):
+      out.putVarInt32(56)
+      out.putVarInt32(self.requested_entity_deletes_)
+
+  def OutputPartial(self, out):
+    if (self.has_requested_entity_puts_):
+      out.putVarInt32(48)
+      out.putVarInt32(self.requested_entity_puts_)
+    if (self.has_requested_entity_deletes_):
+      out.putVarInt32(56)
+      out.putVarInt32(self.requested_entity_deletes_)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 44: break
+      if tt == 48:
+        self.set_requested_entity_puts(d.getVarInt32())
+        continue
+      if tt == 56:
+        self.set_requested_entity_deletes(d.getVarInt32())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_requested_entity_puts_: res+=prefix+("requested_entity_puts: %s\n" % self.DebugFormatInt32(self.requested_entity_puts_))
+    if self.has_requested_entity_deletes_: res+=prefix+("requested_entity_deletes: %s\n" % self.DebugFormatInt32(self.requested_entity_deletes_))
+    return res
+
+class Cost(ProtocolBuffer.ProtocolMessage):
+  has_index_writes_ = 0
+  index_writes_ = 0
+  has_index_write_bytes_ = 0
+  index_write_bytes_ = 0
+  has_entity_writes_ = 0
+  entity_writes_ = 0
+  has_entity_write_bytes_ = 0
+  entity_write_bytes_ = 0
+  has_commitcost_ = 0
+  commitcost_ = None
+  has_approximate_storage_delta_ = 0
+  approximate_storage_delta_ = 0
+  has_id_sequence_updates_ = 0
+  id_sequence_updates_ = 0
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_writes(self): return self.index_writes_
+
+  def set_index_writes(self, x):
+    self.has_index_writes_ = 1
+    self.index_writes_ = x
+
+  def clear_index_writes(self):
+    if self.has_index_writes_:
+      self.has_index_writes_ = 0
+      self.index_writes_ = 0
+
+  def has_index_writes(self): return self.has_index_writes_
+
+  def index_write_bytes(self): return self.index_write_bytes_
+
+  def set_index_write_bytes(self, x):
+    self.has_index_write_bytes_ = 1
+    self.index_write_bytes_ = x
+
+  def clear_index_write_bytes(self):
+    if self.has_index_write_bytes_:
+      self.has_index_write_bytes_ = 0
+      self.index_write_bytes_ = 0
+
+  def has_index_write_bytes(self): return self.has_index_write_bytes_
+
+  def entity_writes(self): return self.entity_writes_
+
+  def set_entity_writes(self, x):
+    self.has_entity_writes_ = 1
+    self.entity_writes_ = x
+
+  def clear_entity_writes(self):
+    if self.has_entity_writes_:
+      self.has_entity_writes_ = 0
+      self.entity_writes_ = 0
+
+  def has_entity_writes(self): return self.has_entity_writes_
+
+  def entity_write_bytes(self): return self.entity_write_bytes_
+
+  def set_entity_write_bytes(self, x):
+    self.has_entity_write_bytes_ = 1
+    self.entity_write_bytes_ = x
+
+  def clear_entity_write_bytes(self):
+    if self.has_entity_write_bytes_:
+      self.has_entity_write_bytes_ = 0
+      self.entity_write_bytes_ = 0
+
+  def has_entity_write_bytes(self): return self.has_entity_write_bytes_
+
+  def commitcost(self):
+    if self.commitcost_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.commitcost_ is None: self.commitcost_ = Cost_CommitCost()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.commitcost_
+
+  def mutable_commitcost(self): self.has_commitcost_ = 1; return self.commitcost()
+
+  def clear_commitcost(self):
+
+    if self.has_commitcost_:
+      self.has_commitcost_ = 0;
+      if self.commitcost_ is not None: self.commitcost_.Clear()
+
+  def has_commitcost(self): return self.has_commitcost_
+
+  def approximate_storage_delta(self): return self.approximate_storage_delta_
+
+  def set_approximate_storage_delta(self, x):
+    self.has_approximate_storage_delta_ = 1
+    self.approximate_storage_delta_ = x
+
+  def clear_approximate_storage_delta(self):
+    if self.has_approximate_storage_delta_:
+      self.has_approximate_storage_delta_ = 0
+      self.approximate_storage_delta_ = 0
+
+  def has_approximate_storage_delta(self): return self.has_approximate_storage_delta_
+
+  def id_sequence_updates(self): return self.id_sequence_updates_
+
+  def set_id_sequence_updates(self, x):
+    self.has_id_sequence_updates_ = 1
+    self.id_sequence_updates_ = x
+
+  def clear_id_sequence_updates(self):
+    if self.has_id_sequence_updates_:
+      self.has_id_sequence_updates_ = 0
+      self.id_sequence_updates_ = 0
+
+  def has_id_sequence_updates(self): return self.has_id_sequence_updates_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_writes()): self.set_index_writes(x.index_writes())
+    if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes())
+    if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes())
+    if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes())
+    if (x.has_commitcost()): self.mutable_commitcost().MergeFrom(x.commitcost())
+    if (x.has_approximate_storage_delta()): self.set_approximate_storage_delta(x.approximate_storage_delta())
+    if (x.has_id_sequence_updates()): self.set_id_sequence_updates(x.id_sequence_updates())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_writes_ != x.has_index_writes_: return 0
+    if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
+    if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0
+    if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0
+    if self.has_entity_writes_ != x.has_entity_writes_: return 0
+    if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0
+    if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0
+    if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0
+    if self.has_commitcost_ != x.has_commitcost_: return 0
+    if self.has_commitcost_ and self.commitcost_ != x.commitcost_: return 0
+    if self.has_approximate_storage_delta_ != x.has_approximate_storage_delta_: return 0
+    if self.has_approximate_storage_delta_ and self.approximate_storage_delta_ != x.approximate_storage_delta_: return 0
+    if self.has_id_sequence_updates_ != x.has_id_sequence_updates_: return 0
+    if self.has_id_sequence_updates_ and self.id_sequence_updates_ != x.id_sequence_updates_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_commitcost_ and not self.commitcost_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
+    if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
+    if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
+    if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
+    if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSize()
+    if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
+    if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
+    if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
+    if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
+    if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
+    if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSizePartial()
+    if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
+    if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
+    return n
+
+  def Clear(self):
+    self.clear_index_writes()
+    self.clear_index_write_bytes()
+    self.clear_entity_writes()
+    self.clear_entity_write_bytes()
+    self.clear_commitcost()
+    self.clear_approximate_storage_delta()
+    self.clear_id_sequence_updates()
+
+  def OutputUnchecked(self, out):
+    if (self.has_index_writes_):
+      out.putVarInt32(8)
+      out.putVarInt32(self.index_writes_)
+    if (self.has_index_write_bytes_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.index_write_bytes_)
+    if (self.has_entity_writes_):
+      out.putVarInt32(24)
+      out.putVarInt32(self.entity_writes_)
+    if (self.has_entity_write_bytes_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.entity_write_bytes_)
+    if (self.has_commitcost_):
+      out.putVarInt32(43)
+      self.commitcost_.OutputUnchecked(out)
+      out.putVarInt32(44)
+    if (self.has_approximate_storage_delta_):
+      out.putVarInt32(64)
+      out.putVarInt32(self.approximate_storage_delta_)
+    if (self.has_id_sequence_updates_):
+      out.putVarInt32(72)
+      out.putVarInt32(self.id_sequence_updates_)
+
+  def OutputPartial(self, out):
+    if (self.has_index_writes_):
+      out.putVarInt32(8)
+      out.putVarInt32(self.index_writes_)
+    if (self.has_index_write_bytes_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.index_write_bytes_)
+    if (self.has_entity_writes_):
+      out.putVarInt32(24)
+      out.putVarInt32(self.entity_writes_)
+    if (self.has_entity_write_bytes_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.entity_write_bytes_)
+    if (self.has_commitcost_):
+      out.putVarInt32(43)
+      self.commitcost_.OutputPartial(out)
+      out.putVarInt32(44)
+    if (self.has_approximate_storage_delta_):
+      out.putVarInt32(64)
+      out.putVarInt32(self.approximate_storage_delta_)
+    if (self.has_id_sequence_updates_):
+      out.putVarInt32(72)
+      out.putVarInt32(self.id_sequence_updates_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_index_writes(d.getVarInt32())
+        continue
+      if tt == 16:
+        self.set_index_write_bytes(d.getVarInt32())
+        continue
+      if tt == 24:
+        self.set_entity_writes(d.getVarInt32())
+        continue
+      if tt == 32:
+        self.set_entity_write_bytes(d.getVarInt32())
+        continue
+      if tt == 43:
+        self.mutable_commitcost().TryMerge(d)
+        continue
+      if tt == 64:
+        self.set_approximate_storage_delta(d.getVarInt32())
+        continue
+      if tt == 72:
+        self.set_id_sequence_updates(d.getVarInt32())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
+    if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_))
+    if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_))
+    if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
+    if self.has_commitcost_:
+      res+=prefix+"CommitCost {\n"
+      res+=self.commitcost_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    if self.has_approximate_storage_delta_: res+=prefix+("approximate_storage_delta: %s\n" % self.DebugFormatInt32(self.approximate_storage_delta_))
+    if self.has_id_sequence_updates_: res+=prefix+("id_sequence_updates: %s\n" % self.DebugFormatInt32(self.id_sequence_updates_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kindex_writes = 1
+  kindex_write_bytes = 2
+  kentity_writes = 3
+  kentity_write_bytes = 4
+  kCommitCostGroup = 5
+  kCommitCostrequested_entity_puts = 6
+  kCommitCostrequested_entity_deletes = 7
+  kapproximate_storage_delta = 8
+  kid_sequence_updates = 9
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_writes",
+    2: "index_write_bytes",
+    3: "entity_writes",
+    4: "entity_write_bytes",
+    5: "CommitCost",
+    6: "requested_entity_puts",
+    7: "requested_entity_deletes",
+    8: "approximate_storage_delta",
+    9: "id_sequence_updates",
+  }, 9)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STARTGROUP,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.NUMERIC,
+    9: ProtocolBuffer.Encoder.NUMERIC,
+  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost'
+class GetRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_transaction_ = 0
+  transaction_ = None
+  has_failover_ms_ = 0
+  failover_ms_ = 0
+  has_strong_ = 0
+  strong_ = 0
+  has_allow_deferred_ = 0
+  allow_deferred_ = 0
+
+  def __init__(self, contents=None):
+    self.key_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def key_size(self): return len(self.key_)
+  def key_list(self): return self.key_
+
+  def key(self, i):
+    return self.key_[i]
+
+  def mutable_key(self, i):
+    return self.key_[i]
+
+  def add_key(self):
+    x = Reference()
+    self.key_.append(x)
+    return x
+
+  def clear_key(self):
+    self.key_ = []
+  def transaction(self):
+    if self.transaction_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.transaction_ is None: self.transaction_ = Transaction()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+  def clear_transaction(self):
+
+    if self.has_transaction_:
+      self.has_transaction_ = 0;
+      if self.transaction_ is not None: self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def failover_ms(self): return self.failover_ms_
+
+  def set_failover_ms(self, x):
+    self.has_failover_ms_ = 1
+    self.failover_ms_ = x
+
+  def clear_failover_ms(self):
+    if self.has_failover_ms_:
+      self.has_failover_ms_ = 0
+      self.failover_ms_ = 0
+
+  def has_failover_ms(self): return self.has_failover_ms_
+
+  def strong(self): return self.strong_
+
+  def set_strong(self, x):
+    self.has_strong_ = 1
+    self.strong_ = x
+
+  def clear_strong(self):
+    if self.has_strong_:
+      self.has_strong_ = 0
+      self.strong_ = 0
+
+  def has_strong(self): return self.has_strong_
+
+  def allow_deferred(self): return self.allow_deferred_
+
+  def set_allow_deferred(self, x):
+    self.has_allow_deferred_ = 1
+    self.allow_deferred_ = x
+
+  def clear_allow_deferred(self):
+    if self.has_allow_deferred_:
+      self.has_allow_deferred_ = 0
+      self.allow_deferred_ = 0
+
+  def has_allow_deferred(self): return self.has_allow_deferred_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
+    if (x.has_strong()): self.set_strong(x.strong())
+    if (x.has_allow_deferred()): self.set_allow_deferred(x.allow_deferred())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if len(self.key_) != len(x.key_): return 0
+    for e1, e2 in zip(self.key_, x.key_):
+      if e1 != e2: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_failover_ms_ != x.has_failover_ms_: return 0
+    if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
+    if self.has_strong_ != x.has_strong_: return 0
+    if self.has_strong_ and self.strong_ != x.strong_: return 0
+    if self.has_allow_deferred_ != x.has_allow_deferred_: return 0
+    if self.has_allow_deferred_ and self.allow_deferred_ != x.allow_deferred_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.key_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+    if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
+    if (self.has_strong_): n += 2
+    if (self.has_allow_deferred_): n += 2
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
+    if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
+    if (self.has_strong_): n += 2
+    if (self.has_allow_deferred_): n += 2
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_key()
+    self.clear_transaction()
+    self.clear_failover_ms()
+    self.clear_strong()
+    self.clear_allow_deferred()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.key_[i].ByteSize())
+      self.key_[i].OutputUnchecked(out)
+    if (self.has_transaction_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.transaction_.ByteSize())
+      self.transaction_.OutputUnchecked(out)
+    if (self.has_failover_ms_):
+      out.putVarInt32(24)
+      out.putVarInt64(self.failover_ms_)
+    if (self.has_strong_):
+      out.putVarInt32(32)
+      out.putBoolean(self.strong_)
+    if (self.has_allow_deferred_):
+      out.putVarInt32(40)
+      out.putBoolean(self.allow_deferred_)
+    if (self.has_header_):
+      out.putVarInt32(50)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.key_[i].ByteSizePartial())
+      self.key_[i].OutputPartial(out)
+    if (self.has_transaction_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.transaction_.ByteSizePartial())
+      self.transaction_.OutputPartial(out)
+    if (self.has_failover_ms_):
+      out.putVarInt32(24)
+      out.putVarInt64(self.failover_ms_)
+    if (self.has_strong_):
+      out.putVarInt32(32)
+      out.putBoolean(self.strong_)
+    if (self.has_allow_deferred_):
+      out.putVarInt32(40)
+      out.putBoolean(self.allow_deferred_)
+    if (self.has_header_):
+      out.putVarInt32(50)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_key().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.set_failover_ms(d.getVarInt64())
+        continue
+      if tt == 32:
+        self.set_strong(d.getBoolean())
+        continue
+      if tt == 40:
+        self.set_allow_deferred(d.getBoolean())
+        continue
+      if tt == 50:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.key_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("key%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
+    if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
+    if self.has_allow_deferred_: res+=prefix+("allow_deferred: %s\n" % self.DebugFormatBool(self.allow_deferred_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 6
+  kkey = 1
+  ktransaction = 2
+  kfailover_ms = 3
+  kstrong = 4
+  kallow_deferred = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "transaction",
+    3: "failover_ms",
+    4: "strong",
+    5: "allow_deferred",
+    6: "header",
+  }, 6)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.STRING,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest'
+class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
+  has_entity_ = 0
+  entity_ = None
+  has_key_ = 0
+  key_ = None
+  has_version_ = 0
+  version_ = 0
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def entity(self):
+    if self.entity_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.entity_ is None: self.entity_ = EntityProto()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.entity_
+
+  def mutable_entity(self): self.has_entity_ = 1; return self.entity()
+
+  def clear_entity(self):
+
+    if self.has_entity_:
+      self.has_entity_ = 0;
+      if self.entity_ is not None: self.entity_.Clear()
+
+  def has_entity(self): return self.has_entity_
+
+  def key(self):
+    if self.key_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.key_ is None: self.key_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.key_
+
+  def mutable_key(self): self.has_key_ = 1; return self.key()
+
+  def clear_key(self):
+
+    if self.has_key_:
+      self.has_key_ = 0;
+      if self.key_ is not None: self.key_.Clear()
+
+  def has_key(self): return self.has_key_
+
+  def version(self): return self.version_
+
+  def set_version(self, x):
+    self.has_version_ = 1
+    self.version_ = x
+
+  def clear_version(self):
+    if self.has_version_:
+      self.has_version_ = 0
+      self.version_ = 0
+
+  def has_version(self): return self.has_version_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
+    if (x.has_key()): self.mutable_key().MergeFrom(x.key())
+    if (x.has_version()): self.set_version(x.version())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_entity_ != x.has_entity_: return 0
+    if self.has_entity_ and self.entity_ != x.entity_: return 0
+    if self.has_key_ != x.has_key_: return 0
+    if self.has_key_ and self.key_ != x.key_: return 0
+    if self.has_version_ != x.has_version_: return 0
+    if self.has_version_ and self.version_ != x.version_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
+    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
+    if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
+    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
+    if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
+    return n
+
+  def Clear(self):
+    self.clear_entity()
+    self.clear_key()
+    self.clear_version()
+
+  def OutputUnchecked(self, out):
+    if (self.has_entity_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.entity_.ByteSize())
+      self.entity_.OutputUnchecked(out)
+    if (self.has_version_):
+      out.putVarInt32(24)
+      out.putVarInt64(self.version_)
+    if (self.has_key_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.key_.ByteSize())
+      self.key_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_entity_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.entity_.ByteSizePartial())
+      self.entity_.OutputPartial(out)
+    if (self.has_version_):
+      out.putVarInt32(24)
+      out.putVarInt64(self.version_)
+    if (self.has_key_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.key_.ByteSizePartial())
+      self.key_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 12: break
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_entity().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.set_version(d.getVarInt64())
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_key().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_entity_:
+      res+=prefix+"entity <\n"
+      res+=self.entity_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_key_:
+      res+=prefix+"key <\n"
+      res+=self.key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
+    return res
+
+class GetResponse(ProtocolBuffer.ProtocolMessage):
+  has_in_order_ = 0
+  in_order_ = 1
+
+  def __init__(self, contents=None):
+    self.entity_ = []
+    self.deferred_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def entity_size(self): return len(self.entity_)
+  def entity_list(self): return self.entity_
+
+  def entity(self, i):
+    return self.entity_[i]
+
+  def mutable_entity(self, i):
+    return self.entity_[i]
+
+  def add_entity(self):
+    x = GetResponse_Entity()
+    self.entity_.append(x)
+    return x
+
+  def clear_entity(self):
+    self.entity_ = []
+  def deferred_size(self): return len(self.deferred_)
+  def deferred_list(self): return self.deferred_
+
+  def deferred(self, i):
+    return self.deferred_[i]
+
+  def mutable_deferred(self, i):
+    return self.deferred_[i]
+
+  def add_deferred(self):
+    x = Reference()
+    self.deferred_.append(x)
+    return x
+
+  def clear_deferred(self):
+    self.deferred_ = []
+  def in_order(self): return self.in_order_
+
+  def set_in_order(self, x):
+    self.has_in_order_ = 1
+    self.in_order_ = x
+
+  def clear_in_order(self):
+    if self.has_in_order_:
+      self.has_in_order_ = 0
+      self.in_order_ = 1
+
+  def has_in_order(self): return self.has_in_order_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
+    for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i))
+    if (x.has_in_order()): self.set_in_order(x.in_order())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.entity_) != len(x.entity_): return 0
+    for e1, e2 in zip(self.entity_, x.entity_):
+      if e1 != e2: return 0
+    if len(self.deferred_) != len(x.deferred_): return 0
+    for e1, e2 in zip(self.deferred_, x.deferred_):
+      if e1 != e2: return 0
+    if self.has_in_order_ != x.has_in_order_: return 0
+    if self.has_in_order_ and self.in_order_ != x.in_order_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    for p in self.entity_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    for p in self.deferred_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 2 * len(self.entity_)
+    for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
+    n += 1 * len(self.deferred_)
+    for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize())
+    if (self.has_in_order_): n += 2
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    n += 2 * len(self.entity_)
+    for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSizePartial()
+    n += 1 * len(self.deferred_)
+    for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial())
+    if (self.has_in_order_): n += 2
+    return n
+
+  def Clear(self):
+    self.clear_entity()
+    self.clear_deferred()
+    self.clear_in_order()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.entity_)):
+      out.putVarInt32(11)
+      self.entity_[i].OutputUnchecked(out)
+      out.putVarInt32(12)
+    for i in xrange(len(self.deferred_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.deferred_[i].ByteSize())
+      self.deferred_[i].OutputUnchecked(out)
+    if (self.has_in_order_):
+      out.putVarInt32(48)
+      out.putBoolean(self.in_order_)
+
+  def OutputPartial(self, out):
+    for i in xrange(len(self.entity_)):
+      out.putVarInt32(11)
+      self.entity_[i].OutputPartial(out)
+      out.putVarInt32(12)
+    for i in xrange(len(self.deferred_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.deferred_[i].ByteSizePartial())
+      self.deferred_[i].OutputPartial(out)
+    if (self.has_in_order_):
+      out.putVarInt32(48)
+      out.putBoolean(self.in_order_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 11:
+        self.add_entity().TryMerge(d)
+        continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_deferred().TryMerge(tmp)
+        continue
+      if tt == 48:
+        self.set_in_order(d.getBoolean())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.entity_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("Entity%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    cnt=0
+    for e in self.deferred_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("deferred%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_in_order_: res+=prefix+("in_order: %s\n" % self.DebugFormatBool(self.in_order_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kEntityGroup = 1
+  kEntityentity = 2
+  kEntitykey = 4
+  kEntityversion = 3
+  kdeferred = 5
+  kin_order = 6
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Entity",
+    2: "entity",
+    3: "version",
+    4: "key",
+    5: "deferred",
+    6: "in_order",
+  }, 6)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse'
+class PutRequest(ProtocolBuffer.ProtocolMessage):
+
+
+  CURRENT      =    0
+  SEQUENTIAL   =    1
+
+  _AutoIdPolicy_NAMES = {
+    0: "CURRENT",
+    1: "SEQUENTIAL",
+  }
+
+  def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "")
+  AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name)
+
+  has_header_ = 0
+  header_ = None
+  has_transaction_ = 0
+  transaction_ = None
+  has_trusted_ = 0
+  trusted_ = 0
+  has_force_ = 0
+  force_ = 0
+  has_mark_changes_ = 0
+  mark_changes_ = 0
+  has_auto_id_policy_ = 0
+  auto_id_policy_ = 0
+
+  def __init__(self, contents=None):
+    self.entity_ = []
+    self.composite_index_ = []
+    self.snapshot_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def entity_size(self): return len(self.entity_)
+  def entity_list(self): return self.entity_
+
+  def entity(self, i):
+    return self.entity_[i]
+
+  def mutable_entity(self, i):
+    return self.entity_[i]
+
+  def add_entity(self):
+    x = EntityProto()
+    self.entity_.append(x)
+    return x
+
+  def clear_entity(self):
+    self.entity_ = []
+  def transaction(self):
+    if self.transaction_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.transaction_ is None: self.transaction_ = Transaction()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+  def clear_transaction(self):
+
+    if self.has_transaction_:
+      self.has_transaction_ = 0;
+      if self.transaction_ is not None: self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def composite_index_size(self): return len(self.composite_index_)
+  def composite_index_list(self): return self.composite_index_
+
+  def composite_index(self, i):
+    return self.composite_index_[i]
+
+  def mutable_composite_index(self, i):
+    return self.composite_index_[i]
+
+  def add_composite_index(self):
+    x = CompositeIndex()
+    self.composite_index_.append(x)
+    return x
+
+  def clear_composite_index(self):
+    self.composite_index_ = []
+  def trusted(self): return self.trusted_
+
+  def set_trusted(self, x):
+    self.has_trusted_ = 1
+    self.trusted_ = x
+
+  def clear_trusted(self):
+    if self.has_trusted_:
+      self.has_trusted_ = 0
+      self.trusted_ = 0
+
+  def has_trusted(self): return self.has_trusted_
+
+  def force(self): return self.force_
+
+  def set_force(self, x):
+    self.has_force_ = 1
+    self.force_ = x
+
+  def clear_force(self):
+    if self.has_force_:
+      self.has_force_ = 0
+      self.force_ = 0
+
+  def has_force(self): return self.has_force_
+
+  def mark_changes(self): return self.mark_changes_
+
+  def set_mark_changes(self, x):
+    self.has_mark_changes_ = 1
+    self.mark_changes_ = x
+
+  def clear_mark_changes(self):
+    if self.has_mark_changes_:
+      self.has_mark_changes_ = 0
+      self.mark_changes_ = 0
+
+  def has_mark_changes(self): return self.has_mark_changes_
+
+  def snapshot_size(self): return len(self.snapshot_)
+  def snapshot_list(self): return self.snapshot_
+
+  def snapshot(self, i):
+    return self.snapshot_[i]
+
+  def mutable_snapshot(self, i):
+    return self.snapshot_[i]
+
+  def add_snapshot(self):
+    x = Snapshot()
+    self.snapshot_.append(x)
+    return x
+
+  def clear_snapshot(self):
+    self.snapshot_ = []
+  def auto_id_policy(self): return self.auto_id_policy_
+
+  def set_auto_id_policy(self, x):
+    self.has_auto_id_policy_ = 1
+    self.auto_id_policy_ = x
+
+  def clear_auto_id_policy(self):
+    if self.has_auto_id_policy_:
+      self.has_auto_id_policy_ = 0
+      self.auto_id_policy_ = 0
+
+  def has_auto_id_policy(self): return self.has_auto_id_policy_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
+    if (x.has_trusted()): self.set_trusted(x.trusted())
+    if (x.has_force()): self.set_force(x.force())
+    if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
+    for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
+    if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if len(self.entity_) != len(x.entity_): return 0
+    for e1, e2 in zip(self.entity_, x.entity_):
+      if e1 != e2: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if len(self.composite_index_) != len(x.composite_index_): return 0
+    for e1, e2 in zip(self.composite_index_, x.composite_index_):
+      if e1 != e2: return 0
+    if self.has_trusted_ != x.has_trusted_: return 0
+    if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
+    if self.has_force_ != x.has_force_: return 0
+    if self.has_force_ and self.force_ != x.force_: return 0
+    if self.has_mark_changes_ != x.has_mark_changes_: return 0
+    if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
+    if len(self.snapshot_) != len(x.snapshot_): return 0
+    for e1, e2 in zip(self.snapshot_, x.snapshot_):
+      if e1 != e2: return 0
+    if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0
+    if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.entity_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.composite_index_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    for p in self.snapshot_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += 1 * len(self.entity_)
+    for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+    n += 1 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
+    if (self.has_trusted_): n += 2
+    if (self.has_force_): n += 2
+    if (self.has_mark_changes_): n += 2
+    n += 1 * len(self.snapshot_)
+    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
+    if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    n += 1 * len(self.entity_)
+    for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSizePartial())
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
+    n += 1 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
+    if (self.has_trusted_): n += 2
+    if (self.has_force_): n += 2
+    if (self.has_mark_changes_): n += 2
+    n += 1 * len(self.snapshot_)
+    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
+    if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_entity()
+    self.clear_transaction()
+    self.clear_composite_index()
+    self.clear_trusted()
+    self.clear_force()
+    self.clear_mark_changes()
+    self.clear_snapshot()
+    self.clear_auto_id_policy()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.entity_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.entity_[i].ByteSize())
+      self.entity_[i].OutputUnchecked(out)
+    if (self.has_transaction_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.transaction_.ByteSize())
+      self.transaction_.OutputUnchecked(out)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(26)
+      out.putVarInt32(self.composite_index_[i].ByteSize())
+      self.composite_index_[i].OutputUnchecked(out)
+    if (self.has_trusted_):
+      out.putVarInt32(32)
+      out.putBoolean(self.trusted_)
+    if (self.has_force_):
+      out.putVarInt32(56)
+      out.putBoolean(self.force_)
+    if (self.has_mark_changes_):
+      out.putVarInt32(64)
+      out.putBoolean(self.mark_changes_)
+    for i in xrange(len(self.snapshot_)):
+      out.putVarInt32(74)
+      out.putVarInt32(self.snapshot_[i].ByteSize())
+      self.snapshot_[i].OutputUnchecked(out)
+    if (self.has_auto_id_policy_):
+      out.putVarInt32(80)
+      out.putVarInt32(self.auto_id_policy_)
+    if (self.has_header_):
+      out.putVarInt32(90)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    for i in xrange(len(self.entity_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.entity_[i].ByteSizePartial())
+      self.entity_[i].OutputPartial(out)
+    if (self.has_transaction_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.transaction_.ByteSizePartial())
+      self.transaction_.OutputPartial(out)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(26)
+      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
+      self.composite_index_[i].OutputPartial(out)
+    if (self.has_trusted_):
+      out.putVarInt32(32)
+      out.putBoolean(self.trusted_)
+    if (self.has_force_):
+      out.putVarInt32(56)
+      out.putBoolean(self.force_)
+    if (self.has_mark_changes_):
+      out.putVarInt32(64)
+      out.putBoolean(self.mark_changes_)
+    for i in xrange(len(self.snapshot_)):
+      out.putVarInt32(74)
+      out.putVarInt32(self.snapshot_[i].ByteSizePartial())
+      self.snapshot_[i].OutputPartial(out)
+    if (self.has_auto_id_policy_):
+      out.putVarInt32(80)
+      out.putVarInt32(self.auto_id_policy_)
+    if (self.has_header_):
+      out.putVarInt32(90)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_entity().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_composite_index().TryMerge(tmp)
+        continue
+      if tt == 32:
+        self.set_trusted(d.getBoolean())
+        continue
+      if tt == 56:
+        self.set_force(d.getBoolean())
+        continue
+      if tt == 64:
+        self.set_mark_changes(d.getBoolean())
+        continue
+      if tt == 74:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_snapshot().TryMerge(tmp)
+        continue
+      if tt == 80:
+        self.set_auto_id_policy(d.getVarInt32())
+        continue
+      if tt == 90:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.entity_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("entity%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.composite_index_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("composite_index%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
+    if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
+    if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
+    cnt=0
+    for e in self.snapshot_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("snapshot%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 11
+  kentity = 1
+  ktransaction = 2
+  kcomposite_index = 3
+  ktrusted = 4
+  kforce = 7
+  kmark_changes = 8
+  ksnapshot = 9
+  kauto_id_policy = 10
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "entity",
+    2: "transaction",
+    3: "composite_index",
+    4: "trusted",
+    7: "force",
+    8: "mark_changes",
+    9: "snapshot",
+    10: "auto_id_policy",
+    11: "header",
+  }, 11)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.NUMERIC,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+    11: ProtocolBuffer.Encoder.STRING,
+  }, 11, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest'
+class PutResponse(ProtocolBuffer.ProtocolMessage):
+  has_cost_ = 0
+  cost_ = None
+
+  def __init__(self, contents=None):
+    self.key_ = []
+    self.version_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def key_size(self): return len(self.key_)
+  def key_list(self): return self.key_
+
+  def key(self, i):
+    return self.key_[i]
+
+  def mutable_key(self, i):
+    return self.key_[i]
+
+  def add_key(self):
+    x = Reference()
+    self.key_.append(x)
+    return x
+
+  def clear_key(self):
+    self.key_ = []
+  def cost(self):
+    if self.cost_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.cost_ is None: self.cost_ = Cost()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.cost_
+
+  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+  def clear_cost(self):
+
+    if self.has_cost_:
+      self.has_cost_ = 0;
+      if self.cost_ is not None: self.cost_.Clear()
+
+  def has_cost(self): return self.has_cost_
+
+  def version_size(self): return len(self.version_)
+  def version_list(self): return self.version_
+
+  def version(self, i):
+    return self.version_[i]
+
+  def set_version(self, i, x):
+    self.version_[i] = x
+
+  def add_version(self, x):
+    self.version_.append(x)
+
+  def clear_version(self):
+    self.version_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+    for i in xrange(x.version_size()): self.add_version(x.version(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.key_) != len(x.key_): return 0
+    for e1, e2 in zip(self.key_, x.key_):
+      if e1 != e2: return 0
+    if self.has_cost_ != x.has_cost_: return 0
+    if self.has_cost_ and self.cost_ != x.cost_: return 0
+    if len(self.version_) != len(x.version_): return 0
+    for e1, e2 in zip(self.version_, x.version_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    for p in self.key_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+    n += 1 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
+    n += 1 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
+    return n
+
+  def Clear(self):
+    self.clear_key()
+    self.clear_cost()
+    self.clear_version()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.key_[i].ByteSize())
+      self.key_[i].OutputUnchecked(out)
+    if (self.has_cost_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.cost_.ByteSize())
+      self.cost_.OutputUnchecked(out)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(24)
+      out.putVarInt64(self.version_[i])
+
+  def OutputPartial(self, out):
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.key_[i].ByteSizePartial())
+      self.key_[i].OutputPartial(out)
+    if (self.has_cost_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.cost_.ByteSizePartial())
+      self.cost_.OutputPartial(out)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(24)
+      out.putVarInt64(self.version_[i])
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_key().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cost().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.add_version(d.getVarInt64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.key_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("key%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_cost_:
+      res+=prefix+"cost <\n"
+      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.version_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kkey = 1
+  kcost = 2
+  kversion = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "cost",
+    3: "version",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse'
+class TouchRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_force_ = 0
+  force_ = 0
+
+  def __init__(self, contents=None):
+    self.key_ = []
+    self.composite_index_ = []
+    self.snapshot_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def key_size(self): return len(self.key_)
+  def key_list(self): return self.key_
+
+  def key(self, i):
+    return self.key_[i]
+
+  def mutable_key(self, i):
+    return self.key_[i]
+
+  def add_key(self):
+    x = Reference()
+    self.key_.append(x)
+    return x
+
+  def clear_key(self):
+    self.key_ = []
+  def composite_index_size(self): return len(self.composite_index_)
+  def composite_index_list(self): return self.composite_index_
+
+  def composite_index(self, i):
+    return self.composite_index_[i]
+
+  def mutable_composite_index(self, i):
+    return self.composite_index_[i]
+
+  def add_composite_index(self):
+    x = CompositeIndex()
+    self.composite_index_.append(x)
+    return x
+
+  def clear_composite_index(self):
+    self.composite_index_ = []
+  def force(self): return self.force_
+
+  def set_force(self, x):
+    self.has_force_ = 1
+    self.force_ = x
+
+  def clear_force(self):
+    if self.has_force_:
+      self.has_force_ = 0
+      self.force_ = 0
+
+  def has_force(self): return self.has_force_
+
+  def snapshot_size(self): return len(self.snapshot_)
+  def snapshot_list(self): return self.snapshot_
+
+  def snapshot(self, i):
+    return self.snapshot_[i]
+
+  def mutable_snapshot(self, i):
+    return self.snapshot_[i]
+
+  def add_snapshot(self):
+    x = Snapshot()
+    self.snapshot_.append(x)
+    return x
+
+  def clear_snapshot(self):
+    self.snapshot_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
+    if (x.has_force()): self.set_force(x.force())
+    for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if len(self.key_) != len(x.key_): return 0
+    for e1, e2 in zip(self.key_, x.key_):
+      if e1 != e2: return 0
+    if len(self.composite_index_) != len(x.composite_index_): return 0
+    for e1, e2 in zip(self.composite_index_, x.composite_index_):
+      if e1 != e2: return 0
+    if self.has_force_ != x.has_force_: return 0
+    if self.has_force_ and self.force_ != x.force_: return 0
+    if len(self.snapshot_) != len(x.snapshot_): return 0
+    for e1, e2 in zip(self.snapshot_, x.snapshot_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.key_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    for p in self.composite_index_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    for p in self.snapshot_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+    n += 1 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
+    if (self.has_force_): n += 2
+    n += 1 * len(self.snapshot_)
+    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
+    n += 1 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
+    if (self.has_force_): n += 2
+    n += 1 * len(self.snapshot_)
+    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_key()
+    self.clear_composite_index()
+    self.clear_force()
+    self.clear_snapshot()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.key_[i].ByteSize())
+      self.key_[i].OutputUnchecked(out)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(18)
+      out.putVarInt32(self.composite_index_[i].ByteSize())
+      self.composite_index_[i].OutputUnchecked(out)
+    if (self.has_force_):
+      out.putVarInt32(24)
+      out.putBoolean(self.force_)
+    for i in xrange(len(self.snapshot_)):
+      out.putVarInt32(74)
+      out.putVarInt32(self.snapshot_[i].ByteSize())
+      self.snapshot_[i].OutputUnchecked(out)
+    if (self.has_header_):
+      out.putVarInt32(82)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.key_[i].ByteSizePartial())
+      self.key_[i].OutputPartial(out)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(18)
+      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
+      self.composite_index_[i].OutputPartial(out)
+    if (self.has_force_):
+      out.putVarInt32(24)
+      out.putBoolean(self.force_)
+    for i in xrange(len(self.snapshot_)):
+      out.putVarInt32(74)
+      out.putVarInt32(self.snapshot_[i].ByteSizePartial())
+      self.snapshot_[i].OutputPartial(out)
+    if (self.has_header_):
+      out.putVarInt32(82)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_key().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_composite_index().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.set_force(d.getBoolean())
+        continue
+      if tt == 74:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_snapshot().TryMerge(tmp)
+        continue
+      if tt == 82:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.key_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("key%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    cnt=0
+    for e in self.composite_index_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("composite_index%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
+    cnt=0
+    for e in self.snapshot_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("snapshot%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 10
+  kkey = 1
+  kcomposite_index = 2
+  kforce = 3
+  ksnapshot = 9
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "composite_index",
+    3: "force",
+    9: "snapshot",
+    10: "header",
+  }, 10)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.STRING,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest'
+class TouchResponse(ProtocolBuffer.ProtocolMessage):
+  has_cost_ = 0
+  cost_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def cost(self):
+    if self.cost_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.cost_ is None: self.cost_ = Cost()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.cost_
+
+  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+  def clear_cost(self):
+
+    if self.has_cost_:
+      self.has_cost_ = 0;
+      if self.cost_ is not None: self.cost_.Clear()
+
+  def has_cost(self): return self.has_cost_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_cost_ != x.has_cost_: return 0
+    if self.has_cost_ and self.cost_ != x.cost_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_cost()
+
+  def OutputUnchecked(self, out):
+    if (self.has_cost_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cost_.ByteSize())
+      self.cost_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_cost_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cost_.ByteSizePartial())
+      self.cost_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cost().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_cost_:
+      res+=prefix+"cost <\n"
+      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcost = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse'
+class DeleteRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_transaction_ = 0
+  transaction_ = None
+  has_trusted_ = 0
+  trusted_ = 0
+  has_force_ = 0
+  force_ = 0
+  has_mark_changes_ = 0
+  mark_changes_ = 0
+
+  def __init__(self, contents=None):
+    self.key_ = []
+    self.snapshot_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def key_size(self): return len(self.key_)
+  def key_list(self): return self.key_
+
+  def key(self, i):
+    return self.key_[i]
+
+  def mutable_key(self, i):
+    return self.key_[i]
+
+  def add_key(self):
+    x = Reference()
+    self.key_.append(x)
+    return x
+
+  def clear_key(self):
+    self.key_ = []
+  def transaction(self):
+    if self.transaction_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.transaction_ is None: self.transaction_ = Transaction()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+  def clear_transaction(self):
+
+    if self.has_transaction_:
+      self.has_transaction_ = 0;
+      if self.transaction_ is not None: self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def trusted(self): return self.trusted_
+
+  def set_trusted(self, x):
+    self.has_trusted_ = 1
+    self.trusted_ = x
+
+  def clear_trusted(self):
+    if self.has_trusted_:
+      self.has_trusted_ = 0
+      self.trusted_ = 0
+
+  def has_trusted(self): return self.has_trusted_
+
+  def force(self): return self.force_
+
+  def set_force(self, x):
+    self.has_force_ = 1
+    self.force_ = x
+
+  def clear_force(self):
+    if self.has_force_:
+      self.has_force_ = 0
+      self.force_ = 0
+
+  def has_force(self): return self.has_force_
+
+  def mark_changes(self): return self.mark_changes_
+
+  def set_mark_changes(self, x):
+    self.has_mark_changes_ = 1
+    self.mark_changes_ = x
+
+  def clear_mark_changes(self):
+    if self.has_mark_changes_:
+      self.has_mark_changes_ = 0
+      self.mark_changes_ = 0
+
+  def has_mark_changes(self): return self.has_mark_changes_
+
+  def snapshot_size(self): return len(self.snapshot_)
+  def snapshot_list(self): return self.snapshot_
+
+  def snapshot(self, i):
+    return self.snapshot_[i]
+
+  def mutable_snapshot(self, i):
+    return self.snapshot_[i]
+
+  def add_snapshot(self):
+    x = Snapshot()
+    self.snapshot_.append(x)
+    return x
+
+  def clear_snapshot(self):
+    self.snapshot_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_trusted()): self.set_trusted(x.trusted())
+    if (x.has_force()): self.set_force(x.force())
+    if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
+    for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if len(self.key_) != len(x.key_): return 0
+    for e1, e2 in zip(self.key_, x.key_):
+      if e1 != e2: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_trusted_ != x.has_trusted_: return 0
+    if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
+    if self.has_force_ != x.has_force_: return 0
+    if self.has_force_ and self.force_ != x.force_: return 0
+    if self.has_mark_changes_ != x.has_mark_changes_: return 0
+    if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
+    if len(self.snapshot_) != len(x.snapshot_): return 0
+    for e1, e2 in zip(self.snapshot_, x.snapshot_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.key_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.snapshot_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+    if (self.has_trusted_): n += 2
+    if (self.has_force_): n += 2
+    if (self.has_mark_changes_): n += 2
+    n += 1 * len(self.snapshot_)
+    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    n += 1 * len(self.key_)
+    for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
+    if (self.has_trusted_): n += 2
+    if (self.has_force_): n += 2
+    if (self.has_mark_changes_): n += 2
+    n += 1 * len(self.snapshot_)
+    for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_key()
+    self.clear_transaction()
+    self.clear_trusted()
+    self.clear_force()
+    self.clear_mark_changes()
+    self.clear_snapshot()
+
+  def OutputUnchecked(self, out):
+    if (self.has_trusted_):
+      out.putVarInt32(32)
+      out.putBoolean(self.trusted_)
+    if (self.has_transaction_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.transaction_.ByteSize())
+      self.transaction_.OutputUnchecked(out)
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(50)
+      out.putVarInt32(self.key_[i].ByteSize())
+      self.key_[i].OutputUnchecked(out)
+    if (self.has_force_):
+      out.putVarInt32(56)
+      out.putBoolean(self.force_)
+    if (self.has_mark_changes_):
+      out.putVarInt32(64)
+      out.putBoolean(self.mark_changes_)
+    for i in xrange(len(self.snapshot_)):
+      out.putVarInt32(74)
+      out.putVarInt32(self.snapshot_[i].ByteSize())
+      self.snapshot_[i].OutputUnchecked(out)
+    if (self.has_header_):
+      out.putVarInt32(82)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_trusted_):
+      out.putVarInt32(32)
+      out.putBoolean(self.trusted_)
+    if (self.has_transaction_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.transaction_.ByteSizePartial())
+      self.transaction_.OutputPartial(out)
+    for i in xrange(len(self.key_)):
+      out.putVarInt32(50)
+      out.putVarInt32(self.key_[i].ByteSizePartial())
+      self.key_[i].OutputPartial(out)
+    if (self.has_force_):
+      out.putVarInt32(56)
+      out.putBoolean(self.force_)
+    if (self.has_mark_changes_):
+      out.putVarInt32(64)
+      out.putBoolean(self.mark_changes_)
+    for i in xrange(len(self.snapshot_)):
+      out.putVarInt32(74)
+      out.putVarInt32(self.snapshot_[i].ByteSizePartial())
+      self.snapshot_[i].OutputPartial(out)
+    if (self.has_header_):
+      out.putVarInt32(82)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 32:
+        self.set_trusted(d.getBoolean())
+        continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 50:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_key().TryMerge(tmp)
+        continue
+      if tt == 56:
+        self.set_force(d.getBoolean())
+        continue
+      if tt == 64:
+        self.set_mark_changes(d.getBoolean())
+        continue
+      if tt == 74:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_snapshot().TryMerge(tmp)
+        continue
+      if tt == 82:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.key_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("key%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
+    if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
+    if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
+    cnt=0
+    for e in self.snapshot_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("snapshot%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 10
+  kkey = 6
+  ktransaction = 5
+  ktrusted = 4
+  kforce = 7
+  kmark_changes = 8
+  ksnapshot = 9
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    4: "trusted",
+    5: "transaction",
+    6: "key",
+    7: "force",
+    8: "mark_changes",
+    9: "snapshot",
+    10: "header",
+  }, 10)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.NUMERIC,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.STRING,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest'
+class DeleteResponse(ProtocolBuffer.ProtocolMessage):
+  has_cost_ = 0
+  cost_ = None
+
+  def __init__(self, contents=None):
+    self.version_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def cost(self):
+    if self.cost_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.cost_ is None: self.cost_ = Cost()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.cost_
+
+  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+  def clear_cost(self):
+
+    if self.has_cost_:
+      self.has_cost_ = 0;
+      if self.cost_ is not None: self.cost_.Clear()
+
+  def has_cost(self): return self.has_cost_
+
+  def version_size(self): return len(self.version_)
+  def version_list(self): return self.version_
+
+  def version(self, i):
+    return self.version_[i]
+
+  def set_version(self, i, x):
+    self.version_[i] = x
+
+  def add_version(self, x):
+    self.version_.append(x)
+
+  def clear_version(self):
+    self.version_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+    for i in xrange(x.version_size()): self.add_version(x.version(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_cost_ != x.has_cost_: return 0
+    if self.has_cost_ and self.cost_ != x.cost_: return 0
+    if len(self.version_) != len(x.version_): return 0
+    for e1, e2 in zip(self.version_, x.version_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+    n += 1 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
+    n += 1 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
+    return n
+
+  def Clear(self):
+    self.clear_cost()
+    self.clear_version()
+
+  def OutputUnchecked(self, out):
+    if (self.has_cost_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cost_.ByteSize())
+      self.cost_.OutputUnchecked(out)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(24)
+      out.putVarInt64(self.version_[i])
+
+  def OutputPartial(self, out):
+    if (self.has_cost_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cost_.ByteSizePartial())
+      self.cost_.OutputPartial(out)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(24)
+      out.putVarInt64(self.version_[i])
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cost().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.add_version(d.getVarInt64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_cost_:
+      res+=prefix+"cost <\n"
+      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.version_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcost = 1
+  kversion = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+    3: "version",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse'
+class NextRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_cursor_ = 0
+  has_count_ = 0
+  count_ = 0
+  has_offset_ = 0
+  offset_ = 0
+  has_compile_ = 0
+  compile_ = 0
+
+  def __init__(self, contents=None):
+    self.cursor_ = Cursor()
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def cursor(self): return self.cursor_
+
+  def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
+
+  def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
+
+  def has_cursor(self): return self.has_cursor_
+
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def compile(self): return self.compile_
+
+  def set_compile(self, x):
+    self.has_compile_ = 1
+    self.compile_ = x
+
+  def clear_compile(self):
+    if self.has_compile_:
+      self.has_compile_ = 0
+      self.compile_ = 0
+
+  def has_compile(self): return self.has_compile_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
+    if (x.has_count()): self.set_count(x.count())
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_compile()): self.set_compile(x.compile())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if self.has_cursor_ != x.has_cursor_: return 0
+    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_compile_ != x.has_compile_: return 0
+    if self.has_compile_ and self.compile_ != x.compile_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    if (not self.has_cursor_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: cursor not set.')
+    elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += self.lengthString(self.cursor_.ByteSize())
+    if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_compile_): n += 2
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    if (self.has_cursor_):
+      n += 1
+      n += self.lengthString(self.cursor_.ByteSizePartial())
+    if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_compile_): n += 2
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_cursor()
+    self.clear_count()
+    self.clear_offset()
+    self.clear_compile()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.cursor_.ByteSize())
+    self.cursor_.OutputUnchecked(out)
+    if (self.has_count_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.count_)
+    if (self.has_compile_):
+      out.putVarInt32(24)
+      out.putBoolean(self.compile_)
+    if (self.has_offset_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.offset_)
+    if (self.has_header_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_cursor_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cursor_.ByteSizePartial())
+      self.cursor_.OutputPartial(out)
+    if (self.has_count_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.count_)
+    if (self.has_compile_):
+      out.putVarInt32(24)
+      out.putBoolean(self.compile_)
+    if (self.has_offset_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.offset_)
+    if (self.has_header_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cursor().TryMerge(tmp)
+        continue
+      if tt == 16:
+        self.set_count(d.getVarInt32())
+        continue
+      if tt == 24:
+        self.set_compile(d.getBoolean())
+        continue
+      if tt == 32:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_cursor_:
+      res+=prefix+"cursor <\n"
+      res+=self.cursor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 5
+  kcursor = 1
+  kcount = 2
+  koffset = 4
+  kcompile = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "count",
+    3: "compile",
+    4: "offset",
+    5: "header",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest'
+class QueryResult(ProtocolBuffer.ProtocolMessage):
+  has_cursor_ = 0
+  cursor_ = None
+  has_skipped_results_ = 0
+  skipped_results_ = 0
+  has_more_results_ = 0
+  more_results_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
+  has_index_only_ = 0
+  index_only_ = 0
+  has_small_ops_ = 0
+  small_ops_ = 0
+  has_compiled_query_ = 0
+  compiled_query_ = None
+  has_compiled_cursor_ = 0
+  compiled_cursor_ = None
+
+  def __init__(self, contents=None):
+    self.result_ = []
+    self.index_ = []
+    self.version_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def cursor(self):
+    if self.cursor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.cursor_ is None: self.cursor_ = Cursor()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.cursor_
+
+  def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
+
+  def clear_cursor(self):
+
+    if self.has_cursor_:
+      self.has_cursor_ = 0;
+      if self.cursor_ is not None: self.cursor_.Clear()
+
+  def has_cursor(self): return self.has_cursor_
+
+  def result_size(self): return len(self.result_)
+  def result_list(self): return self.result_
+
+  def result(self, i):
+    return self.result_[i]
+
+  def mutable_result(self, i):
+    return self.result_[i]
+
+  def add_result(self):
+    x = EntityProto()
+    self.result_.append(x)
+    return x
+
+  def clear_result(self):
+    self.result_ = []
+  def skipped_results(self): return self.skipped_results_
+
+  def set_skipped_results(self, x):
+    self.has_skipped_results_ = 1
+    self.skipped_results_ = x
+
+  def clear_skipped_results(self):
+    if self.has_skipped_results_:
+      self.has_skipped_results_ = 0
+      self.skipped_results_ = 0
+
+  def has_skipped_results(self): return self.has_skipped_results_
+
+  def more_results(self): return self.more_results_
+
+  def set_more_results(self, x):
+    self.has_more_results_ = 1
+    self.more_results_ = x
+
+  def clear_more_results(self):
+    if self.has_more_results_:
+      self.has_more_results_ = 0
+      self.more_results_ = 0
+
+  def has_more_results(self): return self.has_more_results_
+
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
+  def index_only(self): return self.index_only_
+
+  def set_index_only(self, x):
+    self.has_index_only_ = 1
+    self.index_only_ = x
+
+  def clear_index_only(self):
+    if self.has_index_only_:
+      self.has_index_only_ = 0
+      self.index_only_ = 0
+
+  def has_index_only(self): return self.has_index_only_
+
+  def small_ops(self): return self.small_ops_
+
+  def set_small_ops(self, x):
+    self.has_small_ops_ = 1
+    self.small_ops_ = x
+
+  def clear_small_ops(self):
+    if self.has_small_ops_:
+      self.has_small_ops_ = 0
+      self.small_ops_ = 0
+
+  def has_small_ops(self): return self.has_small_ops_
+
+  def compiled_query(self):
+    if self.compiled_query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.compiled_query_
+
+  def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
+
+  def clear_compiled_query(self):
+
+    if self.has_compiled_query_:
+      self.has_compiled_query_ = 0;
+      if self.compiled_query_ is not None: self.compiled_query_.Clear()
+
+  def has_compiled_query(self): return self.has_compiled_query_
+
+  def compiled_cursor(self):
+    if self.compiled_cursor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.compiled_cursor_
+
+  def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
+
+  def clear_compiled_cursor(self):
+
+    if self.has_compiled_cursor_:
+      self.has_compiled_cursor_ = 0;
+      if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
+
+  def has_compiled_cursor(self): return self.has_compiled_cursor_
+
+  def index_size(self): return len(self.index_)
+  def index_list(self): return self.index_
+
+  def index(self, i):
+    return self.index_[i]
+
+  def mutable_index(self, i):
+    return self.index_[i]
+
+  def add_index(self):
+    x = CompositeIndex()
+    self.index_.append(x)
+    return x
+
+  def clear_index(self):
+    self.index_ = []
+  def version_size(self): return len(self.version_)
+  def version_list(self): return self.version_
+
+  def version(self, i):
+    return self.version_[i]
+
+  def set_version(self, i, x):
+    self.version_[i] = x
+
+  def add_version(self, x):
+    self.version_.append(x)
+
+  def clear_version(self):
+    self.version_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
+    for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
+    if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results())
+    if (x.has_more_results()): self.set_more_results(x.more_results())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    if (x.has_index_only()): self.set_index_only(x.index_only())
+    if (x.has_small_ops()): self.set_small_ops(x.small_ops())
+    if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
+    if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
+    for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
+    for i in xrange(x.version_size()): self.add_version(x.version(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_cursor_ != x.has_cursor_: return 0
+    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+    if len(self.result_) != len(x.result_): return 0
+    for e1, e2 in zip(self.result_, x.result_):
+      if e1 != e2: return 0
+    if self.has_skipped_results_ != x.has_skipped_results_: return 0
+    if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0
+    if self.has_more_results_ != x.has_more_results_: return 0
+    if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if self.has_index_only_ != x.has_index_only_: return 0
+    if self.has_index_only_ and self.index_only_ != x.index_only_: return 0
+    if self.has_small_ops_ != x.has_small_ops_: return 0
+    if self.has_small_ops_ and self.small_ops_ != x.small_ops_: return 0
+    if self.has_compiled_query_ != x.has_compiled_query_: return 0
+    if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
+    if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
+    if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
+    if len(self.index_) != len(x.index_): return 0
+    for e1, e2 in zip(self.index_, x.index_):
+      if e1 != e2: return 0
+    if len(self.version_) != len(x.version_): return 0
+    for e1, e2 in zip(self.version_, x.version_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.result_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (not self.has_more_results_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: more_results not set.')
+    if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.index_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
+    n += 1 * len(self.result_)
+    for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
+    if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
+    if (self.has_keys_only_): n += 2
+    if (self.has_index_only_): n += 2
+    if (self.has_small_ops_): n += 2
+    if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
+    if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSize())
+    n += 1 * len(self.index_)
+    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
+    n += 1 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSizePartial())
+    n += 1 * len(self.result_)
+    for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSizePartial())
+    if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
+    if (self.has_more_results_):
+      n += 2
+    if (self.has_keys_only_): n += 2
+    if (self.has_index_only_): n += 2
+    if (self.has_small_ops_): n += 2
+    if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSizePartial())
+    if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
+    n += 1 * len(self.index_)
+    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
+    n += 1 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
+    return n
+
+  def Clear(self):
+    self.clear_cursor()
+    self.clear_result()
+    self.clear_skipped_results()
+    self.clear_more_results()
+    self.clear_keys_only()
+    self.clear_index_only()
+    self.clear_small_ops()
+    self.clear_compiled_query()
+    self.clear_compiled_cursor()
+    self.clear_index()
+    self.clear_version()
+
+  def OutputUnchecked(self, out):
+    if (self.has_cursor_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cursor_.ByteSize())
+      self.cursor_.OutputUnchecked(out)
+    for i in xrange(len(self.result_)):
+      out.putVarInt32(18)
+      out.putVarInt32(self.result_[i].ByteSize())
+      self.result_[i].OutputUnchecked(out)
+    out.putVarInt32(24)
+    out.putBoolean(self.more_results_)
+    if (self.has_keys_only_):
+      out.putVarInt32(32)
+      out.putBoolean(self.keys_only_)
+    if (self.has_compiled_query_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.compiled_query_.ByteSize())
+      self.compiled_query_.OutputUnchecked(out)
+    if (self.has_compiled_cursor_):
+      out.putVarInt32(50)
+      out.putVarInt32(self.compiled_cursor_.ByteSize())
+      self.compiled_cursor_.OutputUnchecked(out)
+    if (self.has_skipped_results_):
+      out.putVarInt32(56)
+      out.putVarInt32(self.skipped_results_)
+    for i in xrange(len(self.index_)):
+      out.putVarInt32(66)
+      out.putVarInt32(self.index_[i].ByteSize())
+      self.index_[i].OutputUnchecked(out)
+    if (self.has_index_only_):
+      out.putVarInt32(72)
+      out.putBoolean(self.index_only_)
+    if (self.has_small_ops_):
+      out.putVarInt32(80)
+      out.putBoolean(self.small_ops_)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(88)
+      out.putVarInt64(self.version_[i])
+
+  def OutputPartial(self, out):
+    if (self.has_cursor_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cursor_.ByteSizePartial())
+      self.cursor_.OutputPartial(out)
+    for i in xrange(len(self.result_)):
+      out.putVarInt32(18)
+      out.putVarInt32(self.result_[i].ByteSizePartial())
+      self.result_[i].OutputPartial(out)
+    if (self.has_more_results_):
+      out.putVarInt32(24)
+      out.putBoolean(self.more_results_)
+    if (self.has_keys_only_):
+      out.putVarInt32(32)
+      out.putBoolean(self.keys_only_)
+    if (self.has_compiled_query_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.compiled_query_.ByteSizePartial())
+      self.compiled_query_.OutputPartial(out)
+    if (self.has_compiled_cursor_):
+      out.putVarInt32(50)
+      out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
+      self.compiled_cursor_.OutputPartial(out)
+    if (self.has_skipped_results_):
+      out.putVarInt32(56)
+      out.putVarInt32(self.skipped_results_)
+    for i in xrange(len(self.index_)):
+      out.putVarInt32(66)
+      out.putVarInt32(self.index_[i].ByteSizePartial())
+      self.index_[i].OutputPartial(out)
+    if (self.has_index_only_):
+      out.putVarInt32(72)
+      out.putBoolean(self.index_only_)
+    if (self.has_small_ops_):
+      out.putVarInt32(80)
+      out.putBoolean(self.small_ops_)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(88)
+      out.putVarInt64(self.version_[i])
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cursor().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_result().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.set_more_results(d.getBoolean())
+        continue
+      if tt == 32:
+        self.set_keys_only(d.getBoolean())
+        continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_query().TryMerge(tmp)
+        continue
+      if tt == 50:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_cursor().TryMerge(tmp)
+        continue
+      if tt == 56:
+        self.set_skipped_results(d.getVarInt32())
+        continue
+      if tt == 66:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_index().TryMerge(tmp)
+        continue
+      if tt == 72:
+        self.set_index_only(d.getBoolean())
+        continue
+      if tt == 80:
+        self.set_small_ops(d.getBoolean())
+        continue
+      if tt == 88:
+        self.add_version(d.getVarInt64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_cursor_:
+      res+=prefix+"cursor <\n"
+      res+=self.cursor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.result_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("result%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_))
+    if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    if self.has_index_only_: res+=prefix+("index_only: %s\n" % self.DebugFormatBool(self.index_only_))
+    if self.has_small_ops_: res+=prefix+("small_ops: %s\n" % self.DebugFormatBool(self.small_ops_))
+    if self.has_compiled_query_:
+      res+=prefix+"compiled_query <\n"
+      res+=self.compiled_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_compiled_cursor_:
+      res+=prefix+"compiled_cursor <\n"
+      res+=self.compiled_cursor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.index_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("index%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    cnt=0
+    for e in self.version_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcursor = 1
+  kresult = 2
+  kskipped_results = 7
+  kmore_results = 3
+  kkeys_only = 4
+  kindex_only = 9
+  ksmall_ops = 10
+  kcompiled_query = 5
+  kcompiled_cursor = 6
+  kindex = 8
+  kversion = 11
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "result",
+    3: "more_results",
+    4: "keys_only",
+    5: "compiled_query",
+    6: "compiled_cursor",
+    7: "skipped_results",
+    8: "index",
+    9: "index_only",
+    10: "small_ops",
+    11: "version",
+  }, 11)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.NUMERIC,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+  }, 11, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult'
+class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_model_key_ = 0
+  model_key_ = None
+  has_size_ = 0
+  size_ = 0
+  has_max_ = 0
+  max_ = 0
+
+  def __init__(self, contents=None):
+    self.reserve_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def model_key(self):
+    if self.model_key_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.model_key_ is None: self.model_key_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.model_key_
+
+  def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key()
+
+  def clear_model_key(self):
+
+    if self.has_model_key_:
+      self.has_model_key_ = 0;
+      if self.model_key_ is not None: self.model_key_.Clear()
+
+  def has_model_key(self): return self.has_model_key_
+
+  def size(self): return self.size_
+
+  def set_size(self, x):
+    self.has_size_ = 1
+    self.size_ = x
+
+  def clear_size(self):
+    if self.has_size_:
+      self.has_size_ = 0
+      self.size_ = 0
+
+  def has_size(self): return self.has_size_
+
+  def max(self): return self.max_
+
+  def set_max(self, x):
+    self.has_max_ = 1
+    self.max_ = x
+
+  def clear_max(self):
+    if self.has_max_:
+      self.has_max_ = 0
+      self.max_ = 0
+
+  def has_max(self): return self.has_max_
+
+  def reserve_size(self): return len(self.reserve_)
+  def reserve_list(self): return self.reserve_
+
+  def reserve(self, i):
+    return self.reserve_[i]
+
+  def mutable_reserve(self, i):
+    return self.reserve_[i]
+
+  def add_reserve(self):
+    x = Reference()
+    self.reserve_.append(x)
+    return x
+
+  def clear_reserve(self):
+    self.reserve_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
+    if (x.has_size()): self.set_size(x.size())
+    if (x.has_max()): self.set_max(x.max())
+    for i in xrange(x.reserve_size()): self.add_reserve().CopyFrom(x.reserve(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if self.has_model_key_ != x.has_model_key_: return 0
+    if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
+    if self.has_size_ != x.has_size_: return 0
+    if self.has_size_ and self.size_ != x.size_: return 0
+    if self.has_max_ != x.has_max_: return 0
+    if self.has_max_ and self.max_ != x.max_: return 0
+    if len(self.reserve_) != len(x.reserve_): return 0
+    for e1, e2 in zip(self.reserve_, x.reserve_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_model_key_ and not self.model_key_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.reserve_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSize())
+    if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
+    if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
+    n += 1 * len(self.reserve_)
+    for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSize())
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSizePartial())
+    if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
+    if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
+    n += 1 * len(self.reserve_)
+    for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_model_key()
+    self.clear_size()
+    self.clear_max()
+    self.clear_reserve()
+
+  def OutputUnchecked(self, out):
+    if (self.has_model_key_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.model_key_.ByteSize())
+      self.model_key_.OutputUnchecked(out)
+    if (self.has_size_):
+      out.putVarInt32(16)
+      out.putVarInt64(self.size_)
+    if (self.has_max_):
+      out.putVarInt32(24)
+      out.putVarInt64(self.max_)
+    if (self.has_header_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+    for i in xrange(len(self.reserve_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.reserve_[i].ByteSize())
+      self.reserve_[i].OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_model_key_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.model_key_.ByteSizePartial())
+      self.model_key_.OutputPartial(out)
+    if (self.has_size_):
+      out.putVarInt32(16)
+      out.putVarInt64(self.size_)
+    if (self.has_max_):
+      out.putVarInt32(24)
+      out.putVarInt64(self.max_)
+    if (self.has_header_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+    for i in xrange(len(self.reserve_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.reserve_[i].ByteSizePartial())
+      self.reserve_[i].OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_model_key().TryMerge(tmp)
+        continue
+      if tt == 16:
+        self.set_size(d.getVarInt64())
+        continue
+      if tt == 24:
+        self.set_max(d.getVarInt64())
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_reserve().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_model_key_:
+      res+=prefix+"model_key <\n"
+      res+=self.model_key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
+    if self.has_max_: res+=prefix+("max: %s\n" % self.DebugFormatInt64(self.max_))
+    cnt=0
+    for e in self.reserve_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("reserve%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 4
+  kmodel_key = 1
+  ksize = 2
+  kmax = 3
+  kreserve = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "model_key",
+    2: "size",
+    3: "max",
+    4: "header",
+    5: "reserve",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest'
+class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
+  has_start_ = 0
+  start_ = 0
+  has_end_ = 0
+  end_ = 0
+  has_cost_ = 0
+  cost_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def start(self): return self.start_
+
+  def set_start(self, x):
+    self.has_start_ = 1
+    self.start_ = x
+
+  def clear_start(self):
+    if self.has_start_:
+      self.has_start_ = 0
+      self.start_ = 0
+
+  def has_start(self): return self.has_start_
+
+  def end(self): return self.end_
+
+  def set_end(self, x):
+    self.has_end_ = 1
+    self.end_ = x
+
+  def clear_end(self):
+    if self.has_end_:
+      self.has_end_ = 0
+      self.end_ = 0
+
+  def has_end(self): return self.has_end_
+
+  def cost(self):
+    if self.cost_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.cost_ is None: self.cost_ = Cost()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.cost_
+
+  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+  def clear_cost(self):
+
+    if self.has_cost_:
+      self.has_cost_ = 0;
+      if self.cost_ is not None: self.cost_.Clear()
+
+  def has_cost(self): return self.has_cost_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_start()): self.set_start(x.start())
+    if (x.has_end()): self.set_end(x.end())
+    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_start_ != x.has_start_: return 0
+    if self.has_start_ and self.start_ != x.start_: return 0
+    if self.has_end_ != x.has_end_: return 0
+    if self.has_end_ and self.end_ != x.end_: return 0
+    if self.has_cost_ != x.has_cost_: return 0
+    if self.has_cost_ and self.cost_ != x.cost_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_start_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: start not set.')
+    if (not self.has_end_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: end not set.')
+    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.start_)
+    n += self.lengthVarInt64(self.end_)
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_start_):
+      n += 1
+      n += self.lengthVarInt64(self.start_)
+    if (self.has_end_):
+      n += 1
+      n += self.lengthVarInt64(self.end_)
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_start()
+    self.clear_end()
+    self.clear_cost()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt64(self.start_)
+    out.putVarInt32(16)
+    out.putVarInt64(self.end_)
+    if (self.has_cost_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.cost_.ByteSize())
+      self.cost_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_start_):
+      out.putVarInt32(8)
+      out.putVarInt64(self.start_)
+    if (self.has_end_):
+      out.putVarInt32(16)
+      out.putVarInt64(self.end_)
+    if (self.has_cost_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.cost_.ByteSizePartial())
+      self.cost_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_start(d.getVarInt64())
+        continue
+      if tt == 16:
+        self.set_end(d.getVarInt64())
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cost().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
+    if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
+    if self.has_cost_:
+      res+=prefix+"cost <\n"
+      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kstart = 1
+  kend = 2
+  kcost = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "start",
+    2: "end",
+    3: "cost",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse'
+class CompositeIndices(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    self.index_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_size(self): return len(self.index_)
+  def index_list(self): return self.index_
+
+  def index(self, i):
+    return self.index_[i]
+
+  def mutable_index(self, i):
+    return self.index_[i]
+
+  def add_index(self):
+    x = CompositeIndex()
+    self.index_.append(x)
+    return x
+
+  def clear_index(self):
+    self.index_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.index_) != len(x.index_): return 0
+    for e1, e2 in zip(self.index_, x.index_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    for p in self.index_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 1 * len(self.index_)
+    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    n += 1 * len(self.index_)
+    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_index()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.index_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.index_[i].ByteSize())
+      self.index_[i].OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    for i in xrange(len(self.index_)):
+      out.putVarInt32(10)
+      out.putVarInt32(self.index_[i].ByteSizePartial())
+      self.index_[i].OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_index().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.index_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("index%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kindex = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices'
+class AddActionsRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_transaction_ = 0
+
+  def __init__(self, contents=None):
+    self.transaction_ = Transaction()
+    self.action_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def transaction(self): return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
+
+  def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def action_size(self): return len(self.action_)
+  def action_list(self): return self.action_
+
+  def action(self, i):
+    return self.action_[i]
+
+  def mutable_action(self, i):
+    return self.action_[i]
+
+  def add_action(self):
+    x = Action()
+    self.action_.append(x)
+    return x
+
+  def clear_action(self):
+    self.action_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    for i in xrange(x.action_size()): self.add_action().CopyFrom(x.action(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if len(self.action_) != len(x.action_): return 0
+    for e1, e2 in zip(self.action_, x.action_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    if (not self.has_transaction_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: transaction not set.')
+    elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
+    for p in self.action_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += self.lengthString(self.transaction_.ByteSize())
+    n += 1 * len(self.action_)
+    for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSize())
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    if (self.has_transaction_):
+      n += 1
+      n += self.lengthString(self.transaction_.ByteSizePartial())
+    n += 1 * len(self.action_)
+    for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_transaction()
+    self.clear_action()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.transaction_.ByteSize())
+    self.transaction_.OutputUnchecked(out)
+    for i in xrange(len(self.action_)):
+      out.putVarInt32(18)
+      out.putVarInt32(self.action_[i].ByteSize())
+      self.action_[i].OutputUnchecked(out)
+    if (self.has_header_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_transaction_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.transaction_.ByteSizePartial())
+      self.transaction_.OutputPartial(out)
+    for i in xrange(len(self.action_)):
+      out.putVarInt32(18)
+      out.putVarInt32(self.action_[i].ByteSizePartial())
+      self.action_[i].OutputPartial(out)
+    if (self.has_header_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_action().TryMerge(tmp)
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.action_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("action%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 3
+  ktransaction = 1
+  kaction = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "transaction",
+    2: "action",
+    3: "header",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest'
+class AddActionsResponse(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    return n
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def OutputPartial(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse'
+class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage):
+  has_header_ = 0
+  header_ = None
+  has_app_ = 0
+  app_ = ""
+  has_allow_multiple_eg_ = 0
+  allow_multiple_eg_ = 0
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def header(self):
+    if self.header_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.header_ is None: self.header_ = InternalHeader()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.header_
+
+  def mutable_header(self): self.has_header_ = 1; return self.header()
+
+  def clear_header(self):
+
+    if self.has_header_:
+      self.has_header_ = 0;
+      if self.header_ is not None: self.header_.Clear()
+
+  def has_header(self): return self.has_header_
+
+  def app(self): return self.app_
+
+  def set_app(self, x):
+    self.has_app_ = 1
+    self.app_ = x
+
+  def clear_app(self):
+    if self.has_app_:
+      self.has_app_ = 0
+      self.app_ = ""
+
+  def has_app(self): return self.has_app_
+
+  def allow_multiple_eg(self): return self.allow_multiple_eg_
+
+  def set_allow_multiple_eg(self, x):
+    self.has_allow_multiple_eg_ = 1
+    self.allow_multiple_eg_ = x
+
+  def clear_allow_multiple_eg(self):
+    if self.has_allow_multiple_eg_:
+      self.has_allow_multiple_eg_ = 0
+      self.allow_multiple_eg_ = 0
+
+  def has_allow_multiple_eg(self): return self.has_allow_multiple_eg_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_header()): self.mutable_header().MergeFrom(x.header())
+    if (x.has_app()): self.set_app(x.app())
+    if (x.has_allow_multiple_eg()): self.set_allow_multiple_eg(x.allow_multiple_eg())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_header_ != x.has_header_: return 0
+    if self.has_header_ and self.header_ != x.header_: return 0
+    if self.has_app_ != x.has_app_: return 0
+    if self.has_app_ and self.app_ != x.app_: return 0
+    if self.has_allow_multiple_eg_ != x.has_allow_multiple_eg_: return 0
+    if self.has_allow_multiple_eg_ and self.allow_multiple_eg_ != x.allow_multiple_eg_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
+    if (not self.has_app_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: app not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
+    n += self.lengthString(len(self.app_))
+    if (self.has_allow_multiple_eg_): n += 2
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
+    if (self.has_app_):
+      n += 1
+      n += self.lengthString(len(self.app_))
+    if (self.has_allow_multiple_eg_): n += 2
+    return n
+
+  def Clear(self):
+    self.clear_header()
+    self.clear_app()
+    self.clear_allow_multiple_eg()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.app_)
+    if (self.has_allow_multiple_eg_):
+      out.putVarInt32(16)
+      out.putBoolean(self.allow_multiple_eg_)
+    if (self.has_header_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.header_.ByteSize())
+      self.header_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_app_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.app_)
+    if (self.has_allow_multiple_eg_):
+      out.putVarInt32(16)
+      out.putBoolean(self.allow_multiple_eg_)
+    if (self.has_header_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.header_.ByteSizePartial())
+      self.header_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_app(d.getPrefixedString())
+        continue
+      if tt == 16:
+        self.set_allow_multiple_eg(d.getBoolean())
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_header().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_header_:
+      res+=prefix+"header <\n"
+      res+=self.header_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+    if self.has_allow_multiple_eg_: res+=prefix+("allow_multiple_eg: %s\n" % self.DebugFormatBool(self.allow_multiple_eg_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kheader = 3
+  kapp = 1
+  kallow_multiple_eg = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app",
+    2: "allow_multiple_eg",
+    3: "header",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest'
+class CommitResponse_Version(ProtocolBuffer.ProtocolMessage):
+  has_root_entity_key_ = 0
+  has_version_ = 0
+  version_ = 0
+
+  def __init__(self, contents=None):
+    self.root_entity_key_ = Reference()
+    if contents is not None: self.MergeFromString(contents)
+
+  def root_entity_key(self): return self.root_entity_key_
+
+  def mutable_root_entity_key(self): self.has_root_entity_key_ = 1; return self.root_entity_key_
+
+  def clear_root_entity_key(self):self.has_root_entity_key_ = 0; self.root_entity_key_.Clear()
+
+  def has_root_entity_key(self): return self.has_root_entity_key_
+
+  def version(self): return self.version_
+
+  def set_version(self, x):
+    self.has_version_ = 1
+    self.version_ = x
+
+  def clear_version(self):
+    if self.has_version_:
+      self.has_version_ = 0
+      self.version_ = 0
+
+  def has_version(self): return self.has_version_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_root_entity_key()): self.mutable_root_entity_key().MergeFrom(x.root_entity_key())
+    if (x.has_version()): self.set_version(x.version())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_root_entity_key_ != x.has_root_entity_key_: return 0
+    if self.has_root_entity_key_ and self.root_entity_key_ != x.root_entity_key_: return 0
+    if self.has_version_ != x.has_version_: return 0
+    if self.has_version_ and self.version_ != x.version_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_root_entity_key_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: root_entity_key not set.')
+    elif not self.root_entity_key_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_version_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: version not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.root_entity_key_.ByteSize())
+    n += self.lengthVarInt64(self.version_)
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_root_entity_key_):
+      n += 1
+      n += self.lengthString(self.root_entity_key_.ByteSizePartial())
+    if (self.has_version_):
+      n += 1
+      n += self.lengthVarInt64(self.version_)
+    return n
+
+  def Clear(self):
+    self.clear_root_entity_key()
+    self.clear_version()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(34)
+    out.putVarInt32(self.root_entity_key_.ByteSize())
+    self.root_entity_key_.OutputUnchecked(out)
+    out.putVarInt32(40)
+    out.putVarInt64(self.version_)
+
+  def OutputPartial(self, out):
+    if (self.has_root_entity_key_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.root_entity_key_.ByteSizePartial())
+      self.root_entity_key_.OutputPartial(out)
+    if (self.has_version_):
+      out.putVarInt32(40)
+      out.putVarInt64(self.version_)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 28: break
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_root_entity_key().TryMerge(tmp)
+        continue
+      if tt == 40:
+        self.set_version(d.getVarInt64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_root_entity_key_:
+      res+=prefix+"root_entity_key <\n"
+      res+=self.root_entity_key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
+    return res
+
+class CommitResponse(ProtocolBuffer.ProtocolMessage):
+  has_cost_ = 0
+  cost_ = None
+
+  def __init__(self, contents=None):
+    self.version_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def cost(self):
+    if self.cost_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.cost_ is None: self.cost_ = Cost()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.cost_
+
+  def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+  def clear_cost(self):
+
+    if self.has_cost_:
+      self.has_cost_ = 0;
+      if self.cost_ is not None: self.cost_.Clear()
+
+  def has_cost(self): return self.has_cost_
+
+  def version_size(self): return len(self.version_)
+  def version_list(self): return self.version_
+
+  def version(self, i):
+    return self.version_[i]
+
+  def mutable_version(self, i):
+    return self.version_[i]
+
+  def add_version(self):
+    x = CommitResponse_Version()
+    self.version_.append(x)
+    return x
+
+  def clear_version(self):
+    self.version_ = []
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+    for i in xrange(x.version_size()): self.add_version().CopyFrom(x.version(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_cost_ != x.has_cost_: return 0
+    if self.has_cost_ and self.cost_ != x.cost_: return 0
+    if len(self.version_) != len(x.version_): return 0
+    for e1, e2 in zip(self.version_, x.version_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.version_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+    n += 2 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.version_[i].ByteSize()
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
+    n += 2 * len(self.version_)
+    for i in xrange(len(self.version_)): n += self.version_[i].ByteSizePartial()
+    return n
+
+  def Clear(self):
+    self.clear_cost()
+    self.clear_version()
+
+  def OutputUnchecked(self, out):
+    if (self.has_cost_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cost_.ByteSize())
+      self.cost_.OutputUnchecked(out)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(27)
+      self.version_[i].OutputUnchecked(out)
+      out.putVarInt32(28)
+
+  def OutputPartial(self, out):
+    if (self.has_cost_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.cost_.ByteSizePartial())
+      self.cost_.OutputPartial(out)
+    for i in xrange(len(self.version_)):
+      out.putVarInt32(27)
+      self.version_[i].OutputPartial(out)
+      out.putVarInt32(28)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_cost().TryMerge(tmp)
+        continue
+      if tt == 27:
+        self.add_version().TryMerge(d)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_cost_:
+      res+=prefix+"cost <\n"
+      res+=self.cost_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    cnt=0
+    for e in self.version_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("Version%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcost = 1
+  kVersionGroup = 3
+  kVersionroot_entity_key = 4
+  kVersionversion = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+    3: "Version",
+    4: "root_entity_key",
+    5: "version",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STARTGROUP,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse'
+if _extension_runtime:
+  pass
+
+__all__ = ['InternalHeader','Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse','CommitResponse_Version']
diff --git a/google/appengine/datastore/datastore_v4_pb.py b/google/appengine/datastore/datastore_v4_pb.py
index cd70a37..3dfcc9c 100644
--- a/google/appengine/datastore/datastore_v4_pb.py
+++ b/google/appengine/datastore/datastore_v4_pb.py
@@ -177,7 +177,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Error'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugH4LQonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIoYBCgxFbnRpdHlSZXN1bHQSLwoGZW50aXR5GAEgAigLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Eg8KB3ZlcnNpb24YAiABKAMiNAoKUmVzdWx0VHlwZRIICgRGVUxMEAESDgoKUFJPSkVDVElPThACEgwKCEtFWV9PTkxZEAMi8QIKBVF1ZXJ5Ej8KCnByb2plY3Rpb24YAiADKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24SNQoEa2luZBgDIAMoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uEi8KBmZpbHRlchgEIAEoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchI1CgVvcmRlchgFIAMoCzImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXISPAoIZ3JvdXBfYnkYBiADKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIUCgxzdGFydF9jdXJzb3IYByABKAwSEgoKZW5kX2N1cnNvchgIIAEoDBIRCgZvZmZzZXQYCiABKAU6ATASDQoFbGltaXQYCyABKAUiHgoOS2luZEV4cHJlc3Npb24SDAoEbmFtZRgBIAIoCSIhChFQcm9wZXJ0eVJlZmVyZW5jZRIMCgRuYW1lGAIgAigJItMBChJQcm9wZXJ0eUV4cHJlc3Npb24SPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJdChRhZ2dyZWdhdGlvbl9mdW5jdGlvbhgCIAEoDjI/LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbi5BZ2dyZWdhdGlvbkZ1bmN0aW9uIiAKE0FnZ3JlZ2F0aW9uRnVuY3Rpb24SCQoFRklSU1QQASLJAQoNUHJvcGVydHlPcmRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEk4KCWRpcmVjdGlvbhgCIAEoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXIuRGlyZWN0aW9uOglBU0NFTkRJTkciKgoJRGlyZWN0aW9uEg0KCUFTQ0VORElORxABEg4KCkRFU0NFTkRJTkcQAiKOAQoGRmlsdGVyEkIKEGNvbXBvc2l0ZV9maWx0ZXIYASABKAsyKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXISQAoPcHJvcGVydHlfZmlsdGVyGAIgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXIinAEKD0NvbXBvc2l0ZUZpbHRlchJDCghvcGVyYXRvchgBIAIoDjIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlci5PcGVyYXRvchIvCgZmaWx0ZXIYAiADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXIiEwoIT3BlcmF0b3ISBwoDQU5EEAEivgIKDlByb3BlcnR5RmlsdGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USQgoIb3BlcmF0b3IYAiACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlci5PcGVyYXRvchItCgV2YWx1ZRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlInsKCE9wZXJhdG9yEg0KCUxFU1NfVEhBThABEhYKEkxFU1NfVEhBTl9PUl9FUVVBTBACEhAKDEdSRUFURVJfVEhBThADEhkKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBAEEgkKBUVRVUFMEAUSEAoMSEFTX0FOQ0VTVE9SEAsisAEKCEdxbFF1ZXJ5EhQKDHF1ZXJ5X3N0cmluZxgBIAIoCRIcCg1hbGxvd19saXRlcmFsGAIgASgIOgVmYWxzZRI2CghuYW1lX2FyZxgDIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnEjgKCm51bWJlcl9hcmcYBCADKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZyJaCgtHcWxRdWVyeUFyZxIMCgRuYW1lGAEgASgJEi0KBXZhbHVlGAIgASgLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUSDgoGY3Vyc29yGAMgASgMIvcCChBRdWVyeVJlc3VsdEJhdGNoEkwKEmVudGl0eV9yZXN1bHRfdHlwZRgBIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdC5SZXN1bHRUeXBlEjwKDWVudGl0eV9yZXN1bHQYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSEgoKZW5kX2N1cnNvchgEIAEoDBJPCgxtb3JlX3Jlc3VsdHMYBSACKA4yOS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoLk1vcmVSZXN1bHRzVHlwZRIaCg9za2lwcGVkX3Jlc3VsdHMYBiABKAU6ATAiVgoPTW9yZVJlc3VsdHNUeXBlEhAKDE5PVF9GSU5JU0hFRBABEhwKGE1PUkVfUkVTVUxUU19BRlRFUl9MSU1JVBACEhMKD05PX01PUkVfUkVTVUxUUxADIpMCCghNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSDQoFZm9yY2UYBiABKAgi4QEKDk11dGF0aW9uUmVzdWx0EhUKDWluZGV4X3VwZGF0ZXMYASACKAUSOAoSaW5zZXJ0X2F1dG9faWRfa2V5GAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhYKDnVwc2VydF92ZXJzaW9uGAMgAygDEhYKDnVwZGF0ZV92ZXJzaW9uGAQgAygDEhYKDmluc2VydF92ZXJzaW9uGAUgAygDEh4KFmluc2VydF9hdXRvX2lkX3ZlcnNpb24YBiADKAMSFgoOZGVsZXRlX3ZlcnNpb24YByADKAMitQEKC1JlYWRPcHRpb25zElcKEHJlYWRfY29uc2lzdGVuY3kYASABKA4yNC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucy5SZWFkQ29uc2lzdGVuY3k6B0RFRkFVTFQSEwoLdHJhbnNhY3Rpb24YAiABKAwiOAoPUmVhZENvbnNpc3RlbmN5EgsKB0RFRkFVTFQQABIKCgZTVFJPTkcQARIMCghFVkVOVFVBTBACInYKDUxvb2t1cFJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSKQoDa2V5GAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Iq4BCg5Mb29rdXBSZXNwb25zZRI0CgVmb3VuZBgBIAMoCzIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdBI2CgdtaXNzaW5nGAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0Ei4KCGRlZmVycmVkGAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IqsCCg9SdW5RdWVyeVJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSOgoMcGFydGl0aW9uX2lkGAIgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSLQoFcXVlcnkYAyABKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeRI0CglncWxfcXVlcnkYByABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeRIdChVtaW5fc2FmZV90aW1lX3NlY29uZHMYBCABKAMSHAoUc3VnZ2VzdGVkX2JhdGNoX3NpemUYBSABKAUiYgoQUnVuUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gSFAoMcXVlcnlfaGFuZGxlGAIgASgMIiwKFENvbnRpbnVlUXVlcnlSZXF1ZXN0EhQKDHF1ZXJ5X2hhbmRsZRgBIAIoDCJRChVDb250aW51ZVF1ZXJ5UmVzcG9uc2USOAoFYmF0Y2gYASACKAsyKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoIlMKF0JlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0EhoKC2Nyb3NzX2dyb3VwGAEgASgIOgVmYWxzZRIcCg1jcm9zc19yZXF1ZXN0GAIgASgIOgVmYWxzZSIvChhCZWdpblRyYW5zYWN0aW9uUmVzcG9uc2USEwoLdHJhbnNhY3Rpb24YASACKAwiJgoPUm9sbGJhY2tSZXF1ZXN0EhMKC3RyYW5zYWN0aW9uGAEgAigMIhIKEFJvbGxiYWNrUmVzcG9uc2Ui1QEKDUNvbW1pdFJlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASABKAwSMwoIbXV0YXRpb24YAiABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbhJICgRtb2RlGAQgASgOMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdC5Nb2RlOg1UUkFOU0FDVElPTkFMIjAKBE1vZGUSEQoNVFJBTlNBQ1RJT05BTBABEhUKEU5PTl9UUkFOU0FDVElPTkFMEAIiUgoOQ29tbWl0UmVzcG9uc2USQAoPbXV0YXRpb25fcmVzdWx0GAEgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb25SZXN1bHQicwoSQWxsb2NhdGVJZHNSZXF1ZXN0Ei4KCGFsbG9jYXRlGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Ei0KB3Jlc2VydmUYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiRgoTQWxsb2NhdGVJZHNSZXNwb25zZRIvCglhbGxvY2F0ZWQYASADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiQwoMV3JpdGVSZXF1ZXN0EjMKCG11dGF0aW9uGAEgAigLMiEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb24yogcKEkRhdGFzdG9yZVY0U2VydmljZRJ5ChBCZWdpblRyYW5zYWN0aW9uEjAuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlcXVlc3QaMS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVzcG9uc2UiABJhCghSb2xsYmFjaxIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJvbGxiYWNrUmVxdWVzdBopLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJvbGxiYWNrUmVzcG9uc2UiABJbCgZDb21taXQSJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVzcG9uc2UiABJhCghSdW5RdWVyeRIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJ1blF1ZXJ5UmVxdWVzdBopLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJ1blF1ZXJ5UmVzcG9uc2UiABJwCg1Db250aW51ZVF1ZXJ5Ei0uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlcXVlc3QaLi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVzcG9uc2UiABJbCgZMb29rdXASJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVzcG9uc2UiABJqCgtBbGxvY2F0ZUlkcxIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkFsbG9jYXRlSWRzUmVxdWVzdBosLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkFsbG9jYXRlSWRzUmVzcG9uc2UiABJYCgNHZXQSJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVzcG9uc2UiABJZCgVXcml0ZRIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LldyaXRlUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgBCIQofY29tLmdvb2dsZS5hcHBob3N0aW5nLmRhdGFzdG9yZQ=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugHPMgonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIoYBCgxFbnRpdHlSZXN1bHQSLwoGZW50aXR5GAEgAigLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Eg8KB3ZlcnNpb24YAiABKAMiNAoKUmVzdWx0VHlwZRIICgRGVUxMEAESDgoKUFJPSkVDVElPThACEgwKCEtFWV9PTkxZEAMi8QIKBVF1ZXJ5Ej8KCnByb2plY3Rpb24YAiADKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24SNQoEa2luZBgDIAMoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uEi8KBmZpbHRlchgEIAEoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchI1CgVvcmRlchgFIAMoCzImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXISPAoIZ3JvdXBfYnkYBiADKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIUCgxzdGFydF9jdXJzb3IYByABKAwSEgoKZW5kX2N1cnNvchgIIAEoDBIRCgZvZmZzZXQYCiABKAU6ATASDQoFbGltaXQYCyABKAUiHgoOS2luZEV4cHJlc3Npb24SDAoEbmFtZRgBIAIoCSIhChFQcm9wZXJ0eVJlZmVyZW5jZRIMCgRuYW1lGAIgAigJItMBChJQcm9wZXJ0eUV4cHJlc3Npb24SPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJdChRhZ2dyZWdhdGlvbl9mdW5jdGlvbhgCIAEoDjI/LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbi5BZ2dyZWdhdGlvbkZ1bmN0aW9uIiAKE0FnZ3JlZ2F0aW9uRnVuY3Rpb24SCQoFRklSU1QQASLJAQoNUHJvcGVydHlPcmRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEk4KCWRpcmVjdGlvbhgCIAEoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXIuRGlyZWN0aW9uOglBU0NFTkRJTkciKgoJRGlyZWN0aW9uEg0KCUFTQ0VORElORxABEg4KCkRFU0NFTkRJTkcQAiKOAQoGRmlsdGVyEkIKEGNvbXBvc2l0ZV9maWx0ZXIYASABKAsyKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXISQAoPcHJvcGVydHlfZmlsdGVyGAIgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXIinAEKD0NvbXBvc2l0ZUZpbHRlchJDCghvcGVyYXRvchgBIAIoDjIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlci5PcGVyYXRvchIvCgZmaWx0ZXIYAiADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXIiEwoIT3BlcmF0b3ISBwoDQU5EEAEivgIKDlByb3BlcnR5RmlsdGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USQgoIb3BlcmF0b3IYAiACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlci5PcGVyYXRvchItCgV2YWx1ZRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlInsKCE9wZXJhdG9yEg0KCUxFU1NfVEhBThABEhYKEkxFU1NfVEhBTl9PUl9FUVVBTBACEhAKDEdSRUFURVJfVEhBThADEhkKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBAEEgkKBUVRVUFMEAUSEAoMSEFTX0FOQ0VTVE9SEAsisAEKCEdxbFF1ZXJ5EhQKDHF1ZXJ5X3N0cmluZxgBIAIoCRIcCg1hbGxvd19saXRlcmFsGAIgASgIOgVmYWxzZRI2CghuYW1lX2FyZxgDIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnEjgKCm51bWJlcl9hcmcYBCADKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZyJaCgtHcWxRdWVyeUFyZxIMCgRuYW1lGAEgASgJEi0KBXZhbHVlGAIgASgLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUSDgoGY3Vyc29yGAMgASgMIvcCChBRdWVyeVJlc3VsdEJhdGNoEkwKEmVudGl0eV9yZXN1bHRfdHlwZRgBIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdC5SZXN1bHRUeXBlEjwKDWVudGl0eV9yZXN1bHQYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSEgoKZW5kX2N1cnNvchgEIAEoDBJPCgxtb3JlX3Jlc3VsdHMYBSACKA4yOS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoLk1vcmVSZXN1bHRzVHlwZRIaCg9za2lwcGVkX3Jlc3VsdHMYBiABKAU6ATAiVgoPTW9yZVJlc3VsdHNUeXBlEhAKDE5PVF9GSU5JU0hFRBABEhwKGE1PUkVfUkVTVUxUU19BRlRFUl9MSU1JVBACEhMKD05PX01PUkVfUkVTVUxUUxADIvUBCghNdXRhdGlvbhI3CgJvcBgBIAIoDjIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uLk9wZXJhdGlvbhIpCgNrZXkYAiABKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSLwoGZW50aXR5GAMgASgLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5IlQKCU9wZXJhdGlvbhIKCgZJTlNFUlQQARIKCgZVUERBVEUQAhIKCgZVUFNFUlQQAxIKCgZERUxFVEUQBBIXChNJTlNFUlRfV0lUSF9BVVRPX0lEEGMiUwoOTXV0YXRpb25SZXN1bHQSKQoDa2V5GAMgASgLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhYKC25ld192ZXJzaW9uGAQgASgDOgEwIqQCChJEZXByZWNhdGVkTXV0YXRpb24SLwoGdXBzZXJ0GAEgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBnVwZGF0ZRgCIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIvCgZpbnNlcnQYAyADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSNwoOaW5zZXJ0X2F1dG9faWQYBCADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLAoGZGVsZXRlGAUgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhQKBWZvcmNlGAYgASgIOgVmYWxzZSLrAQoYRGVwcmVjYXRlZE11dGF0aW9uUmVzdWx0EhUKDWluZGV4X3VwZGF0ZXMYASACKAUSOAoSaW5zZXJ0X2F1dG9faWRfa2V5GAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhYKDnVwc2VydF92ZXJzaW9uGAMgAygDEhYKDnVwZGF0ZV92ZXJzaW9uGAQgAygDEhYKDmluc2VydF92ZXJzaW9uGAUgAygDEh4KFmluc2VydF9hdXRvX2lkX3ZlcnNpb24YBiADKAMSFgoOZGVsZXRlX3ZlcnNpb24YByADKAMitQEKC1JlYWRPcHRpb25zElcKEHJlYWRfY29uc2lzdGVuY3kYASABKA4yNC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucy5SZWFkQ29uc2lzdGVuY3k6B0RFRkFVTFQSEwoLdHJhbnNhY3Rpb24YAiABKAwiOAoPUmVhZENvbnNpc3RlbmN5EgsKB0RFRkFVTFQQABIKCgZTVFJPTkcQARIMCghFVkVOVFVBTBACInYKDUxvb2t1cFJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSKQoDa2V5GAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Iq4BCg5Mb29rdXBSZXNwb25zZRI0CgVmb3VuZBgBIAMoCzIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdBI2CgdtaXNzaW5nGAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0Ei4KCGRlZmVycmVkGAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IqsCCg9SdW5RdWVyeVJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSOgoMcGFydGl0aW9uX2lkGAIgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSLQoFcXVlcnkYAyABKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeRI0CglncWxfcXVlcnkYByABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeRIdChVtaW5fc2FmZV90aW1lX3NlY29uZHMYBCABKAMSHAoUc3VnZ2VzdGVkX2JhdGNoX3NpemUYBSABKAUiYgoQUnVuUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gSFAoMcXVlcnlfaGFuZGxlGAIgASgMIiwKFENvbnRpbnVlUXVlcnlSZXF1ZXN0EhQKDHF1ZXJ5X2hhbmRsZRgBIAIoDCJRChVDb250aW51ZVF1ZXJ5UmVzcG9uc2USOAoFYmF0Y2gYASACKAsyKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoIlMKF0JlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0EhoKC2Nyb3NzX2dyb3VwGAEgASgIOgVmYWxzZRIcCg1jcm9zc19yZXF1ZXN0GAIgASgIOgVmYWxzZSIvChhCZWdpblRyYW5zYWN0aW9uUmVzcG9uc2USEwoLdHJhbnNhY3Rpb24YASACKAwiJgoPUm9sbGJhY2tSZXF1ZXN0EhMKC3RyYW5zYWN0aW9uGAEgAigMIhIKEFJvbGxiYWNrUmVzcG9uc2UiwAIKDUNvbW1pdFJlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASABKAwSMwoIbXV0YXRpb24YBSADKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbhJIChNkZXByZWNhdGVkX211dGF0aW9uGAIgASgLMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uEkgKBG1vZGUYBCABKA4yKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXF1ZXN0Lk1vZGU6DVRSQU5TQUNUSU9OQUwSHwoQaWdub3JlX3JlYWRfb25seRgGIAEoCDoFZmFsc2UiMAoETW9kZRIRCg1UUkFOU0FDVElPTkFMEAESFQoRTk9OX1RSQU5TQUNUSU9OQUwQAiLAAQoOQ29tbWl0UmVzcG9uc2USQAoPbXV0YXRpb25fcmVzdWx0GAMgAygLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb25SZXN1bHQSVQoaZGVwcmVjYXRlZF9tdXRhdGlvbl9yZXN1bHQYASABKAsyMS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5EZXByZWNhdGVkTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgEIAEoBSJzChJBbGxvY2F0ZUlkc1JlcXVlc3QSLgoIYWxsb2NhdGUYASADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSLQoHcmVzZXJ2ZRgCIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleSJGChNBbGxvY2F0ZUlkc1Jlc3BvbnNlEi8KCWFsbG9jYXRlZBgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleSJYCgxXcml0ZVJlcXVlc3QSSAoTZGVwcmVjYXRlZF9tdXRhdGlvbhgBIAIoCzIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvbjKiBwoSRGF0YXN0b3JlVjRTZXJ2aWNlEnkKEEJlZ2luVHJhbnNhY3Rpb24SMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVxdWVzdBoxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXNwb25zZSIAEmEKCFJvbGxiYWNrEiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUm9sbGJhY2tSZXF1ZXN0GikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUm9sbGJhY2tSZXNwb25zZSIAElsKBkNvbW1pdBImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZSIAEmEKCFJ1blF1ZXJ5EiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUnVuUXVlcnlSZXF1ZXN0GikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUnVuUXVlcnlSZXNwb25zZSIAEnAKDUNvbnRpbnVlUXVlcnkSLS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVxdWVzdBouLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXNwb25zZSIAElsKBkxvb2t1cBImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXNwb25zZSIAEmoKC0FsbG9jYXRlSWRzEisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQWxsb2NhdGVJZHNSZXF1ZXN0GiwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQWxsb2NhdGVJZHNSZXNwb25zZSIAElgKA0dldBImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXNwb25zZSIAElkKBVdyaXRlEiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuV3JpdGVSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVzcG9uc2UiAEIhCh9jb20uZ29vZ2xlLmFwcGhvc3RpbmcuZGF0YXN0b3Jl"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2916,6 +2916,435 @@
         _SERIALIZED_DESCRIPTOR.tostring())
 
 class Mutation(ProtocolBuffer.ProtocolMessage):
+
+
+  INSERT       =    1
+  UPDATE       =    2
+  UPSERT       =    3
+  DELETE       =    4
+  INSERT_WITH_AUTO_ID =   99
+
+  _Operation_NAMES = {
+    1: "INSERT",
+    2: "UPDATE",
+    3: "UPSERT",
+    4: "DELETE",
+    99: "INSERT_WITH_AUTO_ID",
+  }
+
+  def Operation_Name(cls, x): return cls._Operation_NAMES.get(x, "")
+  Operation_Name = classmethod(Operation_Name)
+
+  has_op_ = 0
+  op_ = 0
+  has_key_ = 0
+  key_ = None
+  has_entity_ = 0
+  entity_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def op(self): return self.op_
+
+  def set_op(self, x):
+    self.has_op_ = 1
+    self.op_ = x
+
+  def clear_op(self):
+    if self.has_op_:
+      self.has_op_ = 0
+      self.op_ = 0
+
+  def has_op(self): return self.has_op_
+
+  def key(self):
+    if self.key_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.key_ is None: self.key_ = google.appengine.datastore.entity_v4_pb.Key()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.key_
+
+  def mutable_key(self): self.has_key_ = 1; return self.key()
+
+  def clear_key(self):
+
+    if self.has_key_:
+      self.has_key_ = 0;
+      if self.key_ is not None: self.key_.Clear()
+
+  def has_key(self): return self.has_key_
+
+  def entity(self):
+    if self.entity_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.entity_ is None: self.entity_ = google.appengine.datastore.entity_v4_pb.Entity()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.entity_
+
+  def mutable_entity(self): self.has_entity_ = 1; return self.entity()
+
+  def clear_entity(self):
+
+    if self.has_entity_:
+      self.has_entity_ = 0;
+      if self.entity_ is not None: self.entity_.Clear()
+
+  def has_entity(self): return self.has_entity_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_op()): self.set_op(x.op())
+    if (x.has_key()): self.mutable_key().MergeFrom(x.key())
+    if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Mutation', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Mutation')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Mutation')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Mutation', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Mutation', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Mutation', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_op_ != x.has_op_: return 0
+    if self.has_op_ and self.op_ != x.op_: return 0
+    if self.has_key_ != x.has_key_: return 0
+    if self.has_key_ and self.key_ != x.key_: return 0
+    if self.has_entity_ != x.has_entity_: return 0
+    if self.has_entity_ and self.entity_ != x.entity_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_op_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: op not set.')
+    if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.op_)
+    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
+    if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_op_):
+      n += 1
+      n += self.lengthVarInt64(self.op_)
+    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
+    if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_op()
+    self.clear_key()
+    self.clear_entity()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt32(self.op_)
+    if (self.has_key_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.key_.ByteSize())
+      self.key_.OutputUnchecked(out)
+    if (self.has_entity_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.entity_.ByteSize())
+      self.entity_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_op_):
+      out.putVarInt32(8)
+      out.putVarInt32(self.op_)
+    if (self.has_key_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.key_.ByteSizePartial())
+      self.key_.OutputPartial(out)
+    if (self.has_entity_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.entity_.ByteSizePartial())
+      self.entity_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_op(d.getVarInt32())
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_key().TryMerge(tmp)
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_entity().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
+    if self.has_key_:
+      res+=prefix+"key <\n"
+      res+=self.key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_entity_:
+      res+=prefix+"entity <\n"
+      res+=self.entity_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kop = 1
+  kkey = 2
+  kentity = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "op",
+    2: "key",
+    3: "entity",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Mutation'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uExoCb3AgASgAMAU4AmgAFBMaA2tleSACKAIwCzgBShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGZW50aXR5IAMoAjALOAFKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARRzeglPcGVyYXRpb26LAZIBBklOU0VSVJgBAYwBiwGSAQZVUERBVEWYAQKMAYsBkgEGVVBTRVJUmAEDjAGLAZIBBkRFTEVURZgBBIwBiwGSARNJTlNFUlRfV0lUSF9BVVRPX0lEmAFjjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
+class MutationResult(ProtocolBuffer.ProtocolMessage):
+  has_key_ = 0
+  key_ = None
+  has_new_version_ = 0
+  new_version_ = 0
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def key(self):
+    if self.key_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.key_ is None: self.key_ = google.appengine.datastore.entity_v4_pb.Key()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.key_
+
+  def mutable_key(self): self.has_key_ = 1; return self.key()
+
+  def clear_key(self):
+
+    if self.has_key_:
+      self.has_key_ = 0;
+      if self.key_ is not None: self.key_.Clear()
+
+  def has_key(self): return self.has_key_
+
+  def new_version(self): return self.new_version_
+
+  def set_new_version(self, x):
+    self.has_new_version_ = 1
+    self.new_version_ = x
+
+  def clear_new_version(self):
+    if self.has_new_version_:
+      self.has_new_version_ = 0
+      self.new_version_ = 0
+
+  def has_new_version(self): return self.has_new_version_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_key()): self.mutable_key().MergeFrom(x.key())
+    if (x.has_new_version()): self.set_new_version(x.new_version())
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.MutationResult', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.MutationResult')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.MutationResult')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.MutationResult', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.MutationResult', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.MutationResult', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_key_ != x.has_key_: return 0
+    if self.has_key_ and self.key_ != x.key_: return 0
+    if self.has_new_version_ != x.has_new_version_: return 0
+    if self.has_new_version_ and self.new_version_ != x.new_version_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
+    if (self.has_new_version_): n += 1 + self.lengthVarInt64(self.new_version_)
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
+    if (self.has_new_version_): n += 1 + self.lengthVarInt64(self.new_version_)
+    return n
+
+  def Clear(self):
+    self.clear_key()
+    self.clear_new_version()
+
+  def OutputUnchecked(self, out):
+    if (self.has_key_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.key_.ByteSize())
+      self.key_.OutputUnchecked(out)
+    if (self.has_new_version_):
+      out.putVarInt32(32)
+      out.putVarInt64(self.new_version_)
+
+  def OutputPartial(self, out):
+    if (self.has_key_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.key_.ByteSizePartial())
+      self.key_.OutputPartial(out)
+    if (self.has_new_version_):
+      out.putVarInt32(32)
+      out.putVarInt64(self.new_version_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_key().TryMerge(tmp)
+        continue
+      if tt == 32:
+        self.set_new_version(d.getVarInt64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_key_:
+      res+=prefix+"key <\n"
+      res+=self.key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_new_version_: res+=prefix+("new_version: %s\n" % self.DebugFormatInt64(self.new_version_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kkey = 3
+  knew_version = 4
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    3: "key",
+    4: "new_version",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.MutationResult'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0ExoDa2V5IAMoAjALOAFKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgtuZXdfdmVyc2lvbiAEKAAwAzgBQgEwowGqAQdkZWZhdWx0sgEBMKQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
+class DeprecatedMutation(ProtocolBuffer.ProtocolMessage):
   has_force_ = 0
   force_ = 0
 
@@ -3032,29 +3461,29 @@
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
-      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.Mutation', s)
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.DeprecatedMutation', s)
 
   if _net_proto___parse__python is not None:
     def _CEncode(self):
-      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.Mutation')
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.DeprecatedMutation')
 
   if _net_proto___parse__python is not None:
     def _CEncodePartial(self):
-      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.Mutation')
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.DeprecatedMutation')
 
   if _net_proto___parse__python is not None:
     def _CToASCII(self, output_format):
-      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.Mutation', output_format)
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.DeprecatedMutation', output_format)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCII(self, s):
-      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.Mutation', s)
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.DeprecatedMutation', s)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCIIIgnoreUnknown(self, s):
-      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.Mutation', s)
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.DeprecatedMutation', s)
 
 
   def Equals(self, x):
@@ -3301,14 +3730,14 @@
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Mutation'
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.DeprecatedMutation'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uExoGdXBzZXJ0IAEoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZ1cGRhdGUgAigCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBmluc2VydCADKAIwCzgDSh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoOaW5zZXJ0X2F1dG9faWQgBCgCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBmRlbGV0ZSAFKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoFZm9yY2UgBigAMAg4ARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvbhMaBnVwc2VydCABKAIwCzgDSh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGdXBkYXRlIAIoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZpbnNlcnQgAygCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDmluc2VydF9hdXRvX2lkIAQoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZkZWxldGUgBSgCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBWZvcmNlIAYoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
-class MutationResult(ProtocolBuffer.ProtocolMessage):
+class DeprecatedMutationResult(ProtocolBuffer.ProtocolMessage):
   has_index_updates_ = 0
   index_updates_ = 0
 
@@ -3438,29 +3867,29 @@
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
-      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.MutationResult', s)
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.DeprecatedMutationResult', s)
 
   if _net_proto___parse__python is not None:
     def _CEncode(self):
-      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.MutationResult')
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.DeprecatedMutationResult')
 
   if _net_proto___parse__python is not None:
     def _CEncodePartial(self):
-      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.MutationResult')
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.DeprecatedMutationResult')
 
   if _net_proto___parse__python is not None:
     def _CToASCII(self, output_format):
-      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.MutationResult', output_format)
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.DeprecatedMutationResult', output_format)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCII(self, s):
-      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.MutationResult', s)
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.DeprecatedMutationResult', s)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCIIIgnoreUnknown(self, s):
-      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.MutationResult', s)
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.DeprecatedMutationResult', s)
 
 
   def Equals(self, x):
@@ -3702,9 +4131,9 @@
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.MutationResult'
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.DeprecatedMutationResult'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0ExoNaW5kZXhfdXBkYXRlcyABKAAwBTgCFBMaEmluc2VydF9hdXRvX2lkX2tleSACKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoOdXBzZXJ0X3ZlcnNpb24gAygAMAM4AxQTGg51cGRhdGVfdmVyc2lvbiAEKAAwAzgDFBMaDmluc2VydF92ZXJzaW9uIAUoADADOAMUExoWaW5zZXJ0X2F1dG9faWRfdmVyc2lvbiAGKAAwAzgDFBMaDmRlbGV0ZV92ZXJzaW9uIAcoADADOAMUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KMGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvblJlc3VsdBMaDWluZGV4X3VwZGF0ZXMgASgAMAU4AhQTGhJpbnNlcnRfYXV0b19pZF9rZXkgAigCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDnVwc2VydF92ZXJzaW9uIAMoADADOAMUExoOdXBkYXRlX3ZlcnNpb24gBCgAMAM4AxQTGg5pbnNlcnRfdmVyc2lvbiAFKAAwAzgDFBMaFmluc2VydF9hdXRvX2lkX3ZlcnNpb24gBigAMAM4AxQTGg5kZWxldGVfdmVyc2lvbiAHKAAwAzgDFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -5678,12 +6107,15 @@
 
   has_transaction_ = 0
   transaction_ = ""
-  has_mutation_ = 0
-  mutation_ = None
+  has_deprecated_mutation_ = 0
+  deprecated_mutation_ = None
   has_mode_ = 0
   mode_ = 1
+  has_ignore_read_only_ = 0
+  ignore_read_only_ = 0
 
   def __init__(self, contents=None):
+    self.mutation_ = []
     self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
@@ -5700,24 +6132,40 @@
 
   def has_transaction(self): return self.has_transaction_
 
-  def mutation(self):
-    if self.mutation_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.mutation_ is None: self.mutation_ = Mutation()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.mutation_
+  def mutation_size(self): return len(self.mutation_)
+  def mutation_list(self): return self.mutation_
 
-  def mutable_mutation(self): self.has_mutation_ = 1; return self.mutation()
+  def mutation(self, i):
+    return self.mutation_[i]
+
+  def mutable_mutation(self, i):
+    return self.mutation_[i]
+
+  def add_mutation(self):
+    x = Mutation()
+    self.mutation_.append(x)
+    return x
 
   def clear_mutation(self):
+    self.mutation_ = []
+  def deprecated_mutation(self):
+    if self.deprecated_mutation_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.deprecated_mutation_ is None: self.deprecated_mutation_ = DeprecatedMutation()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.deprecated_mutation_
 
-    if self.has_mutation_:
-      self.has_mutation_ = 0;
-      if self.mutation_ is not None: self.mutation_.Clear()
+  def mutable_deprecated_mutation(self): self.has_deprecated_mutation_ = 1; return self.deprecated_mutation()
 
-  def has_mutation(self): return self.has_mutation_
+  def clear_deprecated_mutation(self):
+
+    if self.has_deprecated_mutation_:
+      self.has_deprecated_mutation_ = 0;
+      if self.deprecated_mutation_ is not None: self.deprecated_mutation_.Clear()
+
+  def has_deprecated_mutation(self): return self.has_deprecated_mutation_
 
   def mode(self): return self.mode_
 
@@ -5732,12 +6180,27 @@
 
   def has_mode(self): return self.has_mode_
 
+  def ignore_read_only(self): return self.ignore_read_only_
+
+  def set_ignore_read_only(self, x):
+    self.has_ignore_read_only_ = 1
+    self.ignore_read_only_ = x
+
+  def clear_ignore_read_only(self):
+    if self.has_ignore_read_only_:
+      self.has_ignore_read_only_ = 0
+      self.ignore_read_only_ = 0
+
+  def has_ignore_read_only(self): return self.has_ignore_read_only_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_transaction()): self.set_transaction(x.transaction())
-    if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation())
+    for i in xrange(x.mutation_size()): self.add_mutation().CopyFrom(x.mutation(i))
+    if (x.has_deprecated_mutation()): self.mutable_deprecated_mutation().MergeFrom(x.deprecated_mutation())
     if (x.has_mode()): self.set_mode(x.mode())
+    if (x.has_ignore_read_only()): self.set_ignore_read_only(x.ignore_read_only())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -5770,59 +6233,88 @@
     if x is self: return 1
     if self.has_transaction_ != x.has_transaction_: return 0
     if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
-    if self.has_mutation_ != x.has_mutation_: return 0
-    if self.has_mutation_ and self.mutation_ != x.mutation_: return 0
+    if len(self.mutation_) != len(x.mutation_): return 0
+    for e1, e2 in zip(self.mutation_, x.mutation_):
+      if e1 != e2: return 0
+    if self.has_deprecated_mutation_ != x.has_deprecated_mutation_: return 0
+    if self.has_deprecated_mutation_ and self.deprecated_mutation_ != x.deprecated_mutation_: return 0
     if self.has_mode_ != x.has_mode_: return 0
     if self.has_mode_ and self.mode_ != x.mode_: return 0
+    if self.has_ignore_read_only_ != x.has_ignore_read_only_: return 0
+    if self.has_ignore_read_only_ and self.ignore_read_only_ != x.ignore_read_only_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
-    if (self.has_mutation_ and not self.mutation_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.mutation_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_deprecated_mutation_ and not self.deprecated_mutation_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     if (self.has_transaction_): n += 1 + self.lengthString(len(self.transaction_))
-    if (self.has_mutation_): n += 1 + self.lengthString(self.mutation_.ByteSize())
+    n += 1 * len(self.mutation_)
+    for i in xrange(len(self.mutation_)): n += self.lengthString(self.mutation_[i].ByteSize())
+    if (self.has_deprecated_mutation_): n += 1 + self.lengthString(self.deprecated_mutation_.ByteSize())
     if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
+    if (self.has_ignore_read_only_): n += 2
     return n
 
   def ByteSizePartial(self):
     n = 0
     if (self.has_transaction_): n += 1 + self.lengthString(len(self.transaction_))
-    if (self.has_mutation_): n += 1 + self.lengthString(self.mutation_.ByteSizePartial())
+    n += 1 * len(self.mutation_)
+    for i in xrange(len(self.mutation_)): n += self.lengthString(self.mutation_[i].ByteSizePartial())
+    if (self.has_deprecated_mutation_): n += 1 + self.lengthString(self.deprecated_mutation_.ByteSizePartial())
     if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
+    if (self.has_ignore_read_only_): n += 2
     return n
 
   def Clear(self):
     self.clear_transaction()
     self.clear_mutation()
+    self.clear_deprecated_mutation()
     self.clear_mode()
+    self.clear_ignore_read_only()
 
   def OutputUnchecked(self, out):
     if (self.has_transaction_):
       out.putVarInt32(10)
       out.putPrefixedString(self.transaction_)
-    if (self.has_mutation_):
+    if (self.has_deprecated_mutation_):
       out.putVarInt32(18)
-      out.putVarInt32(self.mutation_.ByteSize())
-      self.mutation_.OutputUnchecked(out)
+      out.putVarInt32(self.deprecated_mutation_.ByteSize())
+      self.deprecated_mutation_.OutputUnchecked(out)
     if (self.has_mode_):
       out.putVarInt32(32)
       out.putVarInt32(self.mode_)
+    for i in xrange(len(self.mutation_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.mutation_[i].ByteSize())
+      self.mutation_[i].OutputUnchecked(out)
+    if (self.has_ignore_read_only_):
+      out.putVarInt32(48)
+      out.putBoolean(self.ignore_read_only_)
 
   def OutputPartial(self, out):
     if (self.has_transaction_):
       out.putVarInt32(10)
       out.putPrefixedString(self.transaction_)
-    if (self.has_mutation_):
+    if (self.has_deprecated_mutation_):
       out.putVarInt32(18)
-      out.putVarInt32(self.mutation_.ByteSizePartial())
-      self.mutation_.OutputPartial(out)
+      out.putVarInt32(self.deprecated_mutation_.ByteSizePartial())
+      self.deprecated_mutation_.OutputPartial(out)
     if (self.has_mode_):
       out.putVarInt32(32)
       out.putVarInt32(self.mode_)
+    for i in xrange(len(self.mutation_)):
+      out.putVarInt32(42)
+      out.putVarInt32(self.mutation_[i].ByteSizePartial())
+      self.mutation_[i].OutputPartial(out)
+    if (self.has_ignore_read_only_):
+      out.putVarInt32(48)
+      out.putBoolean(self.ignore_read_only_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -5834,11 +6326,20 @@
         length = d.getVarInt32()
         tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
         d.skip(length)
-        self.mutable_mutation().TryMerge(tmp)
+        self.mutable_deprecated_mutation().TryMerge(tmp)
         continue
       if tt == 32:
         self.set_mode(d.getVarInt32())
         continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_mutation().TryMerge(tmp)
+        continue
+      if tt == 48:
+        self.set_ignore_read_only(d.getBoolean())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -5848,11 +6349,20 @@
   def __str__(self, prefix="", printElemNumber=0):
     res=""
     if self.has_transaction_: res+=prefix+("transaction: %s\n" % self.DebugFormatString(self.transaction_))
-    if self.has_mutation_:
-      res+=prefix+"mutation <\n"
-      res+=self.mutation_.__str__(prefix + "  ", printElemNumber)
+    cnt=0
+    for e in self.mutation_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("mutation%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
+    if self.has_deprecated_mutation_:
+      res+=prefix+"deprecated_mutation <\n"
+      res+=self.deprecated_mutation_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
+    if self.has_ignore_read_only_: res+=prefix+("ignore_read_only: %s\n" % self.DebugFormatBool(self.ignore_read_only_))
     return res
 
 
@@ -5860,64 +6370,104 @@
     return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
 
   ktransaction = 1
-  kmutation = 2
+  kmutation = 5
+  kdeprecated_mutation = 2
   kmode = 4
+  kignore_read_only = 6
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "transaction",
-    2: "mutation",
+    2: "deprecated_mutation",
     4: "mode",
-  }, 4)
+    5: "mutation",
+    6: "ignore_read_only",
+  }, 6)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.STRING,
     4: ProtocolBuffer.Encoder.NUMERIC,
-  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CommitRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QTGgt0cmFuc2FjdGlvbiABKAIwCTgBFBMaCG11dGF0aW9uIAIoAjALOAFKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBG1vZGUgBCgAMAU4AUIBMWgAowGqAQdkZWZhdWx0sgENVFJBTlNBQ1RJT05BTKQBFHN6BE1vZGWLAZIBDVRSQU5TQUNUSU9OQUyYAQGMAYsBkgERTk9OX1RSQU5TQUNUSU9OQUyYAQKMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QTGgt0cmFuc2FjdGlvbiABKAIwCTgBFBMaCG11dGF0aW9uIAUoAjALOANKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaE2RlcHJlY2F0ZWRfbXV0YXRpb24gAigCMAs4AUoqYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBG1vZGUgBCgAMAU4AUIBMWgAowGqAQdkZWZhdWx0sgENVFJBTlNBQ1RJT05BTKQBFBMaEGlnbm9yZV9yZWFkX29ubHkgBigAMAg4AUIFZmFsc2WjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFHN6BE1vZGWLAZIBDVRSQU5TQUNUSU9OQUyYAQGMAYsBkgERTk9OX1RSQU5TQUNUSU9OQUyYAQKMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
 class CommitResponse(ProtocolBuffer.ProtocolMessage):
-  has_mutation_result_ = 0
-  mutation_result_ = None
+  has_deprecated_mutation_result_ = 0
+  deprecated_mutation_result_ = None
+  has_index_updates_ = 0
+  index_updates_ = 0
 
   def __init__(self, contents=None):
+    self.mutation_result_ = []
     self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
-  def mutation_result(self):
-    if self.mutation_result_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.mutation_result_ is None: self.mutation_result_ = MutationResult()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.mutation_result_
+  def mutation_result_size(self): return len(self.mutation_result_)
+  def mutation_result_list(self): return self.mutation_result_
 
-  def mutable_mutation_result(self): self.has_mutation_result_ = 1; return self.mutation_result()
+  def mutation_result(self, i):
+    return self.mutation_result_[i]
+
+  def mutable_mutation_result(self, i):
+    return self.mutation_result_[i]
+
+  def add_mutation_result(self):
+    x = MutationResult()
+    self.mutation_result_.append(x)
+    return x
 
   def clear_mutation_result(self):
+    self.mutation_result_ = []
+  def deprecated_mutation_result(self):
+    if self.deprecated_mutation_result_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.deprecated_mutation_result_ is None: self.deprecated_mutation_result_ = DeprecatedMutationResult()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.deprecated_mutation_result_
 
-    if self.has_mutation_result_:
-      self.has_mutation_result_ = 0;
-      if self.mutation_result_ is not None: self.mutation_result_.Clear()
+  def mutable_deprecated_mutation_result(self): self.has_deprecated_mutation_result_ = 1; return self.deprecated_mutation_result()
 
-  def has_mutation_result(self): return self.has_mutation_result_
+  def clear_deprecated_mutation_result(self):
+
+    if self.has_deprecated_mutation_result_:
+      self.has_deprecated_mutation_result_ = 0;
+      if self.deprecated_mutation_result_ is not None: self.deprecated_mutation_result_.Clear()
+
+  def has_deprecated_mutation_result(self): return self.has_deprecated_mutation_result_
+
+  def index_updates(self): return self.index_updates_
+
+  def set_index_updates(self, x):
+    self.has_index_updates_ = 1
+    self.index_updates_ = x
+
+  def clear_index_updates(self):
+    if self.has_index_updates_:
+      self.has_index_updates_ = 0
+      self.index_updates_ = 0
+
+  def has_index_updates(self): return self.has_index_updates_
 
 
   def MergeFrom(self, x):
     assert x is not self
-    if (x.has_mutation_result()): self.mutable_mutation_result().MergeFrom(x.mutation_result())
+    for i in xrange(x.mutation_result_size()): self.add_mutation_result().CopyFrom(x.mutation_result(i))
+    if (x.has_deprecated_mutation_result()): self.mutable_deprecated_mutation_result().MergeFrom(x.deprecated_mutation_result())
+    if (x.has_index_updates()): self.set_index_updates(x.index_updates())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -5948,39 +6498,68 @@
 
   def Equals(self, x):
     if x is self: return 1
-    if self.has_mutation_result_ != x.has_mutation_result_: return 0
-    if self.has_mutation_result_ and self.mutation_result_ != x.mutation_result_: return 0
+    if len(self.mutation_result_) != len(x.mutation_result_): return 0
+    for e1, e2 in zip(self.mutation_result_, x.mutation_result_):
+      if e1 != e2: return 0
+    if self.has_deprecated_mutation_result_ != x.has_deprecated_mutation_result_: return 0
+    if self.has_deprecated_mutation_result_ and self.deprecated_mutation_result_ != x.deprecated_mutation_result_: return 0
+    if self.has_index_updates_ != x.has_index_updates_: return 0
+    if self.has_index_updates_ and self.index_updates_ != x.index_updates_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
-    if (self.has_mutation_result_ and not self.mutation_result_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.mutation_result_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_deprecated_mutation_result_ and not self.deprecated_mutation_result_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
-    if (self.has_mutation_result_): n += 1 + self.lengthString(self.mutation_result_.ByteSize())
+    n += 1 * len(self.mutation_result_)
+    for i in xrange(len(self.mutation_result_)): n += self.lengthString(self.mutation_result_[i].ByteSize())
+    if (self.has_deprecated_mutation_result_): n += 1 + self.lengthString(self.deprecated_mutation_result_.ByteSize())
+    if (self.has_index_updates_): n += 1 + self.lengthVarInt64(self.index_updates_)
     return n
 
   def ByteSizePartial(self):
     n = 0
-    if (self.has_mutation_result_): n += 1 + self.lengthString(self.mutation_result_.ByteSizePartial())
+    n += 1 * len(self.mutation_result_)
+    for i in xrange(len(self.mutation_result_)): n += self.lengthString(self.mutation_result_[i].ByteSizePartial())
+    if (self.has_deprecated_mutation_result_): n += 1 + self.lengthString(self.deprecated_mutation_result_.ByteSizePartial())
+    if (self.has_index_updates_): n += 1 + self.lengthVarInt64(self.index_updates_)
     return n
 
   def Clear(self):
     self.clear_mutation_result()
+    self.clear_deprecated_mutation_result()
+    self.clear_index_updates()
 
   def OutputUnchecked(self, out):
-    if (self.has_mutation_result_):
+    if (self.has_deprecated_mutation_result_):
       out.putVarInt32(10)
-      out.putVarInt32(self.mutation_result_.ByteSize())
-      self.mutation_result_.OutputUnchecked(out)
+      out.putVarInt32(self.deprecated_mutation_result_.ByteSize())
+      self.deprecated_mutation_result_.OutputUnchecked(out)
+    for i in xrange(len(self.mutation_result_)):
+      out.putVarInt32(26)
+      out.putVarInt32(self.mutation_result_[i].ByteSize())
+      self.mutation_result_[i].OutputUnchecked(out)
+    if (self.has_index_updates_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.index_updates_)
 
   def OutputPartial(self, out):
-    if (self.has_mutation_result_):
+    if (self.has_deprecated_mutation_result_):
       out.putVarInt32(10)
-      out.putVarInt32(self.mutation_result_.ByteSizePartial())
-      self.mutation_result_.OutputPartial(out)
+      out.putVarInt32(self.deprecated_mutation_result_.ByteSizePartial())
+      self.deprecated_mutation_result_.OutputPartial(out)
+    for i in xrange(len(self.mutation_result_)):
+      out.putVarInt32(26)
+      out.putVarInt32(self.mutation_result_[i].ByteSizePartial())
+      self.mutation_result_[i].OutputPartial(out)
+    if (self.has_index_updates_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.index_updates_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -5989,7 +6568,16 @@
         length = d.getVarInt32()
         tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
         d.skip(length)
-        self.mutable_mutation_result().TryMerge(tmp)
+        self.mutable_deprecated_mutation_result().TryMerge(tmp)
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_mutation_result().TryMerge(tmp)
+        continue
+      if tt == 32:
+        self.set_index_updates(d.getVarInt32())
         continue
 
 
@@ -5999,34 +6587,49 @@
 
   def __str__(self, prefix="", printElemNumber=0):
     res=""
-    if self.has_mutation_result_:
-      res+=prefix+"mutation_result <\n"
-      res+=self.mutation_result_.__str__(prefix + "  ", printElemNumber)
+    cnt=0
+    for e in self.mutation_result_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("mutation_result%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+      cnt+=1
+    if self.has_deprecated_mutation_result_:
+      res+=prefix+"deprecated_mutation_result <\n"
+      res+=self.deprecated_mutation_result_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_index_updates_: res+=prefix+("index_updates: %s\n" % self.DebugFormatInt32(self.index_updates_))
     return res
 
 
   def _BuildTagLookupTable(sparse, maxtag, default=None):
     return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
 
-  kmutation_result = 1
+  kmutation_result = 3
+  kdeprecated_mutation_result = 1
+  kindex_updates = 4
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
-    1: "mutation_result",
-  }, 1)
+    1: "deprecated_mutation_result",
+    3: "mutation_result",
+    4: "index_updates",
+  }, 4)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
-  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CommitResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlExoPbXV0YXRpb25fcmVzdWx0IAEoAjALOAFKJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlExoPbXV0YXRpb25fcmVzdWx0IAMoAjALOANKJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaGmRlcHJlY2F0ZWRfbXV0YXRpb25fcmVzdWx0IAEoAjALOAFKMGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvblJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGg1pbmRleF91cGRhdGVzIAQoADAFOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -6379,24 +6982,24 @@
         _SERIALIZED_DESCRIPTOR.tostring())
 
 class WriteRequest(ProtocolBuffer.ProtocolMessage):
-  has_mutation_ = 0
+  has_deprecated_mutation_ = 0
 
   def __init__(self, contents=None):
-    self.mutation_ = Mutation()
+    self.deprecated_mutation_ = DeprecatedMutation()
     if contents is not None: self.MergeFromString(contents)
 
-  def mutation(self): return self.mutation_
+  def deprecated_mutation(self): return self.deprecated_mutation_
 
-  def mutable_mutation(self): self.has_mutation_ = 1; return self.mutation_
+  def mutable_deprecated_mutation(self): self.has_deprecated_mutation_ = 1; return self.deprecated_mutation_
 
-  def clear_mutation(self):self.has_mutation_ = 0; self.mutation_.Clear()
+  def clear_deprecated_mutation(self):self.has_deprecated_mutation_ = 0; self.deprecated_mutation_.Clear()
 
-  def has_mutation(self): return self.has_mutation_
+  def has_deprecated_mutation(self): return self.has_deprecated_mutation_
 
 
   def MergeFrom(self, x):
     assert x is not self
-    if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation())
+    if (x.has_deprecated_mutation()): self.mutable_deprecated_mutation().MergeFrom(x.deprecated_mutation())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -6427,44 +7030,44 @@
 
   def Equals(self, x):
     if x is self: return 1
-    if self.has_mutation_ != x.has_mutation_: return 0
-    if self.has_mutation_ and self.mutation_ != x.mutation_: return 0
+    if self.has_deprecated_mutation_ != x.has_deprecated_mutation_: return 0
+    if self.has_deprecated_mutation_ and self.deprecated_mutation_ != x.deprecated_mutation_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
-    if (not self.has_mutation_):
+    if (not self.has_deprecated_mutation_):
       initialized = 0
       if debug_strs is not None:
-        debug_strs.append('Required field: mutation not set.')
-    elif not self.mutation_.IsInitialized(debug_strs): initialized = 0
+        debug_strs.append('Required field: deprecated_mutation not set.')
+    elif not self.deprecated_mutation_.IsInitialized(debug_strs): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
-    n += self.lengthString(self.mutation_.ByteSize())
+    n += self.lengthString(self.deprecated_mutation_.ByteSize())
     return n + 1
 
   def ByteSizePartial(self):
     n = 0
-    if (self.has_mutation_):
+    if (self.has_deprecated_mutation_):
       n += 1
-      n += self.lengthString(self.mutation_.ByteSizePartial())
+      n += self.lengthString(self.deprecated_mutation_.ByteSizePartial())
     return n
 
   def Clear(self):
-    self.clear_mutation()
+    self.clear_deprecated_mutation()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
-    out.putVarInt32(self.mutation_.ByteSize())
-    self.mutation_.OutputUnchecked(out)
+    out.putVarInt32(self.deprecated_mutation_.ByteSize())
+    self.deprecated_mutation_.OutputUnchecked(out)
 
   def OutputPartial(self, out):
-    if (self.has_mutation_):
+    if (self.has_deprecated_mutation_):
       out.putVarInt32(10)
-      out.putVarInt32(self.mutation_.ByteSizePartial())
-      self.mutation_.OutputPartial(out)
+      out.putVarInt32(self.deprecated_mutation_.ByteSizePartial())
+      self.deprecated_mutation_.OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -6473,7 +7076,7 @@
         length = d.getVarInt32()
         tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
         d.skip(length)
-        self.mutable_mutation().TryMerge(tmp)
+        self.mutable_deprecated_mutation().TryMerge(tmp)
         continue
 
 
@@ -6483,9 +7086,9 @@
 
   def __str__(self, prefix="", printElemNumber=0):
     res=""
-    if self.has_mutation_:
-      res+=prefix+"mutation <\n"
-      res+=self.mutation_.__str__(prefix + "  ", printElemNumber)
+    if self.has_deprecated_mutation_:
+      res+=prefix+"deprecated_mutation <\n"
+      res+=self.deprecated_mutation_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     return res
 
@@ -6493,11 +7096,11 @@
   def _BuildTagLookupTable(sparse, maxtag, default=None):
     return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
 
-  kmutation = 1
+  kdeprecated_mutation = 1
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
-    1: "mutation",
+    1: "deprecated_mutation",
   }, 1)
 
   _TYPES = _BuildTagLookupTable({
@@ -6510,7 +7113,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.WriteRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LldyaXRlUmVxdWVzdBMaCG11dGF0aW9uIAEoAjALOAJKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LldyaXRlUmVxdWVzdBMaE2RlcHJlY2F0ZWRfbXV0YXRpb24gASgCMAs4AkoqYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -6991,57 +7594,57 @@
         BeginTransactionRequest,
         BeginTransactionResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.Rollback.im_func,
         RollbackRequest,
         RollbackResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.Commit.im_func,
         CommitRequest,
         CommitResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.RunQuery.im_func,
         RunQueryRequest,
         RunQueryResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.ContinueQuery.im_func,
         ContinueQueryRequest,
         ContinueQueryResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.Lookup.im_func,
         LookupRequest,
         LookupResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.AllocateIds.im_func,
         AllocateIdsRequest,
         AllocateIdsResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.Get.im_func,
         LookupRequest,
         LookupResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.Write.im_func,
         WriteRequest,
         CommitResponse,
         None,
-        'none')
+        'INTEGRITY')
 
 if _extension_runtime:
   pass
 
-__all__ = ['Error','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','GqlQuery','GqlQueryArg','QueryResultBatch','Mutation','MutationResult','ReadOptions','LookupRequest','LookupResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','AllocateIdsRequest','AllocateIdsResponse','WriteRequest','DatastoreV4Service']
+__all__ = ['Error','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','GqlQuery','GqlQueryArg','QueryResultBatch','Mutation','MutationResult','DeprecatedMutation','DeprecatedMutationResult','ReadOptions','LookupRequest','LookupResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','AllocateIdsRequest','AllocateIdsResponse','WriteRequest','DatastoreV4Service']
diff --git a/google/appengine/datastore/datastore_v4_stub.py b/google/appengine/datastore/datastore_v4_stub.py
index 5e77289..2d27f02 100644
--- a/google/appengine/datastore/datastore_v4_stub.py
+++ b/google/appengine/datastore/datastore_v4_stub.py
@@ -98,18 +98,22 @@
     try:
       self.__service_validator.validate_commit_req(req)
       if req.has_transaction():
-        resp.mutable_mutation_result()
-        resp.mutable_mutation_result().CopyFrom(
-            self.__apply_v4_mutation(req.mutation(), req.transaction()))
+        resp.mutable_deprecated_mutation_result()
+        resp.mutable_deprecated_mutation_result().CopyFrom(
+            self.__apply_v4_deprecated_mutation(req.deprecated_mutation(),
+                                                req.transaction()))
         v3_req = self.__service_converter.v4_commit_req_to_v3_txn(req)
         v3_resp = datastore_pb.CommitResponse()
         self.__make_v3_call('Commit', v3_req, v3_resp)
-        total_index_updates = (resp.mutable_mutation_result().index_updates()
-                               + v3_resp.cost().index_writes())
-        resp.mutable_mutation_result().set_index_updates(total_index_updates)
+        total_index_updates = (
+            resp.mutable_deprecated_mutation_result().index_updates()
+            + v3_resp.cost().index_writes())
+        resp.mutable_deprecated_mutation_result().set_index_updates(
+            total_index_updates)
       else:
-        resp.mutable_mutation_result().CopyFrom(
-            self.__apply_v4_mutation(req.mutation(), None))
+        resp.mutable_deprecated_mutation_result().CopyFrom(
+            self.__apply_v4_deprecated_mutation(req.deprecated_mutation(),
+                                                None))
     except datastore_pbs.InvalidConversionError, e:
       raise apiproxy_errors.ApplicationError(
           datastore_v4_pb.Error.BAD_REQUEST, str(e))
@@ -280,15 +284,15 @@
     self.__make_v3_call('Put', v3_put_req, v3_put_resp)
     return v3_put_resp.cost().index_writes()
 
-  def __apply_v4_mutation(self, v4_mutation, v4_txn):
-    """Applies a v4 Mutation.
+  def __apply_v4_deprecated_mutation(self, v4_deprecated_mutation, v4_txn):
+    """Applies a v4 DeprecatedMutation.
 
     Args:
-      v4_mutation: a datastore_v4_pb.Mutation
+      v4_deprecated_mutation: a datastore_v4_pb.DeprecatedMutation
       v4_txn: an optional v4 transaction handle or None
 
     Returns:
-      a datastore_v4_pb.MutationResult
+      a datastore_v4_pb.DeprecatedMutationResult
     """
     index_writes = 0
     v3_txn = None
@@ -297,13 +301,13 @@
       self.__service_converter.v4_to_v3_txn(v4_txn, v3_txn)
 
 
-    for v4_entity in v4_mutation.insert_list():
+    for v4_entity in v4_deprecated_mutation.insert_list():
       v3_entity = entity_pb.EntityProto()
       self.__entity_converter.v4_to_v3_entity(v4_entity, v3_entity)
       index_writes += self.__insert_v3_entity(v3_entity, v3_txn)
 
 
-    for v4_entity in v4_mutation.update_list():
+    for v4_entity in v4_deprecated_mutation.update_list():
       v3_entity = entity_pb.EntityProto()
       self.__entity_converter.v4_to_v3_entity(v4_entity, v3_entity)
       index_writes += self.__update_v3_entity(v3_entity, v3_txn)
@@ -312,7 +316,7 @@
     v3_insert_auto_req = datastore_pb.PutRequest()
     if v3_txn:
       v3_insert_auto_req.mutable_transaction().CopyFrom(v3_txn)
-    for v4_entity in v4_mutation.insert_auto_id_list():
+    for v4_entity in v4_deprecated_mutation.insert_auto_id_list():
       v3_entity = entity_pb.EntityProto()
       self.__entity_converter.v4_to_v3_entity(v4_entity, v3_entity)
       v3_insert_auto_req.entity_list().append(v3_entity)
@@ -324,7 +328,7 @@
     v3_upsert_req = datastore_pb.PutRequest()
     if v3_txn:
       v3_upsert_req.mutable_transaction().CopyFrom(v3_txn)
-    for v4_entity in v4_mutation.upsert_list():
+    for v4_entity in v4_deprecated_mutation.upsert_list():
       v3_entity = entity_pb.EntityProto()
       self.__entity_converter.v4_to_v3_entity(v4_entity, v3_entity)
       v3_upsert_req.entity_list().append(v3_entity)
@@ -336,20 +340,20 @@
     v3_delete_req = datastore_pb.DeleteRequest()
     if v3_txn:
       v3_delete_req.mutable_transaction().CopyFrom(v3_txn)
-    for v4_key in v4_mutation.delete_list():
+    for v4_key in v4_deprecated_mutation.delete_list():
       self.__entity_converter.v4_to_v3_reference(v4_key,
                                                  v3_delete_req.add_key())
     v3_delete_resp = datastore_pb.DeleteResponse()
     self.__make_v3_call('Delete', v3_delete_req, v3_delete_resp)
     index_writes += v3_delete_resp.cost().index_writes()
 
-    v4_mutation_result = datastore_v4_pb.MutationResult()
+    v4_deprecated_mutation_result = datastore_v4_pb.DeprecatedMutationResult()
     for v3_ref in v3_insert_auto_id_resp.key_list():
       self.__entity_converter.v3_to_v4_key(
-          v3_ref, v4_mutation_result.add_insert_auto_id_key())
-    v4_mutation_result.set_index_updates(index_writes)
+          v3_ref, v4_deprecated_mutation_result.add_insert_auto_id_key())
+    v4_deprecated_mutation_result.set_index_updates(index_writes)
 
-    return v4_mutation_result
+    return v4_deprecated_mutation_result
 
   def __normalize_v4_run_query_request(self, v4_req):
 
diff --git a/google/appengine/datastore/datastore_v4_validator.py b/google/appengine/datastore/datastore_v4_validator.py
index 5d958c3..94069b5 100644
--- a/google/appengine/datastore/datastore_v4_validator.py
+++ b/google/appengine/datastore/datastore_v4_validator.py
@@ -1043,7 +1043,7 @@
     else:
       _assert_condition(False,
                         'Unknown commit mode: %d.' % req.mode())
-    self.__validate_mutation(req.mutation())
+    self.__validate_deprecated_mutation(req.deprecated_mutation())
 
   def validate_run_query_req(self, req):
     """Validates a normalized RunQueryRequest.
@@ -1109,17 +1109,18 @@
                       ('Cannot specify both a read consistency and'
                        ' a transaction.'))
 
-  def __validate_mutation(self, mutation):
+  def __validate_deprecated_mutation(self, deprecated_mutation):
     self.__entity_validator.validate_entities(WRITE,
-                                              mutation.upsert_list())
+                                              deprecated_mutation.upsert_list())
     self.__entity_validator.validate_entities(WRITE,
-                                              mutation.update_list())
+                                              deprecated_mutation.update_list())
     self.__entity_validator.validate_entities(WRITE,
-                                              mutation.insert_list())
-    self.__entity_validator.validate_entities(WRITE_AUTO_ID,
-                                              mutation.insert_auto_id_list())
+                                              deprecated_mutation.insert_list())
+    self.__entity_validator.validate_entities(
+        WRITE_AUTO_ID,
+        deprecated_mutation.insert_auto_id_list())
     self.__entity_validator.validate_keys(WRITE,
-                                          mutation.delete_list())
+                                          deprecated_mutation.delete_list())
 
 
 
diff --git a/google/appengine/datastore/document_pb.py b/google/appengine/datastore/document_pb.py
index c2b34a1..a5756d3 100644
--- a/google/appengine/datastore/document_pb.py
+++ b/google/appengine/datastore/document_pb.py
@@ -669,6 +669,293 @@
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.FieldTypes'
+class FacetValue(ProtocolBuffer.ProtocolMessage):
+
+
+  ATOM         =    2
+  DATE         =    3
+  NUMBER       =    4
+
+  _ContentType_NAMES = {
+    2: "ATOM",
+    3: "DATE",
+    4: "NUMBER",
+  }
+
+  def ContentType_Name(cls, x): return cls._ContentType_NAMES.get(x, "")
+  ContentType_Name = classmethod(ContentType_Name)
+
+  has_type_ = 0
+  type_ = 2
+  has_string_value_ = 0
+  string_value_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def type(self): return self.type_
+
+  def set_type(self, x):
+    self.has_type_ = 1
+    self.type_ = x
+
+  def clear_type(self):
+    if self.has_type_:
+      self.has_type_ = 0
+      self.type_ = 2
+
+  def has_type(self): return self.has_type_
+
+  def string_value(self): return self.string_value_
+
+  def set_string_value(self, x):
+    self.has_string_value_ = 1
+    self.string_value_ = x
+
+  def clear_string_value(self):
+    if self.has_string_value_:
+      self.has_string_value_ = 0
+      self.string_value_ = ""
+
+  def has_string_value(self): return self.has_string_value_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_type()): self.set_type(x.type())
+    if (x.has_string_value()): self.set_string_value(x.string_value())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_type_ != x.has_type_: return 0
+    if self.has_type_ and self.type_ != x.type_: return 0
+    if self.has_string_value_ != x.has_string_value_: return 0
+    if self.has_string_value_ and self.string_value_ != x.string_value_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_type_): n += 1 + self.lengthVarInt64(self.type_)
+    if (self.has_string_value_): n += 1 + self.lengthString(len(self.string_value_))
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_type_): n += 1 + self.lengthVarInt64(self.type_)
+    if (self.has_string_value_): n += 1 + self.lengthString(len(self.string_value_))
+    return n
+
+  def Clear(self):
+    self.clear_type()
+    self.clear_string_value()
+
+  def OutputUnchecked(self, out):
+    if (self.has_type_):
+      out.putVarInt32(8)
+      out.putVarInt32(self.type_)
+    if (self.has_string_value_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.string_value_)
+
+  def OutputPartial(self, out):
+    if (self.has_type_):
+      out.putVarInt32(8)
+      out.putVarInt32(self.type_)
+    if (self.has_string_value_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.string_value_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_type(d.getVarInt32())
+        continue
+      if tt == 26:
+        self.set_string_value(d.getPrefixedString())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatInt32(self.type_))
+    if self.has_string_value_: res+=prefix+("string_value: %s\n" % self.DebugFormatString(self.string_value_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  ktype = 1
+  kstring_value = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "type",
+    3: "string_value",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.FacetValue'
+class Facet(ProtocolBuffer.ProtocolMessage):
+  has_name_ = 0
+  name_ = ""
+  has_value_ = 0
+
+  def __init__(self, contents=None):
+    self.value_ = FacetValue()
+    if contents is not None: self.MergeFromString(contents)
+
+  def name(self): return self.name_
+
+  def set_name(self, x):
+    self.has_name_ = 1
+    self.name_ = x
+
+  def clear_name(self):
+    if self.has_name_:
+      self.has_name_ = 0
+      self.name_ = ""
+
+  def has_name(self): return self.has_name_
+
+  def value(self): return self.value_
+
+  def mutable_value(self): self.has_value_ = 1; return self.value_
+
+  def clear_value(self):self.has_value_ = 0; self.value_.Clear()
+
+  def has_value(self): return self.has_value_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_name()): self.set_name(x.name())
+    if (x.has_value()): self.mutable_value().MergeFrom(x.value())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_name_ != x.has_name_: return 0
+    if self.has_name_ and self.name_ != x.name_: return 0
+    if self.has_value_ != x.has_value_: return 0
+    if self.has_value_ and self.value_ != x.value_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_name_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: name not set.')
+    if (not self.has_value_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: value not set.')
+    elif not self.value_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.name_))
+    n += self.lengthString(self.value_.ByteSize())
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_name_):
+      n += 1
+      n += self.lengthString(len(self.name_))
+    if (self.has_value_):
+      n += 1
+      n += self.lengthString(self.value_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_name()
+    self.clear_value()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.name_)
+    out.putVarInt32(18)
+    out.putVarInt32(self.value_.ByteSize())
+    self.value_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_name_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.name_)
+    if (self.has_value_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.value_.ByteSizePartial())
+      self.value_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_name(d.getPrefixedString())
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_value().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+    if self.has_value_:
+      res+=prefix+"value <\n"
+      res+=self.value_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kname = 1
+  kvalue = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "name",
+    2: "value",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Facet'
 class Document(ProtocolBuffer.ProtocolMessage):
 
 
@@ -696,6 +983,7 @@
 
   def __init__(self, contents=None):
     self.field_ = []
+    self.facet_ = []
     self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
@@ -799,6 +1087,22 @@
 
   def has_version(self): return self.has_version_
 
+  def facet_size(self): return len(self.facet_)
+  def facet_list(self): return self.facet_
+
+  def facet(self, i):
+    return self.facet_[i]
+
+  def mutable_facet(self, i):
+    return self.facet_[i]
+
+  def add_facet(self):
+    x = Facet()
+    self.facet_.append(x)
+    return x
+
+  def clear_facet(self):
+    self.facet_ = []
 
   def MergeFrom(self, x):
     assert x is not self
@@ -809,6 +1113,7 @@
     if (x.has_storage()): self.set_storage(x.storage())
     if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
     if (x.has_version()): self.set_version(x.version())
+    for i in xrange(x.facet_size()): self.add_facet().CopyFrom(x.facet(i))
 
   def Equals(self, x):
     if x is self: return 1
@@ -827,6 +1132,9 @@
     if self.has_acl_ and self.acl_ != x.acl_: return 0
     if self.has_version_ != x.has_version_: return 0
     if self.has_version_ and self.version_ != x.version_: return 0
+    if len(self.facet_) != len(x.facet_): return 0
+    for e1, e2 in zip(self.facet_, x.facet_):
+      if e1 != e2: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -834,6 +1142,8 @@
     for p in self.field_:
       if not p.IsInitialized(debug_strs): initialized=0
     if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.facet_:
+      if not p.IsInitialized(debug_strs): initialized=0
     return initialized
 
   def ByteSize(self):
@@ -846,6 +1156,8 @@
     if (self.has_storage_): n += 1 + self.lengthVarInt64(self.storage_)
     if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
     if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
+    n += 1 * len(self.facet_)
+    for i in xrange(len(self.facet_)): n += self.lengthString(self.facet_[i].ByteSize())
     return n
 
   def ByteSizePartial(self):
@@ -858,6 +1170,8 @@
     if (self.has_storage_): n += 1 + self.lengthVarInt64(self.storage_)
     if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
     if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
+    n += 1 * len(self.facet_)
+    for i in xrange(len(self.facet_)): n += self.lengthString(self.facet_[i].ByteSizePartial())
     return n
 
   def Clear(self):
@@ -868,6 +1182,7 @@
     self.clear_storage()
     self.clear_acl()
     self.clear_version()
+    self.clear_facet()
 
   def OutputUnchecked(self, out):
     if (self.has_id_):
@@ -893,6 +1208,10 @@
     if (self.has_version_):
       out.putVarInt32(56)
       out.putVarInt64(self.version_)
+    for i in xrange(len(self.facet_)):
+      out.putVarInt32(66)
+      out.putVarInt32(self.facet_[i].ByteSize())
+      self.facet_[i].OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_id_):
@@ -918,6 +1237,10 @@
     if (self.has_version_):
       out.putVarInt32(56)
       out.putVarInt64(self.version_)
+    for i in xrange(len(self.facet_)):
+      out.putVarInt32(66)
+      out.putVarInt32(self.facet_[i].ByteSizePartial())
+      self.facet_[i].OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -949,6 +1272,12 @@
       if tt == 56:
         self.set_version(d.getVarInt64())
         continue
+      if tt == 66:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_facet().TryMerge(tmp)
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -974,6 +1303,14 @@
       res+=self.acl_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
+    cnt=0
+    for e in self.facet_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("facet%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
     return res
 
 
@@ -987,6 +1324,7 @@
   kstorage = 5
   kacl = 6
   kversion = 7
+  kfacet = 8
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -997,7 +1335,8 @@
     5: "storage",
     6: "acl",
     7: "version",
-  }, 7)
+    8: "facet",
+  }, 8)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -1008,7 +1347,8 @@
     5: ProtocolBuffer.Encoder.NUMERIC,
     6: ProtocolBuffer.Encoder.STRING,
     7: ProtocolBuffer.Encoder.NUMERIC,
-  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
+    8: ProtocolBuffer.Encoder.STRING,
+  }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
@@ -1017,4 +1357,4 @@
 if _extension_runtime:
   pass
 
-__all__ = ['FieldValue','FieldValue_Geo','Field','FieldTypes','Document']
+__all__ = ['FieldValue','FieldValue_Geo','Field','FieldTypes','FacetValue','Facet','Document']
diff --git a/google/appengine/dist/httplib.py b/google/appengine/dist/httplib.py
index ac6b513..8f3f145 100644
--- a/google/appengine/dist/httplib.py
+++ b/google/appengine/dist/httplib.py
@@ -20,6 +20,7 @@
 
 """Copyright 2008 Python Software Foundation, Ian Bicking, and Google."""
 
+import inspect
 import mimetools
 import StringIO
 import sys
@@ -202,6 +203,16 @@
   def send(self, data):
     self._body += data
 
+  @staticmethod
+  def _getargspec(callable_object):
+    assert callable(callable_object)
+    try:
+
+      return inspect.getargspec(callable_object)
+    except TypeError:
+
+      return inspect.getargspec(callable_object.__call__)
+
   def getresponse(self):
     if self.port and self.port != self.default_port:
         host = '%s:%s' % (self.host, self.port)
@@ -218,9 +229,18 @@
     except KeyError:
       raise ValueError("%r is an unrecognized HTTP method" % self._method)
 
+
+
+
+
+    args, _, keywords, _ = self._getargspec(self._fetch)
+    extra_kwargs = (
+        {'validate_certificate': False}
+        if keywords or 'validate_certificate' in args
+        else {})
     response = self._fetch(url, self._body, method, headers,
                            self._allow_truncated, self._follow_redirects,
-                           deadline=self.timeout)
+                           deadline=self.timeout, **extra_kwargs)
     return HTTPResponse(response)
 
   def close(self):
diff --git a/google/appengine/dist27/gae_override/httplib.py b/google/appengine/dist27/gae_override/httplib.py
index 6b66e69..1edee85 100644
--- a/google/appengine/dist27/gae_override/httplib.py
+++ b/google/appengine/dist27/gae_override/httplib.py
@@ -5,6 +5,7 @@
 """Copyright 2008 Python Software Foundation, Ian Bicking, and Google."""
 
 import cStringIO
+import inspect
 import mimetools
 
 HTTP_PORT = 80
@@ -458,6 +459,16 @@
       headers = headers.items()
     self.headers = headers
 
+  @staticmethod
+  def _getargspec(callable_object):
+    assert callable(callable_object)
+    try:
+      # Methods and lambdas.
+      return inspect.getargspec(callable_object)
+    except TypeError:
+      # Class instances with __call__.
+      return inspect.getargspec(callable_object.__call__)
+
   def getresponse(self, buffering=False):
     """Get the response from the server.
 
@@ -491,18 +502,31 @@
       raise ValueError('%r is an unrecognized HTTP method' % self._method)
 
     try:
+      # The Python Standard Library doesn't validate certificates so don't
+      # validate them here either.  But some libraries (httplib2, possibly
+      # others) use an alternate technique where the fetch function does not
+      # have a validate_certificate argument so only provide it when supported.
+      argspec = self._getargspec(self._fetch)
+      extra_kwargs = (
+          {'validate_certificate': False}
+          if argspec.keywords or 'validate_certificate' in argspec.args
+          else {})
       fetch_response = self._fetch(url,
                                    self._body,
                                    method, headers,
                                    self._allow_truncated,
                                    self._follow_redirects,
-                                   deadline)
+                                   deadline,
+                                   **extra_kwargs)
     except urlfetch.InvalidURLError, e:
       raise InvalidURL(str(e))
     except (urlfetch.ResponseTooLargeError, urlfetch.DeadlineExceededError), e:
       raise HTTPException(str(e))
     except urlfetch.SSLCertificateError, e:
       # Should be ssl.SSLError but the ssl module isn't available.
+      # Continue to support this exception for versions of _fetch that do not
+      # support validate_certificates. Also, in production App Engine defers
+      # specific semantics so leaving this in just in case.
       raise HTTPException(str(e))
     except urlfetch.DownloadError, e:
       # One of the following occured: UNSPECIFIED_ERROR, FETCH_ERROR
diff --git a/google/appengine/ext/analytics/static/analytics_js.js b/google/appengine/ext/analytics/static/analytics_js.js
index cf04198..fc4c21a 100644
--- a/google/appengine/ext/analytics/static/analytics_js.js
+++ b/google/appengine/ext/analytics/static/analytics_js.js
@@ -9,17 +9,17 @@
 a.metaKey;this.state=a.state;this.j=a;a.defaultPrevented&&this.preventDefault()}};t(U,T);U.prototype.preventDefault=function(){U.n.preventDefault.call(this);var a=this.j;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,ua)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var va="closure_listenable_"+(1E6*Math.random()|0),wa=function(a){try{return!(!a||!a[va])}catch(b){return!1}},xa=0;var ya=function(a,b,c,e,d){this.c=a;this.e=null;this.src=b;this.type=c;this.capture=!!e;this.f=d;this.key=++xa;this.d=this.g=!1},za=function(a){a.d=!0;a.c=null;a.e=null;a.src=null;a.f=null};var V=function(a){this.src=a;this.b={};this.h=0};V.prototype.add=function(a,b,c,e,d){var f=this.b[a];f||(f=this.b[a]=[],this.h++);var g;t:{for(g=0;g<f.length;++g){var h=f[g];if(!h.d&&h.c==b&&h.capture==!!e&&h.f==d)break t}g=-1}-1<g?(a=f[g],c||(a.g=!1)):(a=new ya(b,this.src,a,!!e,d),a.g=c,f.push(a));return a};var Aa=function(a,b){var c=b.type;if(c in a.b){var e=a.b[c],d=B(e,b),f;if(f=0<=d)z(null!=e.length),A.splice.call(e,d,1);f&&(za(b),0==a.b[c].length&&(delete a.b[c],a.h--))}};var W="closure_lm_"+(1E6*Math.random()|0),X={},Ba=0,Da=function(){var a=Ca,b=ta?function(c){return a.call(b.src,b.c,c)}:function(c){c=a.call(b.src,b.c,c);if(!c)return c};return b},Ea=function(a,b,c,e,d){if("array"==n(b))for(var f=0;f<b.length;f++)Ea(a,b[f],c,e,d);else if(c=Fa(c),wa(a))a.k.add(String(b),c,!0,e,d);else{if(!b)throw Error("Invalid event type");var f=!!e,g=Y(a);g||(a[W]=g=new V(a));c=g.add(b,c,!0,e,d);c.e||(e=Da(),c.e=e,e.src=a,e.c=c,a.addEventListener?a.addEventListener(b,e,f):a.attachEvent(b in
 X?X[b]:X[b]="on"+b,e),Ba++)}},Ha=function(a,b,c,e){var d=1;if(a=Y(a))if(b=a.b[b])for(b=D(b),a=0;a<b.length;a++){var f=b[a];f&&f.capture==c&&!f.d&&(d&=!1!==Ga(f,e))}return Boolean(d)},Ga=function(a,b){var c=a.c,e=a.f||a.src;if(a.g&&"number"!=typeof a&&a&&!a.d){var d=a.src;if(wa(d))Aa(d.k,a);else{var f=a.type,g=a.e;d.removeEventListener?d.removeEventListener(f,g,a.capture):d.detachEvent&&d.detachEvent(f in X?X[f]:X[f]="on"+f,g);Ba--;(f=Y(d))?(Aa(f,a),0==f.h&&(f.src=null,d[W]=null)):za(a)}}return c.call(e,
 b)},Ca=function(a,b){if(a.d)return!0;if(!ta){var c;if(!(c=b))t:{c=["window","event"];for(var e=m,d;d=c.shift();)if(null!=e[d])e=e[d];else{c=null;break t}c=e}d=c;c=new U(d,this);e=!0;if(!(0>d.keyCode||void 0!=d.returnValue)){t:{var f=!1;if(0==d.keyCode)try{d.keyCode=-1;break t}catch(g){f=!0}if(f||void 0==d.returnValue)d.returnValue=!0}d=[];for(f=c.currentTarget;f;f=f.parentNode)d.push(f);for(var f=a.type,h=d.length-1;!c.i&&0<=h;h--)c.currentTarget=d[h],e&=Ha(d[h],f,!0,c);for(h=0;!c.i&&h<d.length;h++)c.currentTarget=
-d[h],e&=Ha(d[h],f,!1,c)}return e}return Ga(a,new U(b,this))},Y=function(a){a=a[W];return a instanceof V?a:null},Ia="__closure_events_fn_"+(1E9*Math.random()>>>0),Fa=function(a){z(a,"Listener can not be null.");if("function"==n(a))return a;z(a.handleEvent,"An object listener must have handleEvent method.");return a[Ia]||(a[Ia]=function(b){return a.handleEvent(b)})};var $=function(a,b,c){"number"==typeof a?(this.a=new Date(a,b||0,c||1),Z(this,c||1)):(b=typeof a,"object"==b&&null!=a||"function"==b?(this.a=new Date(a.getFullYear(),a.getMonth(),a.getDate()),Z(this,a.getDate())):(this.a=new Date(aa()),this.a.setHours(0),this.a.setMinutes(0),this.a.setSeconds(0),this.a.setMilliseconds(0)))};k=$.prototype;k.getFullYear=function(){return this.a.getFullYear()};k.getYear=function(){return this.getFullYear()};k.getMonth=function(){return this.a.getMonth()};k.getDate=function(){return this.a.getDate()};
-k.getTime=function(){return this.a.getTime()};k.getUTCHours=function(){return this.a.getUTCHours()};k.setFullYear=function(a){this.a.setFullYear(a)};k.setMonth=function(a){this.a.setMonth(a)};k.setDate=function(a){this.a.setDate(a)};
+d[h],e&=Ha(d[h],f,!1,c)}return e}return Ga(a,new U(b,this))},Y=function(a){a=a[W];return a instanceof V?a:null},Ia="__closure_events_fn_"+(1E9*Math.random()>>>0),Fa=function(a){z(a,"Listener can not be null.");if("function"==n(a))return a;z(a.handleEvent,"An object listener must have handleEvent method.");return a[Ia]||(a[Ia]=function(b){return a.handleEvent(b)})};var $=function(a,b,c){"number"==typeof a?(this.a=Ja(a,b||0,c||1),Z(this,c||1)):(b=typeof a,"object"==b&&null!=a||"function"==b?(this.a=Ja(a.getFullYear(),a.getMonth(),a.getDate()),Z(this,a.getDate())):(this.a=new Date(aa()),this.a.setHours(0),this.a.setMinutes(0),this.a.setSeconds(0),this.a.setMilliseconds(0)))},Ja=function(a,b,c){b=new Date(a,b,c);0<=a&&100>a&&b.setFullYear(b.getFullYear()-1900);return b};k=$.prototype;k.getFullYear=function(){return this.a.getFullYear()};k.getYear=function(){return this.getFullYear()};
+k.getMonth=function(){return this.a.getMonth()};k.getDate=function(){return this.a.getDate()};k.getTime=function(){return this.a.getTime()};k.getUTCHours=function(){return this.a.getUTCHours()};k.setFullYear=function(a){this.a.setFullYear(a)};k.setMonth=function(a){this.a.setMonth(a)};k.setDate=function(a){this.a.setDate(a)};
 k.add=function(a){if(a.o||a.m){var b=this.getMonth()+a.m+12*a.o,c=this.getYear()+Math.floor(b/12),b=b%12;0>b&&(b+=12);var e;t:{switch(b){case 1:e=0!=c%4||0==c%100&&0!=c%400?28:29;break t;case 5:case 8:case 10:case 3:e=30;break t}e=31}e=Math.min(e,this.getDate());this.setDate(1);this.setFullYear(c);this.setMonth(b);this.setDate(e)}a.l&&(b=new Date(this.getYear(),this.getMonth(),this.getDate(),12),a=new Date(b.getTime()+864E5*a.l),this.setDate(1),this.setFullYear(a.getFullYear()),this.setMonth(a.getMonth()),
-this.setDate(a.getDate()),Z(this,a.getDate()))};k.p=function(){return[this.getFullYear(),w(this.getMonth()+1),w(this.getDate())].join("")+""};k.toString=function(){return this.p()};var Z=function(a,b){if(a.getDate()!=b){var c=a.getDate()<b?1:-1;a.a.setUTCHours(a.a.getUTCHours()+c)}};$.prototype.valueOf=function(){return this.a.valueOf()};new $(0,0,1);new $(9999,11,31);J||L&&Q("525");s("ae.init",function(){Ja();Ka();Ea(window,"load",function(){});La()});
-var Ja=function(){var a;a=document;if(a=q("ae-content")?a.getElementById("ae-content"):"ae-content"){a=R("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=R("tbody",null,c);for(var e=0,d;d=c[e];e++){d=R("tr",null,d);for(var f=0,g;g=d[f];f++)f%2&&qa(g,"ae-even")}}}},Ka=function(){var a=R(null,"ae-noscript",void 0);ca(D(a),function(a){sa(a,"ae-noscript")})},La=function(){m._gaq=m._gaq||[];m._gaq.push(function(){m._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
-document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};s("ae.trackPageView",function(){m._gaq&&m._gaq._getAsyncTracker("ae")._trackPageview()});var Na=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return Ma(a)},Ma=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Oa=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
-Pa=function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=Na(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
-r(Oa,a,e,d))};s("ae.Charts.latencyHistogram",function(a,b,c){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");for(var d=0;d<b.length;d++)e.addRow([""+a[d],b[d]]);for(d=b.length;d<a.length;d++)e.addRow([""+a[d],0]);b=Na(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(e,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
+this.setDate(a.getDate()),Z(this,a.getDate()))};k.p=function(){return[this.getFullYear(),w(this.getMonth()+1),w(this.getDate())].join("")+""};k.toString=function(){return this.p()};var Z=function(a,b){if(a.getDate()!=b){var c=a.getDate()<b?1:-1;a.a.setUTCHours(a.a.getUTCHours()+c)}};$.prototype.valueOf=function(){return this.a.valueOf()};new $(0,0,1);new $(9999,11,31);J||L&&Q("525");s("ae.init",function(){Ka();La();Ea(window,"load",function(){});Ma()});
+var Ka=function(){var a;a=document;if(a=q("ae-content")?a.getElementById("ae-content"):"ae-content"){a=R("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=R("tbody",null,c);for(var e=0,d;d=c[e];e++){d=R("tr",null,d);for(var f=0,g;g=d[f];f++)f%2&&qa(g,"ae-even")}}}},La=function(){var a=R(null,"ae-noscript",void 0);ca(D(a),function(a){sa(a,"ae-noscript")})},Ma=function(){m._gaq=m._gaq||[];m._gaq.push(function(){m._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
+document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};s("ae.trackPageView",function(){m._gaq&&m._gaq._getAsyncTracker("ae")._trackPageview()});var Oa=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return Na(a)},Na=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Pa=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
+Qa=function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=Oa(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
+r(Pa,a,e,d))};s("ae.Charts.latencyHistogram",function(a,b,c){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");for(var d=0;d<b.length;d++)e.addRow([""+a[d],b[d]]);for(d=b.length;d<a.length;d++)e.addRow([""+a[d],0]);b=Oa(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(e,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
 s("ae.Charts.latencyTimestampScatter",function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("number","Time (seconds from start)");f.addColumn("number","Latency");for(var g=0;g<a.length;g++){var h=Math.round(a[g]-c);f.addRow([h,b[g]])}a=e.starttime?e.starttime:0;b=new google.visualization.ScatterChart(document.getElementById("LatencyVsTimestamp"));b.draw(f,{hAxis:{title:"Time (seconds from start of recording)",minValue:a},vAxis:{title:"Request Latency (milliseconds)",minValue:0},
-tooltip:{trigger:"none"},legend:"none"});google.visualization.events.addListener(b,"select",r(Oa,b,e,d))});
+tooltip:{trigger:"none"},legend:"none"});google.visualization.events.addListener(b,"select",r(Pa,b,e,d))});
 s("ae.Charts.entityCountBarChart",function(a,b,c,e){var d=new google.visualization.DataTable;d.addColumn("string","");d.addColumn("number","Reads");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Misses");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Writes");d.addColumn({type:"string",role:"tooltip"});var f=50;f>b.length&&(f=b.length);for(var g=0;g<f;g++)d.addRow(["",b[g][1]-b[g][3],b[g][0],b[g][3],b[g][0],b[g][2],b[g][0]]);b=20*f;f=b+130;a=new google.visualization.ColumnChart(document.getElementById(e+
-"-"+a));c=Ma(c);a.draw(d,{height:100,width:f,chartArea:{width:b},fontSize:10,isStacked:!0,vAxis:{minValue:0,maxValue:c,gridlines:{count:5}}})});
+"-"+a));c=Na(c);a.draw(d,{height:100,width:f,chartArea:{width:b},fontSize:10,isStacked:!0,vAxis:{minValue:0,maxValue:c,gridlines:{count:5}}})});
 s("ae.Charts.rpcVariationCandlestick",function(a){var b=new google.visualization.DataTable;b.addColumn("string","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addRows(a);(new google.visualization.CandlestickChart(document.getElementById("rpcvariation"))).draw(b,{vAxis:{title:"RPC Latency variation (milliseconds)"},hAxis:{textPosition:"out",slantedText:!0,slantedTextAngle:45,textStyle:{fontSize:13}},height:250,chartArea:{top:10,height:100},
-legend:"none",tooltip:{trigger:"none"}})});s("ae.Charts.totalTimeBarChart",function(a,b,c,e){for(var d=[],f=0;f<b.length;f++)d[f]=b[f]+" milliseconds";Pa(a,b,d,c,e)});s("ae.Charts.rpcTimeBarChart",function(a,b,c,e,d){var f=[],g=[],h=c.indices,v=c.times;c=c.stats;for(var p=0;p<b;p++)f[p]=0,g[p]=null;for(p=0;p<h.length;p++){f[h[p]]=v[p];b=c[p];var l="Calls: "+b[0];if(0<b[1]||0<b[2]||0<b[3])l+=" Entities";0<b[1]&&(l+=" R:"+b[1]);0<b[2]&&(l+=" W:"+b[2]);0<b[3]&&(l+=" M:"+b[3]);g[h[p]]=l}Pa(a,f,g,e,d)});})();
+legend:"none",tooltip:{trigger:"none"}})});s("ae.Charts.totalTimeBarChart",function(a,b,c,e){for(var d=[],f=0;f<b.length;f++)d[f]=b[f]+" milliseconds";Qa(a,b,d,c,e)});s("ae.Charts.rpcTimeBarChart",function(a,b,c,e,d){var f=[],g=[],h=c.indices,v=c.times;c=c.stats;for(var p=0;p<b;p++)f[p]=0,g[p]=null;for(p=0;p<h.length;p++){f[h[p]]=v[p];b=c[p];var l="Calls: "+b[0];if(0<b[1]||0<b[2]||0<b[3])l+=" Entities";0<b[1]&&(l+=" R:"+b[1]);0<b[2]&&(l+=" W:"+b[2]);0<b[3]&&(l+=" M:"+b[3]);g[h[p]]=l}Qa(a,f,g,e,d)});})();
diff --git a/google/appengine/ext/appstats/recording.py b/google/appengine/ext/appstats/recording.py
index ec160c0..c365bce 100644
--- a/google/appengine/ext/appstats/recording.py
+++ b/google/appengine/ext/appstats/recording.py
@@ -345,8 +345,8 @@
     if config.DATASTORE_DETAILS:
       details = trace.mutable_datastore_details()
       for key in response.key_list():
-        newent = details.add_keys_written()
-        newent.CopyFrom(key)
+        detail = details.add_keys_written()
+        detail.CopyFrom(key)
     if config.CALC_RPC_COSTS:
       writes = response.cost().entity_writes() + response.cost().index_writes()
       trace.set_call_cost_microdollars(writes * config.DATASTORE_WRITE_OP_COST)
@@ -396,8 +396,8 @@
     if config.DATASTORE_DETAILS:
       details = trace.mutable_datastore_details()
       for key in request.key_list():
-        newent = details.add_keys_read()
-        newent.CopyFrom(key)
+        detail = details.add_keys_read()
+        detail.CopyFrom(key)
       for entity_present in response.entity_list():
         details.add_get_successful_fetch(entity_present.has_entity())
     if config.CALC_RPC_COSTS:
@@ -433,12 +433,12 @@
       trace: IndividualStatsProto where information must be recorded.
     """
     details = trace.mutable_datastore_details()
-    if not response.keys_only():
+    if not response.small_ops():
 
 
       for entity in response.result_list():
-        newent = details.add_keys_read()
-        newent.CopyFrom(entity.key())
+        detail = details.add_keys_read()
+        detail.CopyFrom(entity.key())
     if call == 'RunQuery':
 
       if config.DATASTORE_DETAILS:
@@ -466,7 +466,7 @@
     if config.CALC_RPC_COSTS:
       num_results = len(response.result_list()) + response.skipped_results()
       cost_micropennies = config.DATASTORE_READ_OP_COST * baseline_reads
-      if response.keys_only():
+      if response.small_ops():
 
         cost_micropennies += config.DATASTORE_SMALL_OP_COST * num_results
         trace.set_call_cost_microdollars(cost_micropennies)
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index f79c25b..4f28240 100644
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -14,71 +14,71 @@
 2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}Da(h?Ha(g):g,d)}}},yb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},pb=function(a){return 9==a.nodeType?
 a:a.ownerDocument||a.document},zb=function(a,b){r(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(pb(a).createTextNode(String(b)))}},Ab={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},Bb={IMG:" ",BR:"\n"},Cb=function(a){a=a.getAttributeNode("tabindex");
 return null!=a&&a.specified},Db=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},Eb=function(a,b,c){if(!(a.nodeName in Ab))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in Bb)b.push(Bb[a.nodeName]);else for(a=a.firstChild;a;)Eb(a,b,c),a=a.nextSibling},ob=function(a){this.Q=a||l.document||document};f=ob.prototype;f.kb=qb;f.a=function(a){return rb(this.Q,a)};f.o=function(a,b,c){return vb(this.Q,arguments)};
-f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=yb;f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!Cb(a)||Db(a)):Cb(a)&&Db(a))&&v?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var Fb=function(a){Fb[" "](a);return a};Fb[" "]=aa;var Gb=!v||v&&9<=hb,Hb=!v||v&&9<=hb,Ib=v&&!z("9");!x||z("528");w&&z("1.9b")||v&&z("8")||Wa&&z("9.5")||x&&z("528");w&&!z("8")||v&&z("9");var Jb=function(){};Jb.prototype.Sb=!1;var A=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.aa=!1;this.xb=!0};A.prototype.stopPropagation=function(){this.aa=!0};A.prototype.preventDefault=function(){this.defaultPrevented=!0;this.xb=!1};var B=function(a,b){A.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.hb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(w){var e;t:{try{Fb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
+f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=yb;f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!Cb(a)||Db(a)):Cb(a)&&Db(a))&&v?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var Fb=function(a){Fb[" "](a);return a};Fb[" "]=aa;var Gb=!v||v&&9<=hb,Hb=!v||v&&9<=hb,Ib=v&&!z("9");!x||z("528");w&&z("1.9b")||v&&z("8")||Wa&&z("9.5")||x&&z("528");w&&!z("8")||v&&z("9");var Jb=function(){};Jb.prototype.Sb=!1;var A=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.aa=!1;this.xb=!0};A.prototype.stopPropagation=function(){this.aa=!0};A.prototype.preventDefault=function(){this.defaultPrevented=!0;this.xb=!1};var B=function(a,b){A.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.ib=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(w){var e;t:{try{Fb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
 c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=x||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=x||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
-a.metaKey;this.hb=y?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(B,A);var Kb=[1,4,2],Lb=function(a){return Gb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Kb[0])};B.prototype.stopPropagation=function(){B.e.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
+a.metaKey;this.ib=y?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(B,A);var Kb=[1,4,2],Lb=function(a){return Gb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Kb[0])};B.prototype.stopPropagation=function(){B.e.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
 B.prototype.preventDefault=function(){B.e.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Ib)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Mb="closure_listenable_"+(1E6*Math.random()|0),Nb=function(a){try{return!(!a||!a[Mb])}catch(b){return!1}},Ob=0;var Pb=function(a,b,c,d,e){this.X=a;this.Da=null;this.src=b;this.type=c;this.capture=!!d;this.Ia=e;this.key=++Ob;this.ga=this.Ga=!1},Qb=function(a){a.ga=!0;a.X=null;a.Da=null;a.src=null;a.Ia=null};var C=function(a){this.src=a;this.m={};this.va=0};C.prototype.add=function(a,b,c,d,e){var g=this.m[a];g||(g=this.m[a]=[],this.va++);var h=Rb(g,b,d,e);-1<h?(a=g[h],c||(a.Ga=!1)):(a=new Pb(b,this.src,a,!!d,e),a.Ga=c,g.push(a));return a};C.prototype.remove=function(a,b,c,d){if(!(a in this.m))return!1;var e=this.m[a];b=Rb(e,b,c,d);return-1<b?(Qb(e[b]),r(null!=e.length),s.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.va--),!0):!1};
-var Sb=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Ga(a.m[c],b);d&&(Qb(b),0==a.m[c].length&&(delete a.m[c],a.va--));return d};C.prototype.Za=function(a){var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Qb(d[e]);delete this.m[c];this.va--}return b};C.prototype.wa=function(a,b,c,d){a=this.m[a];var e=-1;a&&(e=Rb(a,b,c,d));return-1<e?a[e]:null};var Rb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ga&&g.X==b&&g.capture==!!c&&g.Ia==d)return e}return-1};var Tb="closure_lm_"+(1E6*Math.random()|0),D={},Ub=0,E=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)E(a,b[g],c,d,e);return null}c=Vb(c);if(Nb(a))a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Wb(a);h||(a[Tb]=h=new C(a));c=h.add(b,c,!1,d,e);c.Da||(d=Xb(),c.Da=d,d.src=a,d.X=c,a.addEventListener?a.addEventListener(b,d,g):a.attachEvent(b in D?D[b]:D[b]="on"+b,d),Ub++);a=c}return a},Xb=function(){var a=Yb,b=Hb?function(c){return a.call(b.src,b.X,c)}:function(c){c=a.call(b.src,
+var Sb=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Ga(a.m[c],b);d&&(Qb(b),0==a.m[c].length&&(delete a.m[c],a.va--));return d};C.prototype.$a=function(a){var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Qb(d[e]);delete this.m[c];this.va--}return b};C.prototype.wa=function(a,b,c,d){a=this.m[a];var e=-1;a&&(e=Rb(a,b,c,d));return-1<e?a[e]:null};var Rb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ga&&g.X==b&&g.capture==!!c&&g.Ia==d)return e}return-1};var Tb="closure_lm_"+(1E6*Math.random()|0),D={},Ub=0,E=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)E(a,b[g],c,d,e);return null}c=Vb(c);if(Nb(a))a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Wb(a);h||(a[Tb]=h=new C(a));c=h.add(b,c,!1,d,e);c.Da||(d=Xb(),c.Da=d,d.src=a,d.X=c,a.addEventListener?a.addEventListener(b,d,g):a.attachEvent(b in D?D[b]:D[b]="on"+b,d),Ub++);a=c}return a},Xb=function(){var a=Yb,b=Hb?function(c){return a.call(b.src,b.X,c)}:function(c){c=a.call(b.src,
 b.X,c);if(!c)return c};return b},Zb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Zb(a,b[g],c,d,e);else c=Vb(c),Nb(a)?a.u(b,c,d,e):a&&(a=Wb(a))&&(b=a.wa(b,c,!!d,e))&&F(b)},F=function(a){if("number"==typeof a||!a||a.ga)return!1;var b=a.src;if(Nb(b))return Sb(b.Z,a);var c=a.type,d=a.Da;b.removeEventListener?b.removeEventListener(c,d,a.capture):b.detachEvent&&b.detachEvent(c in D?D[c]:D[c]="on"+c,d);Ub--;(c=Wb(b))?(Sb(c,a),0==c.va&&(c.src=null,b[Tb]=null)):Qb(a);return!0},ac=function(a,b,c,
 d){var e=1;if(a=Wb(a))if(b=a.m[b])for(b=Ha(b),a=0;a<b.length;a++){var g=b[a];g&&g.capture==c&&!g.ga&&(e&=!1!==$b(g,d))}return Boolean(e)},$b=function(a,b){var c=a.X,d=a.Ia||a.src;a.Ga&&F(a);return c.call(d,b)},Yb=function(a,b){if(a.ga)return!0;if(!Hb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new B(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(h){g=!0}if(g||
 void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.aa&&0<=k;k--)c.currentTarget=e[k],d&=ac(e[k],g,!0,c);for(k=0;!c.aa&&k<e.length;k++)c.currentTarget=e[k],d&=ac(e[k],g,!1,c)}return d}return $b(a,new B(b,this))},Wb=function(a){a=a[Tb];return a instanceof C?a:null},bc="__closure_events_fn_"+(1E9*Math.random()>>>0),Vb=function(a){r(a,"Listener can not be null.");if(n(a))return a;r(a.handleEvent,"An object listener must have handleEvent method.");
-return a[bc]||(a[bc]=function(b){return a.handleEvent(b)})};var G=function(a){this.Db=a;this.La={}};p(G,Jb);var cc=[];G.prototype.c=function(a,b,c,d){da(b)||(cc[0]=b,b=cc);for(var e=0;e<b.length;e++){var g=E(a,b[e],c||this.handleEvent,d||!1,this.Db||this);if(!g)break;this.La[g.key]=g}return this};G.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Db||this,c=Vb(c),d=!!d,b=Nb(a)?a.wa(b,c,d,e):a?(a=Wb(a))?a.wa(b,c,d,e):null:null,b&&(F(b),delete this.La[b.key]);return this};
-G.prototype.Za=function(){Ka(this.La,F);this.La={}};G.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var H=function(){this.Z=new C(this);this.bc=this};p(H,Jb);H.prototype[Mb]=!0;f=H.prototype;f.mb=null;f.eb=function(a){this.mb=a};f.addEventListener=function(a,b,c,d){E(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Zb(this,a,b,c,d)};
+return a[bc]||(a[bc]=function(b){return a.handleEvent(b)})};var G=function(a){this.Db=a;this.Ma={}};p(G,Jb);var cc=[];G.prototype.c=function(a,b,c,d){da(b)||(cc[0]=b,b=cc);for(var e=0;e<b.length;e++){var g=E(a,b[e],c||this.handleEvent,d||!1,this.Db||this);if(!g)break;this.Ma[g.key]=g}return this};G.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Db||this,c=Vb(c),d=!!d,b=Nb(a)?a.wa(b,c,d,e):a?(a=Wb(a))?a.wa(b,c,d,e):null:null,b&&(F(b),delete this.Ma[b.key]);return this};
+G.prototype.$a=function(){Ka(this.Ma,F);this.Ma={}};G.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var H=function(){this.Z=new C(this);this.bc=this};p(H,Jb);H.prototype[Mb]=!0;f=H.prototype;f.mb=null;f.fb=function(a){this.mb=a};f.addEventListener=function(a,b,c,d){E(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Zb(this,a,b,c,d)};
 f.dispatchEvent=function(a){dc(this);var b,c=this.mb;if(c){b=[];for(var d=1;c;c=c.mb)b.push(c),r(1E3>++d,"infinite loop")}c=this.bc;d=a.type||a;if(m(a))a=new A(a,c);else if(a instanceof A)a.target=a.target||c;else{var e=a;a=new A(d,c);Pa(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.aa&&0<=h;h--)g=a.currentTarget=b[h],e=ec(g,d,!0,a)&&e;a.aa||(g=a.currentTarget=c,e=ec(g,d,!0,a)&&e,a.aa||(e=ec(g,d,!1,a)&&e));if(b)for(h=0;!a.aa&&h<b.length;h++)g=a.currentTarget=b[h],e=ec(g,d,!1,a)&&e;return e};
 f.c=function(a,b,c,d){dc(this);return this.Z.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Z.remove(String(a),b,c,d)};var ec=function(a,b,c,d){b=a.Z.m[String(b)];if(!b)return!0;b=Ha(b);for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.ga&&h.capture==c){var k=h.X,q=h.Ia||h.src;h.Ga&&Sb(a.Z,h);e=!1!==k.call(q,d)&&e}}return e&&!1!=d.xb};H.prototype.wa=function(a,b,c,d){return this.Z.wa(String(a),b,c,d)};var dc=function(a){r(a.Z,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var I=function(a,b){a.style.display=b?"":"none"},fc=w?"MozUserSelect":x?"WebkitUserSelect":null,gc=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(fc){if(b=b?"none":"",a.style[fc]=b,c){a=0;for(var d;d=c[a];a++)d.style[fc]=b}}else if(v||Wa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var hc=function(){};ba(hc);hc.prototype.ec=0;var J=function(a){H.call(this);this.A=a||qb();this.sa=ic};p(J,H);J.prototype.dc=hc.fa();var ic=null,jc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=J.prototype;f.ha=null;f.f=!1;f.d=null;f.sa=null;f.p=null;f.q=null;f.F=null;
-var kc=function(a){return a.ha||(a.ha=":"+(a.dc.ec++).toString(36))},lc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.ha;d in c&&delete c[d];Ma(a.p.F,b,a)}a.ha=b};J.prototype.a=function(){return this.d};var mc=function(a){return a.jb||(a.jb=new G(a))},oc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.ha&&nc(a.p,a.ha)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;J.e.eb.call(a,b)};f=J.prototype;f.getParent=function(){return this.p};
-f.eb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");J.e.eb.call(this,a)};f.kb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};f.K=function(a){if(this.f)throw Error("Component already rendered");if(a&&this.Y(a)){var b=pb(a);this.A&&this.A.Q==b||(this.A=qb(a));this.Xa(a);this.D()}else throw Error("Invalid element to decorate");};f.Y=function(){return!0};f.Xa=function(a){this.d=a};f.D=function(){this.f=!0;pc(this,function(a){!a.f&&a.a()&&a.D()})};
-f.ca=function(){pc(this,function(a){a.f&&a.ca()});this.jb&&this.jb.Za();this.f=!1};f.Ca=function(a,b){this.Ta(a,qc(this),b)};
-f.Ta=function(a,b,c){r(!!a,"Provided element must not be null.");if(a.f&&(c||!this.f))throw Error("Component already rendered");if(0>b||b>qc(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=kc(a);this.F[d]=a;Ga(this.q,a)}else Ma(this.F,kc(a),a);oc(a,this);Ja(this.q,b,0,a);if(a.f&&this.f&&a.getParent()==this)c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=K(this,b+1);b=this.B();c=c?c.d:
+var kc=function(a){return a.ha||(a.ha=":"+(a.dc.ec++).toString(36))},lc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.ha;d in c&&delete c[d];Ma(a.p.F,b,a)}a.ha=b};J.prototype.a=function(){return this.d};var mc=function(a){a.La||(a.La=new G(a));return a.La},oc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.ha&&nc(a.p,a.ha)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;J.e.fb.call(a,b)};f=J.prototype;f.getParent=function(){return this.p};
+f.fb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");J.e.fb.call(this,a)};f.kb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};f.K=function(a){if(this.f)throw Error("Component already rendered");if(a&&this.Y(a)){var b=pb(a);this.A&&this.A.Q==b||(this.A=qb(a));this.Ya(a);this.D()}else throw Error("Invalid element to decorate");};f.Y=function(){return!0};f.Ya=function(a){this.d=a};f.D=function(){this.f=!0;pc(this,function(a){!a.f&&a.a()&&a.D()})};
+f.ca=function(){pc(this,function(a){a.f&&a.ca()});this.La&&this.La.$a();this.f=!1};f.Ca=function(a,b){this.Ua(a,qc(this),b)};
+f.Ua=function(a,b,c){r(!!a,"Provided element must not be null.");if(a.f&&(c||!this.f))throw Error("Component already rendered");if(0>b||b>qc(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=kc(a);this.F[d]=a;Ga(this.q,a)}else Ma(this.F,kc(a),a);oc(a,this);Ja(this.q,b,0,a);if(a.f&&this.f&&a.getParent()==this)c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=K(this,b+1);b=this.B();c=c?c.d:
 null;if(a.f)throw Error("Component already rendered");a.d||a.o();b?b.insertBefore(a.d,c||null):a.A.Q.body.appendChild(a.d);a.p&&!a.p.f||a.D()}else this.f&&!a.f&&a.d&&a.d.parentNode&&1==a.d.parentNode.nodeType&&a.D()};f.B=function(){return this.d};
 var rc=function(a){if(null==a.sa){var b=a.f?a.d:a.A.Q.body,c;t:{c=pb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.sa="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.sa};J.prototype.pa=function(a){if(this.f)throw Error("Component already rendered");this.sa=a};
 var qc=function(a){return a.q?a.q.length:0},nc=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},K=function(a,b){return a.q?a.q[b]||null:null},pc=function(a,b,c){a.q&&Da(a.q,b,c)},sc=function(a,b){return a.q&&b?Ca(a.q,b):-1};
 J.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:kc(a);a=nc(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Ga(this.q,a);b&&(a.ca(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));oc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var tc,uc={lc:"activedescendant",qc:"atomic",rc:"autocomplete",tc:"busy",wc:"checked",Bc:"controls",Dc:"describedby",Gc:"disabled",Ic:"dropeffect",Jc:"expanded",Kc:"flowto",Mc:"grabbed",Qc:"haspopup",Sc:"hidden",Uc:"invalid",Vc:"label",Wc:"labelledby",Xc:"level",bd:"live",md:"multiline",nd:"multiselectable",rd:"orientation",sd:"owns",td:"posinset",vd:"pressed",zd:"readonly",Bd:"relevant",Cd:"required",Id:"selected",Kd:"setsize",Md:"sort",Zd:"valuemax",$d:"valuemin",ae:"valuenow",be:"valuetext"};var vc={mc:"alert",nc:"alertdialog",oc:"application",pc:"article",sc:"banner",uc:"button",vc:"checkbox",xc:"columnheader",yc:"combobox",zc:"complementary",Ac:"contentinfo",Cc:"definition",Ec:"dialog",Fc:"directory",Hc:"document",Lc:"form",Nc:"grid",Oc:"gridcell",Pc:"group",Rc:"heading",Tc:"img",Yc:"link",Zc:"list",$c:"listbox",ad:"listitem",cd:"log",dd:"main",ed:"marquee",fd:"math",gd:"menu",hd:"menubar",jd:"menuitem",kd:"menuitemcheckbox",ld:"menuitemradio",od:"navigation",pd:"note",qd:"option",
 ud:"presentation",wd:"progressbar",xd:"radio",yd:"radiogroup",Ad:"region",Dd:"row",Ed:"rowgroup",Fd:"rowheader",Gd:"scrollbar",Hd:"search",Jd:"separator",Ld:"slider",Nd:"spinbutton",Od:"status",Pd:"tab",Qd:"tablist",Rd:"tabpanel",Sd:"textbox",Td:"timer",Ud:"toolbar",Vd:"tooltip",Wd:"tree",Xd:"treegrid",Yd:"treeitem"};var wc=function(a,b){b?(r(La(vc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},yc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=xc(b);""===c||void 0==c?(tc||(tc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=tc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
 c)},xc=function(a){r(a,"ARIA attribute cannot be empty.");r(La(uc,a),"No such ARIA attribute "+a);return"aria-"+a};var Bc=function(a,b,c,d,e){if(!(v||x&&z("525")))return!0;if(y&&e)return zc(a);if(e&&!d)return!1;"number"==typeof b&&(b=Ac(b));if(!c&&(17==b||18==b||y&&91==b))return!1;if(x&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(v&&d&&b==a)return!1;switch(a){case 13:return!(v&&v&&9<=hb);case 27:return!x}return zc(a)},zc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||x&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
-default:return!1}},Ac=function(a){if(w)a=Cc(a);else if(y&&x)t:switch(a){case 93:a=91;break t}return a},Cc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var L=function(a,b){H.call(this);a&&Dc(this,a,b)};p(L,H);f=L.prototype;f.d=null;f.Ea=null;f.Wa=null;f.Fa=null;f.r=-1;f.N=-1;f.ib=!1;
+default:return!1}},Ac=function(a){if(w)a=Cc(a);else if(y&&x)t:switch(a){case 93:a=91;break t}return a},Cc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var L=function(a,b){H.call(this);a&&Dc(this,a,b)};p(L,H);f=L.prototype;f.d=null;f.Ea=null;f.Xa=null;f.Fa=null;f.r=-1;f.N=-1;f.jb=!1;
 var Ec={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},Fc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},Gc=v||x&&z("525"),Hc=y&&w;
-L.prototype.Qb=function(a){x&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||y&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));Gc&&!Bc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=Ac(a.keyCode),Hc&&(this.ib=a.altKey))};L.prototype.Rb=function(a){this.N=this.r=-1;this.ib=a.altKey};
-L.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;v&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):x&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&zc(c)?b.charCode:0):Wa?(c=this.N,d=zc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,Hc&&(e=this.ib),y&&63==d&&224==c&&(c=191));var g=c=Ac(c),h=b.keyIdentifier;c?63232<=c&&c in Ec?g=Ec[c]:25==c&&a.shiftKey&&(g=9):h&&h in Fc&&(g=Fc[h]);a=g==this.r;this.r=g;b=new Ic(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
-L.prototype.a=function(){return this.d};var Dc=function(a,b,c){a.Fa&&a.detach();a.d=b;a.Ea=E(a.d,"keypress",a,c);a.Wa=E(a.d,"keydown",a.Qb,c,a);a.Fa=E(a.d,"keyup",a.Rb,c,a)};L.prototype.detach=function(){this.Ea&&(F(this.Ea),F(this.Wa),F(this.Fa),this.Fa=this.Wa=this.Ea=null);this.d=null;this.N=this.r=-1};var Ic=function(a,b,c,d){B.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(Ic,B);var Jc=!!l.DOMTokenList,Kc=Jc?function(a){return a.classList}:function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},Lc=Jc?function(a,b){r(!!a.classList);return a.classList.contains(b)}:function(a,b){return t(Kc(a),b)},Mc=Jc?function(a,b){a.classList.add(b)}:function(a,b){Lc(a,b)||(a.className+=0<a.className.length?" "+b:b)},Nc=Jc?function(a,b){a.classList.remove(b)}:function(a,b){Lc(a,b)&&(a.className=Ea(Kc(a),function(a){return a!=b}).join(" "))};var Pc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Oc[a]=b},Qc={},Oc={};var N=function(){};ba(N);N.prototype.T=function(){};var Rc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=N.prototype;f.o=function(a){return a.kb().o("div",this.ta(a).join(" "))};f.B=function(a){return a};f.Y=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&lc(a,b.id);var c=this.v(),d=!1,e=Kc(b);e&&Da(e,function(b){b==c?d=!0:b&&this.$a(a,b,c)},this);d||Mc(b,c);Sc(a,this.B(b));return b};
-f.$a=function(a,b,c){b==c+"-disabled"?a.qa(!1):b==c+"-horizontal"?Tc(a,"horizontal"):b==c+"-vertical"&&Tc(a,"vertical")};var Sc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=void 0;e=Kc(c);for(var h=0,k=e.length;h<k;h++)if(g=e[h],g=g in Oc?Oc[g]():null){e=g;break t}e=null}e&&(e.d=c,a.isEnabled()||e.qa(!1),a.Ca(e),e.K(c))}else c.nodeValue&&""!=qa(c.nodeValue)||b.removeChild(c);c=d}};
-N.prototype.Ma=function(a){a=a.a();r(a,"The container DOM element cannot be null.");gc(a,!0,w);v&&(a.hideFocus=!0);var b=this.T();b&&wc(a,b)};N.prototype.j=function(a){return a.a()};N.prototype.v=function(){return"goog-container"};N.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var O=function(){},Uc;ba(O);f=O.prototype;f.T=function(){};f.o=function(a){var b=a.kb().o("div",this.ta(a).join(" "),a.Ba);Vc(a,b);return b};f.B=function(a){return a};f.ra=function(a,b,c){if(a=a.a?a.a():a)if(v&&!z("7")){var d=Wc(kb(a),b);d.push(b);la(c?lb:nb,a).apply(null,d)}else c?lb(a,b):nb(a,b)};f.Y=function(){return!0};
-f.K=function(a,b){b.id&&lc(a,b.id);var c=this.B(b);c&&c.firstChild?Xc(a,c.firstChild.nextSibling?Ha(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,c=!1,q=kb(b);Da(q,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.tb||(this.Ha||Yc(this),this.tb=Na(this.Ha));a=parseInt(this.tb[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(q.push(e),g==e&&(k=!0));k||q.push(g);var ha=a.G;ha&&q.push.apply(q,ha);if(v&&!z("7")){var M=Wc(q);0<M.length&&(q.push.apply(q,
-M),c=!0)}if(!h||!k||ha||c)b.className=q.join(" ");Vc(a,b);return b};f.Ma=function(a){rc(a)&&this.pa(a.a(),!0);a.isEnabled()&&this.na(a,a.s())};var Zc=function(a,b,c){if(a=c||a.T())r(b,"The element passed as a first parameter cannot be null."),wc(b,a)},Vc=function(a,b){r(a);r(b);a.s()||yc(b,"hidden",!a.s());a.isEnabled()||$c(b,1,!a.isEnabled());a.l&8&&$c(b,8,!!(a.g&8));a.l&16&&$c(b,16,!!(a.g&16));a.l&64&&$c(b,64,!!(a.g&64))};f=O.prototype;f.za=function(a,b){gc(a,!b,!v&&!Wa)};
-f.pa=function(a,b){this.ra(a,this.v()+"-rtl",b)};f.I=function(a){var b;return a.l&32&&(b=a.j())?Cb(b)&&Db(b):!1};f.na=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.la(null)}(Cb(c)&&Db(c))!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ja=function(a,b){I(a,b);a&&yc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=ad(this,b);e&&this.ra(a,e,c);$c(d,b,c)}};
-var $c=function(a,b,c){Uc||(Uc={1:"disabled",8:"selected",16:"checked",64:"expanded"});if(b=Uc[b])r(a,"The element passed as a first parameter cannot be null."),yc(a,b,c)};O.prototype.j=function(a){return a.a()};O.prototype.v=function(){return"goog-control"};O.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(ad(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);v&&!z("7")&&c.push.apply(c,Wc(c));return c};
-var Wc=function(a,b){var c=[];b&&(a=a.concat([b]));Da([],function(d){!Fa(d,la(t,a))||b&&!t(d,b)||c.push(d.join("_"))});return c},ad=function(a,b){a.Ha||Yc(a);return a.Ha[b]},Yc=function(a){var b=a.v();a.Ha={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var P=function(a,b,c){J.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ka(b);if(d=Qc[d])break;b=b.e?b.e.constructor:null}b=d?n(d.fa)?d.fa():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(P,J);f=P.prototype;f.Ba=null;f.g=0;f.l=39;f.cc=255;f.U=0;f.n=!0;f.G=null;f.$=!0;f.xa=!1;f.qb=null;f.ob=function(){return this.$};f.Na=function(a){this.f&&a!=this.$&&bd(this,a);this.$=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new L)};f.zb=function(){return this.b};
-f.ra=function(a,b){b?a&&(this.G?t(this.G,a)||this.G.push(a):this.G=[a],this.b.ra(this,a,!0)):a&&this.G&&Ga(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.ra(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Zc(this.b,a,this.qb);this.xa||this.b.za(a,!1);this.s()||this.b.ja(a,!1)};f.B=function(){return this.b.B(this.a())};f.Y=function(a){return this.b.Y(a)};f.Xa=function(a){this.d=a=this.b.K(this,a);Zc(this.b,a,this.qb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
-f.D=function(){P.e.D.call(this);this.b.Ma(this);if(this.l&-2&&(this.ob()&&bd(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();Dc(b,a);mc(this).c(b,"key",this.J).c(a,"focus",this.ma).c(a,"blur",this.la)}}};
-var bd=function(a,b){var c=mc(a),d=a.a();b?(c.c(d,"mouseover",a.Qa).c(d,"mousedown",a.ka).c(d,"mouseup",a.Ra).c(d,"mouseout",a.Pa),a.oa!=aa&&c.c(d,"contextmenu",a.oa),v&&c.c(d,"dblclick",a.sb)):(c.u(d,"mouseover",a.Qa).u(d,"mousedown",a.ka).u(d,"mouseup",a.Ra).u(d,"mouseout",a.Pa),a.oa!=aa&&c.u(d,"contextmenu",a.oa),v&&c.u(d,"dblclick",a.sb))};P.prototype.ca=function(){P.e.ca.call(this);this.ea&&this.ea.detach();this.s()&&this.isEnabled()&&this.b.na(this,!1)};var Xc=function(a,b){a.Ba=b};f=P.prototype;
+L.prototype.Qb=function(a){x&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||y&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));Gc&&!Bc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=Ac(a.keyCode),Hc&&(this.jb=a.altKey))};L.prototype.Rb=function(a){this.N=this.r=-1;this.jb=a.altKey};
+L.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;v&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):x&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&zc(c)?b.charCode:0):Wa?(c=this.N,d=zc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,Hc&&(e=this.jb),y&&63==d&&224==c&&(c=191));var g=c=Ac(c),h=b.keyIdentifier;c?63232<=c&&c in Ec?g=Ec[c]:25==c&&a.shiftKey&&(g=9):h&&h in Fc&&(g=Fc[h]);a=g==this.r;this.r=g;b=new Ic(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
+L.prototype.a=function(){return this.d};var Dc=function(a,b,c){a.Fa&&a.detach();a.d=b;a.Ea=E(a.d,"keypress",a,c);a.Xa=E(a.d,"keydown",a.Qb,c,a);a.Fa=E(a.d,"keyup",a.Rb,c,a)};L.prototype.detach=function(){this.Ea&&(F(this.Ea),F(this.Xa),F(this.Fa),this.Fa=this.Xa=this.Ea=null);this.d=null;this.N=this.r=-1};var Ic=function(a,b,c,d){B.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(Ic,B);var Jc=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Kc=function(a,b){return a.classList?a.classList.contains(b):t(Jc(a),b)},Lc=function(a,b){a.classList?a.classList.add(b):Kc(a,b)||(a.className+=0<a.className.length?" "+b:b)},Mc=function(a,b){a.classList?a.classList.remove(b):Kc(a,b)&&(a.className=Ea(Jc(a),function(a){return a!=b}).join(" "))};var Oc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Nc[a]=b},Pc={},Nc={};var N=function(){};ba(N);N.prototype.T=function(){};var Qc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=N.prototype;f.o=function(a){return a.kb().o("div",this.ta(a).join(" "))};f.B=function(a){return a};f.Y=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&lc(a,b.id);var c=this.v(),d=!1,e=Jc(b);e&&Da(e,function(b){b==c?d=!0:b&&this.ab(a,b,c)},this);d||Lc(b,c);Rc(a,this.B(b));return b};
+f.ab=function(a,b,c){b==c+"-disabled"?a.qa(!1):b==c+"-horizontal"?Sc(a,"horizontal"):b==c+"-vertical"&&Sc(a,"vertical")};var Rc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=void 0;e=Jc(c);for(var h=0,k=e.length;h<k;h++)if(g=e[h],g=g in Nc?Nc[g]():null){e=g;break t}e=null}e&&(e.d=c,a.isEnabled()||e.qa(!1),a.Ca(e),e.K(c))}else c.nodeValue&&""!=qa(c.nodeValue)||b.removeChild(c);c=d}};
+N.prototype.Na=function(a){a=a.a();r(a,"The container DOM element cannot be null.");gc(a,!0,w);v&&(a.hideFocus=!0);var b=this.T();b&&wc(a,b)};N.prototype.j=function(a){return a.a()};N.prototype.v=function(){return"goog-container"};N.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var O=function(){},Tc;ba(O);f=O.prototype;f.T=function(){};f.o=function(a){var b=a.kb().o("div",this.ta(a).join(" "),a.Ba);Uc(a,b);return b};f.B=function(a){return a};f.ra=function(a,b,c){if(a=a.a?a.a():a)if(v&&!z("7")){var d=Vc(kb(a),b);d.push(b);la(c?lb:nb,a).apply(null,d)}else c?lb(a,b):nb(a,b)};f.Y=function(){return!0};
+f.K=function(a,b){b.id&&lc(a,b.id);var c=this.B(b);c&&c.firstChild?Wc(a,c.firstChild.nextSibling?Ha(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,c=!1,q=kb(b);Da(q,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.tb||(this.Ha||Xc(this),this.tb=Na(this.Ha));a=parseInt(this.tb[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(q.push(e),g==e&&(k=!0));k||q.push(g);var ha=a.G;ha&&q.push.apply(q,ha);if(v&&!z("7")){var M=Vc(q);0<M.length&&(q.push.apply(q,
+M),c=!0)}if(!h||!k||ha||c)b.className=q.join(" ");Uc(a,b);return b};f.Na=function(a){rc(a)&&this.pa(a.a(),!0);a.isEnabled()&&this.na(a,a.s())};var Yc=function(a,b,c){if(a=c||a.T())r(b,"The element passed as a first parameter cannot be null."),wc(b,a)},Uc=function(a,b){r(a);r(b);a.s()||yc(b,"hidden",!a.s());a.isEnabled()||Zc(b,1,!a.isEnabled());a.l&8&&Zc(b,8,!!(a.g&8));a.l&16&&Zc(b,16,!!(a.g&16));a.l&64&&Zc(b,64,!!(a.g&64))};f=O.prototype;f.za=function(a,b){gc(a,!b,!v&&!Wa)};
+f.pa=function(a,b){this.ra(a,this.v()+"-rtl",b)};f.I=function(a){var b;return a.l&32&&(b=a.j())?Cb(b)&&Db(b):!1};f.na=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.la(null)}(Cb(c)&&Db(c))!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ja=function(a,b){I(a,b);a&&yc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=$c(this,b);e&&this.ra(a,e,c);Zc(d,b,c)}};
+var Zc=function(a,b,c){Tc||(Tc={1:"disabled",8:"selected",16:"checked",64:"expanded"});if(b=Tc[b])r(a,"The element passed as a first parameter cannot be null."),yc(a,b,c)};O.prototype.j=function(a){return a.a()};O.prototype.v=function(){return"goog-control"};O.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push($c(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);v&&!z("7")&&c.push.apply(c,Vc(c));return c};
+var Vc=function(a,b){var c=[];b&&(a=a.concat([b]));Da([],function(d){!Fa(d,la(t,a))||b&&!t(d,b)||c.push(d.join("_"))});return c},$c=function(a,b){a.Ha||Xc(a);return a.Ha[b]},Xc=function(a){var b=a.v();a.Ha={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var P=function(a,b,c){J.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ka(b);if(d=Pc[d])break;b=b.e?b.e.constructor:null}b=d?n(d.fa)?d.fa():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(P,J);f=P.prototype;f.Ba=null;f.g=0;f.l=39;f.cc=255;f.U=0;f.n=!0;f.G=null;f.$=!0;f.xa=!1;f.qb=null;f.ob=function(){return this.$};f.Oa=function(a){this.f&&a!=this.$&&ad(this,a);this.$=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new L)};f.zb=function(){return this.b};
+f.ra=function(a,b){b?a&&(this.G?t(this.G,a)||this.G.push(a):this.G=[a],this.b.ra(this,a,!0)):a&&this.G&&Ga(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.ra(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Yc(this.b,a,this.qb);this.xa||this.b.za(a,!1);this.s()||this.b.ja(a,!1)};f.B=function(){return this.b.B(this.a())};f.Y=function(a){return this.b.Y(a)};f.Ya=function(a){this.d=a=this.b.K(this,a);Yc(this.b,a,this.qb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
+f.D=function(){P.e.D.call(this);this.b.Na(this);if(this.l&-2&&(this.ob()&&ad(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();Dc(b,a);mc(this).c(b,"key",this.J).c(a,"focus",this.ma).c(a,"blur",this.la)}}};
+var ad=function(a,b){var c=mc(a),d=a.a();b?(c.c(d,"mouseover",a.Ra).c(d,"mousedown",a.ka).c(d,"mouseup",a.Sa).c(d,"mouseout",a.Qa),a.oa!=aa&&c.c(d,"contextmenu",a.oa),v&&c.c(d,"dblclick",a.sb)):(c.u(d,"mouseover",a.Ra).u(d,"mousedown",a.ka).u(d,"mouseup",a.Sa).u(d,"mouseout",a.Qa),a.oa!=aa&&c.u(d,"contextmenu",a.oa),v&&c.u(d,"dblclick",a.sb))};P.prototype.ca=function(){P.e.ca.call(this);this.ea&&this.ea.detach();this.s()&&this.isEnabled()&&this.b.na(this,!1)};var Wc=function(a,b){a.Ba=b};f=P.prototype;
 f.pa=function(a){P.e.pa.call(this,a);var b=this.a();b&&this.b.pa(b,a)};f.za=function(a){this.xa=a;var b=this.a();b&&this.b.za(b,a)};f.s=function(){return this.n};f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ja(c,a);this.isEnabled()&&this.b.na(this,a);this.n=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
-f.qa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!Q(this,1,!a)||(a||(this.setActive(!1),this.C(!1)),this.s()&&this.b.na(this,a),this.t(1,!a))};f.C=function(a){Q(this,2,a)&&this.t(2,a)};f.setActive=function(a){Q(this,4,a)&&this.t(4,a)};var cd=function(a,b){Q(a,8,b)&&a.t(8,b)},R=function(a,b){Q(a,64,b)&&a.t(64,b)};P.prototype.t=function(a,b){this.l&a&&b!=!!(this.g&a)&&(this.b.t(this,a,b),this.g=b?this.g|a:this.g&~a)};
-var dd=function(a,b,c){if(a.f&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},S=function(a,b){return!!(a.cc&b)&&!!(a.l&b)},Q=function(a,b,c){return!!(a.l&b)&&!!(a.g&b)!=c&&(!(a.U&b)||a.dispatchEvent(jc(b,c)))&&!a.Sb};f=P.prototype;f.Qa=function(a){(!a.relatedTarget||!yb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&S(this,2)&&this.C(!0)};
-f.Pa=function(a){a.relatedTarget&&yb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(S(this,4)&&this.setActive(!1),S(this,2)&&this.C(!1))};f.oa=aa;f.ka=function(a){this.isEnabled()&&(S(this,2)&&this.C(!0),!Lb(a)||x&&y&&a.ctrlKey||(S(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Lb(a)||x&&y&&a.ctrlKey||a.preventDefault()};f.Ra=function(a){this.isEnabled()&&(S(this,2)&&this.C(!0),this.g&4&&ed(this,a)&&S(this,4)&&this.setActive(!1))};
-f.sb=function(a){this.isEnabled()&&ed(this,a)};var ed=function(a,b){if(S(a,16)){var c=!(a.g&16);Q(a,16,c)&&a.t(16,c)}S(a,8)&&cd(a,!0);S(a,64)&&R(a,!(a.g&64));c=new A("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.hb=b.hb);return a.dispatchEvent(c)};P.prototype.ma=function(){S(this,32)&&Q(this,32,!0)&&this.t(32,!0)};P.prototype.la=function(){S(this,4)&&this.setActive(!1);S(this,32)&&Q(this,32,!1)&&this.t(32,!1)};
-P.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.lb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};P.prototype.lb=function(a){return 13==a.keyCode&&ed(this,a)};if(!n(P))throw Error("Invalid component class "+P);if(!n(O))throw Error("Invalid renderer class "+O);var fd=ka(P);Qc[fd]=O;Pc("goog-control",function(){return new P(null)});var T=function(a,b,c){J.call(this,c);this.b=b||N.fa();this.L=a||"vertical"};p(T,J);f=T.prototype;f.ub=null;f.ea=null;f.b=null;f.L=null;f.n=!0;f.V=!0;f.Ya=!0;f.h=-1;f.i=null;f.ba=!1;f.Pb=!1;f.Ob=!0;f.M=null;f.j=function(){return this.ub||this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new L(this.j()))};f.zb=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.B=function(){return this.b.B(this.a())};f.Y=function(a){return this.b.Y(a)};
-f.Xa=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){T.e.D.call(this);pc(this,function(a){a.f&&gd(this,a)},this);var a=this.a();this.b.Ma(this);this.ja(this.n,!0);mc(this).c(this,"enter",this.Ib).c(this,"highlight",this.Jb).c(this,"unhighlight",this.Lb).c(this,"open",this.Kb).c(this,"close",this.Gb).c(a,"mousedown",this.ka).c(pb(a),"mouseup",this.Hb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Fb);this.I()&&hd(this,!0)};
-var hd=function(a,b){var c=mc(a),d=a.j();b?c.c(d,"focus",a.ma).c(d,"blur",a.la).c(a.ya(),"key",a.J):c.u(d,"focus",a.ma).u(d,"blur",a.la).u(a.ya(),"key",a.J)};f=T.prototype;f.ca=function(){id(this,-1);this.i&&R(this.i,!1);this.ba=!1;T.e.ca.call(this)};f.Ib=function(){return!0};
+f.qa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!Q(this,1,!a)||(a||(this.setActive(!1),this.C(!1)),this.s()&&this.b.na(this,a),this.t(1,!a))};f.C=function(a){Q(this,2,a)&&this.t(2,a)};f.setActive=function(a){Q(this,4,a)&&this.t(4,a)};var bd=function(a,b){Q(a,8,b)&&a.t(8,b)},R=function(a,b){Q(a,64,b)&&a.t(64,b)};P.prototype.t=function(a,b){this.l&a&&b!=!!(this.g&a)&&(this.b.t(this,a,b),this.g=b?this.g|a:this.g&~a)};
+var cd=function(a,b,c){if(a.f&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},S=function(a,b){return!!(a.cc&b)&&!!(a.l&b)},Q=function(a,b,c){return!!(a.l&b)&&!!(a.g&b)!=c&&(!(a.U&b)||a.dispatchEvent(jc(b,c)))&&!a.Sb};f=P.prototype;f.Ra=function(a){(!a.relatedTarget||!yb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&S(this,2)&&this.C(!0)};
+f.Qa=function(a){a.relatedTarget&&yb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(S(this,4)&&this.setActive(!1),S(this,2)&&this.C(!1))};f.oa=aa;f.ka=function(a){this.isEnabled()&&(S(this,2)&&this.C(!0),!Lb(a)||x&&y&&a.ctrlKey||(S(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Lb(a)||x&&y&&a.ctrlKey||a.preventDefault()};f.Sa=function(a){this.isEnabled()&&(S(this,2)&&this.C(!0),this.g&4&&dd(this,a)&&S(this,4)&&this.setActive(!1))};
+f.sb=function(a){this.isEnabled()&&dd(this,a)};var dd=function(a,b){if(S(a,16)){var c=!(a.g&16);Q(a,16,c)&&a.t(16,c)}S(a,8)&&bd(a,!0);S(a,64)&&R(a,!(a.g&64));c=new A("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.ib=b.ib);return a.dispatchEvent(c)};P.prototype.ma=function(){S(this,32)&&Q(this,32,!0)&&this.t(32,!0)};P.prototype.la=function(){S(this,4)&&this.setActive(!1);S(this,32)&&Q(this,32,!1)&&this.t(32,!1)};
+P.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.lb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};P.prototype.lb=function(a){return 13==a.keyCode&&dd(this,a)};if(!n(P))throw Error("Invalid component class "+P);if(!n(O))throw Error("Invalid renderer class "+O);var ed=ka(P);Pc[ed]=O;Oc("goog-control",function(){return new P(null)});var T=function(a,b,c){J.call(this,c);this.b=b||N.fa();this.L=a||"vertical"};p(T,J);f=T.prototype;f.ub=null;f.ea=null;f.b=null;f.L=null;f.n=!0;f.V=!0;f.Za=!0;f.h=-1;f.i=null;f.ba=!1;f.Pb=!1;f.Ob=!0;f.M=null;f.j=function(){return this.ub||this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new L(this.j()))};f.zb=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.B=function(){return this.b.B(this.a())};f.Y=function(a){return this.b.Y(a)};
+f.Ya=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){T.e.D.call(this);pc(this,function(a){a.f&&fd(this,a)},this);var a=this.a();this.b.Na(this);this.ja(this.n,!0);mc(this).c(this,"enter",this.Ib).c(this,"highlight",this.Jb).c(this,"unhighlight",this.Lb).c(this,"open",this.Kb).c(this,"close",this.Gb).c(a,"mousedown",this.ka).c(pb(a),"mouseup",this.Hb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Fb);this.I()&&gd(this,!0)};
+var gd=function(a,b){var c=mc(a),d=a.j();b?c.c(d,"focus",a.ma).c(d,"blur",a.la).c(a.ya(),"key",a.J):c.u(d,"focus",a.ma).u(d,"blur",a.la).u(a.ya(),"key",a.J)};f=T.prototype;f.ca=function(){hd(this,-1);this.i&&R(this.i,!1);this.ba=!1;T.e.ca.call(this)};f.Ib=function(){return!0};
 f.Jb=function(a){var b=sc(this,a.target);if(-1<b&&b!=this.h){var c=K(this,this.h);c&&c.C(!1);this.h=b;c=K(this,this.h);this.ba&&c.setActive(!0);this.Ob&&this.i&&c!=this.i&&(c.l&64?R(c,!0):R(this.i,!1))}b=this.a();r(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&yc(b,"activedescendant",a.target.a().id)};f.Lb=function(a){a.target==K(this,this.h)&&(this.h=-1);a=this.a();r(a,"The DOM element for the container cannot be null.");a.removeAttribute(xc("activedescendant"))};
 f.Kb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&R(this.i,!1),this.i=a)};f.Gb=function(a){a.target==this.i&&(this.i=null)};f.ka=function(a){this.V&&(this.ba=!0);var b=this.j();b&&Cb(b)&&Db(b)?b.focus():a.preventDefault()};f.Hb=function(){this.ba=!1};
-f.Fb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Ra(a);break;case "mouseover":b.Qa(a);break;case "mouseout":b.Pa(a);break;case "contextmenu":b.oa(a)}};f.ma=function(){};f.la=function(){id(this,-1);this.ba=!1;this.i&&R(this.i,!1)};
+f.Fb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Sa(a);break;case "mouseover":b.Ra(a);break;case "mouseout":b.Qa(a);break;case "contextmenu":b.oa(a)}};f.ma=function(){};f.la=function(){hd(this,-1);this.ba=!1;this.i&&R(this.i,!1)};
 f.J=function(a){return this.isEnabled()&&this.s()&&(0!=qc(this)||this.ub)&&this.lb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-f.lb=function(a){var b=K(this,this.h);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:jd(this);break;case 35:kd(this);break;case 38:if("vertical"==this.L)ld(this);else return!1;break;case 37:if("horizontal"==this.L)rc(this)?md(this):ld(this);else return!1;break;case 40:if("vertical"==this.L)md(this);else return!1;
-break;case 39:if("horizontal"==this.L)rc(this)?ld(this):md(this);else return!1;break;default:return!1}return!0};var gd=function(a,b){var c=b.a(),c=c.id||(c.id=kc(b));a.M||(a.M={});a.M[c]=b};T.prototype.Ca=function(a,b){Ba(a,P,"The child of a container must be a control");T.e.Ca.call(this,a,b)};T.prototype.Ta=function(a,b,c){a.U|=2;a.U|=64;!this.I()&&this.Pb||dd(a,32,!1);a.Na(!1);T.e.Ta.call(this,a,b,c);a.f&&this.f&&gd(this,a);b<=this.h&&this.h++};
-T.prototype.removeChild=function(a,b){if(a=m(a)?nc(this,a):a){var c=sc(this,a);-1!=c&&(c==this.h?(a.C(!1),this.h=-1):c<this.h&&this.h--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=T.e.removeChild.call(this,a,b);a.Na(!0);return a};var Tc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=T.prototype;f.s=function(){return this.n};
-f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(I(c,a),this.I()&&Rc(this.j(),this.V&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.V};f.qa=function(a){this.V!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.V=!0,pc(this,function(a){a.vb?delete a.vb:a.qa(!0)})):(pc(this,function(a){a.isEnabled()?a.qa(!1):a.vb=!0}),this.ba=this.V=!1),this.I()&&Rc(this.j(),a&&this.n))};
-f.I=function(){return this.Ya};f.na=function(a){a!=this.Ya&&this.f&&hd(this,a);this.Ya=a;this.V&&this.n&&Rc(this.j(),a)};var id=function(a,b){var c=K(a,b);c?c.C(!0):-1<a.h&&K(a,a.h).C(!1)};T.prototype.C=function(a){id(this,sc(this,a))};
-var jd=function(a){nd(a,function(a,c){return(a+1)%c},qc(a)-1)},kd=function(a){nd(a,function(a,c){a--;return 0>a?c-1:a},0)},md=function(a){nd(a,function(a,c){return(a+1)%c},a.h)},ld=function(a){nd(a,function(a,c){a--;return 0>a?c-1:a},a.h)},nd=function(a,b,c){c=0>c?sc(a,a.i):c;var d=qc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=K(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Ua(c);break}e++;c=b.call(a,c,d)}};T.prototype.Ua=function(a){id(this,a)};var U=function(){};p(U,O);ba(U);f=U.prototype;f.v=function(){return"goog-tab"};f.T=function(){return"tab"};f.o=function(a){var b=U.e.o.call(this,a);(a=a.Sa())&&this.Va(b,a);return b};f.K=function(a,b){b=U.e.K.call(this,a,b);var c=this.Sa(b);c&&(a.rb=c);a.g&8&&(c=a.getParent())&&n(c.W)&&(a.t(8,!1),c.W(a));return b};f.Sa=function(a){return a.title||""};f.Va=function(a,b){a&&(a.title=b||"")};var od=function(a,b,c){P.call(this,a,b||U.fa(),c);dd(this,8,!0);this.U|=9};p(od,P);od.prototype.Sa=function(){return this.rb};od.prototype.Va=function(a){this.zb().Va(this.a(),a);this.rb=a};Pc("goog-tab",function(){return new od(null)});var V=function(){};p(V,N);ba(V);V.prototype.v=function(){return"goog-tab-bar"};V.prototype.T=function(){return"tablist"};V.prototype.$a=function(a,b,c){this.Ab||(this.Ja||pd(this),this.Ab=Na(this.Ja));var d=this.Ab[b];d?(Tc(a,qd(d)),a.wb=d):V.e.$a.call(this,a,b,c)};V.prototype.ta=function(a){var b=V.e.ta.call(this,a);this.Ja||pd(this);b.push(this.Ja[a.wb]);return b};var pd=function(a){var b=a.v();a.Ja={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var W=function(a,b,c){a=a||"top";Tc(this,qd(a));this.wb=a;T.call(this,this.L,b||V.fa(),c);rd(this)};p(W,T);f=W.prototype;f.Zb=!0;f.H=null;f.D=function(){W.e.D.call(this);rd(this)};f.removeChild=function(a,b){sd(this,a);return W.e.removeChild.call(this,a,b)};f.Ua=function(a){W.e.Ua.call(this,a);this.Zb&&this.W(K(this,a))};f.W=function(a){a?cd(a,!0):this.H&&cd(this.H,!1)};
-var sd=function(a,b){if(b&&b==a.H){for(var c=sc(a,b),d=c-1;b=K(a,d);d--)if(b.s()&&b.isEnabled()){a.W(b);return}for(c+=1;b=K(a,c);c++)if(b.s()&&b.isEnabled()){a.W(b);return}a.W(null)}};f=W.prototype;f.Xb=function(a){this.H&&this.H!=a.target&&cd(this.H,!1);this.H=a.target};f.Yb=function(a){a.target==this.H&&(this.H=null)};f.Vb=function(a){sd(this,a.target)};f.Wb=function(a){sd(this,a.target)};f.ma=function(){K(this,this.h)||this.C(this.H||K(this,0))};
-var rd=function(a){mc(a).c(a,"select",a.Xb).c(a,"unselect",a.Yb).c(a,"disable",a.Vb).c(a,"hide",a.Wb)},qd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Pc("goog-tab-bar",function(){return new W});var X=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,wc(a,h.T()),Mc(a,"goog-zippy-header"),td(h,a),a&&h.Mb.c(a,"keydown",h.Nb))}H.call(this);this.A=e||qb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.da=(this.Oa=n(b)?b:null)||!b?null:this.A.a(b);this.k=!0==c;this.Mb=new G(this);this.pb=new G(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(X,H);f=X.prototype;f.$=!0;f.T=function(){return"tab"};f.B=function(){return this.da};f.toggle=function(){this.S(!this.k)};
-f.S=function(a){this.da?I(this.da,a):a&&this.Oa&&(this.da=this.Oa());this.da&&Mc(this.da,"goog-zippy-content");if(this.Aa)I(this.R,!a),I(this.Aa,a);else if(this.R){var b=this.R;a?Mc(b,"goog-zippy-expanded"):Nc(b,"goog-zippy-expanded");b=this.R;a?Nc(b,"goog-zippy-collapsed"):Mc(b,"goog-zippy-collapsed");yc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new ud("toggle",this))};f.ob=function(){return this.$};f.Na=function(a){this.$!=a&&((this.$=a)?(td(this,this.R),td(this,this.Aa)):this.pb.Za())};
-var td=function(a,b){b&&a.pb.c(b,"click",a.$b)};X.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new A("action",this)),a.preventDefault(),a.stopPropagation()};X.prototype.$b=function(){this.toggle();this.dispatchEvent(new A("action",this))};var ud=function(a,b){A.call(this,a,b)};p(ud,A);var Z=function(a,b){this.nb=[];for(var c=sb("span","ae-zippy",rb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new X(e,g,!1);this.nb.push(e)}this.fc=new Y(this.nb,rb(document,b))};Z.prototype.ic=function(){return this.fc};Z.prototype.jc=function(){return this.nb};
+f.lb=function(a){var b=K(this,this.h);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:id(this);break;case 35:jd(this);break;case 38:if("vertical"==this.L)kd(this);else return!1;break;case 37:if("horizontal"==this.L)rc(this)?ld(this):kd(this);else return!1;break;case 40:if("vertical"==this.L)ld(this);else return!1;
+break;case 39:if("horizontal"==this.L)rc(this)?kd(this):ld(this);else return!1;break;default:return!1}return!0};var fd=function(a,b){var c=b.a(),c=c.id||(c.id=kc(b));a.M||(a.M={});a.M[c]=b};T.prototype.Ca=function(a,b){Ba(a,P,"The child of a container must be a control");T.e.Ca.call(this,a,b)};T.prototype.Ua=function(a,b,c){a.U|=2;a.U|=64;!this.I()&&this.Pb||cd(a,32,!1);a.Oa(!1);T.e.Ua.call(this,a,b,c);a.f&&this.f&&fd(this,a);b<=this.h&&this.h++};
+T.prototype.removeChild=function(a,b){if(a=m(a)?nc(this,a):a){var c=sc(this,a);-1!=c&&(c==this.h?(a.C(!1),this.h=-1):c<this.h&&this.h--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=T.e.removeChild.call(this,a,b);a.Oa(!0);return a};var Sc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=T.prototype;f.s=function(){return this.n};
+f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(I(c,a),this.I()&&Qc(this.j(),this.V&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.V};f.qa=function(a){this.V!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.V=!0,pc(this,function(a){a.vb?delete a.vb:a.qa(!0)})):(pc(this,function(a){a.isEnabled()?a.qa(!1):a.vb=!0}),this.ba=this.V=!1),this.I()&&Qc(this.j(),a&&this.n))};
+f.I=function(){return this.Za};f.na=function(a){a!=this.Za&&this.f&&gd(this,a);this.Za=a;this.V&&this.n&&Qc(this.j(),a)};var hd=function(a,b){var c=K(a,b);c?c.C(!0):-1<a.h&&K(a,a.h).C(!1)};T.prototype.C=function(a){hd(this,sc(this,a))};
+var id=function(a){md(a,function(a,c){return(a+1)%c},qc(a)-1)},jd=function(a){md(a,function(a,c){a--;return 0>a?c-1:a},0)},ld=function(a){md(a,function(a,c){return(a+1)%c},a.h)},kd=function(a){md(a,function(a,c){a--;return 0>a?c-1:a},a.h)},md=function(a,b,c){c=0>c?sc(a,a.i):c;var d=qc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=K(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Va(c);break}e++;c=b.call(a,c,d)}};T.prototype.Va=function(a){hd(this,a)};var U=function(){};p(U,O);ba(U);f=U.prototype;f.v=function(){return"goog-tab"};f.T=function(){return"tab"};f.o=function(a){var b=U.e.o.call(this,a);(a=a.Ta())&&this.Wa(b,a);return b};f.K=function(a,b){b=U.e.K.call(this,a,b);var c=this.Ta(b);c&&(a.rb=c);a.g&8&&(c=a.getParent())&&n(c.W)&&(a.t(8,!1),c.W(a));return b};f.Ta=function(a){return a.title||""};f.Wa=function(a,b){a&&(a.title=b||"")};var nd=function(a,b,c){P.call(this,a,b||U.fa(),c);cd(this,8,!0);this.U|=9};p(nd,P);nd.prototype.Ta=function(){return this.rb};nd.prototype.Wa=function(a){this.zb().Wa(this.a(),a);this.rb=a};Oc("goog-tab",function(){return new nd(null)});var V=function(){};p(V,N);ba(V);V.prototype.v=function(){return"goog-tab-bar"};V.prototype.T=function(){return"tablist"};V.prototype.ab=function(a,b,c){this.Ab||(this.Ja||od(this),this.Ab=Na(this.Ja));var d=this.Ab[b];d?(Sc(a,pd(d)),a.wb=d):V.e.ab.call(this,a,b,c)};V.prototype.ta=function(a){var b=V.e.ta.call(this,a);this.Ja||od(this);b.push(this.Ja[a.wb]);return b};var od=function(a){var b=a.v();a.Ja={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var W=function(a,b,c){a=a||"top";Sc(this,pd(a));this.wb=a;T.call(this,this.L,b||V.fa(),c);qd(this)};p(W,T);f=W.prototype;f.Zb=!0;f.H=null;f.D=function(){W.e.D.call(this);qd(this)};f.removeChild=function(a,b){rd(this,a);return W.e.removeChild.call(this,a,b)};f.Va=function(a){W.e.Va.call(this,a);this.Zb&&this.W(K(this,a))};f.W=function(a){a?bd(a,!0):this.H&&bd(this.H,!1)};
+var rd=function(a,b){if(b&&b==a.H){for(var c=sc(a,b),d=c-1;b=K(a,d);d--)if(b.s()&&b.isEnabled()){a.W(b);return}for(c+=1;b=K(a,c);c++)if(b.s()&&b.isEnabled()){a.W(b);return}a.W(null)}};f=W.prototype;f.Xb=function(a){this.H&&this.H!=a.target&&bd(this.H,!1);this.H=a.target};f.Yb=function(a){a.target==this.H&&(this.H=null)};f.Vb=function(a){rd(this,a.target)};f.Wb=function(a){rd(this,a.target)};f.ma=function(){K(this,this.h)||this.C(this.H||K(this,0))};
+var qd=function(a){mc(a).c(a,"select",a.Xb).c(a,"unselect",a.Yb).c(a,"disable",a.Vb).c(a,"hide",a.Wb)},pd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Oc("goog-tab-bar",function(){return new W});var X=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,wc(a,h.T()),Lc(a,"goog-zippy-header"),sd(h,a),a&&h.Mb.c(a,"keydown",h.Nb))}H.call(this);this.A=e||qb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.da=(this.Pa=n(b)?b:null)||!b?null:this.A.a(b);this.k=!0==c;this.Mb=new G(this);this.pb=new G(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(X,H);f=X.prototype;f.$=!0;f.T=function(){return"tab"};f.B=function(){return this.da};f.toggle=function(){this.S(!this.k)};
+f.S=function(a){this.da?I(this.da,a):a&&this.Pa&&(this.da=this.Pa());this.da&&Lc(this.da,"goog-zippy-content");if(this.Aa)I(this.R,!a),I(this.Aa,a);else if(this.R){var b=this.R;a?Lc(b,"goog-zippy-expanded"):Mc(b,"goog-zippy-expanded");b=this.R;a?Mc(b,"goog-zippy-collapsed"):Lc(b,"goog-zippy-collapsed");yc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new td("toggle",this))};f.ob=function(){return this.$};f.Oa=function(a){this.$!=a&&((this.$=a)?(sd(this,this.R),sd(this,this.Aa)):this.pb.$a())};
+var sd=function(a,b){b&&a.pb.c(b,"click",a.$b)};X.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new A("action",this)),a.preventDefault(),a.stopPropagation()};X.prototype.$b=function(){this.toggle();this.dispatchEvent(new A("action",this))};var td=function(a,b){A.call(this,a,b)};p(td,A);var Z=function(a,b){this.nb=[];for(var c=sb("span","ae-zippy",rb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new X(e,g,!1);this.nb.push(e)}this.fc=new Y(this.nb,rb(document,b))};Z.prototype.ic=function(){return this.fc};Z.prototype.jc=function(){return this.nb};
 var Y=function(a,b){this.ua=a;if(this.ua.length)for(var c=0,d;d=this.ua[c];c++)E(d,"toggle",this.Ub,!1,this);this.Ka=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.ua.length||(c+=" ae-disabled");this.P=wb("span",{className:c},"Expand All");E(this.P,"click",this.Tb,!1,this);b&&b.appendChild(this.P)};Y.prototype.Tb=function(){this.ua.length&&this.S(!this.k)};
-Y.prototype.Ub=function(a){a=a.currentTarget;this.Ka=a.k?this.Ka+1:this.Ka-1;a.k!=this.k&&(a.k?(this.k=!0,vd(this,!0)):0==this.Ka&&(this.k=!1,vd(this,!1)))};Y.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.ua[a];a++)b.k!=this.k&&b.S(this.k);vd(this)};
-var vd=function(a,b){(void 0!==b?b:a.k)?(nb(a.P,"ae-plus"),lb(a.P,"ae-minus"),zb(a.P,"Collapse All")):(nb(a.P,"ae-minus"),lb(a.P,"ae-plus"),zb(a.P,"Expand All"))},wd=function(a){this.ac=a;this.Cb={};var b,c=wb("div",{},b=wb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),wb("div",{className:"goog-tab-bar-clear"}),a=wb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new W;d.K(b);E(d,"select",this.Bb,!1,this);E(d,"unselect",this.Bb,!1,this);
-b=0;for(var e;e=this.ac[b];b++)if(e=rb(document,"ae-stats-details-"+e)){var g=sb("h2",null,e)[0],h;h=g;var k=void 0;jb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],Eb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");jb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new od(h);this.Cb[ka(g)]=e;d.Ca(g,!0);a.appendChild(e);0==b?d.W(g):I(e,!1)}rb(document,"bd").appendChild(c)};
-wd.prototype.Bb=function(a){var b=this.Cb[ka(a.target)];I(b,"select"==a.type)};ma("ae.Stats.Details.Tabs",wd);ma("goog.ui.Zippy",X);X.prototype.setExpanded=X.prototype.S;ma("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.ic;Z.prototype.getZippys=Z.prototype.jc;Y.prototype.setExpanded=Y.prototype.S;var $=function(){this.ab=[];this.gb=[]},xd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],yd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<xd.length;c++)if(a<=xd[c][2])return[xd[c][0],xd[c][1]*b,xd[c][2]*b];return[5,2*b,10*b]};$.prototype.fb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.cb=0;$.prototype.write=function(a){this.gb.push(a)};
-var zd=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.fb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.hc=function(){this.gb=[];var a=yd(this.cb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');zd(this,b,c,a);for(var d=0;d<this.ab.length;d++){var e=this.ab[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ia.length&&this.write('<a class="'+this.w+'link" href="'+e.ia+'">'),this.write(e.label),0<e.ia.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ia.length&&this.write('<a class="'+
-this.w+'link" href="'+e.ia+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.fb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.bb&&(this.write('<img class="'+this.w+'extra" src="'+this.fb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.bb*a+'%"\n>'));0<e.yb.length&&(this.write('<span class="'+this.w+'inline" style="left:'+(e.start+Math.max(e.duration,e.bb))*a+'%">&nbsp;'),this.write(e.yb),this.write("</span>"));0<e.ia.length&&
-this.write("</a>");this.write("</div></td></tr>\n")}zd(this,b,c,a);this.write("</table>\n");return this.gb.join("")};$.prototype.gc=function(a,b,c,d,e,g){this.cb=Math.max(this.cb,Math.max(b+c,b+d));this.ab.push({label:a,start:b,duration:c,bb:d,yb:e,ia:g})};ma("Gantt",$);$.prototype.add_bar=$.prototype.gc;$.prototype.draw=$.prototype.hc;})();
+Y.prototype.Ub=function(a){a=a.currentTarget;this.Ka=a.k?this.Ka+1:this.Ka-1;a.k!=this.k&&(a.k?(this.k=!0,ud(this,!0)):0==this.Ka&&(this.k=!1,ud(this,!1)))};Y.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.ua[a];a++)b.k!=this.k&&b.S(this.k);ud(this)};
+var ud=function(a,b){(void 0!==b?b:a.k)?(nb(a.P,"ae-plus"),lb(a.P,"ae-minus"),zb(a.P,"Collapse All")):(nb(a.P,"ae-minus"),lb(a.P,"ae-plus"),zb(a.P,"Expand All"))},vd=function(a){this.ac=a;this.Cb={};var b,c=wb("div",{},b=wb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),wb("div",{className:"goog-tab-bar-clear"}),a=wb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new W;d.K(b);E(d,"select",this.Bb,!1,this);E(d,"unselect",this.Bb,!1,this);
+b=0;for(var e;e=this.ac[b];b++)if(e=rb(document,"ae-stats-details-"+e)){var g=sb("h2",null,e)[0],h;h=g;var k=void 0;jb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],Eb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");jb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new nd(h);this.Cb[ka(g)]=e;d.Ca(g,!0);a.appendChild(e);0==b?d.W(g):I(e,!1)}rb(document,"bd").appendChild(c)};
+vd.prototype.Bb=function(a){var b=this.Cb[ka(a.target)];I(b,"select"==a.type)};ma("ae.Stats.Details.Tabs",vd);ma("goog.ui.Zippy",X);X.prototype.setExpanded=X.prototype.S;ma("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.ic;Z.prototype.getZippys=Z.prototype.jc;Y.prototype.setExpanded=Y.prototype.S;var $=function(){this.bb=[];this.hb=[]},wd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],xd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<wd.length;c++)if(a<=wd[c][2])return[wd[c][0],wd[c][1]*b,wd[c][2]*b];return[5,2*b,10*b]};$.prototype.gb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.eb=0;$.prototype.write=function(a){this.hb.push(a)};
+var yd=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.gb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.hc=function(){this.hb=[];var a=xd(this.eb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');yd(this,b,c,a);for(var d=0;d<this.bb.length;d++){var e=this.bb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ia.length&&this.write('<a class="'+this.w+'link" href="'+e.ia+'">'),this.write(e.label),0<e.ia.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ia.length&&this.write('<a class="'+
+this.w+'link" href="'+e.ia+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.gb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.cb&&(this.write('<img class="'+this.w+'extra" src="'+this.gb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.cb*a+'%"\n>'));0<e.yb.length&&(this.write('<span class="'+this.w+'inline" style="left:'+(e.start+Math.max(e.duration,e.cb))*a+'%">&nbsp;'),this.write(e.yb),this.write("</span>"));0<e.ia.length&&
+this.write("</a>");this.write("</div></td></tr>\n")}yd(this,b,c,a);this.write("</table>\n");return this.hb.join("")};$.prototype.gc=function(a,b,c,d,e,g){this.eb=Math.max(this.eb,Math.max(b+c,b+d));this.bb.push({label:a,start:b,duration:c,cb:d,yb:e,ia:g})};ma("Gantt",$);$.prototype.add_bar=$.prototype.gc;$.prototype.draw=$.prototype.hc;})();
diff --git a/google/appengine/ext/datastore_admin/backup_handler.py b/google/appengine/ext/datastore_admin/backup_handler.py
index 8c23f76..8ba477e 100644
--- a/google/appengine/ext/datastore_admin/backup_handler.py
+++ b/google/appengine/ext/datastore_admin/backup_handler.py
@@ -67,18 +67,29 @@
 from google.appengine.ext.datastore_admin import backup_pb2
 from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import utils
-from google.appengine.ext.mapreduce import context
-from google.appengine.ext.mapreduce import datastore_range_iterators as db_iters
-from google.appengine.ext.mapreduce import input_readers
-from google.appengine.ext.mapreduce import json_util
-from google.appengine.ext.mapreduce import operation as op
-from google.appengine.ext.mapreduce import output_writers
 from google.appengine.runtime import apiproxy_errors
 
+
+try:
+
+  from google.appengine.ext.mapreduce import context
+  from google.appengine.ext.mapreduce import datastore_range_iterators as db_iters
+  from google.appengine.ext.mapreduce import input_readers
+  from google.appengine.ext.mapreduce import json_util
+  from google.appengine.ext.mapreduce import operation as op
+  from google.appengine.ext.mapreduce import output_writers
+except ImportError:
+
+  from google.appengine._internal.mapreduce import context
+  from google.appengine._internal.mapreduce import datastore_range_iterators as db_iters
+  from google.appengine._internal.mapreduce import input_readers
+  from google.appengine._internal.mapreduce import json_util
+  from google.appengine._internal.mapreduce import operation as op
+  from google.appengine._internal.mapreduce import output_writers
+
 try:
 
   from google.appengine.ext.datastore_admin import services_client
-
 except ImportError:
 
   pass
@@ -1627,17 +1638,20 @@
   def __init__(self):
     self.initialized = False
     self.kind_filter = None
+
     self.app_id = None
 
   def initialize(self):
+    """Initialize a restore mapper instance."""
     if self.initialized:
       return
-    mapper_params = context.get().mapreduce_spec.mapper.params
+    mapper_params = get_mapper_params_from_context()
     kind_filter = mapper_params.get('kind_filter')
     self.kind_filter = set(kind_filter) if kind_filter else None
     original_app = mapper_params.get('original_app')
-    if original_app and os.getenv('APPLICATION_ID') != original_app:
-      self.app_id = os.getenv('APPLICATION_ID')
+    target_app = os.getenv('APPLICATION_ID')
+    if original_app and target_app != original_app:
+      self.app_id = target_app
     self.initialized = True
 
   def map(self, record):
@@ -1653,11 +1667,20 @@
     pb = entity_pb.EntityProto(contents=record)
     if self.app_id:
       utils.FixKeys(pb, self.app_id)
-    entity = datastore.Entity.FromPb(pb)
-    if not self.kind_filter or entity.kind() in self.kind_filter:
-      yield op.db.Put(entity)
+
+
+
+    if not self.kind_filter or (
+        utils.get_kind_from_entity_pb(pb) in self.kind_filter):
+      yield utils.Put(pb)
       if self.app_id:
-        yield utils.ReserveKey(entity.key())
+
+        yield utils.ReserveKey(datastore_types.Key._FromPb(pb.key()))
+
+
+def get_mapper_params_from_context():
+  """Get mapper params from MR context. Split out for ease of testing."""
+  return context.get().mapreduce_spec.mapper.params
 
 
 def validate_gs_bucket_name(bucket_name):
diff --git a/google/appengine/ext/datastore_admin/backup_pb2.py b/google/appengine/ext/datastore_admin/backup_pb2.py
index 5d4b8d5..25002eb 100644
--- a/google/appengine/ext/datastore_admin/backup_pb2.py
+++ b/google/appengine/ext/datastore_admin/backup_pb2.py
@@ -31,7 +31,7 @@
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='apphosting/ext/datastore_admin/backup.proto',
   package='apphosting.ext.datastore_admin',
-  serialized_pb=_b('\n+apphosting/ext/datastore_admin/backup.proto\x12\x1e\x61pphosting.ext.datastore_admin\"\x8c\x01\n\x06\x42\x61\x63kup\x12?\n\x0b\x62\x61\x63kup_info\x18\x01 \x01(\x0b\x32*.apphosting.ext.datastore_admin.BackupInfo\x12\x41\n\tkind_info\x18\x02 \x03(\x0b\x32..apphosting.ext.datastore_admin.KindBackupInfo\"Q\n\nBackupInfo\x12\x13\n\x0b\x62\x61\x63kup_name\x18\x01 \x01(\t\x12\x17\n\x0fstart_timestamp\x18\x02 \x01(\x03\x12\x15\n\rend_timestamp\x18\x03 \x01(\x03\"\x8c\x01\n\x0eKindBackupInfo\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\x0c\n\x04\x66ile\x18\x02 \x03(\t\x12\x43\n\rentity_schema\x18\x03 \x01(\x0b\x32,.apphosting.ext.datastore_admin.EntitySchema\x12\x19\n\nis_partial\x18\x04 \x01(\x08:\x05\x66\x61lse\"\xfc\x04\n\x0c\x45ntitySchema\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x41\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x32.apphosting.ext.datastore_admin.EntitySchema.Field\x1a\xb2\x01\n\x04Type\x12\x0f\n\x07is_list\x18\x01 \x01(\x08\x12R\n\x0eprimitive_type\x18\x02 \x03(\x0e\x32:.apphosting.ext.datastore_admin.EntitySchema.PrimitiveType\x12\x45\n\x0f\x65mbedded_schema\x18\x03 \x03(\x0b\x32,.apphosting.ext.datastore_admin.EntitySchema\x1aV\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x02(\t\x12?\n\x04type\x18\x02 \x03(\x0b\x32\x31.apphosting.ext.datastore_admin.EntitySchema.Type\"\x8d\x02\n\rPrimitiveType\x12\t\n\x05\x46LOAT\x10\x00\x12\x0b\n\x07INTEGER\x10\x01\x12\x0b\n\x07\x42OOLEAN\x10\x02\x12\n\n\x06STRING\x10\x03\x12\r\n\tDATE_TIME\x10\x04\x12\n\n\x06RATING\x10\x05\x12\x08\n\x04LINK\x10\x06\x12\x0c\n\x08\x43\x41TEGORY\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\x08\x12\x12\n\x0ePOSTAL_ADDRESS\x10\t\x12\t\n\x05\x45MAIL\x10\n\x12\r\n\tIM_HANDLE\x10\x0b\x12\x0c\n\x08\x42LOB_KEY\x10\x0c\x12\x08\n\x04TEXT\x10\r\x12\x08\n\x04\x42LOB\x10\x0e\x12\x0e\n\nSHORT_BLOB\x10\x0f\x12\x08\n\x04USER\x10\x10\x12\r\n\tGEO_POINT\x10\x11\x12\r\n\tREFERENCE\x10\x12\x42\x14\x10\x02 \x02(\x02\x42\x0c\x42\x61\x63kupProtos')
+  serialized_pb=_b('\n+apphosting/ext/datastore_admin/backup.proto\x12\x1e\x61pphosting.ext.datastore_admin\"\x8c\x01\n\x06\x42\x61\x63kup\x12?\n\x0b\x62\x61\x63kup_info\x18\x01 \x01(\x0b\x32*.apphosting.ext.datastore_admin.BackupInfo\x12\x41\n\tkind_info\x18\x02 \x03(\x0b\x32..apphosting.ext.datastore_admin.KindBackupInfo\"Q\n\nBackupInfo\x12\x13\n\x0b\x62\x61\x63kup_name\x18\x01 \x01(\t\x12\x17\n\x0fstart_timestamp\x18\x02 \x01(\x03\x12\x15\n\rend_timestamp\x18\x03 \x01(\x03\"\x8c\x01\n\x0eKindBackupInfo\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\x0c\n\x04\x66ile\x18\x02 \x03(\t\x12\x43\n\rentity_schema\x18\x03 \x01(\x0b\x32,.apphosting.ext.datastore_admin.EntitySchema\x12\x19\n\nis_partial\x18\x04 \x01(\x08:\x05\x66\x61lse\"\x90\x05\n\x0c\x45ntitySchema\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x41\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x32.apphosting.ext.datastore_admin.EntitySchema.Field\x1a\xb2\x01\n\x04Type\x12\x0f\n\x07is_list\x18\x01 \x01(\x08\x12R\n\x0eprimitive_type\x18\x02 \x03(\x0e\x32:.apphosting.ext.datastore_admin.EntitySchema.PrimitiveType\x12\x45\n\x0f\x65mbedded_schema\x18\x03 \x03(\x0b\x32,.apphosting.ext.datastore_admin.EntitySchema\x1aj\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x02(\t\x12?\n\x04type\x18\x02 \x03(\x0b\x32\x31.apphosting.ext.datastore_admin.EntitySchema.Type\x12\x12\n\nfield_name\x18\x03 \x01(\t\"\x8d\x02\n\rPrimitiveType\x12\t\n\x05\x46LOAT\x10\x00\x12\x0b\n\x07INTEGER\x10\x01\x12\x0b\n\x07\x42OOLEAN\x10\x02\x12\n\n\x06STRING\x10\x03\x12\r\n\tDATE_TIME\x10\x04\x12\n\n\x06RATING\x10\x05\x12\x08\n\x04LINK\x10\x06\x12\x0c\n\x08\x43\x41TEGORY\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\x08\x12\x12\n\x0ePOSTAL_ADDRESS\x10\t\x12\t\n\x05\x45MAIL\x10\n\x12\r\n\tIM_HANDLE\x10\x0b\x12\x0c\n\x08\x42LOB_KEY\x10\x0c\x12\x08\n\x04TEXT\x10\r\x12\x08\n\x04\x42LOB\x10\x0e\x12\x0e\n\nSHORT_BLOB\x10\x0f\x12\x08\n\x04USER\x10\x10\x12\r\n\tGEO_POINT\x10\x11\x12\r\n\tREFERENCE\x10\x12\x42\x14\x10\x02 \x02(\x02\x42\x0c\x42\x61\x63kupProtos')
 )
 
 
@@ -121,8 +121,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=816,
-  serialized_end=1085,
+  serialized_start=836,
+  serialized_end=1105,
 )
 
 
@@ -314,6 +314,13 @@
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
+    _descriptor.FieldDescriptor(
+      name='field_name', full_name='apphosting.ext.datastore_admin.EntitySchema.Field.field_name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
   ],
   extensions=[
   ],
@@ -324,7 +331,7 @@
   is_extendable=False,
   extension_ranges=[],
   serialized_start=727,
-  serialized_end=813,
+  serialized_end=833,
 )
 
 _ENTITYSCHEMA = _descriptor.Descriptor(
@@ -359,7 +366,7 @@
   is_extendable=False,
   extension_ranges=[],
   serialized_start=449,
-  serialized_end=1085,
+  serialized_end=1105,
 )
 
 _BACKUP.fields_by_name['backup_info'].message_type = _BACKUPINFO
diff --git a/google/appengine/ext/datastore_admin/copy_handler.py b/google/appengine/ext/datastore_admin/copy_handler.py
index fb23736..4512830 100644
--- a/google/appengine/ext/datastore_admin/copy_handler.py
+++ b/google/appengine/ext/datastore_admin/copy_handler.py
@@ -42,8 +42,18 @@
 from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import remote_api_put_stub
 from google.appengine.ext.datastore_admin import utils
-from google.appengine.ext.mapreduce import context
-from google.appengine.ext.mapreduce import operation
+
+
+try:
+
+  from google.appengine.ext.mapreduce import context
+  from google.appengine.ext.mapreduce import input_readers
+  from google.appengine.ext.mapreduce import operation
+except ImportError:
+
+  from google.appengine._internal.mapreduce import context
+  from google.appengine._internal.mapreduce import input_readers
+  from google.appengine._internal.mapreduce import operation
 
 
 XSRF_ACTION = 'copy'
@@ -100,8 +110,7 @@
 
   COPY_HANDLER = ('google.appengine.ext.datastore_admin.copy_handler.'
                   'RemoteCopyEntity.map')
-  INPUT_READER = ('google.appengine.ext.mapreduce.input_readers.'
-                  'DatastoreKeyInputReader')
+  INPUT_READER = input_readers.__name__ + '.DatastoreKeyInputReader'
   MAPREDUCE_DETAIL = config.MAPREDUCE_PATH + '/detail?mapreduce_id='
 
   def get(self):
diff --git a/google/appengine/ext/datastore_admin/delete_handler.py b/google/appengine/ext/datastore_admin/delete_handler.py
index 97ad029..e8a108c 100644
--- a/google/appengine/ext/datastore_admin/delete_handler.py
+++ b/google/appengine/ext/datastore_admin/delete_handler.py
@@ -33,8 +33,19 @@
 from google.appengine.ext import webapp
 from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import utils
-from google.appengine.ext.mapreduce import model
-from google.appengine.ext.mapreduce import operation
+
+
+try:
+
+  from google.appengine.ext.mapreduce import model
+  from google.appengine.ext.mapreduce import input_readers
+  from google.appengine.ext.mapreduce import operation
+except ImportError:
+
+  from google.appengine._internal.mapreduce import model
+  from google.appengine._internal.mapreduce import input_readers
+  from google.appengine._internal.mapreduce import operation
+
 
 MAPREDUCE_OBJECTS = [model.MapreduceState.kind(),
                      model.ShardState.kind()]
@@ -116,8 +127,7 @@
   SUFFIX = 'delete.do'
   DELETE_HANDLER = (
       'google.appengine.ext.datastore_admin.delete_handler.DeleteEntity')
-  INPUT_READER = (
-      'google.appengine.ext.mapreduce.input_readers.DatastoreKeyInputReader')
+  INPUT_READER = input_readers.__name__ + '.DatastoreKeyInputReader'
   MAPREDUCE_DETAIL = config.MAPREDUCE_PATH + '/detail?mapreduce_id='
 
   def get(self):
diff --git a/google/appengine/ext/datastore_admin/templates/do_backup.html b/google/appengine/ext/datastore_admin/templates/do_backup.html
index 57ebb47..731ca21 100644
--- a/google/appengine/ext/datastore_admin/templates/do_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/do_backup.html
@@ -40,7 +40,7 @@
             <li>{{ task_name }}</li>
           {% endfor %}
         </ul>
-        <p>Once job's are created their status will only be available for as
+        <p>Once jobs are created their status will only be available for as
         long as the job takes to complete. Once the job is complete, it will
         remove the status objects from your datastore.
         Click a job's id for status information.
diff --git a/google/appengine/ext/datastore_admin/templates/list_actions.html b/google/appengine/ext/datastore_admin/templates/list_actions.html
index 44adfc4..8210992 100644
--- a/google/appengine/ext/datastore_admin/templates/list_actions.html
+++ b/google/appengine/ext/datastore_admin/templates/list_actions.html
@@ -34,7 +34,7 @@
             Entity statistics unavailable.
           {% endif %}
           <a target="_blank"
-             href="http://developers.google.com/appengine/kb/adminconsole.html#datastore_stats">
+             href="https://developers.google.com/appengine/docs/adminconsole/datastorestats">
             <img class="ae-help-icon" src="{{ base_path }}/static/img/help.gif"
               height="14" width="14" alt="help">
           </a>
diff --git a/google/appengine/ext/datastore_admin/utils.py b/google/appengine/ext/datastore_admin/utils.py
index 00e4f59..1a7d55e 100644
--- a/google/appengine/ext/datastore_admin/utils.py
+++ b/google/appengine/ext/datastore_admin/utils.py
@@ -36,12 +36,25 @@
 from google.appengine.ext import webapp
 from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.db import stats
-from google.appengine.ext.mapreduce import control
-from google.appengine.ext.mapreduce import model
-from google.appengine.ext.mapreduce import operation as mr_operation
-from google.appengine.ext.mapreduce import util
 from google.appengine.ext.webapp import _template
 
+
+try:
+
+  from google.appengine.ext.mapreduce import context
+  from google.appengine.ext.mapreduce import control
+  from google.appengine.ext.mapreduce import model
+  from google.appengine.ext.mapreduce import operation as mr_operation
+  from google.appengine.ext.mapreduce import util
+except ImportError:
+
+  from google.appengine._internal.mapreduce import context
+  from google.appengine._internal.mapreduce import control
+  from google.appengine._internal.mapreduce import model
+  from google.appengine._internal.mapreduce import operation as mr_operation
+  from google.appengine._internal.mapreduce import util
+
+
 MEMCACHE_NAMESPACE = '_ah-datastore_admin'
 XSRF_VALIDITY_TIME = 600
 KINDS_AND_SIZES_VAR = 'kinds_and_sizes'
@@ -696,3 +709,59 @@
       pool = ReserveKeyPool()
       ctx.register_pool(self.pool_id, pool)
     pool.reserve_key(self.key)
+
+
+class PutPool(context.Pool):
+  """A trimmed copy of the MutationPool class.
+
+  Properties:
+    puts: a list of entities to put to datastore.
+    max_entity_count: maximum number of entities before flushing it to db.
+  """
+  POOL_NAME = 'put_pool'
+
+  def __init__(self, max_entity_count=context.MAX_ENTITY_COUNT):
+    """Constructor.
+
+    Args:
+      max_entity_count: maximum number of entities before flushing it to db.
+    """
+    self.max_entity_count = max_entity_count
+    self.puts = []
+
+  def Put(self, entity):
+    """Registers entity to put to datastore.
+
+    Args:
+      entity: The EntityProto for the entity to be put.
+    """
+    if len(self.puts) >= self.max_entity_count:
+      self.flush()
+    self.puts.append(entity)
+
+  def flush(self):
+    """Flush all puts to datastore."""
+    if self.puts:
+      datastore_rpc.Connection(
+          config=datastore_rpc.Configuration(deadline=60)).put(self.puts)
+    self.puts = []
+
+
+class Put(mr_operation.Operation):
+  """Mapper operation to batch puts."""
+
+  def __init__(self, entity):
+    """Constructor.
+
+    Args:
+      entity: The EntityProto of the entity to put.
+    """
+    self.entity = entity
+
+  def __call__(self, ctx):
+    pool = ctx.get_pool(PutPool.POOL_NAME)
+    if not pool:
+      pool = PutPool(
+          max_entity_count=(context.MAX_ENTITY_COUNT/(2**ctx.task_retry_count)))
+      ctx.register_pool(PutPool.POOL_NAME, pool)
+    pool.Put(self.entity)
diff --git a/google/appengine/ext/mapreduce/input_readers.py b/google/appengine/ext/mapreduce/input_readers.py
index a624d77..a3e37fb 100644
--- a/google/appengine/ext/mapreduce/input_readers.py
+++ b/google/appengine/ext/mapreduce/input_readers.py
@@ -2728,6 +2728,7 @@
         self._record_reader = None
 
 
+
 class _ReducerReader(RecordsReader):
   """Reader to read KeyValues records files from Files API."""
 
@@ -2751,13 +2752,15 @@
       proto = file_service_pb.KeyValues()
       proto.ParseFromString(binary_record)
 
+      to_yield = None
+      if self.current_key is not None and self.current_key != proto.key():
+        to_yield = (self.current_key, self.current_values)
+        self.current_key = None
+        self.current_values = None
+
       if self.current_key is None:
         self.current_key = proto.key()
         self.current_values = []
-      else:
-        assert proto.key() == self.current_key, (
-            "inconsistent key sequence. Expected %s but got %s" %
-            (self.current_key, proto.key()))
 
       if combiner:
         combiner_result = combiner(
@@ -2775,27 +2778,35 @@
           else:
 
             self.current_values.append(value)
+
+
+
+
+        if not to_yield:
+          yield ALLOW_CHECKPOINT
       else:
 
         self.current_values.extend(proto.value_list())
 
-      if not proto.partial():
-        key = self.current_key
-        values = self.current_values
+      if to_yield:
+        yield to_yield
 
-        self.current_key = None
-        self.current_values = None
-        yield (key, values)
-      else:
         yield ALLOW_CHECKPOINT
 
+
+
+    if self.current_key is not None:
+      to_yield = (self.current_key, self.current_values)
+      self.current_key = None
+      self.current_values = None
+      yield to_yield
+
   @staticmethod
   def encode_data(data):
     """Encodes the given data, which may have include raw bytes.
 
     Works around limitations in JSON encoding, which cannot handle raw bytes.
     """
-
     return base64.b64encode(pickle.dumps(data))
 
   @staticmethod
@@ -2810,8 +2821,8 @@
       A json-izable version of the remaining InputReader.
     """
     result = super(_ReducerReader, self).to_json()
-    result["current_key"] = _ReducerReader.encode_data(self.current_key)
-    result["current_values"] = _ReducerReader.encode_data(self.current_values)
+    result["current_key"] = self.encode_data(self.current_key)
+    result["current_values"] = self.encode_data(self.current_values)
     return result
 
   @classmethod
diff --git a/google/appengine/ext/mapreduce/main.py b/google/appengine/ext/mapreduce/main.py
index 118b4d1..223b41a 100644
--- a/google/appengine/ext/mapreduce/main.py
+++ b/google/appengine/ext/mapreduce/main.py
@@ -86,10 +86,10 @@
 
 
 
-      (r".*/worker_callback/.*", handlers.MapperWorkerCallbackHandler),
-      (r".*/controller_callback/.*", handlers.ControllerCallbackHandler),
-      (r".*/kickoffjob_callback/.*", handlers.KickOffJobHandler),
-      (r".*/finalizejob_callback/.*", handlers.FinalizeJobHandler),
+      (r".*/worker_callback.*", handlers.MapperWorkerCallbackHandler),
+      (r".*/controller_callback.*", handlers.ControllerCallbackHandler),
+      (r".*/kickoffjob_callback.*", handlers.KickOffJobHandler),
+      (r".*/finalizejob_callback.*", handlers.FinalizeJobHandler),
 
 
 
diff --git a/google/appengine/ext/mapreduce/namespace_range.py b/google/appengine/ext/mapreduce/namespace_range.py
index e9be9ba..b4be917 100644
--- a/google/appengine/ext/mapreduce/namespace_range.py
+++ b/google/appengine/ext/mapreduce/namespace_range.py
@@ -40,6 +40,7 @@
     'MAX_NAMESPACE_LENGTH',
     'MAX_NAMESPACE',
     'MIN_NAMESPACE',
+    'NAMESPACE_BATCH_SIZE',
     'NamespaceRange',
     'get_namespace_keys',
 ]
@@ -57,20 +58,24 @@
                                       '._-'))
 MAX_NAMESPACE_LENGTH = 100
 MIN_NAMESPACE = ''
+NAMESPACE_BATCH_SIZE = 50
 
 
 def _setup_constants(alphabet=NAMESPACE_CHARACTERS,
-                     max_length=MAX_NAMESPACE_LENGTH):
+                     max_length=MAX_NAMESPACE_LENGTH,
+                     batch_size=NAMESPACE_BATCH_SIZE):
   """Calculate derived constant values. Only useful for testing."""
 
   global NAMESPACE_CHARACTERS
   global MAX_NAMESPACE_LENGTH
   global MAX_NAMESPACE
   global _LEX_DISTANCE
+  global NAMESPACE_BATCH_SIZE
 
   NAMESPACE_CHARACTERS = alphabet
   MAX_NAMESPACE_LENGTH = max_length
   MAX_NAMESPACE = NAMESPACE_CHARACTERS[-1] * MAX_NAMESPACE_LENGTH
+  NAMESPACE_BATCH_SIZE = batch_size
 
 
 
@@ -288,9 +293,12 @@
     namespace_start = _ord_to_namespace(_namespace_to_ord(after_namespace) + 1)
     return NamespaceRange(namespace_start, self.namespace_end, _app=self.app)
 
-  def make_datastore_query(self):
+  def make_datastore_query(self, cursor=None):
     """Returns a datastore.Query that generates all namespaces in the range.
 
+    Args:
+      cursor: start cursor for the query.
+
     Returns:
       A datastore.Query instance that generates db.Keys for each namespace in
       the NamespaceRange.
@@ -304,18 +312,19 @@
     return datastore.Query('__namespace__',
                            filters=filters,
                            keys_only=True,
+                           cursor=cursor,
                            _app=self.app)
 
   def normalized_start(self):
     """Returns a NamespaceRange with leading non-existant namespaces removed.
 
     Returns:
-      A copy of this NamespaceRange whose namespace_start is adjusted to exlcude
+      A copy of this NamespaceRange whose namespace_start is adjusted to exclude
       the portion of the range that contains no actual namespaces in the
       datastore. None is returned if the NamespaceRange contains no actual
       namespaces in the datastore.
     """
-    namespaces_after_key = self.make_datastore_query().Get(1)
+    namespaces_after_key = list(self.make_datastore_query().Run(limit=1))
 
     if not namespaces_after_key:
       return None
@@ -428,12 +437,19 @@
 
   def __iter__(self):
     """Iterate over all the namespaces within this range."""
-    query = self.make_datastore_query()
-    for ns_key in query.Run():
-      yield ns_key.name() or ''
+    cursor = None
+    while True:
+      query = self.make_datastore_query(cursor=cursor)
+      count = 0
+      for ns_key in query.Run(limit=NAMESPACE_BATCH_SIZE):
+        count += 1
+        yield ns_key.name() or ''
+      if count < NAMESPACE_BATCH_SIZE:
+        break
+      cursor = query.GetCursor()
 
 
 def get_namespace_keys(app, limit):
   """Get namespace keys."""
   ns_query = datastore.Query('__namespace__', keys_only=True, _app=app)
-  return ns_query.Get(limit=limit)
+  return list(ns_query.Run(limit=limit))
diff --git a/google/appengine/ext/mapreduce/output_writers.py b/google/appengine/ext/mapreduce/output_writers.py
index 708e424..447f7a0 100644
--- a/google/appengine/ext/mapreduce/output_writers.py
+++ b/google/appengine/ext/mapreduce/output_writers.py
@@ -1093,6 +1093,8 @@
     self._streaming_buffer.close()
 
     if self._no_dup:
+
+
       cloudstorage_api._copy2(
           self._streaming_buffer.name,
           self._streaming_buffer.name,
@@ -1198,3 +1200,26 @@
       data: string containing the data to be written.
     """
     self._record_writer.write(data)
+
+
+
+class _GoogleCloudStorageKeyValueOutputWriter(
+    _GoogleCloudStorageRecordOutputWriter):
+  """Write key/values to Google Cloud Storage files in LevelDB format."""
+
+  def write(self, data):
+    if len(data) != 2:
+      logging.error("Got bad tuple of length %d (2-tuple expected): %s",
+                    len(data), data)
+
+    try:
+      key = str(data[0])
+      value = str(data[1])
+    except TypeError:
+      logging.error("Expecting a tuple, but got %s: %s",
+                    data.__class__.__name__, data)
+
+    proto = file_service_pb.KeyValue()
+    proto.set_key(key)
+    proto.set_value(value)
+    _GoogleCloudStorageRecordOutputWriter.write(self, proto.Encode())
diff --git a/google/appengine/ext/mapreduce/test_support.py b/google/appengine/ext/mapreduce/test_support.py
index 9429431..a411a81 100644
--- a/google/appengine/ext/mapreduce/test_support.py
+++ b/google/appengine/ext/mapreduce/test_support.py
@@ -190,8 +190,6 @@
   taskqueue.FlushQueue(queue)
   task_run_counts = collections.defaultdict(lambda: 0)
   for task in tasks:
-    import logging
-    logging.error(task)
     retries = 0
     while True:
       try:
@@ -210,7 +208,6 @@
             "Task %s is being retried for the %s time",
             task["name"],
             retries)
-        logging.debug(traceback.format_exc())
 
   return task_run_counts
 
diff --git a/google/appengine/ext/testbed/__init__.py b/google/appengine/ext/testbed/__init__.py
index 00d8d28..d66ad0b 100644
--- a/google/appengine/ext/testbed/__init__.py
+++ b/google/appengine/ext/testbed/__init__.py
@@ -548,7 +548,7 @@
     stub = file_service_stub.FileServiceStub(self._get_blob_storage())
     self._register_stub(FILES_SERVICE_NAME, stub)
 
-  def init_images_stub(self, enable=True):
+  def init_images_stub(self, enable=True, **stub_kwargs):
     """Enable the images stub.
 
     The images service stub is only available in dev_appserver because
@@ -557,6 +557,7 @@
     Args:
       enable: True, if the fake service should be enabled, False if real
               service should be disabled.
+      stub_kwargs: Keyword arguments passed on to the service stub.
     """
     if not enable:
       self._disable_stub(IMAGES_SERVICE_NAME)
@@ -565,7 +566,7 @@
       msg = ('Could not initialize images API; you are likely '
              'missing the Python "PIL" module.')
       raise StubNotSupportedError(msg)
-    stub = images_stub.ImagesServiceStub()
+    stub = images_stub.ImagesServiceStub(**stub_kwargs)
     self._register_stub(IMAGES_SERVICE_NAME, stub)
 
   def init_logservice_stub(self, enable=True):
diff --git a/google/appengine/tools/appcfg.py b/google/appengine/tools/appcfg.py
index 29718b9..79e522e 100644
--- a/google/appengine/tools/appcfg.py
+++ b/google/appengine/tools/appcfg.py
@@ -3389,7 +3389,8 @@
       self.UpdateUsingSpecificFiles()
       return
 
-    if appcfg_java.IsWarFileWithoutYaml(self.basepath):
+    if (self._JavaSupported() and
+        appcfg_java.IsWarFileWithoutYaml(self.basepath)):
       java_app_update = appcfg_java.JavaAppUpdate(self.basepath, self.options)
       sdk_root = os.path.dirname(appcfg_java.__file__)
       self.options.compile_jsps = True
diff --git a/google/appengine/tools/backends_conversion.py b/google/appengine/tools/backends_conversion.py
index ed860b0..6d4179d 100644
--- a/google/appengine/tools/backends_conversion.py
+++ b/google/appengine/tools/backends_conversion.py
@@ -22,6 +22,7 @@
 <module-name>.yaml file for each module defined in backends.yaml.
 """
 
+from __future__ import with_statement
 
 import os
 import sys
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index c76dfca..b6f1c03 100644
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -1914,11 +1914,11 @@
 };
 goog.dom.getElementsByClass = function(className, opt_el) {
   var parent = opt_el || document;
-  return goog.dom.canUseQuerySelector_(parent) ? parent.querySelectorAll("." + className) : parent.getElementsByClassName ? parent.getElementsByClassName(className) : goog.dom.getElementsByTagNameAndClass_(document, "*", className, opt_el);
+  return goog.dom.canUseQuerySelector_(parent) ? parent.querySelectorAll("." + className) : goog.dom.getElementsByTagNameAndClass_(document, "*", className, opt_el);
 };
 goog.dom.getElementByClass = function(className, opt_el) {
   var parent = opt_el || document, retVal = null;
-  return(retVal = goog.dom.canUseQuerySelector_(parent) ? parent.querySelector("." + className) : goog.dom.getElementsByClass(className, opt_el)[0]) || null;
+  return(retVal = goog.dom.canUseQuerySelector_(parent) ? parent.querySelector("." + className) : goog.dom.getElementsByTagNameAndClass_(document, "*", className, opt_el)[0]) || null;
 };
 goog.dom.getRequiredElementByClass = function(className, opt_root) {
   var retValue = goog.dom.getElementByClass(className, opt_root);
@@ -5154,7 +5154,7 @@
   this.xhrOptions_ = this.xmlHttpFactory_ ? this.xmlHttpFactory_.getOptions() : goog.net.XmlHttp.getOptions();
   this.xhr_.onreadystatechange = goog.bind(this.onReadyStateChange_, this);
   try {
-    goog.log.fine(this.logger_, this.formatMsg_("Opening Xhr")), this.inOpen_ = !0, this.xhr_.open(method, url, !0), this.inOpen_ = !1;
+    goog.log.fine(this.logger_, this.formatMsg_("Opening Xhr")), this.inOpen_ = !0, this.xhr_.open(method, String(url), !0), this.inOpen_ = !1;
   } catch (err) {
     goog.log.fine(this.logger_, this.formatMsg_("Error opening Xhr: " + err.message));
     this.error_(goog.net.ErrorCode.EXCEPTION, err);
diff --git a/google/appengine/tools/devappserver2/application_configuration.py b/google/appengine/tools/devappserver2/application_configuration.py
index 8403d3a..48079df 100644
--- a/google/appengine/tools/devappserver2/application_configuration.py
+++ b/google/appengine/tools/devappserver2/application_configuration.py
@@ -115,7 +115,7 @@
 
   @property
   def module_name(self):
-    return self._module_name or 'default'
+    return self._module_name or appinfo.DEFAULT_MODULE
 
   @property
   def major_version(self):
@@ -123,7 +123,7 @@
 
   @property
   def version_id(self):
-    if self.module_name == 'default':
+    if self.module_name == appinfo.DEFAULT_MODULE:
       return '%s.%s' % (
           self.major_version,
           self._minor_version_id)
@@ -570,7 +570,7 @@
                                            module.module_name)
       module_names.add(module.module_name)
     if self.dispatch:
-      if 'default' not in module_names:
+      if appinfo.DEFAULT_MODULE not in module_names:
         raise errors.InvalidAppConfigError(
             'A default module must be specified.')
       missing_modules = (
diff --git a/google/appengine/tools/devappserver2/blob_image.py b/google/appengine/tools/devappserver2/blob_image.py
index bc8aedd..34463cf 100644
--- a/google/appengine/tools/devappserver2/blob_image.py
+++ b/google/appengine/tools/devappserver2/blob_image.py
@@ -24,6 +24,9 @@
 from google.appengine.api import datastore
 from google.appengine.api import datastore_errors
 from google.appengine.api.images import images_service_pb
+from google.appengine.ext import blobstore
+from google.appengine.tools.devappserver2 import blob_download
+from google.appengine.tools.devappserver2 import request_rewriter
 
 BLOBIMAGE_URL_PATTERN = '_ah/img(?:/.*)?'
 _BLOB_SERVING_URL_KIND = '__BlobServingUrl__'
@@ -35,6 +38,14 @@
                   images_service_pb.OutputSettings.PNG: 'image/png',
                   images_service_pb.OutputSettings.WEBP: 'image/webp'}
 
+# Check there's a working images stub.
+try:
+  # pylint: disable=g-import-not-at-top, unused-import
+  from google.appengine.api.images import images_stub
+  _HAS_WORKING_IMAGES_STUB = True
+except ImportError:
+  _HAS_WORKING_IMAGES_STUB = False
+
 
 def _get_images_stub():
   return apiproxy_stub_map.apiproxy.GetStub('images')
@@ -51,18 +62,17 @@
 class Application(object):
   """A WSGI application that handles image serving requests."""
 
-  def _transform_image(self, blob_key, options):
+  def _transform_image(self, blob_key, resize=None, crop=False):
     """Construct and execute a transform request using the images stub.
 
     Args:
       blob_key: A str containing the blob_key of the image to transform.
-      options: A str containing the resize and crop options to apply to the
-          image.
+      resize: An integer for the size of the resulting image.
+      crop: A boolean determining if the image should be cropped or resized.
 
     Returns:
       A str containing the tranformed (if necessary) image.
     """
-    resize, crop = self._parse_options(options)
     image_data = images_service_pb.ImageData()
     image_data.set_blob_key(blob_key)
     image = _get_images_stub()._OpenImageData(image_data)
@@ -167,6 +177,14 @@
       blobkey += match.group(2)
     return (blobkey, options)
 
+  def serve_unresized_image(self, blobkey, environ, start_response):
+    """Use blob_download to rewrite and serve unresized image directly."""
+    state = request_rewriter.RewriterState(environ, '200 OK', [
+        (blobstore.BLOB_KEY_HEADER, blobkey)], [])
+    blob_download.blobstore_download_rewriter(state)
+    start_response(state.status, state.headers.items())
+    return state.body
+
   def serve_image(self, environ, start_response):
     """Dynamically serve an image from blobstore."""
     blobkey, options = self._parse_path(environ['PATH_INFO'])
@@ -181,11 +199,22 @@
                     'called before attempting to serve blobs.', blobkey)
       start_response('404 %s' % httplib.responses[404], [])
       return []
-    image, mime_type = self._transform_image(blobkey, options)
-    start_response('200 OK', [
-        ('Content-Type', mime_type),
-        ('Cache-Control', 'public, max-age=600, no-transform')])
-    return [image]
+
+    resize, crop = self._parse_options(options)
+
+    if resize is None and not crop:
+      return self.serve_unresized_image(blobkey, environ, start_response)
+    elif not _HAS_WORKING_IMAGES_STUB:
+      logging.warning('Serving resized images requires a working Python "PIL" '
+                      'module. The image is served without resizing.')
+      return self.serve_unresized_image(blobkey, environ, start_response)
+    else:
+      # Use Images service to transform blob.
+      image, mime_type = self._transform_image(blobkey, resize, crop)
+      start_response('200 OK', [
+          ('Content-Type', mime_type),
+          ('Cache-Control', 'public, max-age=600, no-transform')])
+      return [image]
 
   def __call__(self, environ, start_response):
     if environ['REQUEST_METHOD'] != 'GET':
diff --git a/google/appengine/tools/devappserver2/blob_image_test.py b/google/appengine/tools/devappserver2/blob_image_test.py
index 1382880..0e6855c 100644
--- a/google/appengine/tools/devappserver2/blob_image_test.py
+++ b/google/appengine/tools/devappserver2/blob_image_test.py
@@ -23,23 +23,25 @@
 import google
 import mox
 
-try:
-  from PIL import Image
-except ImportError:
-  try:
-    import Image
-  except ImportError:
-    raise unittest.SkipTest('blob_image_test could not import PIL')
-
 from google.appengine.api import datastore
 from google.appengine.api import datastore_errors
 from google.appengine.api.images import images_service_pb
 from google.appengine.api.images import images_stub
+from google.appengine.ext import blobstore
 from google.appengine.runtime import apiproxy_errors
+from google.appengine.tools.devappserver2 import blob_download
 from google.appengine.tools.devappserver2 import blob_image
 from google.appengine.tools.devappserver2 import wsgi_test_utils
 
 
+class MockImage(object):
+  """A mock PIL Image object."""
+
+  def __init__(self):
+    self.format = None
+    self.size = None
+
+
 class BlobImageTest(wsgi_test_utils.WSGITestCase):
   """Tests image url handler."""
 
@@ -47,15 +49,22 @@
     self.mox = mox.Mox()
     self._environ = {'PATH_INFO': 'http://test.com/_ah/img/SomeBlobKey',
                      'REQUEST_METHOD': 'GET'}
+    self._has_working_images_stub = blob_image._HAS_WORKING_IMAGES_STUB
+    blob_image._HAS_WORKING_IMAGES_STUB = True
     self._images_stub = self.mox.CreateMock(images_stub.ImagesServiceStub)
-    self._image = Image.Image()
+    self._mock_rewriter = self.mox.CreateMockAnything()
+    self._image = MockImage()
     self.app = blob_image.Application()
     os.environ['APPLICATION_ID'] = 'testapp'
     self._get_images_stub = blob_image._get_images_stub
     blob_image._get_images_stub = lambda: self._images_stub
+    self._blobstore_rewriter = blob_download.blobstore_download_rewriter
+    blob_download.blobstore_download_rewriter = self._mock_rewriter
 
   def tearDown(self):
+    blob_image._HAS_WORKING_IMAGES_STUB = self._has_working_images_stub
     blob_image._get_images_stub = self._get_images_stub
+    blob_download.blobstore_download_rewriter = self._blobstore_rewriter
     self.mox.UnsetStubs()
 
   def expect_open_image(self, blob_key, dimensions=None, throw_exception=None,
@@ -89,7 +98,7 @@
       if not isinstance(bottom_y, float):
         raise self.failureException('Crop argument must be a float.')
       crop_xform.set_crop_bottom_y(bottom_y)
-    self._images_stub._Crop(mox.IsA(Image.Image), crop_xform).AndReturn(
+    self._images_stub._Crop(mox.IsA(MockImage), crop_xform).AndReturn(
         self._image)
 
   def expect_resize(self, resize):
@@ -97,7 +106,7 @@
     resize_xform = images_service_pb.Transform()
     resize_xform.set_width(resize)
     resize_xform.set_height(resize)
-    self._images_stub._Resize(mox.IsA(Image.Image),
+    self._images_stub._Resize(mox.IsA(MockImage),
                               resize_xform).AndReturn(self._image)
 
   def expect_encode_image(self, data,
@@ -105,7 +114,7 @@
     """Setup a mox expectation to images_stub._EncodeImage."""
     output_settings = images_service_pb.OutputSettings()
     output_settings.set_mime_type(mime_type)
-    self._images_stub._EncodeImage(mox.IsA(Image.Image),
+    self._images_stub._EncodeImage(mox.IsA(MockImage),
                                    output_settings).AndReturn(data)
 
   def expect_datatore_lookup(self, blob_key, expected_result):
@@ -130,6 +139,26 @@
         self._environ)
     self.mox.VerifyAll()
 
+  def run_blobstore_serving_request(self, blobkey):
+    def _Validate(state):
+      return state.headers.get(blobstore.BLOB_KEY_HEADER) == blobkey
+
+    def _Rewrite(state):
+      del state.headers[blobstore.BLOB_KEY_HEADER]
+      state.headers['Content-Type'] = 'image/some-type'
+      state.body = ['SomeBlobImage']
+
+    self._mock_rewriter.__call__(mox.Func(_Validate)).WithSideEffects(_Rewrite)
+
+    self.mox.ReplayAll()
+    self.assertResponse(
+        '200 OK',
+        [('Content-Type', 'image/some-type')],
+        'SomeBlobImage',
+        self.app,
+        self._environ)
+    self.mox.VerifyAll()
+
   def test_parse_path(self):
     """Tests URL parsing."""
     self.assertEquals(('SomeBlobKey', ''),
@@ -184,7 +213,7 @@
             images_service_pb.ImagesServiceError.INVALID_BLOB_KEY))
     self.mox.ReplayAll()
     try:
-      self.app._transform_image('SomeBlobKey', '')
+      self.app._transform_image('SomeBlobKey')
       raise self.failureException('Should have thrown ApplicationError')
     except apiproxy_errors.ApplicationError:
       pass
@@ -197,7 +226,7 @@
     self.expect_encode_image('SomeImageInJpeg')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', ''))
+                      self.app._transform_image('SomeBlobKey'))
     self.mox.VerifyAll()
 
   def test_transform_image_not_upscaled(self):
@@ -206,7 +235,7 @@
     self.expect_encode_image('SomeImageInJpeg')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', ''))
+                      self.app._transform_image('SomeBlobKey'))
     self.mox.VerifyAll()
 
   def test_transform_image_no_resize_png(self):
@@ -217,7 +246,7 @@
                              images_service_pb.OutputSettings.PNG)
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageInPng', 'image/png'),
-                      self.app._transform_image('SomeBlobKey', ''))
+                      self.app._transform_image('SomeBlobKey'))
     self.mox.VerifyAll()
 
   def test_transform_image_no_resize_tiff(self):
@@ -228,7 +257,7 @@
     self.expect_encode_image('SomeImageInJpeg')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', ''))
+                      self.app._transform_image('SomeBlobKey'))
     self.mox.VerifyAll()
 
   def test_transform_image_no_resize_gif(self):
@@ -240,7 +269,7 @@
                              images_service_pb.OutputSettings.PNG)
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageInPng', 'image/png'),
-                      self.app._transform_image('SomeBlobKey', ''))
+                      self.app._transform_image('SomeBlobKey'))
     self.mox.VerifyAll()
 
   def test_transform_image_resize(self):
@@ -250,7 +279,7 @@
     self.expect_encode_image('SomeImageSize32')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageSize32', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', 's32'))
+                      self.app._transform_image('SomeBlobKey', 32))
     self.mox.VerifyAll()
 
   def test_transform_image_original_size(self):
@@ -259,7 +288,7 @@
     self.expect_encode_image('SomeImageInJpeg')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', 's0'))
+                      self.app._transform_image('SomeBlobKey', 0))
     self.mox.VerifyAll()
 
   def test_transform_image_resize_png(self):
@@ -270,7 +299,7 @@
                              images_service_pb.OutputSettings.PNG)
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageSize32', 'image/png'),
-                      self.app._transform_image('SomeBlobKey', 's32'))
+                      self.app._transform_image('SomeBlobKey', 32))
     self.mox.VerifyAll()
 
   def test_transform_image_resize_and_crop_portrait(self):
@@ -281,7 +310,7 @@
     self.expect_encode_image('SomeImageSize32-c')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', 's32-c'))
+                      self.app._transform_image('SomeBlobKey', 32, True))
     self.mox.VerifyAll()
 
   def test_transform_image_resize_and_crop_portrait_png(self):
@@ -293,7 +322,7 @@
                              images_service_pb.OutputSettings.PNG)
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageSize32-c', 'image/png'),
-                      self.app._transform_image('SomeBlobKey', 's32-c'))
+                      self.app._transform_image('SomeBlobKey', 32, True))
     self.mox.VerifyAll()
 
   def test_transform_image_resize_and_crop_landscape(self):
@@ -304,36 +333,24 @@
     self.expect_encode_image('SomeImageSize32-c')
     self.mox.ReplayAll()
     self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),
-                      self.app._transform_image('SomeBlobKey', 's32-c'))
+                      self.app._transform_image('SomeBlobKey', 32, True))
     self.mox.VerifyAll()
 
-  def test_basic_run(self):
-    """Tests an image request."""
+  def test_run_no_resize_no_crop(self):
+    """Tests an image request without resizing or cropping."""
     self.expect_datatore_lookup('SomeBlobKey', True)
-    self.expect_open_image('SomeBlobKey', (1600, 1200))
-    self.expect_resize(blob_image._DEFAULT_SERVING_SIZE)
-    self.expect_encode_image('SomeImageInJpeg')
-    self.run_request('image/jpeg', 'SomeImageInJpeg')
 
-  def test_basic_run_png(self):
-    """Tests an image request for a PNG image."""
+    # Should result in serving form blobstore directly.
+    self.run_blobstore_serving_request('SomeBlobKey')
+
+  def test_run_resize_without_working_images_stub(self):
+    """Tests requesting a resized image without working images stub."""
+    blob_image._HAS_WORKING_IMAGES_STUB = False
     self.expect_datatore_lookup('SomeBlobKey', True)
-    self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='PNG')
-    self.expect_resize(blob_image._DEFAULT_SERVING_SIZE)
-    self.expect_encode_image('SomeImageInPng',
-                             images_service_pb.OutputSettings.PNG)
-    self.run_request('image/png', 'SomeImageInPng')
+    self._environ['PATH_INFO'] += '=s32'
 
-  def test_basic_run_with_padded_blobkey(self):
-    """Tests an image request with a padded blobkey."""
-    padded_blobkey = 'SomeBlobKey====================='
-    self.expect_datatore_lookup(padded_blobkey, True)
-    self.expect_open_image(padded_blobkey, (1600, 1200))
-    self.expect_resize(blob_image._DEFAULT_SERVING_SIZE)
-    self.expect_encode_image('SomeImageInJpeg')
-    self.mox.ReplayAll()
-    self._environ['PATH_INFO'] += '====================='
-    self.run_request('image/jpeg', 'SomeImageInJpeg')
+    # Should result in serving form blobstore directly.
+    self.run_blobstore_serving_request('SomeBlobKey')
 
   def test_run_resize(self):
     """Tests an image request with resizing."""
diff --git a/google/appengine/tools/devappserver2/devappserver2.py b/google/appengine/tools/devappserver2/devappserver2.py
index e8e9f79..86af330 100644
--- a/google/appengine/tools/devappserver2/devappserver2.py
+++ b/google/appengine/tools/devappserver2/devappserver2.py
@@ -27,6 +27,7 @@
 import tempfile
 import time
 
+from google.appengine.api import appinfo
 from google.appengine.datastore import datastore_stub_util
 from google.appengine.tools import boolean_action
 from google.appengine.tools.devappserver2.admin import admin_server
@@ -66,6 +67,9 @@
     'critical': logging.CRITICAL,
 }
 
+# The default encoding used by the production interpreter.
+_PROD_DEFAULT_ENCODING = 'ascii'
+
 
 def _generate_storage_paths(app_id):
   """Yield an infinite sequence of possible storage paths."""
@@ -218,7 +222,7 @@
       else:
         module_name = module_name.strip()
         if not module_name:
-          module_name = 'default'
+          module_name = appinfo.DEFAULT_MODULE
         if module_name in module_to_value:
           raise argparse.ArgumentTypeError(
               multiple_duplicate_module_error % module_name)
@@ -672,6 +676,17 @@
     else:
       update_checker.check_for_updates(configuration)
 
+    # There is no good way to set the default encoding from application code
+    # (it needs to be done during interpreter initialization in site.py or
+    # sitecustomize.py) so just warn developers if they have a different
+    # encoding than production.
+    if sys.getdefaultencoding() != _PROD_DEFAULT_ENCODING:
+      logging.warning(
+          'The default encoding of your local Python interpreter is set to %r '
+          'while App Engine\'s production environment uses %r; as a result '
+          'your code may behave differently when deployed.',
+          sys.getdefaultencoding(), _PROD_DEFAULT_ENCODING)
+
     if options.port == 0:
       logging.warn('DEFAULT_VERSION_HOSTNAME will not be set correctly with '
                    '--port=0')
diff --git a/google/appengine/tools/devappserver2/dispatcher.py b/google/appengine/tools/devappserver2/dispatcher.py
index ba32594..f1fa28c 100644
--- a/google/appengine/tools/devappserver2/dispatcher.py
+++ b/google/appengine/tools/devappserver2/dispatcher.py
@@ -22,6 +22,7 @@
 import urlparse
 import wsgiref.headers
 
+from google.appengine.api import appinfo
 from google.appengine.api import request_info
 from google.appengine.tools.devappserver2 import constants
 from google.appengine.tools.devappserver2 import instance
@@ -39,6 +40,10 @@
 ResponseTuple = collections.namedtuple('ResponseTuple',
                                        ['status', 'headers', 'content'])
 
+# This must be kept in sync with dispatch_ah_url_path_prefix_whitelist in
+# google/production/borg/apphosting/templates/frontend.borg.
+DISPATCH_AH_URL_PATH_PREFIX_WHITELIST = ('/_ah/queue/deferred',)
+
 
 class PortRegistry(object):
   def __init__(self):
@@ -370,7 +375,7 @@
       request_info.VersionDoesNotExistError: The version doesn't exist.
     """
     if not module_name:
-      module_name = 'default'
+      module_name = appinfo.DEFAULT_MODULE
     if module_name not in self._module_name_to_module:
       raise request_info.ModuleDoesNotExistError()
     if (version is not None and
@@ -398,8 +403,8 @@
       request_info.VersionDoesNotExistError: The version doesn't exist.
     """
     if not module_name or module_name not in self._module_name_to_module:
-      if 'default' in self._module_name_to_module:
-        module_name = 'default'
+      if appinfo.DEFAULT_MODULE in self._module_name_to_module:
+        module_name = appinfo.DEFAULT_MODULE
       elif self._module_name_to_module:
         # If there is no default module, but there are other modules, take any.
         # This is somewhat of a hack, and can be removed if we ever enforce the
@@ -688,9 +693,26 @@
     return self._handle_request(
         environ, start_response, self._module_for_request(environ['PATH_INFO']))
 
+  def _should_use_dispatch_config(self, path):
+    """Determines whether or not to use the dispatch config.
+
+    Args:
+      path: The request path.
+    Returns:
+      A Boolean indicating whether or not to use the rules in dispatch config.
+    """
+    if (not path.startswith('/_ah/') or
+        any(path.startswith(wl) for wl
+            in DISPATCH_AH_URL_PATH_PREFIX_WHITELIST)):
+      return True
+    else:
+      logging.warning('Skipping dispatch.yaml rules because %s is not a '
+                      'dispatchable path.', path)
+      return False
+
   def _module_for_request(self, path):
     dispatch = self._configuration.dispatch
-    if dispatch:
+    if dispatch and self._should_use_dispatch_config(path):
       for url, module_name in dispatch.dispatch:
         if (url.path_exact and path == url.path or
             not url.path_exact and path.startswith(url.path)):
diff --git a/google/appengine/tools/devappserver2/dispatcher_test.py b/google/appengine/tools/devappserver2/dispatcher_test.py
index a261f2b..d54fce2 100644
--- a/google/appengine/tools/devappserver2/dispatcher_test.py
+++ b/google/appengine/tools/devappserver2/dispatcher_test.py
@@ -438,6 +438,22 @@
     self.assertEqual('default',
                      self.dispatcher._module_for_request('/undispatched'))
 
+  def test_should_use_dispatch_config(self):
+    """Tests the _should_use_dispatch_config method."""
+    self.assertTrue(self.dispatcher._should_use_dispatch_config('/'))
+    self.assertTrue(self.dispatcher._should_use_dispatch_config('/foo/'))
+    self.assertTrue(self.dispatcher._should_use_dispatch_config(
+        '/_ah/queue/deferred'))
+    self.assertTrue(self.dispatcher._should_use_dispatch_config(
+        '/_ah/queue/deferred/blah'))
+
+    self.assertFalse(self.dispatcher._should_use_dispatch_config('/_ah/'))
+    self.assertFalse(self.dispatcher._should_use_dispatch_config('/_ah/foo/'))
+    self.assertFalse(self.dispatcher._should_use_dispatch_config(
+        '/_ah/foo/bar/'))
+    self.assertFalse(self.dispatcher._should_use_dispatch_config(
+        '/_ah/queue/'))
+
   def test_resolve_target(self):
     servr = object()
     inst = object()
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
index 015712c..4edeb3b 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
@@ -152,7 +152,8 @@
     """Dispatch this request if this is a request to a reserved URL.
 
     If the request matches one of our reserved URLs, this calls
-    start_response and returns the response body.
+    start_response and returns the response body.  This also handles OPTIONS
+    CORS requests.
 
     Args:
       request: An ApiRequest, the request from the user.
@@ -165,6 +166,14 @@
     for path_regex, dispatch_function in self._dispatchers:
       if path_regex.match(request.relative_url):
         return dispatch_function(request, start_response)
+
+    if request.http_method == 'OPTIONS':
+      cors_handler = EndpointsDispatcher.__CheckCorsHeaders(request)
+      if cors_handler.allow_cors_request:
+        # The server returns 200 rather than 204, for some reason.
+        return util.send_wsgi_response('200', [], '', start_response,
+                                       cors_handler)
+
     return None
 
   def handle_api_explorer_request(self, request, start_response):
diff --git a/google/appengine/tools/devappserver2/gcs_application.py b/google/appengine/tools/devappserver2/gcs_server.py
similarity index 82%
rename from google/appengine/tools/devappserver2/gcs_application.py
rename to google/appengine/tools/devappserver2/gcs_server.py
index 578546e..e444628 100644
--- a/google/appengine/tools/devappserver2/gcs_application.py
+++ b/google/appengine/tools/devappserver2/gcs_server.py
@@ -22,9 +22,11 @@
 
 
 import httplib
+import logging
 import webob
 
 from google.appengine.ext.cloudstorage import stub_dispatcher
+from google.appengine.tools.devappserver2 import wsgi_server
 
 # Regex for all requests routed through this module.
 GCS_URL_PATTERN = '_ah/gcs/(.+)'
@@ -70,3 +72,17 @@
     start_response('%d %s' % (status_code, status_message), headers)
 
     return [result.content]
+
+
+class GCSServer(Application, wsgi_server.WsgiServer):
+  """Serves API calls over HTTP."""
+
+  def __init__(self, host, port):
+    self._host = host
+    super(GCSServer, self).__init__((host, port), self)
+
+  def start(self):
+    """Start the API Server."""
+    super(GCSServer, self).start()
+    logging.info('Starting Google Cloud Storage server at: http://%s:%d',
+                 self._host, self.port)
diff --git a/google/appengine/tools/devappserver2/gcs_application_test.py b/google/appengine/tools/devappserver2/gcs_server_test.py
similarity index 95%
rename from google/appengine/tools/devappserver2/gcs_application_test.py
rename to google/appengine/tools/devappserver2/gcs_server_test.py
index c9abac1..e666ad9 100644
--- a/google/appengine/tools/devappserver2/gcs_application_test.py
+++ b/google/appengine/tools/devappserver2/gcs_server_test.py
@@ -14,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Tests for google.appengine.tools.devappserver2.gcs_application."""
+"""Tests for google.appengine.tools.devappserver2.gcs_server."""
 
 import cStringIO
 import httplib
@@ -24,7 +24,7 @@
 import mox
 
 from google.appengine.ext.cloudstorage import stub_dispatcher
-from google.appengine.tools.devappserver2 import gcs_application
+from google.appengine.tools.devappserver2 import gcs_server
 from google.appengine.tools.devappserver2 import wsgi_test_utils
 
 
@@ -40,7 +40,7 @@
 
   def setUp(self):
     self.mox = mox.Mox()
-    self.app = gcs_application.Application()
+    self.app = gcs_server.Application()
     self.mox.StubOutWithMock(stub_dispatcher, 'dispatch')
     self._host = 'localhost'
 
diff --git a/google/appengine/tools/devappserver2/module.py b/google/appengine/tools/devappserver2/module.py
index 9a9589e..75d148c 100644
--- a/google/appengine/tools/devappserver2/module.py
+++ b/google/appengine/tools/devappserver2/module.py
@@ -46,7 +46,7 @@
 from google.appengine.tools.devappserver2 import endpoints
 from google.appengine.tools.devappserver2 import errors
 from google.appengine.tools.devappserver2 import file_watcher
-from google.appengine.tools.devappserver2 import gcs_application
+from google.appengine.tools.devappserver2 import gcs_server
 from google.appengine.tools.devappserver2 import go_runtime
 from google.appengine.tools.devappserver2 import http_runtime_constants
 from google.appengine.tools.devappserver2 import instance
@@ -208,9 +208,9 @@
     handlers.append(
         wsgi_handler.WSGIHandler(channel.application, url_pattern))
 
-    url_pattern = '/%s' % gcs_application.GCS_URL_PATTERN
+    url_pattern = '/%s' % gcs_server.GCS_URL_PATTERN
     handlers.append(
-        wsgi_handler.WSGIHandler(gcs_application.Application(), url_pattern))
+        wsgi_handler.WSGIHandler(gcs_server.Application(), url_pattern))
 
     url_pattern = '/%s' % endpoints.API_SERVING_PATTERN
     handlers.append(
diff --git a/google/appengine/tools/devappserver2/php/setup.php b/google/appengine/tools/devappserver2/php/setup.php
index ddf8abd..6155a7a 100644
--- a/google/appengine/tools/devappserver2/php/setup.php
+++ b/google/appengine/tools/devappserver2/php/setup.php
@@ -22,9 +22,6 @@
            // All values are considered true except the empty string.
            $allowed_buckets ? 1 : 0);
     define('GAE_INCLUDE_GS_BUCKETS', $allowed_buckets);
-
-    unset($_ENV['APPLICATION_ROOT']);
-    unset($_SERVER['APPLICATION_ROOT']);
   };
 
   $configureDefaults = function() {
@@ -49,8 +46,20 @@
     unset($_ENV['REAL_SCRIPT_FILENAME']);
     unset($_SERVER['REAL_SCRIPT_FILENAME']);
 
-    // SCRIPT_NAME == PHP_SELF on app engine.
-    $_SERVER['SCRIPT_NAME'] = $_SERVER['PHP_SELF'];
+    // Replicate the SCRIPT_NAME and PHP_SELF setup used in production.
+    // Set SCRIPT_NAME to SCRIPT_FILENAME made relative to APPLICTION_ROOT and
+    // PHP_SELF to SCRIPT_NAME except when the script is included in PATH_INFO (
+    // REQUEST_URI without the query string) which matches Apache behavior.
+    $_SERVER['SCRIPT_NAME'] = substr(
+      $_SERVER['SCRIPT_FILENAME'], strlen($_SERVER['APPLICATION_ROOT']));
+    if (strpos($_SERVER['PATH_INFO'], $_SERVER['SCRIPT_NAME']) === 0) {
+      $_SERVER['PHP_SELF'] = $_SERVER['PATH_INFO'];
+    } else {
+      $_SERVER['PHP_SELF'] = $_SERVER['SCRIPT_NAME'];
+    }
+
+    unset($_ENV['APPLICATION_ROOT']);
+    unset($_SERVER['APPLICATION_ROOT']);
   };
 
   $setupApiProxy = function() {
diff --git a/google/appengine/tools/devappserver2/python/sandbox.py b/google/appengine/tools/devappserver2/python/sandbox.py
index d89dc90..6b87e47 100644
--- a/google/appengine/tools/devappserver2/python/sandbox.py
+++ b/google/appengine/tools/devappserver2/python/sandbox.py
@@ -775,6 +775,7 @@
     'exceptions',
     '_fileio',
     '_functools',
+    'future_builtins',
     'gc',
     '_hashlib',
     '_heapq',
diff --git a/google/appengine/tools/php_cli.py b/google/appengine/tools/php_cli.py
new file mode 100644
index 0000000..5afdca6
--- /dev/null
+++ b/google/appengine/tools/php_cli.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Invoke PHP after setting up the App Engine environment."""
+
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import wsgiref.util
+
+import google
+from google.appengine.api import request_info
+from google.appengine.datastore import datastore_stub_util
+from google.appengine.tools.devappserver2 import api_server
+from google.appengine.tools.devappserver2 import gcs_server
+from google.appengine.tools.devappserver2.php import runtime
+
+
+def _get_gcs_server():
+  server = gcs_server.GCSServer('localhost', 0)
+  server.start()
+  return server
+
+
+class APIRequestInfo(request_info.RequestInfo):
+  """Allows stubs to lookup state linked to the request making the API call."""
+
+  def __init__(self):
+    self._environ = {}
+    wsgiref.util.setup_testing_defaults(self._environ)
+
+  def get_request_url(self, request_id):
+    """Returns the URL the request e.g. 'http://localhost:8080/foo?bar=baz'.
+
+    Args:
+      request_id: The string id of the request making the API call.
+
+    Returns:
+      The URL of the request as a string.
+    """
+    return wsgiref.util.request_uri(self._environ)
+
+  def get_request_environ(self, request_id):
+    """Returns a dict containing the WSGI environ for the request."""
+    return self._environ
+
+  def get_module(self, request_id):
+    """Returns the name of the module serving this request.
+
+    Args:
+      request_id: The string id of the request making the API call.
+
+    Returns:
+      A str containing the module name.
+    """
+    return 'default'
+
+  def get_version(self, request_id):
+    """Returns the version of the module serving this request.
+
+    Args:
+      request_id: The string id of the request making the API call.
+
+    Returns:
+      A str containing the version.
+    """
+    return '1'
+
+  def get_instance(self, request_id):
+    """Returns the instance serving this request.
+
+    Args:
+      request_id: The string id of the request making the API call.
+
+    Returns:
+      An opaque representation of the instance serving this request. It should
+      only be passed to dispatcher methods expecting an instance.
+    """
+    return object()
+
+  def get_dispatcher(self):
+    """Returns the Dispatcher.
+
+    Returns:
+      The Dispatcher instance.
+    """
+    return request_info._LocalFakeDispatcher()
+
+
+def _get_api_server(app_id):
+  """Return a configured and started api_server.APIServer."""
+  tmp_dir = tempfile.mkdtemp()
+  os.environ['APPLICATION_ID'] = app_id
+  api_server.setup_stubs(
+      request_data=APIRequestInfo(),
+      app_id=app_id,
+      application_root=os.getcwd(),
+      trusted=False,
+      appidentity_email_address=None,
+      appidentity_private_key_path=None,
+      blobstore_path=tmp_dir,
+      datastore_consistency=
+      datastore_stub_util.PseudoRandomHRConsistencyPolicy(),
+      datastore_path=':memory:',
+      datastore_require_indexes=False,
+      datastore_auto_id_policy=datastore_stub_util.SCATTERED,
+      images_host_prefix='http://localhost:8080',
+      logs_path=':memory:',
+      mail_smtp_host='',
+      mail_smtp_port=25,
+      mail_smtp_user='',
+      mail_smtp_password='',
+      mail_enable_sendmail=False,
+      mail_show_mail_body=False,
+      matcher_prospective_search_path=tmp_dir,
+      search_index_path=None,
+      taskqueue_auto_run_tasks=False,
+      taskqueue_default_http_server='http://localhost:8080',
+      user_login_url='/_ah/login?continue=%s',
+      user_logout_url='/_ah/login?continue=%s',
+      default_gcs_bucket_name=None)
+
+  server = api_server.APIServer('localhost', 0, app_id)
+  server.start()
+  return server
+
+
+def _get_default_php_cli_path():
+  """Returns the path to the siloed php cli binary or None if not present."""
+  default_php_executable_path = None
+  google_package_directory = os.path.dirname(google.__file__)
+  sdk_directory = os.path.dirname(google_package_directory)
+
+  if sys.platform == 'win32':
+    default_php_executable_path = os.path.abspath(
+        os.path.join(sdk_directory, 'php/php-5.4-Win32-VC9-x86/php.exe'))
+  elif sys.platform == 'darwin':
+    default_php_executable_path = os.path.abspath(
+        os.path.join(os.path.dirname(sdk_directory), 'php'))
+
+  if (default_php_executable_path and
+      os.path.exists(default_php_executable_path)):
+    return default_php_executable_path
+  return None
+
+
+def _parse_path(value):
+  """Returns the given path with ~ and environment variables expanded."""
+  return os.path.expanduser(os.path.expandvars(value))
+
+
+def _create_command_line_parser():
+  """Returns an argparse.ArgumentParser to parse command line arguments."""
+  parser = argparse.ArgumentParser(
+      usage='usage: %(prog)s [execution options] <script> [script_args]',
+      formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+  parser.add_argument(
+      'script',
+      help='the path to the PHP script that should be executed')
+  parser.add_argument(
+      'script_args',
+      help='the command arguments that will be passed to the script',
+      nargs=argparse.REMAINDER)
+
+  execution_group = parser.add_argument_group('Execution Options')
+
+  php_cli_path = _get_default_php_cli_path()
+  execution_group.add_argument('--php_executable_path', metavar='PATH',
+                               type=_parse_path,
+                               default=php_cli_path,
+                               required=php_cli_path is None,
+                               help='path to the PHP executable')
+  return parser
+
+
+def main():
+  parser = _create_command_line_parser()
+  options = parser.parse_args()
+
+  if not options.php_executable_path:
+    parser.error('--php_executable_path must be set')
+  elif not os.path.exists(options.php_executable_path):
+    parser.error('--php_executable_path=%s, %s does not exist' % (
+        options.php_executable_path, options.php_executable_path))
+
+  php_script = os.path.abspath(_parse_path(options.script))
+  if not os.path.exists(php_script):
+    parser.error('%s does not exist' % php_script)
+
+  api_srver = _get_api_server(app_id='dummy_app_id')
+  gcs_srver = _get_gcs_server()
+
+  include_paths = [runtime.SDK_PATH]
+  if sys.platform == 'win32':
+
+    include_path = 'include_path="%s"' % ';'.join(include_paths)
+  else:
+    include_path = 'include_path=%s' % ':'.join(include_paths)
+
+  php_args = [options.php_executable_path,
+              '-d', include_path,
+              '-f', runtime.SETUP_PHP_PATH,]
+  php_args.extend(options.script_args)
+
+
+  env = dict(HTTP_HOST='localhost:%d' % gcs_srver.port,
+             SERVER_SOFTWARE='Development/CLI',
+             REAL_SCRIPT_FILENAME=php_script,
+             REMOTE_API_HOST='localhost',
+             REMOTE_API_PORT=str(api_srver.port),
+             REMOTE_REQUEST_ID='51',
+             APPLICATION_ROOT=os.path.dirname(php_script))
+  if 'SYSTEMROOT' in os.environ:
+    env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
+
+  php_process = subprocess.Popen(php_args, env=env)
+  script_return = php_process.wait()
+
+  api_srver.quit()
+  gcs_srver.quit()
+  sys.exit(script_return)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/google/net/proto/ProtocolBuffer.py b/google/net/proto/ProtocolBuffer.py
index 5db550c..ca2a0ad 100644
--- a/google/net/proto/ProtocolBuffer.py
+++ b/google/net/proto/ProtocolBuffer.py
@@ -954,7 +954,7 @@
         extension.number, extension)
     if actual_handle is not extension:
       raise AssertionError(
-          'Extensions "%s" and "%s" both try to extend message type "%s" with'
+          'Extensions "%s" and "%s" both try to extend message type "%s" with '
           'field number %d.' %
           (extension.full_name, actual_handle.full_name,
            cls.__name__, extension.number))
diff --git a/google/net/proto2/proto/descriptor_pb2.py b/google/net/proto2/proto/descriptor_pb2.py
index eed2c6a..d571464 100644
--- a/google/net/proto2/proto/descriptor_pb2.py
+++ b/google/net/proto2/proto/descriptor_pb2.py
@@ -17,6 +17,8 @@
 
 
 
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
@@ -28,7 +30,8 @@
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='net/proto2/proto/descriptor.proto',
   package='proto2',
-  serialized_pb='\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xdc\t\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x34\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10java_mutable_api\x18\x1c \x01(\x08:\x05\x66\x61lse\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x02\n\x0eMessageOptions\x12+\n#experimental_java_message_interface\x18\x04 \x03(\t\x12+\n#experimental_java_builder_interface\x18\x05 \x03(\t\x12+\n#experimental_java_interface_extends\x18\x06 \x03(\t\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x05\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12%\n\x16\x64\x65precated_raw_message\x18\x0c \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x07\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12(\n\x1clegacy_client_initial_tokens\x18\x18 \x01(\x03:\x02-1\x12(\n\x1clegacy_server_initial_tokens\x18\x19 \x01(\x03:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x87\x04\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\n \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB,\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01\xe0\x01\x01')
+  serialized_pb=_b('\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xdc\t\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x34\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10java_mutable_api\x18\x1c \x01(\x08:\x05\x66\x61lse\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x02\n\x0eMessageOptions\x12+\n#experimental_java_message_interface\x18\x04 \x03(\t\x12+\n#experimental_java_builder_interface\x18\x05 \x03(\t\x12+\n#experimental_java_interface_extends\x18\x06 \x03(\t\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x05\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12%\n\x16\x64\x65precated_raw_message\x18\x0c \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd3\t\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12(\n\x1clegacy_client_initial_tokens\x18\x18 \x01(\x03:\x02-1\x12(\n\x1clegacy_server_initial_tokens\x18\x19 \x01(\x03:\x02-1\x12^\n\tlog_level\x18\x1b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01\"\x9f\x01\n\x08LogLevel\x12\x0c\n\x08LOG_NONE\x10\x00\x12\x13\n\x0fLOG_HEADER_ONLY\x10\x01\x12/\n+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x10\x02\x12#\n\x1fLOG_HEADER_AND_FILTERED_PAYLOAD\x10\x03\x12\x1a\n\x16LOG_HEADER_AND_PAYLOAD\x10\x04*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe7\x04\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\n \x01(\x08:\x05\x66\x61lse\x12^\n\tlog_level\x18\x0b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB,\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01\xe0\x01\x01')
+)
 
 
 
@@ -284,8 +287,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5802,
-  serialized_end=5830,
+  serialized_start=5898,
+  serialized_end=5926,
 )
 
 _METHODOPTIONS_SECURITYLEVEL = _descriptor.EnumDescriptor(
@@ -313,8 +316,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5832,
-  serialized_end=5933,
+  serialized_start=5928,
+  serialized_end=6029,
 )
 
 _METHODOPTIONS_FORMAT = _descriptor.EnumDescriptor(
@@ -334,8 +337,41 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5935,
-  serialized_end=5983,
+  serialized_start=6031,
+  serialized_end=6079,
+)
+
+_METHODOPTIONS_LOGLEVEL = _descriptor.EnumDescriptor(
+  name='LogLevel',
+  full_name='proto2.MethodOptions.LogLevel',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='LOG_NONE', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LOG_HEADER_ONLY', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LOG_HEADER_AND_FILTERED_PAYLOAD', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LOG_HEADER_AND_PAYLOAD', index=4, number=4,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=6082,
+  serialized_end=6241,
 )
 
 _STREAMOPTIONS_TOKENUNIT = _descriptor.EnumDescriptor(
@@ -355,8 +391,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6471,
-  serialized_end=6505,
+  serialized_start=6825,
+  serialized_end=6859,
 )
 
 
@@ -398,14 +434,14 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.FileDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='package', full_name='proto2.FileDescriptorProto.package', index=1,
       number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -530,7 +566,7 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.DescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -607,7 +643,7 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.FieldDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -635,21 +671,21 @@
     _descriptor.FieldDescriptor(
       name='type_name', full_name='proto2.FieldDescriptorProto.type_name', index=4,
       number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='extendee', full_name='proto2.FieldDescriptorProto.extendee', index=5,
       number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='default_value', full_name='proto2.FieldDescriptorProto.default_value', index=6,
       number=7, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -693,7 +729,7 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.OneofDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -721,7 +757,7 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.EnumDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -763,7 +799,7 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.EnumValueDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -805,7 +841,7 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.ServiceDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -854,21 +890,21 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.MethodDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='input_type', full_name='proto2.MethodDescriptorProto.input_type', index=1,
       number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='output_type', full_name='proto2.MethodDescriptorProto.output_type', index=2,
       number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -903,21 +939,21 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.StreamDescriptorProto.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='client_message_type', full_name='proto2.StreamDescriptorProto.client_message_type', index=1,
       number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='server_message_type', full_name='proto2.StreamDescriptorProto.server_message_type', index=2,
       number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -987,7 +1023,7 @@
     _descriptor.FieldDescriptor(
       name='java_package', full_name='proto2.FileOptions.java_package', index=5,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1036,7 +1072,7 @@
     _descriptor.FieldDescriptor(
       name='java_alt_api_package', full_name='proto2.FileOptions.java_alt_api_package', index=12,
       number=19, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1050,7 +1086,7 @@
     _descriptor.FieldDescriptor(
       name='java_outer_classname', full_name='proto2.FileOptions.java_outer_classname', index=14,
       number=8, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1092,14 +1128,14 @@
     _descriptor.FieldDescriptor(
       name='go_package', full_name='proto2.FileOptions.go_package', index=20,
       number=11, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='javascript_package', full_name='proto2.FileOptions.javascript_package', index=21,
       number=12, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1241,14 +1277,14 @@
     _descriptor.FieldDescriptor(
       name='name', full_name='proto2.FieldOptions.UpgradedOption.name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='value', full_name='proto2.FieldOptions.UpgradedOption.value', index=1,
       number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1317,7 +1353,7 @@
     _descriptor.FieldDescriptor(
       name='experimental_map_key', full_name='proto2.FieldOptions.experimental_map_key', index=6,
       number=9, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1376,7 +1412,7 @@
     _descriptor.FieldDescriptor(
       name='proto1_name', full_name='proto2.EnumOptions.proto1_name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1579,14 +1615,14 @@
     _descriptor.FieldDescriptor(
       name='stream_type', full_name='proto2.MethodOptions.stream_type', index=10,
       number=18, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='security_label', full_name='proto2.MethodOptions.security_label', index=11,
       number=19, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1607,14 +1643,14 @@
     _descriptor.FieldDescriptor(
       name='legacy_stream_type', full_name='proto2.MethodOptions.legacy_stream_type', index=14,
       number=22, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='legacy_result_type', full_name='proto2.MethodOptions.legacy_result_type', index=15,
       number=23, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1633,14 +1669,21 @@
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='deprecated', full_name='proto2.MethodOptions.deprecated', index=18,
+      name='log_level', full_name='proto2.MethodOptions.log_level', index=18,
+      number=27, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=2,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='proto2.MethodOptions.deprecated', index=19,
       number=33, type=8, cpp_type=7, label=1,
       has_default_value=True, default_value=False,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='proto2.MethodOptions.uninterpreted_option', index=19,
+      name='uninterpreted_option', full_name='proto2.MethodOptions.uninterpreted_option', index=20,
       number=999, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -1654,12 +1697,13 @@
     _METHODOPTIONS_PROTOCOL,
     _METHODOPTIONS_SECURITYLEVEL,
     _METHODOPTIONS_FORMAT,
+    _METHODOPTIONS_LOGLEVEL,
   ],
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
   serialized_start=5017,
-  serialized_end=5994,
+  serialized_end=6252,
 )
 
 
@@ -1701,7 +1745,7 @@
     _descriptor.FieldDescriptor(
       name='security_label', full_name='proto2.StreamOptions.security_label', index=4,
       number=5, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1741,14 +1785,21 @@
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='deprecated', full_name='proto2.StreamOptions.deprecated', index=10,
+      name='log_level', full_name='proto2.StreamOptions.log_level', index=10,
+      number=11, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=2,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='proto2.StreamOptions.deprecated', index=11,
       number=33, type=8, cpp_type=7, label=1,
       has_default_value=True, default_value=False,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='proto2.StreamOptions.uninterpreted_option', index=11,
+      name='uninterpreted_option', full_name='proto2.StreamOptions.uninterpreted_option', index=12,
       number=999, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -1764,8 +1815,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=5997,
-  serialized_end=6516,
+  serialized_start=6255,
+  serialized_end=6870,
 )
 
 
@@ -1779,7 +1830,7 @@
     _descriptor.FieldDescriptor(
       name='name_part', full_name='proto2.UninterpretedOption.NamePart.name_part', index=0,
       number=1, type=9, cpp_type=9, label=2,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1799,8 +1850,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6745,
-  serialized_end=6796,
+  serialized_start=7099,
+  serialized_end=7150,
 )
 
 _UNINTERPRETEDOPTION = _descriptor.Descriptor(
@@ -1820,7 +1871,7 @@
     _descriptor.FieldDescriptor(
       name='identifier_value', full_name='proto2.UninterpretedOption.identifier_value', index=1,
       number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1848,14 +1899,14 @@
     _descriptor.FieldDescriptor(
       name='string_value', full_name='proto2.UninterpretedOption.string_value', index=5,
       number=7, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value="",
+      has_default_value=False, default_value=_b(""),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='aggregate_value', full_name='proto2.UninterpretedOption.aggregate_value', index=6,
       number=8, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1868,8 +1919,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6519,
-  serialized_end=6796,
+  serialized_start=6873,
+  serialized_end=7150,
 )
 
 
@@ -1897,14 +1948,14 @@
     _descriptor.FieldDescriptor(
       name='leading_comments', full_name='proto2.SourceCodeInfo.Location.leading_comments', index=2,
       number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
       name='trailing_comments', full_name='proto2.SourceCodeInfo.Location.trailing_comments', index=3,
       number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
@@ -1917,8 +1968,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6868,
-  serialized_end=6967,
+  serialized_start=7222,
+  serialized_end=7321,
 )
 
 _SOURCECODEINFO = _descriptor.Descriptor(
@@ -1944,8 +1995,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6799,
-  serialized_end=6967,
+  serialized_start=7153,
+  serialized_end=7321,
 )
 
 _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
@@ -1955,7 +2006,7 @@
 _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
 _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
 _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
-_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO;
+_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
 _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
 _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
 _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
@@ -1966,8 +2017,8 @@
 _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
 _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
 _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
-_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO;
-_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO;
+_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
+_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
 _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
 _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
 _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
@@ -1979,18 +2030,18 @@
 _FILEOPTIONS.fields_by_name['cc_api_compatibility'].enum_type = _FILEOPTIONS_COMPATIBILITYLEVEL
 _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
 _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FILEOPTIONS_COMPATIBILITYLEVEL.containing_type = _FILEOPTIONS;
-_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS;
+_FILEOPTIONS_COMPATIBILITYLEVEL.containing_type = _FILEOPTIONS
+_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
 _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FIELDOPTIONS_UPGRADEDOPTION.containing_type = _FIELDOPTIONS;
+_FIELDOPTIONS_UPGRADEDOPTION.containing_type = _FIELDOPTIONS
 _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
 _FIELDOPTIONS.fields_by_name['jtype'].enum_type = _FIELDOPTIONS_JTYPE
 _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE
 _FIELDOPTIONS.fields_by_name['upgraded_option'].message_type = _FIELDOPTIONS_UPGRADEDOPTION
 _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS;
-_FIELDOPTIONS_JTYPE.containing_type = _FIELDOPTIONS;
-_FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS;
+_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
+_FIELDOPTIONS_JTYPE.containing_type = _FIELDOPTIONS
+_FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS
 _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
 _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
 _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
@@ -1998,17 +2049,20 @@
 _METHODOPTIONS.fields_by_name['security_level'].enum_type = _METHODOPTIONS_SECURITYLEVEL
 _METHODOPTIONS.fields_by_name['response_format'].enum_type = _METHODOPTIONS_FORMAT
 _METHODOPTIONS.fields_by_name['request_format'].enum_type = _METHODOPTIONS_FORMAT
+_METHODOPTIONS.fields_by_name['log_level'].enum_type = _METHODOPTIONS_LOGLEVEL
 _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_METHODOPTIONS_PROTOCOL.containing_type = _METHODOPTIONS;
-_METHODOPTIONS_SECURITYLEVEL.containing_type = _METHODOPTIONS;
-_METHODOPTIONS_FORMAT.containing_type = _METHODOPTIONS;
+_METHODOPTIONS_PROTOCOL.containing_type = _METHODOPTIONS
+_METHODOPTIONS_SECURITYLEVEL.containing_type = _METHODOPTIONS
+_METHODOPTIONS_FORMAT.containing_type = _METHODOPTIONS
+_METHODOPTIONS_LOGLEVEL.containing_type = _METHODOPTIONS
 _STREAMOPTIONS.fields_by_name['token_unit'].enum_type = _STREAMOPTIONS_TOKENUNIT
 _STREAMOPTIONS.fields_by_name['security_level'].enum_type = _METHODOPTIONS_SECURITYLEVEL
+_STREAMOPTIONS.fields_by_name['log_level'].enum_type = _METHODOPTIONS_LOGLEVEL
 _STREAMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_STREAMOPTIONS_TOKENUNIT.containing_type = _STREAMOPTIONS;
-_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION;
+_STREAMOPTIONS_TOKENUNIT.containing_type = _STREAMOPTIONS
+_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
 _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
-_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO;
+_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
 _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
 DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
 DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
@@ -2031,149 +2085,153 @@
 DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
 DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
 
-class FileDescriptorSet(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _FILEDESCRIPTORSET
+FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), dict(
+  DESCRIPTOR = _FILEDESCRIPTORSET,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
+FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _FILEDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-class FileDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _FILEDESCRIPTORPROTO
+  ))
 
+DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict(
 
+  ExtensionRange = _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), dict(
+    DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE,
+    __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-class DescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
+    ))
+  ,
+  DESCRIPTOR = _DESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-  class ExtensionRange(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE
+  ))
 
+FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _FIELDDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-  DESCRIPTOR = _DESCRIPTORPROTO
+  ))
 
+OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ONEOFDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
-class FieldDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _FIELDDESCRIPTORPROTO
+EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
+EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-class OneofDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _ONEOFDESCRIPTORPROTO
+  ))
 
+ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _SERVICEDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
-class EnumDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _ENUMDESCRIPTORPROTO
+MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _METHODDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
+StreamDescriptorProto = _reflection.GeneratedProtocolMessageType('StreamDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _STREAMDESCRIPTORPROTO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-class EnumValueDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO
+  ))
 
+FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict(
+  DESCRIPTOR = _FILEOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
-class ServiceDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _SERVICEDESCRIPTORPROTO
+MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict(
+  DESCRIPTOR = _MESSAGEOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
+FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict(
 
-class MethodDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _METHODDESCRIPTORPROTO
+  UpgradedOption = _reflection.GeneratedProtocolMessageType('UpgradedOption', (_message.Message,), dict(
+    DESCRIPTOR = _FIELDOPTIONS_UPGRADEDOPTION,
+    __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+    ))
+  ,
+  DESCRIPTOR = _FIELDOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
-class StreamDescriptorProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _STREAMDESCRIPTORPROTO
-
-
-
-class FileOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _FILEOPTIONS
-
-
-
-class MessageOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _MESSAGEOPTIONS
-
-
-
-class FieldOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-
-  class UpgradedOption(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _FIELDOPTIONS_UPGRADEDOPTION
-
-
-  DESCRIPTOR = _FIELDOPTIONS
-
-
-
-class EnumOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _ENUMOPTIONS
-
-
-
-class EnumValueOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _ENUMVALUEOPTIONS
-
-
-
-class ServiceOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _SERVICEOPTIONS
-
-
-
-class MethodOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _METHODOPTIONS
+EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
+EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMVALUEOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-class StreamOptions(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _STREAMOPTIONS
+  ))
 
+ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict(
+  DESCRIPTOR = _SERVICEOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
-class UninterpretedOption(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
+MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict(
+  DESCRIPTOR = _METHODOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-  class NamePart(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART
+  ))
 
+StreamOptions = _reflection.GeneratedProtocolMessageType('StreamOptions', (_message.Message,), dict(
+  DESCRIPTOR = _STREAMOPTIONS,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-  DESCRIPTOR = _UNINTERPRETEDOPTION
+  ))
 
+UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict(
 
+  NamePart = _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), dict(
+    DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART,
+    __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-class SourceCodeInfo(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
+    ))
+  ,
+  DESCRIPTOR = _UNINTERPRETEDOPTION,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
-  class Location(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _SOURCECODEINFO_LOCATION
+  ))
 
+SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict(
 
-  DESCRIPTOR = _SOURCECODEINFO
+  Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict(
+    DESCRIPTOR = _SOURCECODEINFO_LOCATION,
+    __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+    ))
+  ,
+  DESCRIPTOR = _SOURCECODEINFO,
+  __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
+  ))
 
 
 
diff --git a/google/storage/speckle/proto/sql_pb2.py b/google/storage/speckle/proto/sql_pb2.py
index 404cfb3..0ffb96d 100644
--- a/google/storage/speckle/proto/sql_pb2.py
+++ b/google/storage/speckle/proto/sql_pb2.py
@@ -972,30 +972,30 @@
         ExecRequest,
         ExecResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.ExecOp.im_func,
         ExecOpRequest,
         ExecOpResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.GetMetadata.im_func,
         MetadataRequest,
         MetadataResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.OpenConnection.im_func,
         OpenConnectionRequest,
         OpenConnectionResponse,
         None,
-        'none')
+        'INTEGRITY')
     rpcserver._GetHandlerDecorator(
         self.CloseConnection.im_func,
         CloseConnectionRequest,
         CloseConnectionResponse,
         None,
-        'none')
+        'INTEGRITY')
 
 
diff --git a/google_sql.py b/google_sql.py
index 2af168b..de775b6 100644
--- a/google_sql.py
+++ b/google_sql.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/lib/cacerts/urlfetch_cacerts.txt b/lib/cacerts/urlfetch_cacerts.txt
index c575060..54186d4 100644
--- a/lib/cacerts/urlfetch_cacerts.txt
+++ b/lib/cacerts/urlfetch_cacerts.txt
@@ -4165,30 +4165,6 @@
 YR1UJj4UWWSwmNXzp8OurVA=
 -----END CERTIFICATE-----
 
-subject= /C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA
-serial=020000000000D678B79405
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILAgAAAAAA1ni3lAUwDQYJKoZIhvcNAQEEBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0xNDAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIABjAdBgNVHQ4EFgQU
-YHtmGkUNl8qJUC99BM00qP/8/UswDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B
-AQQFAAOCAQEArqqf/LfSyx9fOSkoGJ40yWxPbxrwZKJwSk8ThptgKJ7ogUmYfQq7
-5bCdPTbbjwVR/wkxKh/diXeeDy5slQTthsu0AD+EAk2AaioteAuubyuig0SDH81Q
-gkwkr733pbTIWg/050deSY43lv6aiAU62cDbKYfmGZZHpzqmjIs8d/5GY6dT2iHR
-rH5Jokvmw2dZL7OKDrssvamqQnw1wdh/1acxOk5jQzmvCLBhNIzTmKlDNPYPhyk7
-ncJWWJh3w/cbrPad+D6qp1RF8PX51TFl/mtYnHGzHtdS6jIX/EBgHcl5JLL2bP2o
-Zg6C3ZjL2sJETy6ge/L3ayx2EYRGinij4w==
------END CERTIFICATE-----
-
 subject= /C=US/O=GeoTrust Inc./CN=GeoTrust Global CA
 serial=12BBE6
 -----BEGIN CERTIFICATE-----
@@ -4698,32 +4674,6 @@
 -----END CERTIFICATE-----
 
 subject= /CN=GlobalSign RootSign Partners CA/OU=RootSign Partners CA/O=GlobalSign nv-sa/C=BE
-serial=040000000000F97FAA2E1E
------BEGIN CERTIFICATE-----
-MIID5zCCAs+gAwIBAgILBAAAAAAA+X+qLh4wDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wMzEyMTYxMzAw
-MDBaFw0xNDAxMjcxMTAwMDBaMHExKDAmBgNVBAMTH0dsb2JhbFNpZ24gUm9vdFNp
-Z24gUGFydG5lcnMgQ0ExHTAbBgNVBAsTFFJvb3RTaWduIFBhcnRuZXJzIENBMRkw
-FwYDVQQKExBHbG9iYWxTaWduIG52LXNhMQswCQYDVQQGEwJCRTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAL3vMPEw8TSpiWV3TUanjZD9rk+OyigXulnj
-qJIKRQMqio/lCVBVUoHwo5Gx2RIqgfbCAxw8gsByzfGnANf1VJwKR+6alUGSjqCt
-CT3T66J0rZ8ZIAm2faZeNZ9POWoDtYqtH5Ziaxe5q4dg1V1t2ZLJ0BOu1IjZUKhE
-kQSw6kfqX7LtBMHXAXwh+MRxI/xrTGVEM8ONHebSZhxSKUbEBucLNfBZAWYAic+c
-43t4qlPi7qw1lef9XddClJXTGm4xVUfX663HTJ9UcYMaF8j5585YAfQ2v64/WZ9l
-fEAHXHMgNKISw0n0aEBpHonghek6t5dju0ewOWtBAH71S7h/4yECAwEAAaOBmTCB
-ljAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVoTs
-tXGl52PY21EE1vrm8EhSSc4wMwYDVR0fBCwwKjAooCagJIYiaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9Sb290LmNybDAfBgNVHSMEGDAWgBRge2YaRQ2XyolQL30E
-zTSo//z9SzANBgkqhkiG9w0BAQUFAAOCAQEAXC8uZ0oms+e1PzU83aAD7Vaa+UQ3
-UhYwZcfRTqIPjbe2tmeO50zsjZW+5s6nInh0rNf4dJmz986LEzjVlsyNdsUvOLI6
-rmG+C4eZ4yFiZCM5jYT2hY33d/+wOAbwfsFIX7XuWCYGZgUidJKDp9u1+ZLj6MMZ
-LC5j77sf3/n3B0dmDQeJl374Myyey64UPfEc36Pxea/Iko+UccTRRMVU2x61Cwqp
-QqOv1kM5He6Pk5hYW75unAv1Y+xemcL5VPoBB0baDbBkJM+O0QYdTzyiY3dFW6S8
-X7CAuzHgC1QBXBYdck7VKmlH0Rtmfl8BbvE1kWvgLv6wRdgWJ7XFi8LaUw==
------END CERTIFICATE-----
-
-subject= /CN=GlobalSign RootSign Partners CA/OU=RootSign Partners CA/O=GlobalSign nv-sa/C=BE
 serial=040000000001154B5AC5A7
 -----BEGIN CERTIFICATE-----
 MIID5zCCAs+gAwIBAgILBAAAAAABFUtaxacwDQYJKoZIhvcNAQEFBQAwVzELMAkG
@@ -4828,33 +4778,6 @@
 uQ==
 -----END CERTIFICATE-----
 
-subject= /C=BE/O=GlobalSign nv-sa/OU=Primary Secure Server CA/CN=GlobalSign Primary Secure Server CA
-serial=04000000000108D9611E1E
------BEGIN CERTIFICATE-----
-MIID7zCCAtegAwIBAgILBAAAAAABCNlhHh4wDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05OTAxMjgxMjAw
-MDBaFw0xNDAxMjcxMTAwMDBaMHkxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMSEwHwYDVQQLExhQcmltYXJ5IFNlY3VyZSBTZXJ2ZXIgQ0Ex
-LDAqBgNVBAMTI0dsb2JhbFNpZ24gUHJpbWFyeSBTZWN1cmUgU2VydmVyIENBMIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA9mrtqGswoy2s6UKeGDXAHvdv
-dMu3QiRTrTHL76XJxT0DXqWdds0Z4uEWLaQtRCDxGh/3fWDNpscVqauKoslmbN0Q
-o9ibdynupkDNLzQ2f6MXBQvLWKUipnw15o1doVPCmsXaXf7YDn07IpdSLN2yPAuQ
-3AX9suUKVR5dnmL7f+O4lvSfJqyiXITZgrrg6PWVbgQKlmRJow+eg6lj58khmWug
-FpElyBTZvdzsPHdTR1ZDhH7WY+XjKK88T8B9tBj2175XC4nb1sGDkpLjnDDRWUyl
-cZBfhgdw6E6UFMnyTqOAwloRqejo4rwCnL84TXraPFFj7rz4fFF+oLjgSKmvrQID
-AQABo4GZMIGWMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
-DgQWBBTRZoP1iV7Qvn9h0t2oyvorekp/MTAzBgNVHR8ELDAqMCigJqAkhiJodHRw
-Oi8vY3JsLmdsb2JhbHNpZ24ubmV0L1Jvb3QuY3JsMB8GA1UdIwQYMBaAFGB7ZhpF
-DZfKiVAvfQTNNKj//P1LMA0GCSqGSIb3DQEBBQUAA4IBAQCc/xTUC7yfoztsuSwh
-yDfDRU0w+0uSkOBYBb30bknZlLUJiGhuU9zOLHajg0ooxYo/RdDxLwBloK4XBzgA
-1Cjiw+JJbLcC4Sk9lNzUx9pjAU88q+HnhGFE5v4tfP/iXBm8TFnMvpq0cUgizi2b
-tH7B2ue+jWeIdmXqkSUDVuoEjiVlaFQjGZEm37JGdjFYqwr9qDXioMQlr3owBIcd
-/8B7a4AeSnd70iwU0giZBlwCjdVJo1sAFKoznEmRbCoBi+IXncx0w2oK5uuBOKX8
-J6mFIJZtXOCLMhhh+cO801XohixLfQk38vjRkXZrvPqQcLo4WiQpff/N0n41bdnu
-mvjF
------END CERTIFICATE-----
-
 subject= /C=JP/O=SECOM Trust.net/OU=Security Communication RootCA1
 serial=2712
 -----BEGIN CERTIFICATE-----
@@ -5071,33 +4994,6 @@
 IIi0tWeUz12OYjf+xLQ=
 -----END CERTIFICATE-----
 
-subject= /C=BE/CN=Belgium Root CA2
-serial=040000000001156AB1AA7E
------BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgILBAAAAAABFWqxqn4wDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wNzEwMDQxMjAw
-MDBaFw0xNDAxMjYyMzAwMDBaMCgxCzAJBgNVBAYTAkJFMRkwFwYDVQQDExBCZWxn
-aXVtIFJvb3QgQ0EyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxnNC
-HpL/dQ+Lv3SGpz/tshgtLZf5qfuYSiPf1Y3gjMYyHBYtB0LWLbZuL6f1/MaFgl2V
-3rUiAMyoU0Cfrwo1onrH4cr3YBBnDqdQcxdTlZ8inwxdb7ZBvIzr2h1GvaeUv/Ma
-y9T7jQ4eM8iW1+yMU96THjQeilBxJli0XcKIidpg0okhP97XARg2buEscAMEZe+Y
-BitdHmLcVWv+ZmQhX/gv4debKa9vzZ+qDEbRiMWdopWfrD8VrvJh3+/Da5oi2Cxx
-/Vgd7ACkOCCVWsfVN2O6T5uq/lZGLmPZCyPVivq1I/CJG6EUDSbaQfA4jzDtBSZ5
-wUtOobh+VVI6aUaEdQIDAQABo4H4MIH1MA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
-Af8EBTADAQH/MB0GA1UdDgQWBBSFiuv0xbu+DlkDlN7WgAEV4xCcOTBDBgNVHSAE
-PDA6MDgGBWA4CQEBMC8wLQYIKwYBBQUHAgEWIWh0dHA6Ly9yZXBvc2l0b3J5LmVp
-ZC5iZWxnaXVtLmJlIDA6BgNVHR8EMzAxMC+gLaArhilodHRwOi8vc2VjdXJlLmds
-b2JhbHNpZ24ubmV0L2NybC9yb290LmNybDARBglghkgBhvhCAQEEBAMCAAcwHwYD
-VR0jBBgwFoAUYHtmGkUNl8qJUC99BM00qP/8/UswDQYJKoZIhvcNAQEFBQADggEB
-AH1t5NWhYEwrNe6NfOyI0orfIiEoy13BB5w214IoqfGSTivFMZBI2FQeBOquBXko
-B253FXQq+mmZMlIl5qn0qprUQKQlicA2cSm0UgBe7SlIQkkxFusl1AgVdjk6oeNk
-HqxZs+J1SLy0NofzDA+F8BWy4AVSPujQ6x1GK70FdGmea/h9anxodOyPLAvWEckP
-FxavtvTuxwAjBTfdGB6Z6DvQBq0LtljcrLyojA9uwVDSvcwOTZK5lcTV54aE6KZW
-X2DapbDi2KY/oL6HfhOiDh+OPqa3YXzvCesY/h5v0RerHFFk49+ItSJryzwRcvYu
-zk1zYQL5ZykZc/PkVRV3HWE=
------END CERTIFICATE-----
-
 subject= /C=US/O=Volusion, Inc./OU=Domain Validated SSL/CN=Volusion, Inc. DV SSL CA
 serial=023A5C
 -----BEGIN CERTIFICATE-----
@@ -5422,33 +5318,6 @@
 50FxBsiYOB6e1zFqDr0OPLoSjOHq1gwXwFI8Z5jZ
 -----END CERTIFICATE-----
 
-subject= /OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign
-serial=040000000001100B8C9E8B
------BEGIN CERTIFICATE-----
-MIIECjCCAvKgAwIBAgILBAAAAAABEAuMnoswDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wNzAxMTAwOTAw
-MDBaFw0xNDAxMjgxMjAwMDBaMEwxIDAeBgNVBAsTF0dsb2JhbFNpZ24gUm9vdCBD
-QSAtIFIyMRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAps8kDr4ubyiZRULEqz4h
-VJsL03+EcPoSs8u/h1/Gf4bTsjBc1v2t8Xvc5fhglgmSEPXQU977e35ziKxSiHtK
-pspJpl6op4xaEbx6guu+jOmzrJYlB5dKmSoHL7Qed7+KD7UCfBuWuMW5Oiy81hK5
-61l94tAGhl9eSWq1OV6INOy8eAwImIRsqM1LtKB9DHlN8LgtyyHK1WxbfeGgKYSh
-+dOUScskYpEgvN0L1dnM+eonCitzkcadG6zIy+jgoPQvkItN+7A2G/YZeoXgbfJh
-E4hcn+CTClGXilrOr6vV96oJqmC93Nlf33KpYBNeAAHJSvo/pOoHAyECjoLKA8Kb
-jwIDAQABo4HhMIHeMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0G
-A1UdDgQWBBSb4gdXZxwewGoG3lm0mi3f3BmGLjBGBgNVHSAEPzA9MDsGBFUdIAAw
-MzAxBggrBgEFBQcCARYlaHR0cDovL3d3dy5nbG9iYWxzaWduLm5ldC9yZXBvc2l0
-b3J5LzAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8vY3JsLmdsb2JhbHNpZ24ubmV0
-L3Jvb3QuY3JsMB8GA1UdIwQYMBaAFGB7ZhpFDZfKiVAvfQTNNKj//P1LMA0GCSqG
-SIb3DQEBBQUAA4IBAQCEmeuzQ3pcTg9cnzsuGJMExGbQjMWUmeremTFDNhx3MBb5
-GevG3dwQjvKl3ZciW9r9MeKs4qljEdKkFaCgmW0JXh2Ylgt73rQFZtzx2jdQWDfW
-nQSCtgrmPRpyOHEWTcStQT3uJC5l/SY7SMUK1FSGuBLBtCfLKgdYzhrZQuWcUWHy
-AcPdwPUWQcSQZ5rBcx+0tNB6eR7+Ua30DBykB86CBdXtI0H1T5e2XmMp8rRsviTG
-NZPzn3/dA3Uk8NLxaYupKQWPikxy8JC/B7KPh9bWl8DW1ZV4j9tNjYodO5g85wDR
-1K8jW2iz/0GhC+SlPaKG59ilq0oEO7KLAxfXOWyw
------END CERTIFICATE-----
-
 subject= /OU=Alpha CA/O=Alpha/CN=Alpha CA
 serial=0400000000011E44A5F365
 -----BEGIN CERTIFICATE-----
@@ -5692,33 +5561,6 @@
 pN19+kSElK7XCQQidg9kUTWpJA/5C9sy2sL+wbkqXHonE8qxSDpx0EM=
 -----END CERTIFICATE-----
 
-subject= /C=BE/O=GlobalSign nv-sa/OU=ServerSign CA/CN=GlobalSign ServerSign CA
-serial=04000000000108D96125CF
------BEGIN CERTIFICATE-----
-MIIEFzCCAv+gAwIBAgILBAAAAAABCNlhJc8wDQYJKoZIhvcNAQEFBQAweTELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExITAfBgNVBAsTGFBy
-aW1hcnkgU2VjdXJlIFNlcnZlciBDQTEsMCoGA1UEAxMjR2xvYmFsU2lnbiBQcmlt
-YXJ5IFNlY3VyZSBTZXJ2ZXIgQ0EwHhcNMDQwMTIyMDkwMDAwWhcNMTQwMTI3MTAw
-MDAwWjBjMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTEW
-MBQGA1UECxMNU2VydmVyU2lnbiBDQTEhMB8GA1UEAxMYR2xvYmFsU2lnbiBTZXJ2
-ZXJTaWduIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0dkNqei/
-yW/6JO5Wc7YT0C4NnfG/7TxWnxjFjMkdi2adIiSYRJmYkKyY7VKVYAjSIdpOelaO
-cjfkWiLKYUVa+Bznik0EASFwGcWh12YgscLaFwaPINsO0EbU77Akx+ObdvXJwtIE
-CSL8sGX/bPbWKZdjJQjtkNaK/IkHvazrAxZjeuqj+IKGdFT3dabWXLKkqqx/s3C8
-TiXHW8YDXaqxteXa+CzXsTI9dPm6C9PQ5wMaZFc7bMOxi3jiA9CVt4Scb/VRWRWB
-86UxBpD5Aw0Wg9g4RrfG3vJbec5q6NU4gGINHDQJOj3ZoZmbWk6EVm22jH3sHa5I
-aW84jH70y+nX7QIDAQABo4G1MIGyMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8E
-CDAGAQH/AgEAMB0GA1UdDgQWBBRIu6i/W4TSV0jkYemRIJHYHSXffzA5BgNVHR8E
-MjAwMC6gLKAqhihodHRwOi8vY3JsLmdsb2JhbHNpZ24ubmV0L3ByaW1zZXJ2ZXIu
-Y3JsMBEGCWCGSAGG+EIBAQQEAwICBDAfBgNVHSMEGDAWgBTRZoP1iV7Qvn9h0t2o
-yvorekp/MTANBgkqhkiG9w0BAQUFAAOCAQEAP/iVafKvW0zupGPQ/4Kc2OqZSCLo
-lwkE0+tsZzH7q/oIL5o1HKzEA4acHOHMoxZ7PrUL24sZz+9KoGxsZaz3+pj7JSih
-/k0kFaLkJ+jcDqGz441uhRWRfP0Nv8o/GvMsGxAwZHUa3cW1ytEKHUvzgSnKqr0o
-rGbJSoAb59oezXnB6YWkel+4840tciE/DwwYfBFL7Kq6SFdSM8ioDsOfz7bsDbnt
-nk67vMUgBiiiFFoQymb++pOpIMBYV5Bvqtc5vUGla6fOL53iU4GiwPnUEMqVwpra
-HBtFP+DIpNCr0Iai5G1wY5rfdmdz9TaJPi3YtwXLdNfZAf9KfJv5q547rQ==
------END CERTIFICATE-----
-
 subject= /C=pt/O=MULTICERT-CA/CN=MULTICERT-CA 02
 serial=07278EF8
 -----BEGIN CERTIFICATE-----
@@ -6358,34 +6200,6 @@
 -----END CERTIFICATE-----
 
 subject= /OU=Alpha CA/O=Alpha/CN=Alpha CA
-serial=04000000000111DFE869C6
------BEGIN CERTIFICATE-----
-MIIEMjCCAxqgAwIBAgILBAAAAAABEd/oacYwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wNzA0MTExMjAw
-MDBaFw0xNDAxMjcxMTAwMDBaMDYxETAPBgNVBAsTCEFscGhhIENBMQ4wDAYDVQQK
-EwVBbHBoYTERMA8GA1UEAxMIQWxwaGEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IB
-DwAwggEKAoIBAQC7Mi4rE/30JIpb+mvNq5yytguJauYfQc6KJEL/XK9fkD31kLlz
-x3C0wso41q0GFRqAOB15K6VDIOe5+o0GIlcNZLXRTe0kOEm0agfUM9s7djg/r3dp
-73oTMyl7QISQNXhajyMpV2+wV6s3mZQoz9PHV6WWsYqBLnOAvWjsGxEXih7YlHdL
-dpHqtMwWMwNiuAYaZWm+rNaXG6exJ6HAJVIvSbzaBAa6uLWmqOHLJYe2KNSJazQB
-dxq27N5Z3Jm7XdyPhMK5YgMTYwIJnuEJyL7xGHlxbcnQtUKXyvg0TZKHwDn6XCE9
-lFIEWoOp1KuDBSjYFyMkg2SbIS/4Oyt4ZIeTAgMBAAGjggEeMIIBGjAOBgNVHQ8B
-Af8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUCin6ra9N/f1d
-fXYmh6uqWqp0IhUwSgYDVR0gBEMwQTA/BgorBgEEAaAyAQoKMDEwLwYIKwYBBQUH
-AgEWI2h0dHA6Ly93d3cuYWxwaGFzc2wuY29tL3JlcG9zaXRvcnkvMDMGA1UdHwQs
-MCowKKAmoCSGImh0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5uZXQvcm9vdC5jcmwwEQYJ
-YIZIAYb4QgEBBAQDAgIEMCAGA1UdJQQZMBcGCisGAQQBgjcKAwMGCWCGSAGG+EIE
-ATAfBgNVHSMEGDAWgBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUF
-AAOCAQEAYVsyCaDG341UXqvUm13oG0OsOEr+TQXMMXQYFzeKpo7yr3yMYmU08bYl
-YeOQE0Yj//Jk6Ukk5c8b6yEIdZGC7MSfpkvBSHeVFx3EckWt/pPkA0sFIIGSvZCA
-jYJaJYvZg2td3dbFGMhwOoDX0AY7/gKWG71yAIT6pUX6cFEq39Ayx1KZcUlcLF9I
-Xxm90y9SqWsR75b9tErUnITJI7eILTmAmEmS0KOBJOQEfIhWugYoEe0xd60lk/x8
-MVchbuC4YXvXCyg1RUEKE4QTvdX7yynEtYz551HVVbCTFnpR9jx/4H2k+ule1cUc
-8+etx3iT40MdIKmATxlRmcT0N8zmhw==
------END CERTIFICATE-----
-
-subject= /OU=Alpha CA/O=Alpha/CN=Alpha CA
 serial=0400000000011E44A5F171
 -----BEGIN CERTIFICATE-----
 MIIEMjCCAxqgAwIBAgILBAAAAAABHkSl8XEwDQYJKoZIhvcNAQEFBQAwVzELMAkG
@@ -6889,34 +6703,6 @@
 pG1FJseIVqDwavfY5/wnfmcI0L36tsNhAgFlubgvz1o=
 -----END CERTIFICATE-----
 
-subject= /DC=ru/DC=yandex/DC=ld/CN=YandexExternalCA
-serial=072713F5
------BEGIN CERTIFICATE-----
-MIIEPjCCA6egAwIBAgIEBycT9TANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MB4XDTA3MDExNzE1MTYyMFoXDTE0MDExNzE1MTU0NlowWzESMBAG
-CgmSJomT8ixkARkWAnJ1MRYwFAYKCZImiZPyLGQBGRYGeWFuZGV4MRIwEAYKCZIm
-iZPyLGQBGRYCbGQxGTAXBgNVBAMTEFlhbmRleEV4dGVybmFsQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDOf3icODyZl7NcIpHKuJQ5ol5X/roVHJiG
-0s2wu3vXYmyEYTFQLGM1qr/yjOAKnNtwpQMr98+quO7SWs8TvtxT+mf75lptRuD2
-JawD2Vrkqq/gv92L0lyg6vfmWgovWhGftKjy6S8LPTGhsypfPEvCjBzG3IcyIlUP
-S/4VIvk5hXLNFlvR9iPjMZ6Pfs1MfU+GwudBWkG4HefSTcrsJV4j/l/eORIkCc36
-yWWTJrCUTTigx50qeRjiH6Aq8UxEhaNNU6GROgEQyarDT0n78Zu4v8/S6bRBhL+q
-yDMTUDuXzLseDNr5i1w8g6NZ9XbvmMF4fl5SGAKKNtLFxfeDqsoXAgMBAAGjggFv
-MIIBazASBgNVHRMBAf8ECDAGAQH/AgEBMFMGA1UdIARMMEowSAYJKwYBBAGxPgEA
-MDswOQYIKwYBBQUHAgEWLWh0dHA6Ly93d3cucHVibGljLXRydXN0LmNvbS9DUFMv
-T21uaVJvb3QuaHRtbDAOBgNVHQ8BAf8EBAMCAYYwgYkGA1UdIwSBgTB/oXmkdzB1
-MQswCQYDVQQGEwJVUzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQL
-Ex5HVEUgQ3liZXJUcnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBD
-eWJlclRydXN0IEdsb2JhbCBSb290ggIBpTBFBgNVHR8EPjA8MDqgOKA2hjRodHRw
-Oi8vd3d3LnB1YmxpYy10cnVzdC5jb20vY2dpLWJpbi9DUkwvMjAxOC9jZHAuY3Js
-MB0GA1UdDgQWBBTbQScwTxr1Wz6EVsjshZizUSwtJzANBgkqhkiG9w0BAQUFAAOB
-gQAZuNLEObDlHdW3QJbokq5ANrTp9/WLLdRONjFK0tPkHq5FjeyX4GgPVvAUTuQa
-ydC35nz7H+1SGZBp9F+pT9YnaNH6lKl7o8mXPOCznQYeIvGCgI4L1uv37QtBvbri
-B/I8h+FY/43FMjAnk9ciR1xgbARK4bUKZaPd9MdU+/TY7w==
------END CERTIFICATE-----
-
 subject= /C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 4 L1 CA/CN=TC TrustCenter Class 4 Extended Validation CA II
 serial=04000000000126943401B0
 -----BEGIN CERTIFICATE-----
@@ -7506,34 +7292,6 @@
 -----END CERTIFICATE-----
 
 subject= /C=BE/OU=Domain Validation CA/O=GlobalSign nv-sa/CN=GlobalSign Domain Validation CA
-serial=040000000001100B8CA35E
------BEGIN CERTIFICATE-----
-MIIESjCCAzKgAwIBAgILBAAAAAABEAuMo14wDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wNzAxMTAxMDAw
-MDBaFw0xNDAxMjcxMTAwMDBaMHExCzAJBgNVBAYTAkJFMR0wGwYDVQQLExREb21h
-aW4gVmFsaWRhdGlvbiBDQTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTEoMCYG
-A1UEAxMfR2xvYmFsU2lnbiBEb21haW4gVmFsaWRhdGlvbiBDQTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBALSfSeaznwFVNtA2lWzLFlpscrXineL6OekK
-3HNcoDt2bQUokw2lQvPoy+7TMxoTJwrfXNFUYmqaFzbWPFiHHrJmH1VpK4lWR7TC
-UAzlXcH9KRtmc0P0b9EUTyptSFI69eSQP96y9BDV+fqslg0QMiPS01GnlYVQ+g8p
-naeITg0xm0RBjkEvbpoatLalWfFJWQl+fknTaTNLAJLFG0Igafhk39inRNGQXv05
-rWt9/tWLpAFk9qe0IITMBS8n7h7VJJauhEOkPkPzO5nX+fLePRnt0GXxScpI0jh9
-xkjXcmG4xsJnCthlWv1b88X9voxpz5kgtursOYDpZqjuPZ1Ge4cCAwEAAaOB/DCB
-+TAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADBLBgNVHSAERDBC
-MEAGCSsGAQQBoDIBCjAzMDEGCCsGAQUFBwIBFiVodHRwOi8vd3d3Lmdsb2JhbHNp
-Z24ubmV0L3JlcG9zaXRvcnkvMB0GA1UdDgQWBBQ2Ek6eccQmQfH68SlMvxekUyi2
-6zAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8vY3JsLmdsb2JhbHNpZ24ubmV0L3Jv
-b3QuY3JsMBEGCWCGSAGG+EIBAQQEAwICBDAfBgNVHSMEGDAWgBRge2YaRQ2XyolQ
-L30EzTSo//z9SzANBgkqhkiG9w0BAQUFAAOCAQEAU4qY64SKL/9h4+uqiFEka36A
-nvej8ivbFnwdYQqa6j8GWtMGOZb/W/AoaIsb4dj21hfQtxMmS28uz4m8lIn1xvLK
-bGKexmVzVcqN5ZJ6S1fSKHZT1LjvmHdtQrjuqGakYmUyVcsB5EpmPO6q8506Xok4
-t0Syi+Q5fAlSnKjzL+kBJNJcirFm7Im5PFwif8Gl2rE1jGhBAZbsZBQPcsHaLydr
-/JtekuEqKBEVfYLG38I6Jv4U6WcIgLUO+SkIeoqyc2Fn5AVWL8s/ADgF/ldEkGVb
-mOWdpQj3KeqTrT+P29ABlQvVjrB+J+wo9pa7jqUeXYOAoCYkifQovnzuwn+ohQ==
------END CERTIFICATE-----
-
-subject= /C=BE/OU=Domain Validation CA/O=GlobalSign nv-sa/CN=GlobalSign Domain Validation CA
 serial=0400000000011E44A5FA2C
 -----BEGIN CERTIFICATE-----
 MIIESjCCAzKgAwIBAgILBAAAAAABHkSl+iwwDQYJKoZIhvcNAQEFBQAwVzELMAkG
@@ -8109,35 +7867,6 @@
 znVg9mCJZCVa
 -----END CERTIFICATE-----
 
-subject= /OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign
-serial=040000000001220D3C0B8D
------BEGIN CERTIFICATE-----
-MIIEVzCCAz+gAwIBAgILBAAAAAABIg08C40wDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wOTA2MjMxMjAw
-MDBaFw0xNDAxMjgxMjAwMDBaMEwxIDAeBgNVBAsTF0dsb2JhbFNpZ24gUm9vdCBD
-QSAtIFIyMRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAps8kDr4ubyiZRULEqz4h
-VJsL03+EcPoSs8u/h1/Gf4bTsjBc1v2t8Xvc5fhglgmSEPXQU977e35ziKxSiHtK
-pspJpl6op4xaEbx6guu+jOmzrJYlB5dKmSoHL7Qed7+KD7UCfBuWuMW5Oiy81hK5
-61l94tAGhl9eSWq1OV6INOy8eAwImIRsqM1LtKB9DHlN8LgtyyHK1WxbfeGgKYSh
-+dOUScskYpEgvN0L1dnM+eonCitzkcadG6zIy+jgoPQvkItN+7A2G/YZeoXgbfJh
-E4hcn+CTClGXilrOr6vV96oJqmC93Nlf33KpYBNeAAHJSvo/pOoHAyECjoLKA8Kb
-jwIDAQABo4IBLTCCASkwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8w
-HQYDVR0OBBYEFJviB1dnHB7AagbeWbSaLd/cGYYuMEYGA1UdIAQ/MD0wOwYEVR0g
-ADAzMDEGCCsGAQUFBwIBFiVodHRwOi8vd3d3Lmdsb2JhbHNpZ24ubmV0L3JlcG9z
-aXRvcnkvMDMGA1UdHwQsMCowKKAmoCSGImh0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5u
-ZXQvcm9vdC5jcmwwSQYIKwYBBQUHAQEEPTA7MDkGCCsGAQUFBzABhi1odHRwOi8v
-b2NzcC5nbG9iYWxzaWduLmNvbS9FeHRlbmRlZFNTTENBQ3Jvc3MwHwYDVR0jBBgw
-FoAUYHtmGkUNl8qJUC99BM00qP/8/UswDQYJKoZIhvcNAQEFBQADggEBAFCxoKeS
-Lf9y7AZDmXVgFuCRo8YQbVXxTAwzAoB/SVGum+qY9XBh2n/C3QlVvvrg3zT270eR
-CiSlB3j9JQSsXpjlc7AsLPtHZ2hQYmNrjv+zFvioLL6+pIBNvtOJxYSX2KJJ6pgf
-kDGiGDt940TlEsJt2ANsFRU8bulU4ekJxssb5Pk90CBbbg3igqqSmV9uIjipPo3d
-FimnrVhx4c8VtRWjqVoTj5Ru53MhPX09v/LQMYqPWUgZ92mY93yBYPTNUb2lOE+t
-kmOUnKT+D5FW5Y8D+6TiU5Y8fLDnHbofNb7HNC9LzdpknBSqRlh28DVu/XSjghaH
-SWcKD4Yx4Bgb9EQ=
------END CERTIFICATE-----
-
 subject= /C=US/O=WoSign, Inc./CN=WoSign Premium Server Authority
 serial=64D1B8FD4F3656C28DC536B5D5AEA2E4
 -----BEGIN CERTIFICATE-----
@@ -8893,35 +8622,6 @@
 -----END CERTIFICATE-----
 
 subject= /OU=Organization Validation CA/O=GlobalSign/CN=GlobalSign Organization Validation CA
-serial=04000000000111DFE86C66
------BEGIN CERTIFICATE-----
-MIIEZzCCA0+gAwIBAgILBAAAAAABEd/obGYwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wNzA0MTExMjAw
-MDBaFw0xNDAxMjcxMTAwMDBaMGoxIzAhBgNVBAsTGk9yZ2FuaXphdGlvbiBWYWxp
-ZGF0aW9uIENBMRMwEQYDVQQKEwpHbG9iYWxTaWduMS4wLAYDVQQDEyVHbG9iYWxT
-aWduIE9yZ2FuaXphdGlvbiBWYWxpZGF0aW9uIENBMIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEAoS/EvM6HA+lnwYnI5ZP8fbStnvZjTmronCxziaIB9I8h
-+P0lnVgWbYb27klXdX516iIRfj37x0JB3PzFDJFVgHvrZDMdm/nKOOmrxiVDUSVA
-9OR+GFVqqY8QOkAe1leD738vNC8t0vZTwhkNt+3JgfVGLLQjQl6dEwN17Opq/Fd8
-yTaXO5jcExPs7EH6XTTquZPnEBZlzJyS/fXFnT5KuQn85F8eaV9N9FZyRLEdIwPI
-NvZliMi/ORZFjh4mbFEWxSoAOMWkE2mVfasBO6jEFLSA2qwaRCDV/qkGexQnr+Aw
-Id2Q9KnVIxkuHgPmwd+VKeTBlEPdPpCqy0vJvorTOQIDAQABo4IBHzCCARswDgYD
-VR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFH1tKuxm
-q6dRNqsCafFwj8RZC5ofMEsGA1UdIAREMEIwQAYJKwYBBAGgMgEUMDMwMQYIKwYB
-BQUHAgEWJWh0dHA6Ly93d3cuZ2xvYmFsc2lnbi5uZXQvcmVwb3NpdG9yeS8wMwYD
-VR0fBCwwKjAooCagJIYiaHR0cDovL2NybC5nbG9iYWxzaWduLm5ldC9yb290LmNy
-bDARBglghkgBhvhCAQEEBAMCAgQwIAYDVR0lBBkwFwYKKwYBBAGCNwoDAwYJYIZI
-AYb4QgQBMB8GA1UdIwQYMBaAFGB7ZhpFDZfKiVAvfQTNNKj//P1LMA0GCSqGSIb3
-DQEBBQUAA4IBAQA3qI82eQA8GOgaxfJ7IihrvxmPF5rtpsTWodZjLXv7BFso2sz5
-tu4CVBnebJHyYQ39fyggzI820WGHoFlJqgeW3vmzLPm17hUpM820E53HkM5NfPJa
-EYd7+tSN0S9VmRpf7xYIsT3SPR7LtfBXl1I6EmNitvK8zeKmnBfOKODGD1rsv3C9
-WudUvvHPxj2fX3raty5l6sLT6ce6vk3L2jOuVZ2uFPYyCGLhieQ0KnU8KgWpK1A4
-u1mGpoRahMO9Q7qfHxUFzrV3DdTdL0nI/liVS7xOlhMAHpy4J3dxHcRhy/QejDOz
-AGcNt7KsjD063DgvZC0AgYk12OK5MRf+Ol/R
------END CERTIFICATE-----
-
-subject= /OU=Organization Validation CA/O=GlobalSign/CN=GlobalSign Organization Validation CA
 serial=0400000000011E44A5F52A
 -----BEGIN CERTIFICATE-----
 MIIEZzCCA0+gAwIBAgILBAAAAAABHkSl9SowDQYJKoZIhvcNAQEFBQAwVzELMAkG
@@ -9125,35 +8825,6 @@
 -----END CERTIFICATE-----
 
 subject= /C=BE/OU=Domain Validation CA/O=GlobalSign nv-sa/CN=GlobalSign Domain Validation CA
-serial=0400000000011256AD6204
------BEGIN CERTIFICATE-----
-MIIEbjCCA1agAwIBAgILBAAAAAABElatYgQwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wNzA1MDQxMDAw
-MDBaFw0xNDAxMjcxMTAwMDBaMHExCzAJBgNVBAYTAkJFMR0wGwYDVQQLExREb21h
-aW4gVmFsaWRhdGlvbiBDQTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTEoMCYG
-A1UEAxMfR2xvYmFsU2lnbiBEb21haW4gVmFsaWRhdGlvbiBDQTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBALSfSeaznwFVNtA2lWzLFlpscrXineL6OekK
-3HNcoDt2bQUokw2lQvPoy+7TMxoTJwrfXNFUYmqaFzbWPFiHHrJmH1VpK4lWR7TC
-UAzlXcH9KRtmc0P0b9EUTyptSFI69eSQP96y9BDV+fqslg0QMiPS01GnlYVQ+g8p
-naeITg0xm0RBjkEvbpoatLalWfFJWQl+fknTaTNLAJLFG0Igafhk39inRNGQXv05
-rWt9/tWLpAFk9qe0IITMBS8n7h7VJJauhEOkPkPzO5nX+fLePRnt0GXxScpI0jh9
-xkjXcmG4xsJnCthlWv1b88X9voxpz5kgtursOYDpZqjuPZ1Ge4cCAwEAAaOCAR8w
-ggEbMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMEsGA1UdIARE
-MEIwQAYJKwYBBAGgMgEKMDMwMQYIKwYBBQUHAgEWJWh0dHA6Ly93d3cuZ2xvYmFs
-c2lnbi5uZXQvcmVwb3NpdG9yeS8wHQYDVR0OBBYEFDYSTp5xxCZB8frxKUy/F6RT
-KLbrMDMGA1UdHwQsMCowKKAmoCSGImh0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5uZXQv
-cm9vdC5jcmwwEQYJYIZIAYb4QgEBBAQDAgIEMCAGA1UdJQQZMBcGCisGAQQBgjcK
-AwMGCWCGSAGG+EIEATAfBgNVHSMEGDAWgBRge2YaRQ2XyolQL30EzTSo//z9SzAN
-BgkqhkiG9w0BAQUFAAOCAQEAwyV5SvrNJm9AariUG+ag4WvSMZo/ifvFCiJ1ev27
-QDScvJ6FSVl6lcwNLUQLx8sVEB7S3ON46l/1NVGmyD85kBWeXxXJt49da2OZaOFu
-XPydNzMKrodwSqLjqyZ9cwfk9wqqdY+m7psE0QVDdBq61MKdf7egbO0WmmdAVquD
-n0yc6yg0H43mWg9pQNmpnmr5iX/Q+IyzSC4LT/H5z/UOEQIMiRZzU10s0+/eXwsl
-utVOj4WQP4iTeaUgrP+wisvLpVV4gzlBMqpdJZOn6u4YcrXiHX8IdBG2ASdOls+o
-8Cr5UqwkGhmen2xSfIs6plTewcchfTrTvBqobfK/33bKAw==
------END CERTIFICATE-----
-
-subject= /C=BE/OU=Domain Validation CA/O=GlobalSign nv-sa/CN=GlobalSign Domain Validation CA
 serial=0400000000011E44A5F895
 -----BEGIN CERTIFICATE-----
 MIIEbjCCA1agAwIBAgILBAAAAAABHkSl+JUwDQYJKoZIhvcNAQEFBQAwVzELMAkG
@@ -9443,35 +9114,6 @@
 nxvyqMJTRbJlhAPtuwsSQwXA4vYGRONOE1WK2MwRzS03zNF9RPDz8ifpHdc=
 -----END CERTIFICATE-----
 
-subject= /OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign
-serial=040000000001220D3C1263
------BEGIN CERTIFICATE-----
-MIIEeTCCA2GgAwIBAgILBAAAAAABIg08EmMwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw0wOTA2MjMxMjAw
-MDBaFw0xNDAxMjgxMjAwMDBaMEwxIDAeBgNVBAsTF0dsb2JhbFNpZ24gUm9vdCBD
-QSAtIFIyMRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAps8kDr4ubyiZRULEqz4h
-VJsL03+EcPoSs8u/h1/Gf4bTsjBc1v2t8Xvc5fhglgmSEPXQU977e35ziKxSiHtK
-pspJpl6op4xaEbx6guu+jOmzrJYlB5dKmSoHL7Qed7+KD7UCfBuWuMW5Oiy81hK5
-61l94tAGhl9eSWq1OV6INOy8eAwImIRsqM1LtKB9DHlN8LgtyyHK1WxbfeGgKYSh
-+dOUScskYpEgvN0L1dnM+eonCitzkcadG6zIy+jgoPQvkItN+7A2G/YZeoXgbfJh
-E4hcn+CTClGXilrOr6vV96oJqmC93Nlf33KpYBNeAAHJSvo/pOoHAyECjoLKA8Kb
-jwIDAQABo4IBTzCCAUswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8w
-HQYDVR0OBBYEFJviB1dnHB7AagbeWbSaLd/cGYYuMEYGA1UdIAQ/MD0wOwYEVR0g
-ADAzMDEGCCsGAQUFBwIBFiVodHRwOi8vd3d3Lmdsb2JhbHNpZ24ubmV0L3JlcG9z
-aXRvcnkvMDMGA1UdHwQsMCowKKAmoCSGImh0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5u
-ZXQvcm9vdC5jcmwwSQYIKwYBBQUHAQEEPTA7MDkGCCsGAQUFBzABhi1odHRwOi8v
-b2NzcC5nbG9iYWxzaWduLmNvbS9FeHRlbmRlZFNTTENBQ3Jvc3MwIAYDVR0lBBkw
-FwYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMB8GA1UdIwQYMBaAFGB7ZhpFDZfKiVAv
-fQTNNKj//P1LMA0GCSqGSIb3DQEBBQUAA4IBAQCXTMERzOLPKMnyiDIRQJ8KOZIr
-4DvwdfvH3+ObuSf2x8WbwHIToi+6e7ByzAH/lUwaTHeYsDeflHps1nrEyz1RflKX
-m80XJ87nbG2lqGTtptaVeL+vxoa5oVhman6sof/W08Uge1321P8RJORKdnGkTesA
-4t6K5EFU4YqPFWVzHBooLh+/je/bTG7riXIpIGI6FuHTP6Yqz78wseA3BJxLD/vB
-FfX8gnrZdqL0bar14yjtKiobnAJiItcRCl2rma5VQVXMXsy69QswKa9J+TeP3wsS
-26Cx70sn+PTD3+lbyNF7A0gvQkJ7iWVHm6PH8QQ1AnU9N+6uV0dgb9boQwpP
------END CERTIFICATE-----
-
 subject= /C=JP/O=SECOM Trust Systems CO.,LTD./CN=SECOM Passport for Web EV CA
 serial=1033C4D7
 -----BEGIN CERTIFICATE-----
@@ -9679,36 +9321,6 @@
 Il5s0yo=
 -----END CERTIFICATE-----
 
-subject= /C=US/O=Entrust, Inc./OU=www.entrust.net/rpa is incorporated by reference/OU=(c) 2009 Entrust, Inc./CN=Entrust Certification Authority - L1C
-serial=4C183721
------BEGIN CERTIFICATE-----
-MIIEgTCCA+qgAwIBAgIETBg3ITANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xMDA4
-MjMxODU1NTJaFw0xNDAxMjMxOTI1NTJaMIGxMQswCQYDVQQGEwJVUzEWMBQGA1UE
-ChMNRW50cnVzdCwgSW5jLjE5MDcGA1UECxMwd3d3LmVudHJ1c3QubmV0L3JwYSBp
-cyBpbmNvcnBvcmF0ZWQgYnkgcmVmZXJlbmNlMR8wHQYDVQQLExYoYykgMjAwOSBF
-bnRydXN0LCBJbmMuMS4wLAYDVQQDEyVFbnRydXN0IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gTDFDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAl6Mt
-PJ7eBdoTwhGNnY7jf8dLflqfs/9iq3PIKGu6EGSChxPNVxj/KM7A5g4GkVApg9Hy
-wyrb2NtOBMwA64u2lty8qvpSdwTB2xnkrpz9PIsD7028GgNl+cGxP3KG8jiqGa4Q
-iHgo2nXDPQKCApy5wWV3diRMmPdtMTj72/7bNwJ2oRiXpszeIAlJNiRpQvbkN2Lx
-WW2pPO00nKOO29w61/cKb+8u2NWTWnrtCElo4kHjWpDBhlX8UUOd4LLEZ7TLMjEl
-8FSfS9Fv29Td/K9ebHiQld7KOki5eTybGdZ1BaD5iNfB6KUJ5BoV3IcjqrJ1jGMl
-h9j4PabCzGb/pWZoVQIDAQABo4IBDDCCAQgwDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
-EwEB/wQFMAMBAf8wMwYIKwYBBQUHAQEEJzAlMCMGCCsGAQUFBzABhhdodHRwOi8v
-b2NzcC5lbnRydXN0Lm5ldDAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8vY3JsLmVu
-dHJ1c3QubmV0L3NlcnZlcjEuY3JsMDsGA1UdIAQ0MDIwMAYEVR0gADAoMCYGCCsG
-AQUFBwIBFhpodHRwOi8vd3d3LmVudHJ1c3QubmV0L0NQUzAdBgNVHQ4EFgQUHvGr
-iQb4SQ8BM3fuFHruGXyTKE0wHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7UISX8+1i
-0BowDQYJKoZIhvcNAQEFBQADgYEAKJ7MZtfG/o+ZChn+OEd3R8MvkOUlYWqOFjLJ
-9e02GzByZQ3uBPE9xqYmCmIA3WOUJCr3PTq1b6xBt06+BfFlPKyBV3D9GtWiljPZ
-cvKsBkVq0fi1rpCe/KME2euOXXcD/WIpsXXpcGxeHoWHHH6A2Xzmcq2pPdcUnFS8
-nvco65s=
------END CERTIFICATE-----
-
 subject= /C=NL/O=Staat der Nederlanden/CN=Staat der Nederlanden Overheid CA
 serial=989A79
 -----BEGIN CERTIFICATE-----
@@ -34985,30 +34597,6 @@
 JWhyg2e9VrCNAb0q98xLvYeluocgTEIRQa0QFzuM
 -----END CERTIFICATE-----
 
-subject= /C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA
-serial=-3B44273F3500A95AEE2CA9699E66DDD0
------BEGIN CERTIFICATE-----
-MIIDezCCAmOgAwIBAgIQxLvYwMr/VqUR01aWYZkiMDANBgkqhkiG9w0BAQQFADAd
-MRswGQYDVQQDExJSb290IFNHQyBBdXRob3JpdHkwHhcNOTkwODIwMDAzMDAxWhcN
-MTQwMTI4MDcwMDAwWjBXMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2ln
-biBudi1zYTEQMA4GA1UECxMHUm9vdCBDQTEbMBkGA1UEAxMSR2xvYmFsU2lnbiBS
-b290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2g7mmY3Oo+NP
-in778YuDJWvqSB/xKrC5lREEvfBj0eJnZs8c3c8bSCvujYmOmq8pgGWr6cctEsur
-HExwB6E9CjDNFY1P+N3UjFAVHO9Q7sQu9/zpUvKRfeBt1TUwjl5Dc/JB6dVq47KJ
-OlY5OG8GPIhpWypNxadUuGyJzJv5PMrl/Yn1EjySeJbW3HRuk0Rh0Y3HRrJ1Dobo
-GYrVbWzVeBaVounICjjr8iQTT3NUkxOFOhu8HjS1iwWMuXeLsdsfIJGrCVNukM57
-N3S5cEeRIlFjFnmusa5BJgjIGSvRRqpI1mQq14M0/ywqwWwZQ0oHhefTfPYhaO/q
-8lKff5OQzwIDAQABo30wezANBgNVHQoEBjAEAwIHgDAgBgNVHSUEGTAXBgorBgEE
-AYI3CgMDBglghkgBhvhCBAEwSAYDVR0BBEEwP4AQDScp5AUql7R3WDVHky0GuKEf
-MB0xGzAZBgNVBAMTElJvb3QgU0dDIEF1dGhvcml0eYIKIJ0R0Q5/e4V0gDANBgkq
-hkiG9w0BAQQFAAOCAQEA0oLuVTYlV0K5y6hwnEKORqfXmZHSzKLb8qDGv8bbRfF6
-jtwDY0qblJmmD71Mym3kMWFqCBBNHkfUEVkzAmVprhPb8WV5ciV5IcS0JcJs/4x+
-lt9pwEUkoWlLpqYE54HeytuIo6Z8kc+GR3aX5pf3Gi7XA/A3O912lW0mdFFJRNY+
-hLcDdG1mZ6I2i4Tz7fmoneSoGgnc0gGSTx89WEG76awDm+jwlsDNfgHb4qk+ZuAk
-5ux/bRhTOZ3Aib9geL7LBzd3nX2OjRcK128X2uWK4ecIxBPleitcbfeeIMSNT+0G
-KQeveZLyX/mqIRXLZjl30y0ZJGiEX6lIRlrbHbRBHw==
------END CERTIFICATE-----
-
 subject= /OU=Extended Validation CA/O=GlobalSign/CN=GlobalSign Extended Validation CA
 serial=040000000001220D3C0F75
 -----BEGIN CERTIFICATE-----
diff --git a/lib/endpoints-1.0/endpoints/api_config.py b/lib/endpoints-1.0/endpoints/api_config.py
index f751e6b..4015c22 100644
--- a/lib/endpoints-1.0/endpoints/api_config.py
+++ b/lib/endpoints-1.0/endpoints/api_config.py
@@ -36,6 +36,14 @@
 
 
 
+
+
+
+
+
+
+
+
 try:
   import json
 except ImportError:
diff --git a/lib/endpoints-1.0/endpoints/protojson.py b/lib/endpoints-1.0/endpoints/protojson.py
index 554cfac..ed988be 100644
--- a/lib/endpoints-1.0/endpoints/protojson.py
+++ b/lib/endpoints-1.0/endpoints/protojson.py
@@ -66,6 +66,25 @@
 
     return super(EndpointsProtoJson, self).encode_field(field, value)
 
+  @staticmethod
+  def __pad_value(value, pad_len_multiple, pad_char):
+    """Add padding characters to the value if needed.
+
+    Args:
+      value: The string value to be padded.
+      pad_len_multiple: Pad the result so its length is a multiple
+          of pad_len_multiple.
+      pad_char: The character to use for padding.
+
+    Returns:
+      The string value with padding characters added.
+    """
+    assert pad_len_multiple > 0
+    assert len(pad_char) == 1
+    padding_length = (pad_len_multiple -
+                      (len(value) % pad_len_multiple)) % pad_len_multiple
+    return value + pad_char * padding_length
+
   def decode_field(self, field, value):
     """Decode a JSON value to a python value.
 
@@ -83,7 +102,8 @@
       try:
 
 
-        return base64.urlsafe_b64decode(str(value))
+        padded_value = self.__pad_value(str(value), 4, '=')
+        return base64.urlsafe_b64decode(padded_value)
       except (TypeError, UnicodeEncodeError), err:
         raise messages.DecodeError('Base64 decoding error: %s' % err)
 
diff --git a/old_dev_appserver.py b/old_dev_appserver.py
index 2af168b..de775b6 100644
--- a/old_dev_appserver.py
+++ b/old_dev_appserver.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/php/sdk/google/appengine/api/app_identity/AppIdentityService.php b/php/sdk/google/appengine/api/app_identity/AppIdentityService.php
index 0bcdbcc..b2bc716 100644
--- a/php/sdk/google/appengine/api/app_identity/AppIdentityService.php
+++ b/php/sdk/google/appengine/api/app_identity/AppIdentityService.php
@@ -31,12 +31,6 @@
 use google\appengine\runtime\ApiProxy;
 use google\appengine\runtime\ApplicationError;
 
-require_once 'google/appengine/api/app_identity/app_identity_service_pb.php';
-require_once 'google/appengine/api/app_identity/AppIdentityException.php';
-require_once 'google/appengine/api/app_identity/PublicCertificate.php';
-require_once 'google/appengine/runtime/ApiProxy.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-
 /**
  * The AppIdentityService allows you to sign arbitrary byte
  * array using per app private key maintained by App Engine. You can also
@@ -175,7 +169,8 @@
     } else if (is_array($scopes)) {
       $memcache_key .= implode(self::DOMAIN_SEPARATOR, $scopes);
     } else {
-      throw new \InvalidArgumentException('Invalid scope ' . $scopes);
+      throw new \InvalidArgumentException(
+          'Invalid scope ' . htmlspecialchars($scopes));
     }
 
     $memcache = new \Memcache();
@@ -222,11 +217,12 @@
           $req->addScope($scope);
         } else {
           throw new \InvalidArgumentException(
-            'Invalid scope ' . $scope);
+            'Invalid scope ' . htmlspecialchars($scope));
         }
       }
     } else {
-      throw new \InvalidArgumentException('Invalid scope ' . $scopes);
+      throw new \InvalidArgumentException(
+          'Invalid scope ' . htmlspecialchars($scopes));
     }
 
     try {
diff --git a/php/sdk/google/appengine/api/app_identity/AppIdentityServiceTest.php b/php/sdk/google/appengine/api/app_identity/AppIdentityServiceTest.php
index 11e17c7..61c3b31 100644
--- a/php/sdk/google/appengine/api/app_identity/AppIdentityServiceTest.php
+++ b/php/sdk/google/appengine/api/app_identity/AppIdentityServiceTest.php
@@ -19,11 +19,6 @@
  *
  */
 
-require_once 'google/appengine/api/app_identity/app_identity_service_pb.php';
-require_once 'google/appengine/api/app_identity/AppIdentityService.php';
-require_once 'google/appengine/runtime/Memcache.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\AppIdentityServiceError\ErrorCode;
 use google\appengine\api\app_identity\AppIdentityService;
 use google\appengine\testing\ApiProxyTestBase;
diff --git a/php/sdk/google/appengine/api/cloud_storage/CloudStorageTools.php b/php/sdk/google/appengine/api/cloud_storage/CloudStorageTools.php
index 456f35f..a97e2d1 100644
--- a/php/sdk/google/appengine/api/cloud_storage/CloudStorageTools.php
+++ b/php/sdk/google/appengine/api/cloud_storage/CloudStorageTools.php
@@ -29,6 +29,7 @@
 use google\appengine\ImagesGetUrlBaseResponse;
 use google\appengine\ImagesServiceError;
 use google\appengine\ext\cloud_storage_streams\CloudStorageClient;
+use google\appengine\ext\cloud_storage_streams\CloudStorageStreamWrapper;
 use google\appengine\files\GetDefaultGsBucketNameRequest;
 use google\appengine\files\GetDefaultGsBucketNameResponse;
 use google\appengine\runtime\ApiProxy;
@@ -57,7 +58,7 @@
   // - The path format is less secure and should only be used for the specific
   //   case when the subdomain format fails.
   const PRODUCTION_HOST_SUBDOMAIN_FORMAT = "%s.storage.googleapis.com";
-  const PRODUCTION_HOST_PATH_FORMAT = "storage.googleapis.com/%s";
+  const PRODUCTION_HOST_PATH_FORMAT = "storage.googleapis.com";
 
   // The GCS filename format (bucket, object).
   const GS_FILENAME_FORMAT = "gs://%s/%s";
@@ -190,7 +191,7 @@
 
     if (!empty($extra_options)) {
       throw new \InvalidArgumentException('Invalid options supplied: ' .
-          implode(',', $extra_options));
+          htmlspecialchars(implode(',', $extra_options)));
     }
 
     try {
@@ -236,7 +237,7 @@
          self::$get_image_serving_url_default_options));
     if (!empty($extra_options)) {
       throw new \InvalidArgumentException('Invalid options supplied: ' .
-          implode(',', $extra_options));
+          htmlspecialchars(implode(',', $extra_options)));
     }
     $options = array_merge(self::$get_image_serving_url_default_options,
                            $options);
@@ -342,7 +343,8 @@
 
     if (!self::parseFilename($gs_filename, $bucket, $object)) {
       throw new \InvalidArgumentException(
-          sprintf('Invalid Google Cloud Storage filename: %s', $gs_filename));
+          sprintf('Invalid Google Cloud Storage filename: %s',
+                  htmlspecialchars($gs_filename)));
     }
 
     if (self::isDevelServer()) {
@@ -353,14 +355,14 @@
       // Use path format for HTTPS URL when the bucket name contains "." to
       // avoid SSL certificate validation issue.
       if ($use_https && strpos($bucket, '.') !== false) {
-        $format = self::PRODUCTION_HOST_PATH_FORMAT;
+        $host = self::PRODUCTION_HOST_PATH_FORMAT;
+        $path = sprintf('/%s%s', $bucket, $object);
       } else {
-        $format = self::PRODUCTION_HOST_SUBDOMAIN_FORMAT;
+        $host = sprintf(self::PRODUCTION_HOST_SUBDOMAIN_FORMAT, $bucket);
+        $path = strlen($object) > 0 ? $object : '/';
       }
 
       $scheme = $use_https ? 'https' : 'http';
-      $host = sprintf($format, $bucket);
-      $path = $object;
     }
 
     return sprintf('%s://%s%s',
@@ -382,12 +384,14 @@
   public static function getFilename($bucket, $object) {
     if (self::validateBucketName($bucket) === false) {
       throw new \InvalidArgumentException(
-          sprintf('Invalid cloud storage bucket name \'%s\'', $bucket));
+          sprintf('Invalid cloud storage bucket name \'%s\'',
+                  htmlspecialchars($bucket)));
     }
 
     if (self::validateObjectName($object) === false) {
       throw new \InvalidArgumentException(
-          sprintf('Invalid cloud storage object name \'%s\'', $object));
+          sprintf('Invalid cloud storage object name \'%s\'',
+                  htmlspecialchars($object)));
     }
 
     return sprintf(self::GS_FILENAME_FORMAT, $bucket, $object);
@@ -547,7 +551,7 @@
 
     if (!empty($extra_options)) {
       throw new \InvalidArgumentException('Invalid options supplied: ' .
-          implode(',', $extra_options));
+          htmlspecialchars(implode(',', $extra_options)));
     }
 
     // Determine the range to send
@@ -615,6 +619,48 @@
   }
 
   /**
+   * Get metadata from a Google Cloud Storage file pointer resource.
+   *
+   * @param resource $handle A Google Cloud Storage file pointer resource that
+   * is typically created using fopen().
+   *
+   * @return array An array that maps metadata keys to values.
+   *
+   * @throws \InvalidArgumentException If $handler is not a Google Cloud Storage
+   * file pointer resource.
+   */
+  public static function getMetaData($handle) {
+    $wrapper = self::getStreamWrapperFromFileHandle($handle);
+    return $wrapper->getMetaData();
+  }
+
+  /**
+   * Get content type from a Google Cloud Storage file pointer resource.
+   *
+   * @param resource $handle A Google Cloud Storage file pointer resource that
+   * is typically created using fopen().
+   *
+   * @return string The content type of the Google Cloud Storage object.
+   *
+   * @throws \InvalidArgumentException If $handler is not a Google Cloud Storage
+   * file pointer resource.
+   */
+  public static function getContentType($handle) {
+    $wrapper = self::getStreamWrapperFromFileHandle($handle);
+    return $wrapper->getContentType();
+  }
+
+  private static function getStreamWrapperFromFileHandle($handle) {
+    $wrapper = stream_get_meta_data($handle)['wrapper_data'];
+    if (!$wrapper instanceof CloudStorageStreamWrapper) {
+      throw new \InvalidArgumentException(
+          '$handle must be a Google Cloud Storage file pointer resource');
+    }
+
+    return $wrapper;
+  }
+
+  /**
    * Validates the format of a GCS filename and strips the gs:// prefix.
    *
    * @param string $filename The google cloud storage filename, in the format
diff --git a/php/sdk/google/appengine/api/cloud_storage/CloudStorageToolsTest.php b/php/sdk/google/appengine/api/cloud_storage/CloudStorageToolsTest.php
index 46778fd..0ed5546 100644
--- a/php/sdk/google/appengine/api/cloud_storage/CloudStorageToolsTest.php
+++ b/php/sdk/google/appengine/api/cloud_storage/CloudStorageToolsTest.php
@@ -20,9 +20,6 @@
  */
 namespace google\appengine\api\cloud_storage;
 
-require_once 'google/appengine/api/cloud_storage/CloudStorageTools.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\testing\ApiProxyTestBase;
 use google\appengine\BlobstoreServiceError;
 use google\appengine\ImagesServiceError;
@@ -661,34 +658,39 @@
   }
 
   public function testGetPublicUrlInProduction() {
-    $object = "object";
     putenv("SERVER_SOFTWARE=Google App Engine/1.8.6");
 
-    $bucket_with_a_dot = "bucket.name";
-    $gs_filename = sprintf("gs://%s/%s", $bucket_with_a_dot, $object);
-
     // Get HTTPS URL for bucket containing "." - should use the path format to
     // avoid SSL certificate validation issue.
+    $expected = "https://storage.googleapis.com/bucket.name";
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket.name", true);
+    $this->assertEquals($expected, $actual);
     $expected = "https://storage.googleapis.com/bucket.name/object";
-    $actual = CloudStorageTools::getPublicUrl($gs_filename, true);
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket.name/object", true);
     $this->assertEquals($expected, $actual);
 
     // Get HTTP URL for bucket contain "." - should use the subdomain format.
+    $expected = "http://bucket.name.storage.googleapis.com/";
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket.name/", false);
+    $this->assertEquals($expected, $actual);
     $expected = "http://bucket.name.storage.googleapis.com/object";
-    $actual = CloudStorageTools::getPublicUrl($gs_filename, false);
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket.name/object", false);
     $this->assertEquals($expected, $actual);
 
-    $bucket_without_dot = "bucket";
-    $gs_filename = sprintf("gs://%s/%s", $bucket_without_dot, $object);
-
     // Get HTTPS URL for bucket without "." - should use the subdomain format.
+    $expected = "https://bucket.storage.googleapis.com/";
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket", true);
+    $this->assertEquals($expected, $actual);
     $expected = "https://bucket.storage.googleapis.com/object";
-    $actual = CloudStorageTools::getPublicUrl($gs_filename, true);
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket/object", true);
     $this->assertEquals($expected, $actual);
 
     // Get HTTP URL for bucket without "." - should use the subdomain format.
+    $expected = "http://bucket.storage.googleapis.com/";
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket", false);
+    $this->assertEquals($expected, $actual);
     $expected = "http://bucket.storage.googleapis.com/object";
-    $actual = CloudStorageTools::getPublicUrl($gs_filename, false);
+    $actual = CloudStorageTools::getPublicUrl("gs://bucket/object", false);
     $this->assertEquals($expected, $actual);
   }
 
diff --git a/php/sdk/google/appengine/api/log/LogService.php b/php/sdk/google/appengine/api/log/LogService.php
index 367edec..c1d2c7e 100644
--- a/php/sdk/google/appengine/api/log/LogService.php
+++ b/php/sdk/google/appengine/api/log/LogService.php
@@ -52,17 +52,6 @@
 final class LogService {
    use ApiProxyAccess;
 
-   // Map syslog priority levels to appengine severity levels.
-   private static $syslog_priority_map = array(
-       LOG_EMERG => self::LEVEL_CRITICAL,
-       LOG_ALERT => self::LEVEL_CRITICAL,
-       LOG_CRIT => self::LEVEL_CRITICAL,
-       LOG_ERR => self::LEVEL_ERROR,
-       LOG_WARNING => self::LEVEL_WARNING,
-       LOG_NOTICE => self::LEVEL_INFO,
-       LOG_INFO => self::LEVEL_INFO,
-       LOG_DEBUG => self::LEVEL_DEBUG);
-
   /**
    * Constants for application log levels.
    */
@@ -77,7 +66,19 @@
    */
   const MAX_BATCH_SIZE = 1000;
 
-  # Validation patterns copied from google/appengine/api/logservice/logservice.py
+  // Map syslog priority levels to appengine severity levels.
+  private static $syslog_priority_map = array(
+      LOG_EMERG => self::LEVEL_CRITICAL,
+      LOG_ALERT => self::LEVEL_CRITICAL,
+      LOG_CRIT => self::LEVEL_CRITICAL,
+      LOG_ERR => self::LEVEL_ERROR,
+      LOG_WARNING => self::LEVEL_WARNING,
+      LOG_NOTICE => self::LEVEL_INFO,
+      LOG_INFO => self::LEVEL_INFO,
+      LOG_DEBUG => self::LEVEL_DEBUG);
+
+
+  // Validation patterns copied from google/appengine/api/logservice/logservice.py
   private static $MAJOR_VERSION_ID_REGEX =
       '/^(?:(?:((?!-)[a-z\d\-]{1,63}):)?)((?!-)[a-z\d\-]{1,100})$/';
   private static $REQUEST_ID_REGEX = '/^[\da-fA-F]+$/';
@@ -240,7 +241,7 @@
             }
             if (!preg_match(self::$MAJOR_VERSION_ID_REGEX, $version)) {
               throw new \InvalidArgumentException(
-                  "Invalid version id $version");
+                  "Invalid version id " . htmlspecialchars($version));
             }
             $request->addModuleVersion()->setVersionId($version);
           }
@@ -256,7 +257,8 @@
           $batch_size = $value;
           break;
         default:
-          throw new \InvalidArgumentException("Invalid option $key");
+          throw new \InvalidArgumentException(
+              "Invalid option " . htmlspecialchars($key));
       }
     }
 
@@ -312,7 +314,8 @@
 
     if (is_string($request_ids)) {
       if (!preg_match(self::$REQUEST_ID_REGEX, $request_ids)) {
-        throw new \InvalidArgumentException("Invalid request id $request_ids");
+        throw new \InvalidArgumentException(
+            "Invalid request id " . htmlspecialchars($request_ids));
       }
       $request->addRequestId($request_ids);
     } else if (is_array($request_ids)) {
@@ -324,7 +327,7 @@
         }
         if (!preg_match(self::$REQUEST_ID_REGEX, $id)) {
           throw new \InvalidArgumentException(
-              "Invalid request id $id");
+              "Invalid request id " . htmlspecialchars($id));
         }
         $request->addRequestId($id);
       }
@@ -358,7 +361,7 @@
 
   private static function optionTypeException($key, $value, $expected) {
     throw new \InvalidArgumentException(
-        "Option $key must be type $expected but was " .
+        htmlspecialchars("Option $key must be type $expected but was ") .
         self::typeOrClass($value));
   }
 
@@ -378,6 +381,17 @@
     // The time stamp may only represent a date up to 2038 due to 32 bit ints.
     return (double) $datetime->getTimeStamp() * 1e6;
   }
+
+  /**
+   * The GAE PECL extension calls this directly instead of the built-in syslog.
+   */
+  private static function syslog($priority, $message) {
+    $log_level = self::getAppEngineLogLevel($priority);
+    self::log($log_level, $message);
+    if (function_exists('_gae_syslog')) {
+      _gae_syslog($log_level);
+    }
+  }
 }
 
 /**
diff --git a/php/sdk/google/appengine/api/log/LogServiceTest.php b/php/sdk/google/appengine/api/log/LogServiceTest.php
index 34df7b0..e64a63f 100644
--- a/php/sdk/google/appengine/api/log/LogServiceTest.php
+++ b/php/sdk/google/appengine/api/log/LogServiceTest.php
@@ -21,11 +21,6 @@
 
 namespace google\appengine\api\log;
 
-require_once 'google/appengine/api/logservice/log_service_pb.php';
-require_once 'google/appengine/api/log/LogService.php';
-require_once 'google/appengine/api/log/RequestLog.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\LogOffset;
 use google\appengine\LogReadRequest;
 use google\appengine\LogReadResponse;
diff --git a/php/sdk/google/appengine/api/log/RequestLogIterator.php b/php/sdk/google/appengine/api/log/RequestLogIterator.php
index 9365895..68b4664 100644
--- a/php/sdk/google/appengine/api/log/RequestLogIterator.php
+++ b/php/sdk/google/appengine/api/log/RequestLogIterator.php
@@ -19,9 +19,6 @@
 
 namespace google\appengine\api\log;
 
-require_once 'google/appengine/api/log/LogService.php';
-require_once 'google/appengine/api/log/RequestLog.php';
-
 use google\appengine\LogReadRequest;
 use google\appengine\LogReadResponse;
 use google\appengine\runtime\ApiProxy;
diff --git a/php/sdk/google/appengine/api/mail/AdminMessage.php b/php/sdk/google/appengine/api/mail/AdminMessage.php
index 929f6c8..6ea928d 100644
--- a/php/sdk/google/appengine/api/mail/AdminMessage.php
+++ b/php/sdk/google/appengine/api/mail/AdminMessage.php
@@ -19,8 +19,6 @@
 
 namespace google\appengine\api\mail;
 
-require_once 'google/appengine/api/mail/BaseMessage.php';
-
 use google\appengine\base\VoidProto;
 use google\appengine\runtime\ApiProxy;
 use google\appengine\runtime\ApplicationError;
diff --git a/php/sdk/google/appengine/api/mail/AdminMessageTest.php b/php/sdk/google/appengine/api/mail/AdminMessageTest.php
index a396589..96eb02c 100644
--- a/php/sdk/google/appengine/api/mail/AdminMessageTest.php
+++ b/php/sdk/google/appengine/api/mail/AdminMessageTest.php
@@ -19,11 +19,6 @@
  *
  */
 
-require_once 'google/appengine/api/mail_service_pb.php';
-require_once 'google/appengine/api/mail/AdminMessage.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\api\mail\AdminMessage;
 use google\appengine\base\VoidProto;
 use google\appengine\MailMessage;
diff --git a/php/sdk/google/appengine/api/mail/BaseMessage.php b/php/sdk/google/appengine/api/mail/BaseMessage.php
index 8aaa01c..a16fe2b 100644
--- a/php/sdk/google/appengine/api/mail/BaseMessage.php
+++ b/php/sdk/google/appengine/api/mail/BaseMessage.php
@@ -19,10 +19,6 @@
 
 namespace google\appengine\api\mail;
 
-require_once 'google/appengine/api/mail_service_pb.php';
-require_once 'google/appengine/runtime/ApiProxy.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-
 use google\appengine\MailAttachment;
 use google\appengine\MailHeader;
 use google\appengine\MailMessage;
@@ -89,7 +85,8 @@
             $func_name = $allowed_functions[$key];
             call_user_func(array($this, $func_name), $value);
           } else {
-            $error = sprintf("Message received an invalid option: %s", $key);
+            $error = sprintf("Message received an invalid option: %s",
+                             htmlspecialchars($key));
             throw new \InvalidArgumentException($error);
           }
         }
@@ -244,7 +241,9 @@
     } else if (!in_array(strtolower($key), self::$allowed_headers)) {
       // Array keys don't have consistent case.
       $error = sprintf("Input header '%s: %s' is not whitelisted for use with" .
-                       " the Google App Engine Mail Service.", $key, $value);
+                       " the Google App Engine Mail Service.",
+                       htmlspecialchars($key),
+                       htmlspecialchars($value));
       return false;
     }
     return true;
@@ -282,7 +281,7 @@
       case ErrorCode::UNAUTHORIZED_SENDER:
         $error = sprintf("Mail Service Error: Sender (%s) is not an " .
                          "authorized email address.",
-                         $this->message->getSender());
+                         htmlspecialchars($this->message->getSender()));
         throw new \InvalidArgumentException($error);
       case ErrorCode::INVALID_ATTACHMENT_TYPE:
         throw new \InvalidArgumentException(
@@ -319,7 +318,8 @@
    */
   public function setReplyTo($email) {
     if (!$this->checkValidEmail($email)) {
-      throw new \InvalidArgumentException("Invalid reply-to: ". $email);
+      throw new \InvalidArgumentException(
+          "Invalid reply-to: ". htmlspecialchars($email));
     }
     $this->message->setReplyto($email);
   }
@@ -333,7 +333,8 @@
    */
   public function setSender($email) {
     if (!$this->checkValidEmail($email)) {
-      throw new \InvalidArgumentException("Invalid sender: ". $email);
+      throw new \InvalidArgumentException(
+          "Invalid sender: ". htmlspecialchars($email));
     }
     $this->message->setSender($email);
   }
diff --git a/php/sdk/google/appengine/api/mail/Message.php b/php/sdk/google/appengine/api/mail/Message.php
index 60a4249..fe6d71a 100644
--- a/php/sdk/google/appengine/api/mail/Message.php
+++ b/php/sdk/google/appengine/api/mail/Message.php
@@ -19,8 +19,6 @@
 
 namespace google\appengine\api\mail;
 
-require_once 'google/appengine/api/mail/BaseMessage.php';
-
 use google\appengine\base\VoidProto;
 use google\appengine\runtime\ApiProxy;
 use google\appengine\runtime\ApplicationError;
@@ -167,12 +165,12 @@
       return array($email_list);
     } else if (!is_array($email_list)) {
       $error = sprintf("Invalid %s: %s", $err_str, $email_list);
-      throw new \InvalidArgumentException($error);
+      throw new \InvalidArgumentException(htmlspecialchars($error));
     } else {
       foreach($email_list as $email) {
         if (!$this->checkValidEmail($email)) {
           $error = sprintf("Invalid %s: %s", $err_str, $email);
-          throw new \InvalidArgumentException($error);
+          throw new \InvalidArgumentException(htmlspecialchars($error));
         }
       }
       return $email_list;
diff --git a/php/sdk/google/appengine/api/mail/MessageTest.php b/php/sdk/google/appengine/api/mail/MessageTest.php
index e51863a..2f2e55f 100644
--- a/php/sdk/google/appengine/api/mail/MessageTest.php
+++ b/php/sdk/google/appengine/api/mail/MessageTest.php
@@ -19,11 +19,6 @@
  *
  */
 
-require_once 'google/appengine/api/mail_service_pb.php';
-require_once 'google/appengine/api/mail/Message.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\base\VoidProto;
 use google\appengine\api\mail\Message;
 use google\appengine\MailMessage;
@@ -101,12 +96,17 @@
   }
 
   public function testCheckValidEmails() {
-    $this->assertEquals($this->setupValidEmailTest("invalid.email"),
-                        "Invalid 'to' recipient: invalid.email");
+    $invalid_email = "invalid.email <script>alert('wello')</script>";
+    $converted_invalid_email = htmlspecialchars($invalid_email);
 
-    $array_emails = array("test@test.com", "invalid.email");
+    $this->assertEquals(
+        $this->setupValidEmailTest(
+            $invalid_email),
+        "Invalid 'to' recipient: " .$converted_invalid_email);
+
+    $array_emails = array("test@test.com", $invalid_email);
     $this->assertEquals($this->setupValidEmailTest($array_emails),
-                        "Invalid 'to' recipient: invalid.email");
+                        "Invalid 'to' recipient: " . $converted_invalid_email);
   }
 
   public function testAddHeaderNonWhitelisted() {
diff --git a/php/sdk/google/appengine/api/modules/InvalidModuleStateException.php b/php/sdk/google/appengine/api/modules/InvalidModuleStateException.php
index d3fdda8..e1327dd 100644
--- a/php/sdk/google/appengine/api/modules/InvalidModuleStateException.php
+++ b/php/sdk/google/appengine/api/modules/InvalidModuleStateException.php
@@ -19,8 +19,6 @@
 
 namespace google\appengine\api\modules;
 
-require_once "google/appengine/api/modules/ModulesException.php";
-
 /**
  * Thrown when the current state was unexpected when starting/stopping a
  * module.
diff --git a/php/sdk/google/appengine/api/modules/ModulesService.php b/php/sdk/google/appengine/api/modules/ModulesService.php
index 85532b4..074b63f 100644
--- a/php/sdk/google/appengine/api/modules/ModulesService.php
+++ b/php/sdk/google/appengine/api/modules/ModulesService.php
@@ -21,13 +21,6 @@
 
 namespace google\appengine\api\modules;
 
-require_once 'google/appengine/api/modules/modules_service_pb.php';
-require_once "google/appengine/api/modules/InvalidModuleStateException.php";
-require_once "google/appengine/api/modules/ModulesException.php";
-require_once "google/appengine/api/modules/TransientModulesException.php";
-require_once 'google/appengine/runtime/ApiProxy.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-
 use google\appengine\runtime\ApiProxy;
 use google\appengine\runtime\ApplicationError;
 use google\appengine\GetDefaultVersionRequest;
diff --git a/php/sdk/google/appengine/api/modules/ModulesServiceTest.php b/php/sdk/google/appengine/api/modules/ModulesServiceTest.php
index daf055b..bc535fc 100644
--- a/php/sdk/google/appengine/api/modules/ModulesServiceTest.php
+++ b/php/sdk/google/appengine/api/modules/ModulesServiceTest.php
@@ -21,11 +21,6 @@
 
 namespace google\appengine\api\modules;
 
-require_once 'google/appengine/api/modules/modules_service_pb.php';
-require_once 'google/appengine/api/modules/ModulesService.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\runtime\ApplicationError;
 use google\appengine\testing\ApiProxyTestBase;
 use google\appengine\GetDefaultVersionRequest;
diff --git a/php/sdk/google/appengine/api/modules/TransientModulesException.php b/php/sdk/google/appengine/api/modules/TransientModulesException.php
index 222feff..c3ab72e 100644
--- a/php/sdk/google/appengine/api/modules/TransientModulesException.php
+++ b/php/sdk/google/appengine/api/modules/TransientModulesException.php
@@ -19,8 +19,6 @@
 
 namespace google\appengine\api\modules;
 
-require_once "google/appengine/api/modules/ModulesException.php";
-
 /**
  * Thrown when a temporary error occurred. Retry the operation.
  */
diff --git a/php/sdk/google/appengine/api/taskqueue/PushQueue.php b/php/sdk/google/appengine/api/taskqueue/PushQueue.php
index ac33c0f..607814d 100644
--- a/php/sdk/google/appengine/api/taskqueue/PushQueue.php
+++ b/php/sdk/google/appengine/api/taskqueue/PushQueue.php
@@ -21,14 +21,6 @@
 
 namespace google\appengine\api\taskqueue;
 
-require_once 'google/appengine/api/taskqueue/PushTask.php';
-require_once 'google/appengine/api/taskqueue/taskqueue_service_pb.php';
-require_once 'google/appengine/api/taskqueue/TaskAlreadyExistsException.php';
-require_once 'google/appengine/api/taskqueue/TaskQueueException.php';
-require_once 'google/appengine/api/taskqueue/TransientTaskQueueException.php';
-require_once 'google/appengine/runtime/ApiProxy.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-
 use google\appengine\runtime\ApiProxy;
 use google\appengine\runtime\ApplicationError;
 use google\appengine\TaskQueueAddRequest;
diff --git a/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php b/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php
index 38bacb7..8339b7c 100644
--- a/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php
+++ b/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php
@@ -21,10 +21,6 @@
 
 namespace google\appengine\api\taskqueue;
 
-require_once 'google/appengine/api/taskqueue/PushQueue.php';
-require_once 'google/appengine/api/taskqueue/PushTask.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\api\taskqueue\PushTask;
 use google\appengine\testing\ApiProxyTestBase;
 use google\appengine\TaskQueueAddRequest\RequestMethod;
diff --git a/php/sdk/google/appengine/api/taskqueue/PushTask.php b/php/sdk/google/appengine/api/taskqueue/PushTask.php
index 1877496..2cedd14 100644
--- a/php/sdk/google/appengine/api/taskqueue/PushTask.php
+++ b/php/sdk/google/appengine/api/taskqueue/PushTask.php
@@ -32,9 +32,6 @@
 
 namespace google\appengine\api\taskqueue;
 
-require_once 'google/appengine/api/taskqueue/PushQueue.php';
-require_once 'google/appengine/api/taskqueue/taskqueue_service_pb.php';
-
 use google\appengine\TaskQueueAddRequest\RequestMethod;
 
 /**
@@ -127,8 +124,9 @@
     $extra_options = array_diff(array_keys($options),
                                 array_keys(self::$default_options));
     if (!empty($extra_options)) {
-      throw new \InvalidArgumentException('Invalid options supplied: ' .
-                                          implode(',', $extra_options));
+      throw new \InvalidArgumentException(
+          'Invalid options supplied: ' .
+          htmlspecialchars(implode(',', $extra_options)));
     }
 
     $this->options = array_merge(self::$default_options, $options);
@@ -147,11 +145,11 @@
         $display_len = 1000;
         throw new \InvalidArgumentException('name exceeds maximum length of ' .
             self::MAX_NAME_LENGTH . ". First $display_len characters of name: "
-            . substr($name, 0, $display_len));
+            . htmlspecialchars(substr($name, 0, $display_len)));
       }
       if (!preg_match(self::NAME_PATTERN, $name)) {
         throw new \InvalidArgumentException('name must match pattern: ' .
-            self::NAME_PATTERN . '. name: ' . $name);
+            self::NAME_PATTERN . '. name: ' . htmlspecialchars($name));
       }
     }
     $delay = $this->options['delay_seconds'];
@@ -196,7 +194,8 @@
       }
       if (strpos($h, ':') === false) {
         throw new \InvalidArgumentException(
-            'Each header must contain a colon. Header: ' . $h);
+            'Each header must contain a colon. Header: ' .
+            htmlspecialchars($h));
       }
       if ($has_content_type &&
           strncasecmp('content-type', $h, strlen('content-type')) == 0) {
diff --git a/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php b/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php
index 2f1795a..e29a225 100644
--- a/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php
+++ b/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php
@@ -21,9 +21,6 @@
 
 namespace google\appengine\api\taskqueue;
 
-require_once 'google/appengine/api/taskqueue/PushTask.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\api\taskqueue\PushTask;
 use google\appengine\testing\ApiProxyTestBase;
 use google\appengine\TaskQueueAddRequest\RequestMethod;
diff --git a/php/sdk/google/appengine/api/taskqueue/TaskAlreadyExistsException.php b/php/sdk/google/appengine/api/taskqueue/TaskAlreadyExistsException.php
index a17cf06..67ac881 100644
--- a/php/sdk/google/appengine/api/taskqueue/TaskAlreadyExistsException.php
+++ b/php/sdk/google/appengine/api/taskqueue/TaskAlreadyExistsException.php
@@ -19,8 +19,6 @@
 
 namespace google\appengine\api\taskqueue;
 
-require_once 'google/appengine/api/taskqueue/TaskQueueException.php';
-
 /**
  * Thrown when adding a task, but a task with the same name already exists in
  * the queue.
diff --git a/php/sdk/google/appengine/api/taskqueue/TransientTaskQueueException.php b/php/sdk/google/appengine/api/taskqueue/TransientTaskQueueException.php
index 973db24..e2e8bac 100644
--- a/php/sdk/google/appengine/api/taskqueue/TransientTaskQueueException.php
+++ b/php/sdk/google/appengine/api/taskqueue/TransientTaskQueueException.php
@@ -19,8 +19,6 @@
 
 namespace google\appengine\api\taskqueue;
 
-require_once 'google/appengine/api/taskqueue/TaskQueueException.php';
-
 /**
  * Thrown when there is a transient failure using Task Queue service, i.e.
  * retrying may result in success.
diff --git a/php/sdk/google/appengine/api/users/UserService.php b/php/sdk/google/appengine/api/users/UserService.php
index d951c28..8e14623 100644
--- a/php/sdk/google/appengine/api/users/UserService.php
+++ b/php/sdk/google/appengine/api/users/UserService.php
@@ -27,12 +27,6 @@
 use google\appengine\runtime\ApplicationError;
 use google\appengine\UserServiceError\ErrorCode;
 
-require_once 'google/appengine/api/user_service_pb.php';
-require_once 'google/appengine/api/users/User.php';
-require_once 'google/appengine/api/users/UsersException.php';
-require_once 'google/appengine/runtime/ApiProxy.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
-
 final class UserService {
   /**
    * Computes the login URL for redirection.
@@ -68,7 +62,8 @@
     try {
       ApiProxy::makeSyncCall('user', 'CreateLoginURL', $req, $resp);
     } catch (ApplicationError $e) {
-      throw self::applicationErrorToException($e, $destination_url);
+      throw self::applicationErrorToException(
+          $e, htmlspecialchars($destination_url));
     }
     return $resp->getLoginUrl();
   }
@@ -94,7 +89,8 @@
     try {
       ApiProxy::makeSyncCall('user', 'CreateLogoutURL', $req, $resp);
     } catch (ApplicationError $e) {
-      throw self::applicationErrorToException($e, $destination_url);
+      throw self::applicationErrorToException(
+        $e, htmlspecialchars($destination_url));
     }
     return $resp->getLogoutUrl();
   }
diff --git a/php/sdk/google/appengine/api/users/UserServiceTest.php b/php/sdk/google/appengine/api/users/UserServiceTest.php
index de390da..7ac2897 100644
--- a/php/sdk/google/appengine/api/users/UserServiceTest.php
+++ b/php/sdk/google/appengine/api/users/UserServiceTest.php
@@ -17,10 +17,6 @@
 /**
  */
 
-require_once 'google/appengine/api/users/User.php';
-require_once 'google/appengine/api/users/UserService.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\api\users\User;
 use google\appengine\api\users\UserService;
 use google\appengine\testing\ApiProxyTestBase;
diff --git a/php/sdk/google/appengine/api/users/UserTest.php b/php/sdk/google/appengine/api/users/UserTest.php
index ba8f75b..df6df7e 100644
--- a/php/sdk/google/appengine/api/users/UserTest.php
+++ b/php/sdk/google/appengine/api/users/UserTest.php
@@ -17,10 +17,6 @@
 /**
  */
 
-require_once 'google/appengine/api/users/User.php';
-require_once 'google/appengine/api/users/UserService.php';
-require_once 'google/appengine/testing/ApiProxyTestBase.php';
-
 use google\appengine\api\users\User;
 use google\appengine\api\users\UserService;
 use google\appengine\testing\ApiProxyTestBase;
diff --git a/php/sdk/google/appengine/datastore/datastore_v3_pb.php b/php/sdk/google/appengine/datastore/datastore_v3_pb.php
index 5ed31c5..7b8c494 100644
--- a/php/sdk/google/appengine/datastore/datastore_v3_pb.php
+++ b/php/sdk/google/appengine/datastore/datastore_v3_pb.php
@@ -25,6 +25,83 @@
   require_once 'google/appengine/datastore/snapshot_pb.php';
 }
 namespace google\appengine_datastore_v3 {
+  class InternalHeader extends \google\net\ProtocolMessage {
+    public function getQos() {
+      if (!isset($this->qos)) {
+        return '';
+      }
+      return $this->qos;
+    }
+    public function setQos($val) {
+      $this->qos = $val;
+      return $this;
+    }
+    public function clearQos() {
+      unset($this->qos);
+      return $this;
+    }
+    public function hasQos() {
+      return isset($this->qos);
+    }
+    public function clear() {
+      $this->clearQos();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->qos)) {
+        $res += 1;
+        $res += $this->lengthString(strlen($this->qos));
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->qos)) {
+        $out->putVarInt32(10);
+        $out->putPrefixedString($this->qos);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 10:
+            $length = $d->getVarInt32();
+            $this->setQos(substr($d->buffer(), $d->pos(), $length));
+            $d->skip($length);
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasQos()) {
+        $this->setQos($x->getQos());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->qos) !== isset($x->qos)) return false;
+      if (isset($this->qos) && $this->qos !== $x->qos) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->qos)) {
+        $res .= $prefix . "qos: " . $this->debugFormatString($this->qos) . "\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace google\appengine_datastore_v3 {
   class Transaction extends \google\net\ProtocolMessage {
     public function getHandle() {
       if (!isset($this->handle)) {
@@ -81,10 +158,33 @@
     public function hasMarkChanges() {
       return isset($this->mark_changes);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearHandle();
       $this->clearApp();
       $this->clearMarkChanges();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -98,6 +198,10 @@
       if (isset($this->mark_changes)) {
         $res += 2;
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -113,6 +217,11 @@
         $out->putVarInt32(24);
         $out->putBoolean($this->mark_changes);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(34);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -129,6 +238,12 @@
           case 24:
             $this->setMarkChanges($d->getBoolean());
             break;
+          case 34:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -140,6 +255,7 @@
     public function checkInitialized() {
       if (!isset($this->handle)) return 'handle';
       if (!isset($this->app)) return 'app';
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -153,6 +269,9 @@
       if ($x->hasMarkChanges()) {
         $this->setMarkChanges($x->getMarkChanges());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -162,6 +281,8 @@
       if (isset($this->app) && $this->app !== $x->app) return false;
       if (isset($this->mark_changes) !== isset($x->mark_changes)) return false;
       if (isset($this->mark_changes) && $this->mark_changes !== $x->mark_changes) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -175,6 +296,9 @@
       if (isset($this->mark_changes)) {
         $res .= $prefix . "mark_changes: " . $this->debugFormatBool($this->mark_changes) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -988,6 +1112,28 @@
     public function hasPersistOffset() {
       return isset($this->persist_offset);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearApp();
       $this->clearKind();
@@ -1015,6 +1161,7 @@
       $this->clearMinSafeTimeSeconds();
       $this->clearSafeReplicaName();
       $this->clearPersistOffset();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -1122,6 +1269,10 @@
       if (isset($this->persist_offset)) {
         $res += 3;
       }
+      if (isset($this->header)) {
+        $res += 2;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -1242,6 +1393,11 @@
         $out->putVarInt32(296);
         $out->putBoolean($this->persist_offset);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(314);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -1354,6 +1510,12 @@
           case 296:
             $this->setPersistOffset($d->getBoolean());
             break;
+          case 314:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -1377,6 +1539,7 @@
       if (isset($this->transaction) && (!$this->transaction->isInitialized())) return 'transaction';
       if (isset($this->compiled_cursor) && (!$this->compiled_cursor->isInitialized())) return 'compiled_cursor';
       if (isset($this->end_compiled_cursor) && (!$this->end_compiled_cursor->isInitialized())) return 'end_compiled_cursor';
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -1459,6 +1622,9 @@
       if ($x->hasPersistOffset()) {
         $this->setPersistOffset($x->getPersistOffset());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -1526,6 +1692,8 @@
       }
       if (isset($this->persist_offset) !== isset($x->persist_offset)) return false;
       if (isset($this->persist_offset) && $this->persist_offset !== $x->persist_offset) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -1608,6 +1776,9 @@
       if (isset($this->persist_offset)) {
         $res .= $prefix . "persist_offset: " . $this->debugFormatBool($this->persist_offset) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -3074,81 +3245,43 @@
 }
 namespace google\appengine_datastore_v3 {
   class CompiledCursor extends \google\net\ProtocolMessage {
-    private $position = array();
-    public function getMultiqueryIndex() {
-      if (!isset($this->multiquery_index)) {
-        return 0;
+    public function getPosition() {
+      if (!isset($this->position)) {
+        return new \google\appengine_datastore_v3\CompiledCursor\Position();
       }
-      return $this->multiquery_index;
-    }
-    public function setMultiqueryIndex($val) {
-      $this->multiquery_index = $val;
-      return $this;
-    }
-    public function clearMultiqueryIndex() {
-      unset($this->multiquery_index);
-      return $this;
-    }
-    public function hasMultiqueryIndex() {
-      return isset($this->multiquery_index);
-    }
-    public function getPositionSize() {
-      return sizeof($this->position);
-    }
-    public function getPositionList() {
       return $this->position;
     }
-    public function mutablePosition($idx) {
-      if (!isset($this->position[$idx])) {
-        $val = new \google\appengine_datastore_v3\CompiledCursor\Position();
-        $this->position[$idx] = $val;
-        return $val;
+    public function mutablePosition() {
+      if (!isset($this->position)) {
+        $res = new \google\appengine_datastore_v3\CompiledCursor\Position();
+        $this->position = $res;
+        return $res;
       }
-      return $this->position[$idx];
-    }
-    public function getPosition($idx) {
-      if (isset($this->position[$idx])) {
-        return $this->position[$idx];
-      }
-      if ($idx >= end(array_keys($this->position))) {
-        throw new \OutOfRangeException('index out of range: ' + $idx);
-      }
-      return new \google\appengine_datastore_v3\CompiledCursor\Position();
-    }
-    public function addPosition() {
-      $val = new \google\appengine_datastore_v3\CompiledCursor\Position();
-      $this->position[] = $val;
-      return $val;
+      return $this->position;
     }
     public function clearPosition() {
-      $this->position = array();
+      if (isset($this->position)) {
+        unset($this->position);
+      }
+    }
+    public function hasPosition() {
+      return isset($this->position);
     }
     public function clear() {
-      $this->clearMultiqueryIndex();
       $this->clearPosition();
     }
     public function byteSizePartial() {
       $res = 0;
-      if (isset($this->multiquery_index)) {
-        $res += 1;
-        $res += $this->lengthVarInt64($this->multiquery_index);
-      }
-      $this->checkProtoArray($this->position);
-      $res += 2 * sizeof($this->position);
-      foreach ($this->position as $value) {
-        $res += $value->byteSizePartial();
+      if (isset($this->position)) {
+        $res += 2;
+        $res += $this->position->byteSizePartial();
       }
       return $res;
     }
     public function outputPartial($out) {
-      if (isset($this->multiquery_index)) {
-        $out->putVarInt32(8);
-        $out->putVarInt32($this->multiquery_index);
-      }
-      $this->checkProtoArray($this->position);
-      foreach ($this->position as $value) {
+      if (isset($this->position)) {
         $out->putVarInt32(19);
-        $value->outputPartial($out);
+        $this->position->outputPartial($out);
         $out->putVarInt32(20);
       }
     }
@@ -3156,11 +3289,8 @@
       while($d->avail() > 0) {
         $tt = $d->getVarInt32();
         switch ($tt) {
-          case 8:
-            $this->setMultiqueryIndex($d->getVarInt32());
-            break;
           case 19:
-            $this->addPosition()->tryMerge($d);
+            $this->mutablePosition()->tryMerge($d);
             break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
@@ -3171,37 +3301,25 @@
       };
     }
     public function checkInitialized() {
-      foreach ($this->position as $value) {
-        if (!$value->isInitialized()) return 'position';
-      }
+      if (isset($this->position) && (!$this->position->isInitialized())) return 'position';
       return null;
     }
     public function mergeFrom($x) {
       if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
-      if ($x->hasMultiqueryIndex()) {
-        $this->setMultiqueryIndex($x->getMultiqueryIndex());
-      }
-      foreach ($x->getPositionList() as $v) {
-        $this->addPosition()->copyFrom($v);
+      if ($x->hasPosition()) {
+        $this->mutablePosition()->mergeFrom($x->getPosition());
       }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
-      if (isset($this->multiquery_index) !== isset($x->multiquery_index)) return false;
-      if (isset($this->multiquery_index) && !$this->integerEquals($this->multiquery_index, $x->multiquery_index)) return false;
-      if (sizeof($this->position) !== sizeof($x->position)) return false;
-      foreach (array_map(null, $this->position, $x->position) as $v) {
-        if (!$v[0]->equals($v[1])) return false;
-      }
+      if (isset($this->position) !== isset($x->position)) return false;
+      if (isset($this->position) && !$this->position->equals($x->position)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
       $res = '';
-      if (isset($this->multiquery_index)) {
-        $res .= $prefix . "multiquery_index: " . $this->debugFormatInt32($this->multiquery_index) . "\n";
-      }
-      foreach ($this->position as $value) {
-        $res .= $prefix . "Position {\n" . $value->shortDebugString($prefix . "  ") . $prefix . "}\n";
+      if (isset($this->position)) {
+        $res .= $prefix . "Position {\n" . $this->position->shortDebugString($prefix . "  ") . $prefix . "}\n";
       }
       return $res;
     }
@@ -3906,12 +4024,35 @@
     public function hasAllowDeferred() {
       return isset($this->allow_deferred);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearKey();
       $this->clearTransaction();
       $this->clearFailoverMs();
       $this->clearStrong();
       $this->clearAllowDeferred();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -3934,6 +4075,10 @@
       if (isset($this->allow_deferred)) {
         $res += 2;
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -3960,6 +4105,11 @@
         $out->putVarInt32(40);
         $out->putBoolean($this->allow_deferred);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(50);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -3986,6 +4136,12 @@
           case 40:
             $this->setAllowDeferred($d->getBoolean());
             break;
+          case 50:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -3999,6 +4155,7 @@
         if (!$value->isInitialized()) return 'key';
       }
       if (isset($this->transaction) && (!$this->transaction->isInitialized())) return 'transaction';
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -4018,6 +4175,9 @@
       if ($x->hasAllowDeferred()) {
         $this->setAllowDeferred($x->getAllowDeferred());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -4033,6 +4193,8 @@
       if (isset($this->strong) && $this->strong !== $x->strong) return false;
       if (isset($this->allow_deferred) !== isset($x->allow_deferred)) return false;
       if (isset($this->allow_deferred) && $this->allow_deferred !== $x->allow_deferred) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -4052,6 +4214,9 @@
       if (isset($this->allow_deferred)) {
         $res .= $prefix . "allow_deferred: " . $this->debugFormatBool($this->allow_deferred) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -4621,6 +4786,28 @@
     public function hasAutoIdPolicy() {
       return isset($this->auto_id_policy);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearEntity();
       $this->clearTransaction();
@@ -4630,6 +4817,7 @@
       $this->clearMarkChanges();
       $this->clearSnapshot();
       $this->clearAutoIdPolicy();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -4665,6 +4853,10 @@
         $res += 1;
         $res += $this->lengthVarInt64($this->auto_id_policy);
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -4707,6 +4899,11 @@
         $out->putVarInt32(80);
         $out->putVarInt32($this->auto_id_policy);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(90);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -4748,6 +4945,12 @@
           case 80:
             $this->setAutoIdPolicy($d->getVarInt32());
             break;
+          case 90:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -4767,6 +4970,7 @@
       foreach ($this->snapshot as $value) {
         if (!$value->isInitialized()) return 'snapshot';
       }
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -4795,6 +4999,9 @@
       if ($x->hasAutoIdPolicy()) {
         $this->setAutoIdPolicy($x->getAutoIdPolicy());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -4820,6 +5027,8 @@
       }
       if (isset($this->auto_id_policy) !== isset($x->auto_id_policy)) return false;
       if (isset($this->auto_id_policy) && $this->auto_id_policy !== $x->auto_id_policy) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -4848,6 +5057,9 @@
       if (isset($this->auto_id_policy)) {
         $res .= $prefix . "auto_id_policy: " . ($this->auto_id_policy) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -5168,11 +5380,34 @@
     public function clearSnapshot() {
       $this->snapshot = array();
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearKey();
       $this->clearCompositeIndex();
       $this->clearForce();
       $this->clearSnapshot();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -5194,6 +5429,10 @@
       foreach ($this->snapshot as $value) {
         $res += $this->lengthString($value->byteSizePartial());
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -5219,6 +5458,11 @@
         $out->putVarInt32($value->byteSizePartial());
         $value->outputPartial($out);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(82);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -5245,6 +5489,12 @@
             $d->skip($length);
             $this->addSnapshot()->tryMerge($tmp);
             break;
+          case 82:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -5263,6 +5513,7 @@
       foreach ($this->snapshot as $value) {
         if (!$value->isInitialized()) return 'snapshot';
       }
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -5279,6 +5530,9 @@
       foreach ($x->getSnapshotList() as $v) {
         $this->addSnapshot()->copyFrom($v);
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -5296,6 +5550,8 @@
       foreach (array_map(null, $this->snapshot, $x->snapshot) as $v) {
         if (!$v[0]->equals($v[1])) return false;
       }
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -5312,6 +5568,9 @@
       foreach ($this->snapshot as $value) {
         $res .= $prefix . "snapshot <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -5540,6 +5799,28 @@
     public function clearSnapshot() {
       $this->snapshot = array();
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearTrusted();
       $this->clearTransaction();
@@ -5547,6 +5828,7 @@
       $this->clearForce();
       $this->clearMarkChanges();
       $this->clearSnapshot();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -5573,6 +5855,10 @@
       foreach ($this->snapshot as $value) {
         $res += $this->lengthString($value->byteSizePartial());
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -5605,6 +5891,11 @@
         $out->putVarInt32($value->byteSizePartial());
         $value->outputPartial($out);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(82);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -5637,6 +5928,12 @@
             $d->skip($length);
             $this->addSnapshot()->tryMerge($tmp);
             break;
+          case 82:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -5653,6 +5950,7 @@
       foreach ($this->snapshot as $value) {
         if (!$value->isInitialized()) return 'snapshot';
       }
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -5675,6 +5973,9 @@
       foreach ($x->getSnapshotList() as $v) {
         $this->addSnapshot()->copyFrom($v);
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -5694,6 +5995,8 @@
       foreach (array_map(null, $this->snapshot, $x->snapshot) as $v) {
         if (!$v[0]->equals($v[1])) return false;
       }
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -5716,6 +6019,9 @@
       foreach ($this->snapshot as $value) {
         $res .= $prefix . "snapshot <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -5933,11 +6239,34 @@
     public function hasOffset() {
       return isset($this->offset);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearCursor();
       $this->clearCount();
       $this->clearCompile();
       $this->clearOffset();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -5956,6 +6285,10 @@
         $res += 1;
         $res += $this->lengthVarInt64($this->offset);
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -5976,6 +6309,11 @@
         $out->putVarInt32(32);
         $out->putVarInt32($this->offset);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(42);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -5996,6 +6334,12 @@
           case 32:
             $this->setOffset($d->getVarInt32());
             break;
+          case 42:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -6006,6 +6350,7 @@
     }
     public function checkInitialized() {
       if ((!isset($this->cursor)) || (!$this->cursor->isInitialized())) return 'cursor';
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -6022,6 +6367,9 @@
       if ($x->hasOffset()) {
         $this->setOffset($x->getOffset());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -6033,6 +6381,8 @@
       if (isset($this->compile) && $this->compile !== $x->compile) return false;
       if (isset($this->offset) !== isset($x->offset)) return false;
       if (isset($this->offset) && !$this->integerEquals($this->offset, $x->offset)) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -6049,6 +6399,9 @@
       if (isset($this->offset)) {
         $res .= $prefix . "offset: " . $this->debugFormatInt32($this->offset) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -6595,6 +6948,7 @@
 }
 namespace google\appengine_datastore_v3 {
   class AllocateIdsRequest extends \google\net\ProtocolMessage {
+    private $reserve = array();
     public function getModelKey() {
       if (!isset($this->model_key)) {
         return new \storage_onestore_v3\Reference();
@@ -6659,10 +7013,65 @@
     public function hasMax() {
       return isset($this->max);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
+    public function getReserveSize() {
+      return sizeof($this->reserve);
+    }
+    public function getReserveList() {
+      return $this->reserve;
+    }
+    public function mutableReserve($idx) {
+      if (!isset($this->reserve[$idx])) {
+        $val = new \storage_onestore_v3\Reference();
+        $this->reserve[$idx] = $val;
+        return $val;
+      }
+      return $this->reserve[$idx];
+    }
+    public function getReserve($idx) {
+      if (isset($this->reserve[$idx])) {
+        return $this->reserve[$idx];
+      }
+      if ($idx >= end(array_keys($this->reserve))) {
+        throw new \OutOfRangeException('index out of range: ' + $idx);
+      }
+      return new \storage_onestore_v3\Reference();
+    }
+    public function addReserve() {
+      $val = new \storage_onestore_v3\Reference();
+      $this->reserve[] = $val;
+      return $val;
+    }
+    public function clearReserve() {
+      $this->reserve = array();
+    }
     public function clear() {
       $this->clearModelKey();
       $this->clearSize();
       $this->clearMax();
+      $this->clearHeader();
+      $this->clearReserve();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -6678,6 +7087,15 @@
         $res += 1;
         $res += $this->lengthVarInt64($this->max);
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
+      $this->checkProtoArray($this->reserve);
+      $res += 1 * sizeof($this->reserve);
+      foreach ($this->reserve as $value) {
+        $res += $this->lengthString($value->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -6694,6 +7112,17 @@
         $out->putVarInt32(24);
         $out->putVarInt64($this->max);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(34);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
+      $this->checkProtoArray($this->reserve);
+      foreach ($this->reserve as $value) {
+        $out->putVarInt32(42);
+        $out->putVarInt32($value->byteSizePartial());
+        $value->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -6711,6 +7140,18 @@
           case 24:
             $this->setMax($d->getVarInt64());
             break;
+          case 34:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
+          case 42:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->addReserve()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -6720,7 +7161,11 @@
       };
     }
     public function checkInitialized() {
-      if ((!isset($this->model_key)) || (!$this->model_key->isInitialized())) return 'model_key';
+      if (isset($this->model_key) && (!$this->model_key->isInitialized())) return 'model_key';
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
+      foreach ($this->reserve as $value) {
+        if (!$value->isInitialized()) return 'reserve';
+      }
       return null;
     }
     public function mergeFrom($x) {
@@ -6734,6 +7179,12 @@
       if ($x->hasMax()) {
         $this->setMax($x->getMax());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
+      foreach ($x->getReserveList() as $v) {
+        $this->addReserve()->copyFrom($v);
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -6743,6 +7194,12 @@
       if (isset($this->size) && !$this->integerEquals($this->size, $x->size)) return false;
       if (isset($this->max) !== isset($x->max)) return false;
       if (isset($this->max) && !$this->integerEquals($this->max, $x->max)) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
+      if (sizeof($this->reserve) !== sizeof($x->reserve)) return false;
+      foreach (array_map(null, $this->reserve, $x->reserve) as $v) {
+        if (!$v[0]->equals($v[1])) return false;
+      }
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -6756,6 +7213,12 @@
       if (isset($this->max)) {
         $res .= $prefix . "max: " . $this->debugFormatInt64($this->max) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      foreach ($this->reserve as $value) {
+        $res .= $prefix . "reserve <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -7086,9 +7549,32 @@
     public function clearAction() {
       $this->action = array();
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearTransaction();
       $this->clearAction();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -7101,6 +7587,10 @@
       foreach ($this->action as $value) {
         $res += $this->lengthString($value->byteSizePartial());
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -7115,6 +7605,11 @@
         $out->putVarInt32($value->byteSizePartial());
         $value->outputPartial($out);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -7132,6 +7627,12 @@
             $d->skip($length);
             $this->addAction()->tryMerge($tmp);
             break;
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -7145,6 +7646,7 @@
       foreach ($this->action as $value) {
         if (!$value->isInitialized()) return 'action';
       }
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -7155,6 +7657,9 @@
       foreach ($x->getActionList() as $v) {
         $this->addAction()->copyFrom($v);
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -7164,6 +7669,8 @@
       foreach (array_map(null, $this->action, $x->action) as $v) {
         if (!$v[0]->equals($v[1])) return false;
       }
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -7174,6 +7681,9 @@
       foreach ($this->action as $value) {
         $res .= $prefix . "action <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -7252,9 +7762,32 @@
     public function hasAllowMultipleEg() {
       return isset($this->allow_multiple_eg);
     }
+    public function getHeader() {
+      if (!isset($this->header)) {
+        return new \google\appengine_datastore_v3\InternalHeader();
+      }
+      return $this->header;
+    }
+    public function mutableHeader() {
+      if (!isset($this->header)) {
+        $res = new \google\appengine_datastore_v3\InternalHeader();
+        $this->header = $res;
+        return $res;
+      }
+      return $this->header;
+    }
+    public function clearHeader() {
+      if (isset($this->header)) {
+        unset($this->header);
+      }
+    }
+    public function hasHeader() {
+      return isset($this->header);
+    }
     public function clear() {
       $this->clearApp();
       $this->clearAllowMultipleEg();
+      $this->clearHeader();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -7265,6 +7798,10 @@
       if (isset($this->allow_multiple_eg)) {
         $res += 2;
       }
+      if (isset($this->header)) {
+        $res += 1;
+        $res += $this->lengthString($this->header->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -7276,6 +7813,11 @@
         $out->putVarInt32(16);
         $out->putBoolean($this->allow_multiple_eg);
       }
+      if (isset($this->header)) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($this->header->byteSizePartial());
+        $this->header->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -7289,6 +7831,12 @@
           case 16:
             $this->setAllowMultipleEg($d->getBoolean());
             break;
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableHeader()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -7299,6 +7847,7 @@
     }
     public function checkInitialized() {
       if (!isset($this->app)) return 'app';
+      if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
       return null;
     }
     public function mergeFrom($x) {
@@ -7309,6 +7858,9 @@
       if ($x->hasAllowMultipleEg()) {
         $this->setAllowMultipleEg($x->getAllowMultipleEg());
       }
+      if ($x->hasHeader()) {
+        $this->mutableHeader()->mergeFrom($x->getHeader());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -7316,6 +7868,8 @@
       if (isset($this->app) && $this->app !== $x->app) return false;
       if (isset($this->allow_multiple_eg) !== isset($x->allow_multiple_eg)) return false;
       if (isset($this->allow_multiple_eg) && $this->allow_multiple_eg !== $x->allow_multiple_eg) return false;
+      if (isset($this->header) !== isset($x->header)) return false;
+      if (isset($this->header) && !$this->header->equals($x->header)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -7326,6 +7880,9 @@
       if (isset($this->allow_multiple_eg)) {
         $res .= $prefix . "allow_multiple_eg: " . $this->debugFormatBool($this->allow_multiple_eg) . "\n";
       }
+      if (isset($this->header)) {
+        $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
diff --git a/php/sdk/google/appengine/datastore/datastore_v4_pb.php b/php/sdk/google/appengine/datastore/datastore_v4_pb.php
index d5c7ab2..303f391 100644
--- a/php/sdk/google/appengine/datastore/datastore_v4_pb.php
+++ b/php/sdk/google/appengine/datastore/datastore_v4_pb.php
@@ -2226,8 +2226,313 @@
     }
   }
 }
+namespace google\appengine\datastore\v4\Mutation {
+  class Operation {
+    const INSERT = 1;
+    const UPDATE = 2;
+    const UPSERT = 3;
+    const DELETE = 4;
+    const INSERT_WITH_AUTO_ID = 99;
+  }
+}
 namespace google\appengine\datastore\v4 {
   class Mutation extends \google\net\ProtocolMessage {
+    public function getOp() {
+      if (!isset($this->op)) {
+        return 1;
+      }
+      return $this->op;
+    }
+    public function setOp($val) {
+      $this->op = $val;
+      return $this;
+    }
+    public function clearOp() {
+      unset($this->op);
+      return $this;
+    }
+    public function hasOp() {
+      return isset($this->op);
+    }
+    public function getKey() {
+      if (!isset($this->key)) {
+        return new \google\appengine\datastore\v4\Key();
+      }
+      return $this->key;
+    }
+    public function mutableKey() {
+      if (!isset($this->key)) {
+        $res = new \google\appengine\datastore\v4\Key();
+        $this->key = $res;
+        return $res;
+      }
+      return $this->key;
+    }
+    public function clearKey() {
+      if (isset($this->key)) {
+        unset($this->key);
+      }
+    }
+    public function hasKey() {
+      return isset($this->key);
+    }
+    public function getEntity() {
+      if (!isset($this->entity)) {
+        return new \google\appengine\datastore\v4\Entity();
+      }
+      return $this->entity;
+    }
+    public function mutableEntity() {
+      if (!isset($this->entity)) {
+        $res = new \google\appengine\datastore\v4\Entity();
+        $this->entity = $res;
+        return $res;
+      }
+      return $this->entity;
+    }
+    public function clearEntity() {
+      if (isset($this->entity)) {
+        unset($this->entity);
+      }
+    }
+    public function hasEntity() {
+      return isset($this->entity);
+    }
+    public function clear() {
+      $this->clearOp();
+      $this->clearKey();
+      $this->clearEntity();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->op)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->op);
+      }
+      if (isset($this->key)) {
+        $res += 1;
+        $res += $this->lengthString($this->key->byteSizePartial());
+      }
+      if (isset($this->entity)) {
+        $res += 1;
+        $res += $this->lengthString($this->entity->byteSizePartial());
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->op)) {
+        $out->putVarInt32(8);
+        $out->putVarInt32($this->op);
+      }
+      if (isset($this->key)) {
+        $out->putVarInt32(18);
+        $out->putVarInt32($this->key->byteSizePartial());
+        $this->key->outputPartial($out);
+      }
+      if (isset($this->entity)) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($this->entity->byteSizePartial());
+        $this->entity->outputPartial($out);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 8:
+            $this->setOp($d->getVarInt32());
+            break;
+          case 18:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableKey()->tryMerge($tmp);
+            break;
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableEntity()->tryMerge($tmp);
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      if (!isset($this->op)) return 'op';
+      if (isset($this->key) && (!$this->key->isInitialized())) return 'key';
+      if (isset($this->entity) && (!$this->entity->isInitialized())) return 'entity';
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasOp()) {
+        $this->setOp($x->getOp());
+      }
+      if ($x->hasKey()) {
+        $this->mutableKey()->mergeFrom($x->getKey());
+      }
+      if ($x->hasEntity()) {
+        $this->mutableEntity()->mergeFrom($x->getEntity());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->op) !== isset($x->op)) return false;
+      if (isset($this->op) && $this->op !== $x->op) return false;
+      if (isset($this->key) !== isset($x->key)) return false;
+      if (isset($this->key) && !$this->key->equals($x->key)) return false;
+      if (isset($this->entity) !== isset($x->entity)) return false;
+      if (isset($this->entity) && !$this->entity->equals($x->entity)) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->op)) {
+        $res .= $prefix . "op: " . ($this->op) . "\n";
+      }
+      if (isset($this->key)) {
+        $res .= $prefix . "key <\n" . $this->key->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->entity)) {
+        $res .= $prefix . "entity <\n" . $this->entity->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace google\appengine\datastore\v4 {
+  class MutationResult extends \google\net\ProtocolMessage {
+    public function getKey() {
+      if (!isset($this->key)) {
+        return new \google\appengine\datastore\v4\Key();
+      }
+      return $this->key;
+    }
+    public function mutableKey() {
+      if (!isset($this->key)) {
+        $res = new \google\appengine\datastore\v4\Key();
+        $this->key = $res;
+        return $res;
+      }
+      return $this->key;
+    }
+    public function clearKey() {
+      if (isset($this->key)) {
+        unset($this->key);
+      }
+    }
+    public function hasKey() {
+      return isset($this->key);
+    }
+    public function getNewVersion() {
+      if (!isset($this->new_version)) {
+        return '0';
+      }
+      return $this->new_version;
+    }
+    public function setNewVersion($val) {
+      if (is_double($val)) {
+        $this->new_version = sprintf('%0.0F', $val);
+      } else {
+        $this->new_version = $val;
+      }
+      return $this;
+    }
+    public function clearNewVersion() {
+      unset($this->new_version);
+      return $this;
+    }
+    public function hasNewVersion() {
+      return isset($this->new_version);
+    }
+    public function clear() {
+      $this->clearKey();
+      $this->clearNewVersion();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->key)) {
+        $res += 1;
+        $res += $this->lengthString($this->key->byteSizePartial());
+      }
+      if (isset($this->new_version)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->new_version);
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->key)) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($this->key->byteSizePartial());
+        $this->key->outputPartial($out);
+      }
+      if (isset($this->new_version)) {
+        $out->putVarInt32(32);
+        $out->putVarInt64($this->new_version);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableKey()->tryMerge($tmp);
+            break;
+          case 32:
+            $this->setNewVersion($d->getVarInt64());
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      if (isset($this->key) && (!$this->key->isInitialized())) return 'key';
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasKey()) {
+        $this->mutableKey()->mergeFrom($x->getKey());
+      }
+      if ($x->hasNewVersion()) {
+        $this->setNewVersion($x->getNewVersion());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->key) !== isset($x->key)) return false;
+      if (isset($this->key) && !$this->key->equals($x->key)) return false;
+      if (isset($this->new_version) !== isset($x->new_version)) return false;
+      if (isset($this->new_version) && !$this->integerEquals($this->new_version, $x->new_version)) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->key)) {
+        $res .= $prefix . "key <\n" . $this->key->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->new_version)) {
+        $res .= $prefix . "new_version: " . $this->debugFormatInt64($this->new_version) . "\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace google\appengine\datastore\v4 {
+  class DeprecatedMutation extends \google\net\ProtocolMessage {
     private $upsert = array();
     private $update = array();
     private $insert = array();
@@ -2616,7 +2921,7 @@
   }
 }
 namespace google\appengine\datastore\v4 {
-  class MutationResult extends \google\net\ProtocolMessage {
+  class DeprecatedMutationResult extends \google\net\ProtocolMessage {
     private $insert_auto_id_key = array();
     private $upsert_version = array();
     private $update_version = array();
@@ -4418,6 +4723,7 @@
 }
 namespace google\appengine\datastore\v4 {
   class CommitRequest extends \google\net\ProtocolMessage {
+    private $mutation = array();
     public function getTransaction() {
       if (!isset($this->transaction)) {
         return '';
@@ -4435,27 +4741,27 @@
     public function hasTransaction() {
       return isset($this->transaction);
     }
-    public function getMutation() {
-      if (!isset($this->mutation)) {
-        return new \google\appengine\datastore\v4\Mutation();
+    public function getDeprecatedMutation() {
+      if (!isset($this->deprecated_mutation)) {
+        return new \google\appengine\datastore\v4\DeprecatedMutation();
       }
-      return $this->mutation;
+      return $this->deprecated_mutation;
     }
-    public function mutableMutation() {
-      if (!isset($this->mutation)) {
-        $res = new \google\appengine\datastore\v4\Mutation();
-        $this->mutation = $res;
+    public function mutableDeprecatedMutation() {
+      if (!isset($this->deprecated_mutation)) {
+        $res = new \google\appengine\datastore\v4\DeprecatedMutation();
+        $this->deprecated_mutation = $res;
         return $res;
       }
-      return $this->mutation;
+      return $this->deprecated_mutation;
     }
-    public function clearMutation() {
-      if (isset($this->mutation)) {
-        unset($this->mutation);
+    public function clearDeprecatedMutation() {
+      if (isset($this->deprecated_mutation)) {
+        unset($this->deprecated_mutation);
       }
     }
-    public function hasMutation() {
-      return isset($this->mutation);
+    public function hasDeprecatedMutation() {
+      return isset($this->deprecated_mutation);
     }
     public function getMode() {
       if (!isset($this->mode)) {
@@ -4474,10 +4780,60 @@
     public function hasMode() {
       return isset($this->mode);
     }
+    public function getMutationSize() {
+      return sizeof($this->mutation);
+    }
+    public function getMutationList() {
+      return $this->mutation;
+    }
+    public function mutableMutation($idx) {
+      if (!isset($this->mutation[$idx])) {
+        $val = new \google\appengine\datastore\v4\Mutation();
+        $this->mutation[$idx] = $val;
+        return $val;
+      }
+      return $this->mutation[$idx];
+    }
+    public function getMutation($idx) {
+      if (isset($this->mutation[$idx])) {
+        return $this->mutation[$idx];
+      }
+      if ($idx >= end(array_keys($this->mutation))) {
+        throw new \OutOfRangeException('index out of range: ' + $idx);
+      }
+      return new \google\appengine\datastore\v4\Mutation();
+    }
+    public function addMutation() {
+      $val = new \google\appengine\datastore\v4\Mutation();
+      $this->mutation[] = $val;
+      return $val;
+    }
+    public function clearMutation() {
+      $this->mutation = array();
+    }
+    public function getIgnoreReadOnly() {
+      if (!isset($this->ignore_read_only)) {
+        return false;
+      }
+      return $this->ignore_read_only;
+    }
+    public function setIgnoreReadOnly($val) {
+      $this->ignore_read_only = $val;
+      return $this;
+    }
+    public function clearIgnoreReadOnly() {
+      unset($this->ignore_read_only);
+      return $this;
+    }
+    public function hasIgnoreReadOnly() {
+      return isset($this->ignore_read_only);
+    }
     public function clear() {
       $this->clearTransaction();
-      $this->clearMutation();
+      $this->clearDeprecatedMutation();
       $this->clearMode();
+      $this->clearMutation();
+      $this->clearIgnoreReadOnly();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -4485,14 +4841,22 @@
         $res += 1;
         $res += $this->lengthString(strlen($this->transaction));
       }
-      if (isset($this->mutation)) {
+      if (isset($this->deprecated_mutation)) {
         $res += 1;
-        $res += $this->lengthString($this->mutation->byteSizePartial());
+        $res += $this->lengthString($this->deprecated_mutation->byteSizePartial());
       }
       if (isset($this->mode)) {
         $res += 1;
         $res += $this->lengthVarInt64($this->mode);
       }
+      $this->checkProtoArray($this->mutation);
+      $res += 1 * sizeof($this->mutation);
+      foreach ($this->mutation as $value) {
+        $res += $this->lengthString($value->byteSizePartial());
+      }
+      if (isset($this->ignore_read_only)) {
+        $res += 2;
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -4500,15 +4864,25 @@
         $out->putVarInt32(10);
         $out->putPrefixedString($this->transaction);
       }
-      if (isset($this->mutation)) {
+      if (isset($this->deprecated_mutation)) {
         $out->putVarInt32(18);
-        $out->putVarInt32($this->mutation->byteSizePartial());
-        $this->mutation->outputPartial($out);
+        $out->putVarInt32($this->deprecated_mutation->byteSizePartial());
+        $this->deprecated_mutation->outputPartial($out);
       }
       if (isset($this->mode)) {
         $out->putVarInt32(32);
         $out->putVarInt32($this->mode);
       }
+      $this->checkProtoArray($this->mutation);
+      foreach ($this->mutation as $value) {
+        $out->putVarInt32(42);
+        $out->putVarInt32($value->byteSizePartial());
+        $value->outputPartial($out);
+      }
+      if (isset($this->ignore_read_only)) {
+        $out->putVarInt32(48);
+        $out->putBoolean($this->ignore_read_only);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -4523,11 +4897,20 @@
             $length = $d->getVarInt32();
             $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
             $d->skip($length);
-            $this->mutableMutation()->tryMerge($tmp);
+            $this->mutableDeprecatedMutation()->tryMerge($tmp);
             break;
           case 32:
             $this->setMode($d->getVarInt32());
             break;
+          case 42:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->addMutation()->tryMerge($tmp);
+            break;
+          case 48:
+            $this->setIgnoreReadOnly($d->getBoolean());
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -4537,7 +4920,10 @@
       };
     }
     public function checkInitialized() {
-      if (isset($this->mutation) && (!$this->mutation->isInitialized())) return 'mutation';
+      if (isset($this->deprecated_mutation) && (!$this->deprecated_mutation->isInitialized())) return 'deprecated_mutation';
+      foreach ($this->mutation as $value) {
+        if (!$value->isInitialized()) return 'mutation';
+      }
       return null;
     }
     public function mergeFrom($x) {
@@ -4545,21 +4931,33 @@
       if ($x->hasTransaction()) {
         $this->setTransaction($x->getTransaction());
       }
-      if ($x->hasMutation()) {
-        $this->mutableMutation()->mergeFrom($x->getMutation());
+      if ($x->hasDeprecatedMutation()) {
+        $this->mutableDeprecatedMutation()->mergeFrom($x->getDeprecatedMutation());
       }
       if ($x->hasMode()) {
         $this->setMode($x->getMode());
       }
+      foreach ($x->getMutationList() as $v) {
+        $this->addMutation()->copyFrom($v);
+      }
+      if ($x->hasIgnoreReadOnly()) {
+        $this->setIgnoreReadOnly($x->getIgnoreReadOnly());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
       if (isset($this->transaction) !== isset($x->transaction)) return false;
       if (isset($this->transaction) && $this->transaction !== $x->transaction) return false;
-      if (isset($this->mutation) !== isset($x->mutation)) return false;
-      if (isset($this->mutation) && !$this->mutation->equals($x->mutation)) return false;
+      if (isset($this->deprecated_mutation) !== isset($x->deprecated_mutation)) return false;
+      if (isset($this->deprecated_mutation) && !$this->deprecated_mutation->equals($x->deprecated_mutation)) return false;
       if (isset($this->mode) !== isset($x->mode)) return false;
       if (isset($this->mode) && $this->mode !== $x->mode) return false;
+      if (sizeof($this->mutation) !== sizeof($x->mutation)) return false;
+      foreach (array_map(null, $this->mutation, $x->mutation) as $v) {
+        if (!$v[0]->equals($v[1])) return false;
+      }
+      if (isset($this->ignore_read_only) !== isset($x->ignore_read_only)) return false;
+      if (isset($this->ignore_read_only) && $this->ignore_read_only !== $x->ignore_read_only) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -4567,56 +4965,132 @@
       if (isset($this->transaction)) {
         $res .= $prefix . "transaction: " . $this->debugFormatString($this->transaction) . "\n";
       }
-      if (isset($this->mutation)) {
-        $res .= $prefix . "mutation <\n" . $this->mutation->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      if (isset($this->deprecated_mutation)) {
+        $res .= $prefix . "deprecated_mutation <\n" . $this->deprecated_mutation->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
       if (isset($this->mode)) {
         $res .= $prefix . "mode: " . ($this->mode) . "\n";
       }
+      foreach ($this->mutation as $value) {
+        $res .= $prefix . "mutation <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->ignore_read_only)) {
+        $res .= $prefix . "ignore_read_only: " . $this->debugFormatBool($this->ignore_read_only) . "\n";
+      }
       return $res;
     }
   }
 }
 namespace google\appengine\datastore\v4 {
   class CommitResponse extends \google\net\ProtocolMessage {
-    public function getMutationResult() {
-      if (!isset($this->mutation_result)) {
-        return new \google\appengine\datastore\v4\MutationResult();
+    private $mutation_result = array();
+    public function getDeprecatedMutationResult() {
+      if (!isset($this->deprecated_mutation_result)) {
+        return new \google\appengine\datastore\v4\DeprecatedMutationResult();
       }
-      return $this->mutation_result;
+      return $this->deprecated_mutation_result;
     }
-    public function mutableMutationResult() {
-      if (!isset($this->mutation_result)) {
-        $res = new \google\appengine\datastore\v4\MutationResult();
-        $this->mutation_result = $res;
+    public function mutableDeprecatedMutationResult() {
+      if (!isset($this->deprecated_mutation_result)) {
+        $res = new \google\appengine\datastore\v4\DeprecatedMutationResult();
+        $this->deprecated_mutation_result = $res;
         return $res;
       }
-      return $this->mutation_result;
+      return $this->deprecated_mutation_result;
     }
-    public function clearMutationResult() {
-      if (isset($this->mutation_result)) {
-        unset($this->mutation_result);
+    public function clearDeprecatedMutationResult() {
+      if (isset($this->deprecated_mutation_result)) {
+        unset($this->deprecated_mutation_result);
       }
     }
-    public function hasMutationResult() {
-      return isset($this->mutation_result);
+    public function hasDeprecatedMutationResult() {
+      return isset($this->deprecated_mutation_result);
+    }
+    public function getMutationResultSize() {
+      return sizeof($this->mutation_result);
+    }
+    public function getMutationResultList() {
+      return $this->mutation_result;
+    }
+    public function mutableMutationResult($idx) {
+      if (!isset($this->mutation_result[$idx])) {
+        $val = new \google\appengine\datastore\v4\MutationResult();
+        $this->mutation_result[$idx] = $val;
+        return $val;
+      }
+      return $this->mutation_result[$idx];
+    }
+    public function getMutationResult($idx) {
+      if (isset($this->mutation_result[$idx])) {
+        return $this->mutation_result[$idx];
+      }
+      if ($idx >= end(array_keys($this->mutation_result))) {
+        throw new \OutOfRangeException('index out of range: ' + $idx);
+      }
+      return new \google\appengine\datastore\v4\MutationResult();
+    }
+    public function addMutationResult() {
+      $val = new \google\appengine\datastore\v4\MutationResult();
+      $this->mutation_result[] = $val;
+      return $val;
+    }
+    public function clearMutationResult() {
+      $this->mutation_result = array();
+    }
+    public function getIndexUpdates() {
+      if (!isset($this->index_updates)) {
+        return 0;
+      }
+      return $this->index_updates;
+    }
+    public function setIndexUpdates($val) {
+      $this->index_updates = $val;
+      return $this;
+    }
+    public function clearIndexUpdates() {
+      unset($this->index_updates);
+      return $this;
+    }
+    public function hasIndexUpdates() {
+      return isset($this->index_updates);
     }
     public function clear() {
+      $this->clearDeprecatedMutationResult();
       $this->clearMutationResult();
+      $this->clearIndexUpdates();
     }
     public function byteSizePartial() {
       $res = 0;
-      if (isset($this->mutation_result)) {
+      if (isset($this->deprecated_mutation_result)) {
         $res += 1;
-        $res += $this->lengthString($this->mutation_result->byteSizePartial());
+        $res += $this->lengthString($this->deprecated_mutation_result->byteSizePartial());
+      }
+      $this->checkProtoArray($this->mutation_result);
+      $res += 1 * sizeof($this->mutation_result);
+      foreach ($this->mutation_result as $value) {
+        $res += $this->lengthString($value->byteSizePartial());
+      }
+      if (isset($this->index_updates)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->index_updates);
       }
       return $res;
     }
     public function outputPartial($out) {
-      if (isset($this->mutation_result)) {
+      if (isset($this->deprecated_mutation_result)) {
         $out->putVarInt32(10);
-        $out->putVarInt32($this->mutation_result->byteSizePartial());
-        $this->mutation_result->outputPartial($out);
+        $out->putVarInt32($this->deprecated_mutation_result->byteSizePartial());
+        $this->deprecated_mutation_result->outputPartial($out);
+      }
+      $this->checkProtoArray($this->mutation_result);
+      foreach ($this->mutation_result as $value) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($value->byteSizePartial());
+        $value->outputPartial($out);
+      }
+      if (isset($this->index_updates)) {
+        $out->putVarInt32(32);
+        $out->putVarInt32($this->index_updates);
       }
     }
     public function tryMerge($d) {
@@ -4627,7 +5101,16 @@
             $length = $d->getVarInt32();
             $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
             $d->skip($length);
-            $this->mutableMutationResult()->tryMerge($tmp);
+            $this->mutableDeprecatedMutationResult()->tryMerge($tmp);
+            break;
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->addMutationResult()->tryMerge($tmp);
+            break;
+          case 32:
+            $this->setIndexUpdates($d->getVarInt32());
             break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
@@ -4638,25 +5121,46 @@
       };
     }
     public function checkInitialized() {
-      if (isset($this->mutation_result) && (!$this->mutation_result->isInitialized())) return 'mutation_result';
+      if (isset($this->deprecated_mutation_result) && (!$this->deprecated_mutation_result->isInitialized())) return 'deprecated_mutation_result';
+      foreach ($this->mutation_result as $value) {
+        if (!$value->isInitialized()) return 'mutation_result';
+      }
       return null;
     }
     public function mergeFrom($x) {
       if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
-      if ($x->hasMutationResult()) {
-        $this->mutableMutationResult()->mergeFrom($x->getMutationResult());
+      if ($x->hasDeprecatedMutationResult()) {
+        $this->mutableDeprecatedMutationResult()->mergeFrom($x->getDeprecatedMutationResult());
+      }
+      foreach ($x->getMutationResultList() as $v) {
+        $this->addMutationResult()->copyFrom($v);
+      }
+      if ($x->hasIndexUpdates()) {
+        $this->setIndexUpdates($x->getIndexUpdates());
       }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
-      if (isset($this->mutation_result) !== isset($x->mutation_result)) return false;
-      if (isset($this->mutation_result) && !$this->mutation_result->equals($x->mutation_result)) return false;
+      if (isset($this->deprecated_mutation_result) !== isset($x->deprecated_mutation_result)) return false;
+      if (isset($this->deprecated_mutation_result) && !$this->deprecated_mutation_result->equals($x->deprecated_mutation_result)) return false;
+      if (sizeof($this->mutation_result) !== sizeof($x->mutation_result)) return false;
+      foreach (array_map(null, $this->mutation_result, $x->mutation_result) as $v) {
+        if (!$v[0]->equals($v[1])) return false;
+      }
+      if (isset($this->index_updates) !== isset($x->index_updates)) return false;
+      if (isset($this->index_updates) && !$this->integerEquals($this->index_updates, $x->index_updates)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
       $res = '';
-      if (isset($this->mutation_result)) {
-        $res .= $prefix . "mutation_result <\n" . $this->mutation_result->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      if (isset($this->deprecated_mutation_result)) {
+        $res .= $prefix . "deprecated_mutation_result <\n" . $this->deprecated_mutation_result->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      foreach ($this->mutation_result as $value) {
+        $res .= $prefix . "mutation_result <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->index_updates)) {
+        $res .= $prefix . "index_updates: " . $this->debugFormatInt32($this->index_updates) . "\n";
       }
       return $res;
     }
@@ -4929,44 +5433,44 @@
 }
 namespace google\appengine\datastore\v4 {
   class WriteRequest extends \google\net\ProtocolMessage {
-    public function getMutation() {
-      if (!isset($this->mutation)) {
-        return new \google\appengine\datastore\v4\Mutation();
+    public function getDeprecatedMutation() {
+      if (!isset($this->deprecated_mutation)) {
+        return new \google\appengine\datastore\v4\DeprecatedMutation();
       }
-      return $this->mutation;
+      return $this->deprecated_mutation;
     }
-    public function mutableMutation() {
-      if (!isset($this->mutation)) {
-        $res = new \google\appengine\datastore\v4\Mutation();
-        $this->mutation = $res;
+    public function mutableDeprecatedMutation() {
+      if (!isset($this->deprecated_mutation)) {
+        $res = new \google\appengine\datastore\v4\DeprecatedMutation();
+        $this->deprecated_mutation = $res;
         return $res;
       }
-      return $this->mutation;
+      return $this->deprecated_mutation;
     }
-    public function clearMutation() {
-      if (isset($this->mutation)) {
-        unset($this->mutation);
+    public function clearDeprecatedMutation() {
+      if (isset($this->deprecated_mutation)) {
+        unset($this->deprecated_mutation);
       }
     }
-    public function hasMutation() {
-      return isset($this->mutation);
+    public function hasDeprecatedMutation() {
+      return isset($this->deprecated_mutation);
     }
     public function clear() {
-      $this->clearMutation();
+      $this->clearDeprecatedMutation();
     }
     public function byteSizePartial() {
       $res = 0;
-      if (isset($this->mutation)) {
+      if (isset($this->deprecated_mutation)) {
         $res += 1;
-        $res += $this->lengthString($this->mutation->byteSizePartial());
+        $res += $this->lengthString($this->deprecated_mutation->byteSizePartial());
       }
       return $res;
     }
     public function outputPartial($out) {
-      if (isset($this->mutation)) {
+      if (isset($this->deprecated_mutation)) {
         $out->putVarInt32(10);
-        $out->putVarInt32($this->mutation->byteSizePartial());
-        $this->mutation->outputPartial($out);
+        $out->putVarInt32($this->deprecated_mutation->byteSizePartial());
+        $this->deprecated_mutation->outputPartial($out);
       }
     }
     public function tryMerge($d) {
@@ -4977,7 +5481,7 @@
             $length = $d->getVarInt32();
             $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
             $d->skip($length);
-            $this->mutableMutation()->tryMerge($tmp);
+            $this->mutableDeprecatedMutation()->tryMerge($tmp);
             break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
@@ -4988,25 +5492,25 @@
       };
     }
     public function checkInitialized() {
-      if ((!isset($this->mutation)) || (!$this->mutation->isInitialized())) return 'mutation';
+      if ((!isset($this->deprecated_mutation)) || (!$this->deprecated_mutation->isInitialized())) return 'deprecated_mutation';
       return null;
     }
     public function mergeFrom($x) {
       if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
-      if ($x->hasMutation()) {
-        $this->mutableMutation()->mergeFrom($x->getMutation());
+      if ($x->hasDeprecatedMutation()) {
+        $this->mutableDeprecatedMutation()->mergeFrom($x->getDeprecatedMutation());
       }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
-      if (isset($this->mutation) !== isset($x->mutation)) return false;
-      if (isset($this->mutation) && !$this->mutation->equals($x->mutation)) return false;
+      if (isset($this->deprecated_mutation) !== isset($x->deprecated_mutation)) return false;
+      if (isset($this->deprecated_mutation) && !$this->deprecated_mutation->equals($x->deprecated_mutation)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
       $res = '';
-      if (isset($this->mutation)) {
-        $res .= $prefix . "mutation <\n" . $this->mutation->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      if (isset($this->deprecated_mutation)) {
+        $res .= $prefix . "deprecated_mutation <\n" . $this->deprecated_mutation->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
       return $res;
     }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
index 59b58fb..d566f2e 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
@@ -254,6 +254,24 @@
   }
 
   /**
+   * Subclass can override this method to return the metadata of the underlying
+   * GCS object.
+   */
+  public function getMetaData() {
+    trigger_error(sprintf("%s does not have metadata", get_class($this)));
+    return false;
+  }
+
+  /**
+   * Subclass can override this method to return the MIME content type of the
+   * underlying GCS object.
+   */
+  public function getContentType() {
+    trigger_error(sprintf("%s does not have content type", get_class($this)));
+    return false;
+  }
+
+  /**
    * Get the OAuth Token HTTP header for the supplied scope.
    *
    * @param $scopes mixed The scopes to acquire the token for.
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php
index 0c78034..576011a 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php
@@ -43,15 +43,18 @@
   // The current list of files we're enumerating through
   private $current_file_list = null;
 
-  public function __construct($bucket_name, $object_prefix, $context) {
-    parent::__construct($bucket_name, $object_prefix, $context);
-    // Ignore the leading slash
-    if (isset($object_prefix)) {
-      if (!StringUtil::endsWith($object_prefix, '/')) {
-        $object_prefix .= '/';
-      }
-      $this->prefix = substr($object_prefix, 1);
+  public function __construct($bucket_name, $object_name, $context) {
+    // $object_name should end with a trailing slash.
+    if (!StringUtil::endsWith($object_name, parent::DELIMITER)) {
+      $object_name = $object_name . parent::DELIMITER;
     }
+
+    // $prefix is the $object_name without leading slash.
+    if (strlen($object_name) > 1) {
+      $this->prefix = substr($object_name, 1);
+    }
+
+    parent::__construct($bucket_name, $object_name, $context);
   }
 
   /**
@@ -124,7 +127,6 @@
    */
   public function mkdir($options) {
     $report_errors = ($options | STREAM_REPORT_ERRORS) != 0;
-    $path = $this->getCorrectPathForDirectoryName();
     $headers = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
     if ($headers === false) {
       if ($report_errors) {
@@ -136,7 +138,8 @@
     // Use x-goog-if-generation-match so we only create a new object.
     $headers['x-goog-if-generation-match'] = 0;
     $headers['Content-Range'] = sprintf(parent::FINAL_CONTENT_RANGE_NO_DATA, 0);
-    $url = $this->createObjectUrl($this->bucket_name, $path);
+
+    $url = $this->createObjectUrl($this->bucket_name, $this->object_name);
     $http_response = $this->makeHttpRequest($url, "PUT", $headers);
 
     if (false === $http_response) {
@@ -190,8 +193,7 @@
       return false;
     }
 
-    $path = $this->getCorrectPathForDirectoryName();
-    $url = $this->createObjectUrl($this->bucket_name, $path);
+    $url = $this->createObjectUrl($this->bucket_name, $this->object_name);
     $http_response = $this->makeHttpRequest($url, "DELETE", $headers);
 
     if (false === $http_response) {
@@ -262,32 +264,33 @@
     $prefix_len = isset($this->prefix) ? strlen($this->prefix) : 0;
     foreach($xml->Contents as $content) {
       $key = (string) $content->Key;
+
+      // Skip objects end with "_$folder$" or "/" as they exist solely for
+      // the purpose of representing empty directories. Since we create
+      // empty direcotires using the delimiter ("/"), they will always be
+      // captured in the <CommonPrefixies> section.
+      if (StringUtil::endsWith($key, parent::FOLDER_SUFFIX) ||
+          StringUtil::endsWith($key, parent::DELIMITER)) {
+        continue;
+      }
+
       if ($prefix_len != 0) {
         $key = substr($key, $prefix_len);
       }
-      // If the key ends with FOLDER_SUFFIX then replace that value with a '/'
-      // to be consistent with the folder behaviour of Google Cloud Storage
-      // Manager, which supports the creating of 'folders' in the UI. See
-      // https://developers.google.com/storage/docs/gsmanager
-      if (StringUtil::endsWith($key, self::FOLDER_SUFFIX)) {
-        $key = substr_replace($key,
-                              parent::DELIMITER,
-                              -strlen(parent::FOLDER_SUFFIX));
+
+      array_push($this->current_file_list, $key);
+    }
+
+    // All "Subdirectories" are listed as <CommonPrefixes>. See
+    // https://developers.google.com/storage/docs/reference-methods#getbucket
+    foreach($xml->CommonPrefixes as $common_prefixes) {
+      $key = (string) $common_prefixes->Prefix;
+      if ($prefix_len != 0) {
+        $key = substr($key, $prefix_len);
       }
       array_push($this->current_file_list, $key);
     }
-    return true;
-  }
 
-  private function getCorrectPathForDirectoryName() {
-    // Replace the trailing MARKER from the prefix and replace it with the
-    // FOLDER_SUFFIX.
-    if (StringUtil::endsWith($this->object_name, parent::DELIMITER)) {
-      return substr_replace($this->object_name,
-                            parent::FOLDER_SUFFIX,
-                            -strlen(parent::DELIMITER));
-    } else {
-      return $this->object_name . parent::FOLDER_SUFFIX;
-    }
+    return true;
   }
 }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
index b2596e1..4535987 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
@@ -29,6 +29,8 @@
  * Google Cloud Storage Client for reading objects.
  */
 final class CloudStorageReadClient extends CloudStorageClient {
+  const METADATA_HEADER_PREFIX = 'x-goog-meta-';
+
   // Buffer for storing data.
   private $read_buffer;
 
@@ -54,6 +56,12 @@
   // we then return in calls to stat()
   private $stat_result = [];
 
+  // Metadata for the object as it was first read.
+  private $metadata = [];
+
+  // Content-Type for the object as it was first read.
+  private $content_type;
+
   // HTTP status codes that indicate that there is an object to read, and we
   // need to process the response.
   private static $valid_status_codes = [HttpResponse::OK,
@@ -181,6 +189,14 @@
     return $this->buffer_read_position + $this->object_block_start_position;
   }
 
+  public function getMetaData() {
+    return $this->metadata;
+  }
+
+  public function getContentType() {
+    return $this->content_type;
+  }
+
   /**
    * Override the makeHttpRequest function so we can implement caching.
    * If caching is enabled then we try and retrieve a matching request for the
@@ -315,6 +331,11 @@
       }
     }
 
+    $this->metadata = self::extractMetaData($http_response['headers']);
+
+    $this->content_type = $this->getHeaderValue('Content-Type',
+                                                $http_response['headers']);
+
     $this->object_etag =
         $this->getHeaderValue('ETag', $http_response['headers']);
 
@@ -335,5 +356,21 @@
 
     return true;
   }
+
+  /**
+   * Extract metadata from HTTP response headers.
+   */
+  private static function extractMetaData($headers) {
+    $metadata = [];
+    foreach($headers as $key => $value) {
+      if (StringUtil::startsWith(strtolower($key),
+                                 self::METADATA_HEADER_PREFIX)) {
+        $metadata_key = substr($key, strlen(self::METADATA_HEADER_PREFIX));
+        $metadata[$metadata_key] = $value;
+      }
+    }
+
+    return $metadata;
+  }
 }
 
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php
index 14dbe8a..d6a1303 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php
@@ -77,6 +77,11 @@
       return false;
     }
 
+    // Assume opening root directory if no object name is specified in path.
+    if (!isset($object)) {
+      $object = "/";
+    }
+
     $this->client = new CloudStorageDirectoryClient($bucket,
                                                     $object,
                                                     $this->context);
@@ -152,6 +157,26 @@
                     E_USER_ERROR);
       return false;
     }
+
+    // If the file being renamed is an uploaded file being moved to an allowed
+    // include bucket trigger a warning.
+    $allowed_buckets = $this->getAllowedBuckets();
+    foreach ($_FILES as $file) {
+      if ($file['tmp_name'] == $from) {
+        foreach ($allowed_buckets as $allowed_bucket) {
+          // 5th character indicates start of bucket since it ignores 'gs://'.
+          if (strpos($to, $allowed_bucket) === 5) {
+            trigger_error(sprintf('Moving uploaded file (%s) to an allowed ' .
+                                  'include bucket (%s) which may be ' .
+                                  'vulnerable to local file inclusion (LFI).',
+                                  $from, $allowed_bucket),
+                          E_USER_WARNING);
+            break 2;
+          }
+        }
+      }
+    }
+
     $client = new CloudStorageRenameClient($from_bucket,
                                            $from_object,
                                            $to_bucket,
@@ -219,12 +244,19 @@
     }
 
     if (($options & self::STREAM_OPEN_FOR_INCLUDE) != 0) {
-      $allowed_buckets = explode(",", GAE_INCLUDE_GS_BUCKETS);
+      $allowed_buckets = $this->getAllowedBuckets();
       $include_allowed = false;
       foreach ($allowed_buckets as $bucket_name) {
-        $bucket_name = trim($bucket_name);
+        // Check if the allowed bucket includes a path restriction and if so
+        // separate the path from the bucket name.
+        if (strpos($bucket_name, '/') !== false) {
+          list($bucket_name, $object_path) = explode('/', $bucket_name, 2);
+        }
         if ($bucket_name === $bucket) {
-          $include_allowed = true;
+          // If a path restriction is set then ensure that the object either
+          // starts with or is equal to the path.
+          $include_allowed = !isset($object_path) ||
+              (isset($object) && strpos($object, $object_path) === 1);
           break;
         }
       }
@@ -326,4 +358,22 @@
     return $client->stat();
   }
 
+  private function getAllowedBuckets() {
+    static $allowed_buckets;
+
+    if (!isset($allowed_buckets)) {
+      $allowed_buckets = explode(',', GAE_INCLUDE_GS_BUCKETS);
+      $allowed_buckets = array_map('trim', $allowed_buckets);
+    }
+
+    return $allowed_buckets;
+  }
+
+  public function getMetaData() {
+    return $this->client->getMetaData();
+  }
+
+  public function getContentType() {
+    return $this->client->getContentType();
+  }
 }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
index 2a7d9c1..1ba6cc9 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
@@ -82,7 +82,7 @@
     $this->_SERVER = $_SERVER;
 
     if (!defined("GAE_INCLUDE_GS_BUCKETS")) {
-      define("GAE_INCLUDE_GS_BUCKETS", "foo, bucket, bar");
+      define("GAE_INCLUDE_GS_BUCKETS", "foo, bucket/object_name.png, bar, to_bucket");
     }
 
     stream_wrapper_register("gs",
@@ -685,14 +685,17 @@
     $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
     $request_headers = $this->getStandardRequestHeaders();
     $last_modified = 'Mon, 01 Jul 2013 10:02:46 GMT';
-    $file_results = [
-        ['key' => 'a/b_$folder$', 'size' => '0', 'mtime' => $last_modified],
-    ];
+    $file_results = [];
+    $common_prefixes_results = ['name' => 'a/b/'];
     $response = [
         'status_code' => 200,
         'headers' => [
         ],
-        'body' => $this->makeGetBucketXmlResponse('a/b', $file_results),
+        'body' => $this->makeGetBucketXmlResponse(
+            'a/b',
+            $file_results,
+            null,
+            $common_prefixes_results),
     ];
     $expected_url = $this->makeCloudStorageObjectUrl('bucket', null);
     $expected_query = http_build_query([
@@ -856,8 +859,21 @@
     $from = "gs://bucket/object.png";
     $to = "gs://to_bucket/to.png";
 
+    // Simulate the rename is acting on a uploaded file which is then being
+    // moved into the allowed include bucket which will trigger a warning.
+    $_FILES['foo']['tmp_name'] = $from;
+
     $this->assertTrue(rename($from, $to));
     $this->apiProxyMock->verify();
+
+    $this->assertEquals(
+      [['errno' => E_USER_WARNING,
+        'errstr' => sprintf('Moving uploaded file (%s) to an allowed include ' .
+                            'bucket (%s) which may be vulnerable to local ' .
+                            'file inclusion (LFI).', $from, 'to_bucket')]],
+      $this->triggered_errors);
+
+    $_FILES = [];
   }
 
   public function testRenameObjectWithContextSuccess() {
@@ -1150,6 +1166,77 @@
         $this->triggered_errors[0]);
   }
 
+  /**
+   * @dataProvider supportedStreamReadModes
+   */
+  public function testReadMetaDataAndContentTypeInReadMode($mode) {
+    $metadata = ["foo" => "far", "bar" => "boo"];
+    $this->expectFileReadRequest("Test data",
+                                 0,
+                                 CloudStorageReadClient::DEFAULT_READ_SIZE,
+                                 null,
+                                 null,
+                                 $metadata,
+                                 "image/png");
+
+    $stream = new CloudStorageStreamWrapper();
+    $this->assertTrue($stream->stream_open("gs://bucket/object_name.png",
+                                           $mode,
+                                           0,
+                                           $unused));
+
+    $this->assertEquals($metadata, $stream->getMetaData());
+    $this->assertEquals("image/png", $stream->getContentType());
+  }
+
+  /**
+   * @dataProvider supportedStreamWriteModes
+   */
+  public function testReadMetaDataAndContentTypeInWriteMode($mode) {
+    $metadata = ["foo" => "far", "bar" => "boo"];
+
+    $expected_url = $this->makeCloudStorageObjectUrl();
+    $this->expectFileWriteStartRequest("image/png",
+                                       "public-read",
+                                       "foo_upload_id",
+                                       $expected_url,
+                                       $metadata);
+
+    $context = [
+        "gs" => [
+            "acl" => "public-read",
+            "Content-Type" => "image/png",
+            "metadata" => $metadata
+        ],
+    ];
+    stream_context_set_default($context);
+
+    $stream = new CloudStorageStreamWrapper();
+    $this->assertTrue($stream->stream_open("gs://bucket/object.png",
+                                           $mode,
+                                           0,
+                                           $unused));
+
+    $this->assertEquals($metadata, $stream->getMetaData());
+    $this->assertEquals("image/png", $stream->getContentType());
+  }
+
+  /**
+   * DataProvider for
+   * - testReadMetaDataAndContentTypeInReadMode
+   */
+  public function supportedStreamReadModes() {
+    return [["r"], ["rt"], ["rb"]];
+  }
+
+  /**
+   * DataProvider for
+   * - testReadMetaDataAndContentTypeInWriteMode
+   */
+  public function supportedStreamWriteModes() {
+    return [["w"], ["wt"], ["wb"]];
+  }
+
   public function testWriteLargeObjectSuccess() {
     $data_to_write = str_repeat("1234567890", 100000);
     $data_len = strlen($data_to_write);
@@ -1285,12 +1372,24 @@
     $this->apiProxyMock->verify();
   }
 
-  public function testOpenDirEmptyBucket() {
-    $this->assertFalse(opendir("gs:///"));
-    $this->assertEquals(
-        ["errno" => E_USER_ERROR,
-         "errstr" => "Invalid Google Cloud Storage path: gs:///"],
-        $this->triggered_errors[0]);
+  public function testInvalidDirectoryForInclude() {
+    // Uses GAE_INCLUDE_GS_BUCKETS, which is not defined.
+    stream_wrapper_unregister('gs');
+    stream_wrapper_register('gs',
+        '\\google\\appengine\\ext\\cloud_storage_streams\\' .
+        'CloudStorageStreamWrapper',
+        0);
+
+    include 'gs://baz/foo/object.php';
+
+    $this->assertEquals(E_WARNING, $this->triggered_errors[0]["errno"]);
+    $this->assertStringStartsWith(
+        'include(gs://baz/foo/object.php): failed to open stream:',
+        $this->triggered_errors[0]["errstr"]);
+    $this->assertEquals(E_WARNING, $this->triggered_errors[1]["errno"]);
+    $this->assertStringStartsWith(
+        "include(): Failed opening 'gs://baz/foo/object.php'",
+        $this->triggered_errors[1]["errstr"]);
   }
 
   public function testOpenDirNoBucket() {
@@ -1301,16 +1400,77 @@
         $this->triggered_errors[0]);
   }
 
-  public function testReaddirSuccess() {
+  public function testOpenDirEmptyBucket() {
+    $this->assertFalse(opendir("gs:///"));
+    $this->assertEquals(
+        ["errno" => E_USER_ERROR,
+         "errstr" => "Invalid Google Cloud Storage path: gs:///"],
+        $this->triggered_errors[0]);
+  }
+  /**
+   * DataProvider for
+   * - testReadRootDirSuccess
+   */
+  public function rootDirPath() {
+    return [["gs://bucket"], ["gs://bucket/"]];
+  }
+
+  /**
+   * @dataProvider rootDirPath
+   */
+  public function testReadRootDirSuccess($path) {
     $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
 
     $request_headers = $this->getStandardRequestHeaders();
-    $file_results = ['f/file1.txt', 'f/file2.txt', 'f/sub_$folder$'];
+    $file_results = ['file1.txt', 'file2.txt', 'file3.txt' ];
+    $common_prefixes_results = ['dir/'];
     $response = [
         'status_code' => 200,
         'headers' => [
         ],
-        'body' => $this->makeGetBucketXmlResponse("f/", $file_results),
+        'body' => $this->makeGetBucketXmlResponse(
+            "",
+            $file_results,
+            null,
+            $common_prefixes_results),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $res = opendir($path);
+    $this->assertEquals("file1.txt", readdir($res));
+    $this->assertEquals("file2.txt", readdir($res));
+    $this->assertEquals("file3.txt", readdir($res));
+    $this->assertEquals("dir/", readdir($res));
+    $this->assertFalse(readdir($res));
+    closedir($res);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testReadADirSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+
+    $request_headers = $this->getStandardRequestHeaders();
+    $file_results = ['f/file1.txt', 'f/file2.txt', 'f/', 'f_$folder$'];
+    $common_prefixes_results = ['f/sub/'];
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse(
+            "f/",
+            $file_results,
+            null,
+            $common_prefixes_results),
     ];
     $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
     $expected_query = http_build_query([
@@ -1459,7 +1619,23 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testMkDirNoBucket() {
+    $this->assertFalse(mkdir("gs://"));
+    $this->assertEquals(
+        [["errno" => E_USER_ERROR,
+          "errstr" => "Invalid Google Cloud Storage path: gs://"]],
+        $this->triggered_errors);
+  }
+
   public function testMkDirBucketWithoutObject() {
+    $this->assertFalse(mkdir("gs://bucket"));
+    $this->assertEquals(
+        [["errno" => E_USER_ERROR,
+          "errstr" => "Invalid Google Cloud Storage path: gs://bucket"]],
+        $this->triggered_errors);
+  }
+
+  public function testMkDirRootObject() {
     $this->assertFalse(mkdir("gs://bucket_without_object/"));
     $this->assertEquals(
         [["errno" => E_USER_ERROR,
@@ -1468,15 +1644,7 @@
         $this->triggered_errors);
   }
 
-  public function testMkDirBucketWithoutBucket() {
-    $this->assertFalse(mkdir("gs://"));
-    $this->assertEquals(
-        [["errno" => E_USER_ERROR,
-          "errstr" => "Invalid Google Cloud Storage path: gs://"]],
-        $this->triggered_errors);
-  }
-
-  public function testMkDirSuccess() {
+  public function testMkDirWithTrailingDelimieterSuccess() {
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
     $request_headers = [
         "Authorization" => "OAuth foo token",
@@ -1492,7 +1660,35 @@
     ];
 
     $expected_url = $this->makeCloudStorageObjectUrl('bucket',
-                                                     '/dira/dirb_$folder$');
+                                                     '/dira/dirb/');
+    $this->expectHttpRequest($expected_url,
+                             RequestMethod::PUT,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $this->assertTrue(mkdir("gs://bucket/dira/dirb/"));
+    $this->apiProxyMock->verify();
+  }
+
+
+  public function testMkDirWithoutTrailingDelimiterSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
+    $request_headers = [
+        "Authorization" => "OAuth foo token",
+        "x-goog-if-generation-match" => 0,
+        "Content-Range" => "bytes */0",
+        "x-goog-api-version" => 2,
+    ];
+
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+    ];
+
+    $expected_url = $this->makeCloudStorageObjectUrl('bucket',
+                                                     '/dira/dirb/');
     $this->expectHttpRequest($expected_url,
                              RequestMethod::PUT,
                              $request_headers,
@@ -1503,15 +1699,6 @@
     $this->apiProxyMock->verify();
   }
 
-  public function testRmDirNoFile() {
-    $this->assertFalse(rmdir("gs://bucket_without_object/"));
-    $this->assertEquals(
-        [["errno" => E_USER_ERROR,
-          "errstr" => "Invalid Google Cloud Storage path: " .
-                      "gs://bucket_without_object/"]],
-        $this->triggered_errors);
-  }
-
   public function testRmDirNoBucket() {
     $this->assertFalse(rmdir("gs://"));
     $this->assertEquals(
@@ -1520,6 +1707,22 @@
         $this->triggered_errors);
   }
 
+  public function testRmDirBucketWithoutObject() {
+    $this->assertFalse(rmdir("gs://bucket"));
+    $this->assertEquals(
+        [["errno" => E_USER_ERROR,
+          "errstr" => "Invalid Google Cloud Storage path: gs://bucket"]],
+        $this->triggered_errors);
+  }
+
+  public function testRmDirRootObject() {
+    $this->assertFalse(rmdir("gs://bucket/"));
+    $this->assertEquals(
+        [["errno" => E_USER_ERROR,
+          "errstr" => "Invalid Google Cloud Storage path: gs://bucket/"]],
+        $this->triggered_errors);
+  }
+
   public function testRmDirSuccess() {
     // Expect a request to list the contents of the bucket to ensure that it is
     // empty.
@@ -1554,8 +1757,7 @@
         ],
     ];
 
-    $expected_url = $this->makeCloudStorageObjectUrl('bucket',
-                                                     '/dira/dirb_$folder$');
+    $expected_url = $this->makeCloudStorageObjectUrl('bucket', '/dira/dirb/');
     $this->expectHttpRequest($expected_url,
                              RequestMethod::DELETE,
                              $request_headers,
@@ -1623,7 +1825,9 @@
                                          $start_byte,
                                          $length,
                                          $etag = null,
-                                         $paritial_content = null) {
+                                         $paritial_content = null,
+                                         $metadata = null,
+                                         $content_type = null) {
     $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
 
     assert($length > 0);
@@ -1641,10 +1845,21 @@
 
     $response_headers = [
         "ETag" => "deadbeef",
-        "Content-Type" => "text/plain",
         "Last-Modified" => "Mon, 02 Jul 2012 01:41:01 GMT",
     ];
 
+    if (isset($content_type)) {
+      $response_headers["Content-Type"] = $content_type;
+    } else {
+      $response_headers["Content-Type"] = "binary/octet-stream";
+    }
+
+    if (isset($metadata)) {
+      foreach ($metadata as $key => $value) {
+        $response_headers["x-goog-meta-" . $key] = $value;
+      }
+    }
+
     $response = $this->createSuccessfulGetHttpResponse($response_headers,
                                                        $body,
                                                        $start_byte,
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
index 52693c5..aee98b0 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
@@ -48,9 +48,10 @@
    * the object is a 'file' or a 'directory', by listing the contents of the
    * bucket and then matching the results against the supplied object name.
    *
-   * If a file ends with "_$folder$" then Google Cloud Storage Manager will
-   * show it as a 'folder' in the UI tool, so we consider an object that ends
-   * in "_$folder$" as a directory as well.
+   * If a file ends with "/ then Google Cloud Console will show it as a 'folder'
+   * in the UI tool, so we consider an object that ends in "/" as a directory
+   * as well. For backward compatibility, we also treat files with the
+   * "_$folder$" suffix as folders.
    */
   public function stat() {
     $prefix = $this->prefix;
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
index 61673ba..2269c28 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
@@ -70,6 +70,7 @@
 
     if (array_key_exists("Content-Type", $this->context_options)) {
       $headers["Content-Type"] = $this->context_options["Content-Type"];
+      $this->content_type = $this->context_options["Content-Type"];
     }
 
     if (array_key_exists("acl", $this->context_options)) {
@@ -96,6 +97,7 @@
           return false;
         }
         $headers['x-goog-meta-' . $name] = $value;
+        $this->metadata[$name] = $value;
       }
     }
 
@@ -181,6 +183,22 @@
     $this->writeBufferToGS(true);
   }
 
+  public function getMetaData() {
+    if (array_key_exists("metadata", $this->context_options)) {
+      return $this->context_options["metadata"];
+    }
+
+    return [];
+  }
+
+  public function getContentType() {
+    if (array_key_exists("Content-Type", $this->context_options)) {
+      return $this->context_options["Content-Type"];
+    }
+
+    return null;
+  }
+
   private function writeBufferToGS($complete = false) {
     $headers = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
     if ($headers === false) {
diff --git a/php/sdk/google/appengine/runtime/Memcache.php b/php/sdk/google/appengine/runtime/Memcache.php
index f0715e6..f5e4f02 100644
--- a/php/sdk/google/appengine/runtime/Memcache.php
+++ b/php/sdk/google/appengine/runtime/Memcache.php
@@ -399,8 +399,12 @@
 
     $return_value = array();
     foreach ($response->getItemList() as $item) {
-      $return_value[$item->getKey()] = MemcacheUtils::deserializeValue(
-          $item->getValue(), $item->getFlags());
+      try {
+        $return_value[$item->getKey()] = MemcacheUtils::deserializeValue(
+            $item->getValue(), $item->getFlags());
+      } catch (\UnexpectedValueException $e) {
+        // Skip entries that cannot be deserialized.
+      }
     }
     return $return_value;
   }
diff --git a/php/sdk/google/appengine/runtime/MemcacheTest.php b/php/sdk/google/appengine/runtime/MemcacheTest.php
index 6c8e6a1..8427234 100644
--- a/php/sdk/google/appengine/runtime/MemcacheTest.php
+++ b/php/sdk/google/appengine/runtime/MemcacheTest.php
@@ -200,6 +200,26 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testGetUnexpectedValue() {
+    $memcache = new Memcache();
+
+    $request = new MemcacheGetRequest();
+    $request->addKey("key");
+
+    $response = new MemcacheGetResponse();
+    $item = $response->addItem();
+    $item->setKey("key");
+    $item->setValue("value");
+    $item->setFlags(2);  // Python's picked type.
+
+    $this->apiProxyMock->expectCall('memcache',
+                                    'Get',
+                                    $request,
+                                    $response);
+    $this->assertFalse(memcache_get($memcache, "key"));
+    $this->apiProxyMock->verify();
+  }
+
   public function testGetMany() {
     $memcache = new Memcache();
 
diff --git a/php/sdk/google/appengine/runtime/MemcacheUtils.php b/php/sdk/google/appengine/runtime/MemcacheUtils.php
index e74fa9e..89a37f1 100644
--- a/php/sdk/google/appengine/runtime/MemcacheUtils.php
+++ b/php/sdk/google/appengine/runtime/MemcacheUtils.php
@@ -85,8 +85,8 @@
       case self::TYPE_PHP_SERIALIZED:
         return unserialize($value);
       default:
-        throw new UnexpectedValueException("unexpected type flag: " .
-                                           $type_flag);
+        throw new \UnexpectedValueException("unexpected type flag: " .
+                                            $type_flag);
     }
   }
 
diff --git a/php/sdk/google/appengine/runtime/Memcached.php b/php/sdk/google/appengine/runtime/Memcached.php
index 72dbf07..93d3c34 100644
--- a/php/sdk/google/appengine/runtime/Memcached.php
+++ b/php/sdk/google/appengine/runtime/Memcached.php
@@ -497,8 +497,13 @@
         $cas_token = $item->getCasId();
       }
       $this->result_code = self::RES_SUCCESS;
-      return MemcacheUtils::deserializeValue($item->getValue(),
-                                             $item->getFlags());
+      try {
+        return MemcacheUtils::deserializeValue($item->getValue(),
+                                               $item->getFlags());
+      } catch (\UnexpectedValueException $e) {
+        $this->result_code = self::RES_NOTFOUND;
+        return false;
+      }
     } else {
       $this->result_code = self::RES_NOTFOUND;
       return false;
@@ -625,8 +630,13 @@
 
     $return_value = array();
     foreach ($response->getItemList() as $item) {
-      $return_value[$item->getKey()] = MemcacheUtils::deserializeValue(
-          $item->getValue(), $item->getFlags());
+      try {
+        $return_value[$item->getKey()] = MemcacheUtils::deserializeValue(
+            $item->getValue(), $item->getFlags());
+      } catch (\UnexpectedValueException $e) {
+        // Skip entries that cannot be deserialized.
+        continue;
+      }
       if ($item->hasCasId()) {
         $cas_tokens[$item->getKey()] = $item->getCasId();
       }
diff --git a/php/sdk/google/appengine/runtime/MemcachedTest.php b/php/sdk/google/appengine/runtime/MemcachedTest.php
index 3e424a2..a9aaae7 100644
--- a/php/sdk/google/appengine/runtime/MemcachedTest.php
+++ b/php/sdk/google/appengine/runtime/MemcachedTest.php
@@ -266,6 +266,43 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testGetMultiUnexpectedValue() {
+    $request = new MemcacheGetRequest();
+    $request->addKey("key");
+    $request->addKey("key1");
+    $request->setForCas(true);
+
+    $response = new MemcacheGetResponse();
+    $item = $response->addItem();
+    $item->setKey("key");
+    $item->setValue("value");
+    $item->setFlags(2);  // Python's picked type.
+    $item->setCasId(123456);
+    $item = $response->addItem();
+    $item->setKey("key1");
+    $item->setValue("value1");
+    $item->setFlags(0);  // String.
+    $item->setCasId(789);
+
+    $this->apiProxyMock->expectCall('memcache',
+                                    'Get',
+                                    $request,
+                                    $response);
+    $memcached = new Memcached();
+    $keys = ["key", "key1"];
+    $result = $memcached->getMulti($keys,
+                                   $cas_tokens,
+                                   Memcached::GET_PRESERVE_ORDER);
+    $this->assertTrue(array_key_exists("key", $result));
+    $this->assertNull($result["key"]);
+    $this->assertTrue(array_key_exists("key", $cas_tokens));
+    $this->assertNull($cas_tokens["key"]);
+    $this->assertEquals("value1", $result["key1"]);
+    $this->assertEquals(789, $cas_tokens["key1"]);
+    $this->assertEquals($memcached->getResultCode(), Memcached::RES_SUCCESS);
+    $this->apiProxyMock->verify();
+  }
+
   public function testGetCasSuccess() {
     $request = new MemcacheGetRequest();
     $request->addKey("widgets_key");
@@ -347,6 +384,26 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testGetUnexpectedValue() {
+    $request = new MemcacheGetRequest();
+    $request->addKey("key");
+
+    $response = new MemcacheGetResponse();
+    $item = $response->addItem();
+    $item->setKey("key");
+    $item->setValue("value");
+    $item->setFlags(2);  // Python's picked type.
+
+    $this->apiProxyMock->expectCall('memcache',
+                                    'Get',
+                                    $request,
+                                    $response);
+    $memcached = new Memcached();
+    $this->assertFalse($memcached->get("key"));
+    $this->assertEquals($memcached->getResultCode(), Memcached::RES_NOTFOUND);
+    $this->apiProxyMock->verify();
+  }
+
   public function testSetSuccess() {
     $request = new MemcacheSetRequest();
     $item = $request->addItem();
diff --git a/php/sdk/google/appengine/runtime/autoloader.php b/php/sdk/google/appengine/runtime/autoloader.php
index ccf8d3f..281ec97 100644
--- a/php/sdk/google/appengine/runtime/autoloader.php
+++ b/php/sdk/google/appengine/runtime/autoloader.php
@@ -111,8 +111,11 @@
         'google\appengine\datastore\v4\gqlqueryarg' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\queryresultbatch\moreresultstype' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\queryresultbatch' => 'google/appengine/datastore/datastore_v4_pb.php',
+        'google\appengine\datastore\v4\mutation\operation' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\mutation' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\mutationresult' => 'google/appengine/datastore/datastore_v4_pb.php',
+        'google\appengine\datastore\v4\deprecatedmutation' => 'google/appengine/datastore/datastore_v4_pb.php',
+        'google\appengine\datastore\v4\deprecatedmutationresult' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\readoptions\readconsistency' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\readoptions' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\lookuprequest' => 'google/appengine/datastore/datastore_v4_pb.php',
@@ -131,6 +134,7 @@
         'google\appengine\datastore\v4\allocateidsrequest' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\allocateidsresponse' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\writerequest' => 'google/appengine/datastore/datastore_v4_pb.php',
+        'google\appengine_datastore_v3\internalheader' => 'google/appengine/datastore/datastore_v3_pb.php',
         'google\appengine_datastore_v3\transaction' => 'google/appengine/datastore/datastore_v3_pb.php',
         'google\appengine_datastore_v3\query\hint' => 'google/appengine/datastore/datastore_v3_pb.php',
         'google\appengine_datastore_v3\query\filter\operator' => 'google/appengine/datastore/datastore_v3_pb.php',
diff --git a/php/sdk/google/appengine/runtime/proto/Encoder.php b/php/sdk/google/appengine/runtime/proto/Encoder.php
index b20d71a..1f57029 100644
--- a/php/sdk/google/appengine/runtime/proto/Encoder.php
+++ b/php/sdk/google/appengine/runtime/proto/Encoder.php
@@ -53,7 +53,7 @@
     }
 
     // Cheaper approximate check, then more expensive precise check.
-    // On ia32 we cannot distinguish MAX_INT32 and values larges than
+    // On ia32 we cannot distinguish MAX_INT32 and values larger than
     // MAX_INT32 without bcmath.
     if ($val >= Decoder::MAX_INT32 && bccomp($val, Decoder::MAX_INT32) > 0) {
       throw new ProtocolBufferEncodeError(
diff --git a/php_cli.py b/php_cli.py
new file mode 100644
index 0000000..c47660d
--- /dev/null
+++ b/php_cli.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Convenience wrapper for starting an appengine tool."""
+
+
+import os
+import sys
+
+
+
+
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
+
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 7))
+
+_DIR_PATH = wrapper_util.get_dir_path(__file__, os.path.join('lib', 'ipaddr'))
+_PATHS = wrapper_util.Paths(_DIR_PATH)
+
+
+
+
+
+EXTRA_PATHS = _PATHS.v2_extra_paths
+
+
+def fix_sys_path(extra_extra_paths=()):
+  """Fix the sys.path to include our extra paths.
+
+  fix_sys_path should be called before running testbed-based unit tests so that
+  third-party modules are correctly added to sys.path.
+  """
+  sys.path[1:1] = EXTRA_PATHS
+
+
+def _run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
+  script_name = os.path.basename(file_path)
+
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
+
+
+
+
+
+
+
+  if 'google' in sys.modules:
+    del sys.modules['google']
+
+  execfile(_PATHS.script_file(script_name), globals_)
+
+
+if __name__ == '__main__':
+  _run_file(__file__, globals())
diff --git a/remote_api_shell.py b/remote_api_shell.py
index 2af168b..de775b6 100644
--- a/remote_api_shell.py
+++ b/remote_api_shell.py
@@ -21,19 +21,20 @@
 
 
 import os
-import re
 import sys
 
+sys_path = sys.path
+try:
+  sys.path = [os.path.dirname(__file__)] + sys.path
 
-if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Very old versions of Python are not supported. Please '
-                   'use version 2.5 or greater.\n')
-  sys.exit(1)
-version_tuple = tuple(sys.version_info[:2])
-if version_tuple < (2, 5):
-  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5 or greater.\n' % version_tuple)
-  sys.exit(1)
+  import wrapper_util
+
+finally:
+  sys.path = sys_path
+
+wrapper_util.reject_old_python_versions((2, 5))
+
+
 
 
 def get_dir_path(sibling):
@@ -47,7 +48,7 @@
   where it points).
 
   Args:
-    sibling: Relative path to a sibiling of this module file. Choose a sibling
+    sibling: Relative path to a sibling of this module file. Choose a sibling
     that is potentially symlinked into the parent directory.
 
   Returns:
@@ -56,28 +57,11 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  if 'GAE_SDK_ROOT' in os.environ:
-    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+  return wrapper_util.get_dir_path(__file__, sibling)
 
 
 
-    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
-    for dir_path in [gae_sdk_root,
-                     os.path.join(gae_sdk_root, 'google_appengine')]:
-      if os.path.exists(os.path.join(dir_path, sibling)):
-        return dir_path
-    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
-                     'directory' % gae_sdk_root)
-  else:
-    py_file = __file__.replace('.pyc', '.py')
-    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-                 os.path.abspath(os.path.dirname(py_file))]
-    for dir_path in dir_paths:
-      sibling_path = os.path.join(dir_path, sibling)
-      if os.path.exists(sibling_path):
-        return dir_path
-    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
-                     'environment variable.')
+
 
 
 
@@ -88,97 +72,43 @@
 
 
 DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
-SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
-GOOGLE_SQL_DIR = os.path.join(
-    DIR_PATH, 'google', 'storage', 'speckle', 'python', 'tool')
+_PATHS = wrapper_util.Paths(DIR_PATH)
 
-EXTRA_PATHS = [
-  DIR_PATH,
-  os.path.join(DIR_PATH, 'lib', 'antlr3'),
-  os.path.join(DIR_PATH, 'lib', 'django-0.96'),
-  os.path.join(DIR_PATH, 'lib', 'fancy_urllib'),
-  os.path.join(DIR_PATH, 'lib', 'ipaddr'),
-  os.path.join(DIR_PATH, 'lib', 'jinja2-2.6'),
-  os.path.join(DIR_PATH, 'lib', 'protorpc-1.0'),
-  os.path.join(DIR_PATH, 'lib', 'PyAMF'),
-  os.path.join(DIR_PATH, 'lib', 'markupsafe'),
-  os.path.join(DIR_PATH, 'lib', 'webob_0_9'),
-  os.path.join(DIR_PATH, 'lib', 'webapp2-2.5.2'),
-  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
-  os.path.join(DIR_PATH, 'lib', 'simplejson'),
-  os.path.join(DIR_PATH, 'lib', 'rsa'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1'),
-  os.path.join(DIR_PATH, 'lib', 'pyasn1_modules'),
-]
+SCRIPT_DIR = _PATHS.default_script_dir
+GOOGLE_SQL_DIR = _PATHS.google_sql_dir
 
-API_SERVER_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'argparse'),
-]
-API_SERVER_EXTRA_PATH_SCRIPTS = 'api_server'
+EXTRA_PATHS = _PATHS.v1_extra_paths
+
+API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
+
+ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
 
 
+OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
 
 
-ENDPOINTSCFG_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'cherrypy'),
-  os.path.join(DIR_PATH, 'lib', 'concurrent'),
-  os.path.join(DIR_PATH, 'lib', 'endpoints-1.0'),
-]
-ENDPOINTSCFG_EXTRA_PATH_SCRIPTS = 'endpointscfg'
+GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
 
 
-OAUTH_CLIENT_EXTRA_PATHS = [
-  os.path.join(DIR_PATH, 'lib', 'google-api-python-client'),
-  os.path.join(DIR_PATH, 'lib', 'httplib2'),
-  os.path.join(DIR_PATH, 'lib', 'python-gflags'),
-]
-
-OAUTH_CLIENT_EXTRA_PATH_SCRIPTS = '(appcfg|bulkloader)'
-
-
-GOOGLE_SQL_EXTRA_PATHS = OAUTH_CLIENT_EXTRA_PATHS + [
-  os.path.join(DIR_PATH, 'lib', 'enum'),
-  os.path.join(DIR_PATH, 'lib', 'grizzled'),
-  os.path.join(DIR_PATH, 'lib', 'oauth2'),
-  os.path.join(DIR_PATH, 'lib', 'prettytable'),
-  os.path.join(DIR_PATH, 'lib', 'sqlcmd'),
-]
-
-GOOGLE_SQL_EXTRA_PATH_SCRIPTS = 'google_sql'
-
-
-
-SCRIPT_EXCEPTIONS = {
-  "old_dev_appserver.py" : "dev_appserver_main.py"
-}
-
-SCRIPT_DIR_EXCEPTIONS = {
-  'google_sql.py': GOOGLE_SQL_DIR,
-}
 
 
 def fix_sys_path(extra_extra_paths=()):
   """Fix the sys.path to include our extra paths."""
-  extra_paths = EXTRA_PATHS[:]
-  extra_paths.extend(extra_extra_paths)
-  sys.path = extra_paths + sys.path
+  sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
 
 
-def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
-  """Execute the file at the specified path with the passed-in globals."""
+def run_file(file_path, globals_):
+  """Execute the given script with the passed-in globals.
+
+  Args:
+    file_path: the path to the wrapper for the given script. This will usually
+      be a copy of this file.
+    globals_: the global bindings to be used while executing the wrapped script.
+  """
   script_name = os.path.basename(file_path)
 
-  if re.match(OAUTH_CLIENT_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = OAUTH_CLIENT_EXTRA_PATHS
-  elif re.match(GOOGLE_SQL_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = GOOGLE_SQL_EXTRA_PATHS
-  elif re.match(API_SERVER_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = API_SERVER_EXTRA_PATHS
-  elif re.match(ENDPOINTSCFG_EXTRA_PATH_SCRIPTS, script_name):
-    extra_extra_paths = ENDPOINTSCFG_EXTRA_PATHS
-  else:
-    extra_extra_paths = []
-  fix_sys_path(extra_extra_paths)
+  sys.path = (_PATHS.script_paths(script_name) +
+              _PATHS.scrub_path(script_name, sys.path))
 
 
 
@@ -189,10 +119,7 @@
   if 'google' in sys.modules:
     del sys.modules['google']
 
-  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
-  script_dir = SCRIPT_DIR_EXCEPTIONS.get(script_name, script_dir)
-  script_path = os.path.join(script_dir, script_name)
-  execfile(script_path, globals_)
+  execfile(_PATHS.script_file(script_name), globals_)
 
 
 if __name__ == '__main__':
diff --git a/wrapper_util.py b/wrapper_util.py
new file mode 100644
index 0000000..b7afd34
--- /dev/null
+++ b/wrapper_util.py
@@ -0,0 +1,316 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Finds the directory and target script name for App Engine SDK scripts."""
+
+import os
+import sys
+
+
+def reject_old_python_versions(minimum_version):
+  """Guard against old python versions.
+
+  Args:
+    minimum_version: a tuple that indicates the minimum Python version.
+  """
+  minimum_version_string = '.'.join(str(x) for x in minimum_version)
+  if not hasattr(sys, 'version_info'):
+    sys.stderr.write('Very old versions of Python are not supported. Please '
+                     'use version %s.\n' % minimum_version_string)
+    sys.exit(1)
+  version_tuple = tuple(sys.version_info[:2])
+  if version_tuple < minimum_version:
+    sys.stderr.write('Error: Python %d.%d is not supported. Please use '
+                     'version %s.\n' % (version_tuple[0], version_tuple[1],
+                                        minimum_version_string))
+    sys.exit(1)
+
+
+def get_dir_path(script_file, sibling):
+  """Get a path to the directory of the script script_file.
+
+  By default, the canonical path (symlinks resolved) will be returned. In some
+  environments the canonical directory is not sufficient because different
+  parts of the SDK are referenced by symlinks, including script_file.
+  In this case, the non-canonical path to script_file's directory will be
+  returned (i.e., the directory where the symlink lives, not the directory
+  where it points).
+
+  Args:
+    script_file: The script file whose directory is wanted.
+    sibling: Relative path to a sibling of script_file. Choose a sibling
+    that is potentially symlinked into the parent directory.
+
+  Returns:
+    A directory name.
+
+  Raises:
+    ValueError: If no proper path could be determined.
+  """
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = script_file.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
+
+
+
+
+
+
+
+class Paths(object):
+  """Encapsulates the path and unwrapped script details for a wrapper script.
+
+  Most of the attributes of this object are there so that wrapper_script_v1
+  can continue to export the same global variables it historically has, in case
+  any end-users are referencing those.
+
+  Attributes:
+    default_script_dir: the path where the corresponding unwrapped script will
+      be found, apart from a few exceptional scripts.
+  """
+
+  def __init__(self, dir_path):
+    """Make a new Paths object.
+
+    Args:
+      dir_path: the directory path where the calling script is to be found.
+        This directory should have a lib subdirectory.
+    """
+
+
+    self.v1_extra_paths = [
+        dir_path,
+        os.path.join(dir_path, 'lib', 'antlr3'),
+        os.path.join(dir_path, 'lib', 'django-0.96'),
+        os.path.join(dir_path, 'lib', 'fancy_urllib'),
+        os.path.join(dir_path, 'lib', 'ipaddr'),
+        os.path.join(dir_path, 'lib', 'jinja2-2.6'),
+        os.path.join(dir_path, 'lib', 'protorpc-1.0'),
+        os.path.join(dir_path, 'lib', 'PyAMF'),
+        os.path.join(dir_path, 'lib', 'markupsafe'),
+        os.path.join(dir_path, 'lib', 'webob_0_9'),
+        os.path.join(dir_path, 'lib', 'webapp2-2.5.2'),
+        os.path.join(dir_path, 'lib', 'yaml', 'lib'),
+        os.path.join(dir_path, 'lib', 'simplejson'),
+
+        os.path.join(dir_path, 'lib', 'rsa'),
+        os.path.join(dir_path, 'lib', 'pyasn1'),
+        os.path.join(dir_path, 'lib', 'pyasn1_modules'),
+    ]
+
+    self.api_server_extra_paths = [
+        os.path.join(dir_path, 'lib', 'argparse'),
+    ]
+
+
+
+
+    self.endpointscfg_extra_paths = [
+        os.path.join(dir_path, 'lib', 'cherrypy'),
+        os.path.join(dir_path, 'lib', 'concurrent'),
+        os.path.join(dir_path, 'lib', 'endpoints-1.0'),
+    ]
+
+
+    self.oauth_client_extra_paths = [
+        os.path.join(dir_path, 'lib', 'google-api-python-client'),
+        os.path.join(dir_path, 'lib', 'httplib2'),
+        os.path.join(dir_path, 'lib', 'python-gflags'),
+    ]
+
+
+    self.google_sql_extra_paths = self.oauth_client_extra_paths + [
+        os.path.join(dir_path, 'lib', 'enum'),
+        os.path.join(dir_path, 'lib', 'grizzled'),
+        os.path.join(dir_path, 'lib', 'oauth2'),
+        os.path.join(dir_path, 'lib', 'prettytable'),
+        os.path.join(dir_path, 'lib', 'sqlcmd'),
+    ]
+
+    devappserver2_dir = os.path.join(
+        dir_path, 'google', 'appengine', 'tools', 'devappserver2')
+    php_runtime_dir = os.path.join(devappserver2_dir, 'php')
+    python_runtime_dir = os.path.join(devappserver2_dir, 'python')
+
+    stub_paths = [
+        os.path.join(dir_path, 'lib', 'antlr3'),
+        os.path.join(dir_path, 'lib', 'fancy_urllib'),
+        os.path.join(dir_path, 'lib', 'ipaddr'),
+        os.path.join(dir_path, 'lib', 'yaml-3.10'),
+
+        os.path.join(dir_path, 'lib', 'rsa'),
+        os.path.join(dir_path, 'lib', 'pyasn1'),
+        os.path.join(dir_path, 'lib', 'pyasn1_modules'),
+    ]
+
+
+
+
+
+    self.v2_extra_paths = stub_paths + [
+        dir_path,
+
+        os.path.join(dir_path, 'lib', 'simplejson'),
+
+
+
+        os.path.join(dir_path, 'lib', 'django-1.4'),
+        os.path.join(dir_path, 'lib', 'endpoints-1.0'),
+        os.path.join(dir_path, 'lib', 'jinja2-2.6'),
+        os.path.join(dir_path, 'lib', 'protorpc-1.0'),
+        os.path.join(dir_path, 'lib', 'PyAMF-0.6.1'),
+        os.path.join(dir_path, 'lib', 'markupsafe-0.15'),
+        os.path.join(dir_path, 'lib', 'webob-1.2.3'),
+        os.path.join(dir_path, 'lib', 'webapp2-2.5.2'),
+    ]
+
+    devappserver2_paths = stub_paths + [
+        dir_path,
+        os.path.join(dir_path, 'lib', 'concurrent'),
+        os.path.join(dir_path, 'lib', 'cherrypy'),
+        os.path.join(dir_path, 'lib', 'jinja2-2.6'),
+        os.path.join(dir_path, 'lib', 'webob-1.2.3'),
+        os.path.join(dir_path, 'lib', 'webapp2-2.5.1'),
+    ]
+
+    php_runtime_paths = [
+        dir_path,
+        os.path.join(dir_path, 'lib', 'concurrent'),
+        os.path.join(dir_path, 'lib', 'cherrypy'),
+        os.path.join(dir_path, 'lib', 'yaml-3.10'),
+    ]
+
+    python_runtime_paths = [
+        dir_path,
+        os.path.join(dir_path, 'lib', 'concurrent'),
+        os.path.join(dir_path, 'lib', 'cherrypy'),
+        os.path.join(dir_path, 'lib', 'fancy_urllib'),
+        os.path.join(dir_path, 'lib', 'protorpc-1.0'),
+        os.path.join(dir_path, 'lib', 'yaml-3.10'),
+    ]
+
+    self._script_to_paths = {
+        'api_server.py': self.v1_extra_paths + self.api_server_extra_paths,
+        'appcfg.py': self.v1_extra_paths + self.oauth_client_extra_paths,
+        'backends_conversion.py': self.v1_extra_paths,
+        'bulkload_client.py': self.v1_extra_paths,
+        'bulkloader.py': self.v1_extra_paths + self.oauth_client_extra_paths,
+        'dev_appserver.py': devappserver2_paths,
+        'download_appstats.py': self.v1_extra_paths,
+        'endpointscfg.py': self.v1_extra_paths + self.endpointscfg_extra_paths,
+        'gen_protorpc.py': self.v1_extra_paths,
+        'google_sql.py': self.v1_extra_paths + self.google_sql_extra_paths,
+        'old_dev_appserver.py': self.v1_extra_paths,
+        'php_cli.py': devappserver2_paths,
+        'remote_api_shell.py': self.v1_extra_paths,
+        'vmboot.py': self.v1_extra_paths,
+        '_php_runtime.py': php_runtime_paths,
+        '_python_runtime.py': python_runtime_paths,
+    }
+
+    self._wrapper_name_to_real_name = {
+        'old_dev_appserver.py': 'dev_appserver_main.py',
+        'dev_appserver.py': 'devappserver2.py',
+        '_php_runtime.py': 'runtime.py',
+        '_python_runtime.py': 'runtime.py',
+    }
+
+    self.default_script_dir = os.path.join(
+        dir_path, 'google', 'appengine', 'tools')
+
+    self.google_sql_dir = os.path.join(
+        dir_path, 'google', 'storage', 'speckle', 'python', 'tool')
+
+    self._script_to_dir = {
+        'google_sql.py': self.google_sql_dir,
+        'dev_appserver.py': devappserver2_dir,
+        '_php_runtime.py': php_runtime_dir,
+        '_python_runtime.py': python_runtime_dir,
+    }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    self._sys_paths_to_scrub = {
+        'dev_appserver.py':
+            [os.path.normcase(os.path.join(dir_path, 'launcher'))],
+    }
+
+  def script_paths(self, script_name):
+    """Returns the sys.path prefix appropriate for this script.
+
+    Args:
+      script_name: the basename of the script, for example 'appcfg.py'.
+    """
+    try:
+      return self._script_to_paths[script_name]
+    except KeyError:
+      raise KeyError('Script name %s not recognized' % script_name)
+
+  def script_file(self, script_name):
+    """Returns the absolute name of the wrapped script.
+
+    Args:
+      script_name: the basename of the script, for example 'appcfg.py'.
+    """
+    script_dir = self._script_to_dir.get(script_name, self.default_script_dir)
+    script_name = self._wrapper_name_to_real_name.get(script_name, script_name)
+    return os.path.join(script_dir, script_name)
+
+  def scrub_path(self, script_name, paths):
+    """Removes bad paths from a list of paths.
+
+    Args:
+      script_name: the basename of the script, for example 'appcfg.py'.
+      paths: a list of paths
+
+    Returns:
+      The list of paths with any bad paths removed.
+    """
+    sys_paths_to_scrub = self._sys_paths_to_scrub.get(script_name, [])
+
+
+    return [path for path in paths
+            if os.path.normcase(path) not in sys_paths_to_scrub]