App Engine Python SDK version 1.8.4

git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@381 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 38d5184..223eb00 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,6 +3,60 @@
 
 App Engine SDK - Release Notes
 
+Version 1.8.4
+
+All
+==============================
+- A Datastore Admin fix in this release improves security by ensuring that
+  scheduled backups can now only be started by a cron or task queue task.
+  Administrators can still start a backup by going to the Datastore Admin
+  in the Admin Console.
+
+Python
+==============================
+- Better support is now provided for the _$folder$ magic cloud storage keyword,
+  and the implementation of basic mkdir()/rmdir() functionality via GCS
+  streams.
+- This release adds the wrappers @transactional_async and @transactonal_tasklet
+  to correct (a) @transactional @tasklet being synchronous despite the
+  decorator, and (b) @tasklet @transactional not running the generator in a
+  transaction.
+    https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=195
+- Fixed a unicode issue associated with expressions in the Search API. A search
+  with snippeted fields was failing on documents containing unicode characters.
+- Fixed an issue with the dev_appserver not auto-detecting the lib/ directory
+  in the SDK.
+    https://code.google.com/p/googleappengine/issues/detail?id=8459
+    https://code.google.com/p/googleappengine/issues/detail?id=9847
+- Fixed an issue where Blobstore usage was being charged even though the
+  actual data was being stored in Cloud Storage. All affected developers have
+  been informed and reimbursements are underway.
+    https://code.google.com/p/googleappengine/issues/detail?id=9659
+- Fixed an issue that ensures all .py files in google.appengine.api that are
+  available in the SDK are also available in the runtime.
+    https://code.google.com/p/googleappengine/issues/detail?id=9755
+- Fixed an issue of warning messages being displayed when running appcfg
+  update (eg. appcfg.py update . --oauth2) from the 1.8.3 Python SDK. The
+  appengine_rpc_httplib2 will work without warnings regardless of which
+  oauth2client library is being used.
+    https://code.google.com/p/googleappengine/issues/detail?id=9807
+
+
+PHP
+==============================
+- The PHP interpreter was upgraded from PHP 5.4.8 to PHP 5.4.17.
+- The is_writable() method now supports Google Cloud Storage files and buckets.
+- You no longer need to specify the PHP runtime on the command line when
+  deploying applications via appcfg.py
+- Backends are disabled for PHP and are no longer supported with the PHP
+  runtime.
+- Upload of PHP apps now ignores any pre-compilation errors. This allows
+  precompilation to be disabled on a per-runtime basis, and defaults to
+  disabled for PHP.
+- Fixed an issue where SYSTEMROOT was not passed to the environment checking
+  script thereby not recognizing the PHP intepreter for Windows in the SDK.
+    https://code.google.com/p/googleappengine/issues/detail?id=9382
+
 Version 1.8.3
 
 Python
diff --git a/VERSION b/VERSION
index be8ffd7..8effc62 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
-release: "1.8.3"
-timestamp: 1374030712
+release: "1.8.4"
+timestamp: 1375825037
 api_versions: ['1']
 supported_api_versions:
   python:
diff --git a/_php_runtime.py b/_php_runtime.py
index 1c48e39..b615636 100644
--- a/_php_runtime.py
+++ b/_php_runtime.py
@@ -52,15 +52,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/_python_runtime.py b/_python_runtime.py
index 1c48e39..b615636 100644
--- a/_python_runtime.py
+++ b/_python_runtime.py
@@ -52,15 +52,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/api_server.py b/api_server.py
index af18e1c..661f26e 100644
--- a/api_server.py
+++ b/api_server.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/apiclient/__init__.py b/apiclient/__init__.py
deleted file mode 100644
index 4802e90..0000000
--- a/apiclient/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__version__ = "1.0"
diff --git a/apiclient/discovery.py b/apiclient/discovery.py
deleted file mode 100644
index 19af41a..0000000
--- a/apiclient/discovery.py
+++ /dev/null
@@ -1,953 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Client for discovery based APIs.
-
-A client library for Google's discovery based APIs.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = [
-    'build',
-    'build_from_document',
-    'fix_method_name',
-    'key2param',
-    ]
-
-import copy
-import httplib2
-import keyword
-import logging
-import os
-import re
-import uritemplate
-import urllib
-import urlparse
-import mimeparse
-import mimetypes
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
-
-from apiclient.errors import HttpError
-from apiclient.errors import InvalidJsonError
-from apiclient.errors import MediaUploadSizeError
-from apiclient.errors import UnacceptableMimeTypeError
-from apiclient.errors import UnknownApiNameOrVersion
-from apiclient.errors import UnknownFileType
-from apiclient.http import HttpRequest
-from apiclient.http import MediaFileUpload
-from apiclient.http import MediaUpload
-from apiclient.model import JsonModel
-from apiclient.model import MediaModel
-from apiclient.model import RawModel
-from apiclient.schema import Schemas
-from email.mime.multipart import MIMEMultipart
-from email.mime.nonmultipart import MIMENonMultipart
-from oauth2client.util import positional
-from oauth2client.util import _add_query_parameter
-from oauth2client.anyjson import simplejson
-
-# The client library requires a version of httplib2 that supports RETRIES.
-httplib2.RETRIES = 1
-
-logger = logging.getLogger(__name__)
-
-URITEMPLATE = re.compile('{[^}]*}')
-VARNAME = re.compile('[a-zA-Z0-9_-]+')
-DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
-                 '{api}/{apiVersion}/rest')
-DEFAULT_METHOD_DOC = 'A description of how to use this function'
-HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
-_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
-BODY_PARAMETER_DEFAULT_VALUE = {
-    'description': 'The request body.',
-    'type': 'object',
-    'required': True,
-}
-MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
-  'description': ('The filename of the media request body, or an instance '
-                  'of a MediaUpload object.'),
-  'type': 'string',
-  'required': False,
-}
-
-# Parameters accepted by the stack, but not visible via discovery.
-# TODO(dhermes): Remove 'userip' in 'v2'.
-STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
-STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
-
-# Library-specific reserved words beyond Python keywords.
-RESERVED_WORDS = frozenset(['body'])
-
-
-def fix_method_name(name):
-  """Fix method names to avoid reserved word conflicts.
-
-  Args:
-    name: string, method name.
-
-  Returns:
-    The name with a '_' prefixed if the name is a reserved word.
-  """
-  if keyword.iskeyword(name) or name in RESERVED_WORDS:
-    return name + '_'
-  else:
-    return name
-
-
-def key2param(key):
-  """Converts key names into parameter names.
-
-  For example, converting "max-results" -> "max_results"
-
-  Args:
-    key: string, the method key name.
-
-  Returns:
-    A safe method name based on the key name.
-  """
-  result = []
-  key = list(key)
-  if not key[0].isalpha():
-    result.append('x')
-  for c in key:
-    if c.isalnum():
-      result.append(c)
-    else:
-      result.append('_')
-
-  return ''.join(result)
-
-
-@positional(2)
-def build(serviceName,
-          version,
-          http=None,
-          discoveryServiceUrl=DISCOVERY_URI,
-          developerKey=None,
-          model=None,
-          requestBuilder=HttpRequest):
-  """Construct a Resource for interacting with an API.
-
-  Construct a Resource object for interacting with an API. The serviceName and
-  version are the names from the Discovery service.
-
-  Args:
-    serviceName: string, name of the service.
-    version: string, the version of the service.
-    http: httplib2.Http, An instance of httplib2.Http or something that acts
-      like it that HTTP requests will be made through.
-    discoveryServiceUrl: string, a URI Template that points to the location of
-      the discovery service. It should have two parameters {api} and
-      {apiVersion} that when filled in produce an absolute URI to the discovery
-      document for that service.
-    developerKey: string, key obtained from
-      https://code.google.com/apis/console.
-    model: apiclient.Model, converts to and from the wire format.
-    requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
-      request.
-
-  Returns:
-    A Resource object with methods for interacting with the service.
-  """
-  params = {
-      'api': serviceName,
-      'apiVersion': version
-      }
-
-  if http is None:
-    http = httplib2.Http()
-
-  requested_url = uritemplate.expand(discoveryServiceUrl, params)
-
-  # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
-  # variable that contains the network address of the client sending the
-  # request. If it exists then add that to the request for the discovery
-  # document to avoid exceeding the quota on discovery requests.
-  if 'REMOTE_ADDR' in os.environ:
-    requested_url = _add_query_parameter(requested_url, 'userIp',
-                                         os.environ['REMOTE_ADDR'])
-  logger.info('URL being requested: %s' % requested_url)
-
-  resp, content = http.request(requested_url)
-
-  if resp.status == 404:
-    raise UnknownApiNameOrVersion("name: %s  version: %s" % (serviceName,
-                                                            version))
-  if resp.status >= 400:
-    raise HttpError(resp, content, uri=requested_url)
-
-  try:
-    service = simplejson.loads(content)
-  except ValueError, e:
-    logger.error('Failed to parse as JSON: ' + content)
-    raise InvalidJsonError()
-
-  return build_from_document(content, base=discoveryServiceUrl, http=http,
-      developerKey=developerKey, model=model, requestBuilder=requestBuilder)
-
-
-@positional(1)
-def build_from_document(
-    service,
-    base=None,
-    future=None,
-    http=None,
-    developerKey=None,
-    model=None,
-    requestBuilder=HttpRequest):
-  """Create a Resource for interacting with an API.
-
-  Same as `build()`, but constructs the Resource object from a discovery
-  document that is it given, as opposed to retrieving one over HTTP.
-
-  Args:
-    service: string or object, the JSON discovery document describing the API.
-      The value passed in may either be the JSON string or the deserialized
-      JSON.
-    base: string, base URI for all HTTP requests, usually the discovery URI.
-      This parameter is no longer used as rootUrl and servicePath are included
-      within the discovery document. (deprecated)
-    future: string, discovery document with future capabilities (deprecated).
-    http: httplib2.Http, An instance of httplib2.Http or something that acts
-      like it that HTTP requests will be made through.
-    developerKey: string, Key for controlling API usage, generated
-      from the API Console.
-    model: Model class instance that serializes and de-serializes requests and
-      responses.
-    requestBuilder: Takes an http request and packages it up to be executed.
-
-  Returns:
-    A Resource object with methods for interacting with the service.
-  """
-
-  # future is no longer used.
-  future = {}
-
-  if isinstance(service, basestring):
-    service = simplejson.loads(service)
-  base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
-  schema = Schemas(service)
-
-  if model is None:
-    features = service.get('features', [])
-    model = JsonModel('dataWrapper' in features)
-  return Resource(http=http, baseUrl=base, model=model,
-                  developerKey=developerKey, requestBuilder=requestBuilder,
-                  resourceDesc=service, rootDesc=service, schema=schema)
-
-
-def _cast(value, schema_type):
-  """Convert value to a string based on JSON Schema type.
-
-  See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
-  JSON Schema.
-
-  Args:
-    value: any, the value to convert
-    schema_type: string, the type that value should be interpreted as
-
-  Returns:
-    A string representation of 'value' based on the schema_type.
-  """
-  if schema_type == 'string':
-    if type(value) == type('') or type(value) == type(u''):
-      return value
-    else:
-      return str(value)
-  elif schema_type == 'integer':
-    return str(int(value))
-  elif schema_type == 'number':
-    return str(float(value))
-  elif schema_type == 'boolean':
-    return str(bool(value)).lower()
-  else:
-    if type(value) == type('') or type(value) == type(u''):
-      return value
-    else:
-      return str(value)
-
-
-def _media_size_to_long(maxSize):
-  """Convert a string media size, such as 10GB or 3TB into an integer.
-
-  Args:
-    maxSize: string, size as a string, such as 2MB or 7GB.
-
-  Returns:
-    The size as an integer value.
-  """
-  if len(maxSize) < 2:
-    return 0L
-  units = maxSize[-2:].upper()
-  bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
-  if bit_shift is not None:
-    return long(maxSize[:-2]) << bit_shift
-  else:
-    return long(maxSize)
-
-
-def _media_path_url_from_info(root_desc, path_url):
-  """Creates an absolute media path URL.
-
-  Constructed using the API root URI and service path from the discovery
-  document and the relative path for the API method.
-
-  Args:
-    root_desc: Dictionary; the entire original deserialized discovery document.
-    path_url: String; the relative URL for the API method. Relative to the API
-        root, which is specified in the discovery document.
-
-  Returns:
-    String; the absolute URI for media upload for the API method.
-  """
-  return '%(root)supload/%(service_path)s%(path)s' % {
-      'root': root_desc['rootUrl'],
-      'service_path': root_desc['servicePath'],
-      'path': path_url,
-  }
-
-
-def _fix_up_parameters(method_desc, root_desc, http_method):
-  """Updates parameters of an API method with values specific to this library.
-
-  Specifically, adds whatever global parameters are specified by the API to the
-  parameters for the individual method. Also adds parameters which don't
-  appear in the discovery document, but are available to all discovery based
-  APIs (these are listed in STACK_QUERY_PARAMETERS).
-
-  SIDE EFFECTS: This updates the parameters dictionary object in the method
-  description.
-
-  Args:
-    method_desc: Dictionary with metadata describing an API method. Value comes
-        from the dictionary of methods stored in the 'methods' key in the
-        deserialized discovery document.
-    root_desc: Dictionary; the entire original deserialized discovery document.
-    http_method: String; the HTTP method used to call the API method described
-        in method_desc.
-
-  Returns:
-    The updated Dictionary stored in the 'parameters' key of the method
-        description dictionary.
-  """
-  parameters = method_desc.setdefault('parameters', {})
-
-  # Add in the parameters common to all methods.
-  for name, description in root_desc.get('parameters', {}).iteritems():
-    parameters[name] = description
-
-  # Add in undocumented query parameters.
-  for name in STACK_QUERY_PARAMETERS:
-    parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
-
-  # Add 'body' (our own reserved word) to parameters if the method supports
-  # a request payload.
-  if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
-    body = BODY_PARAMETER_DEFAULT_VALUE.copy()
-    body.update(method_desc['request'])
-    parameters['body'] = body
-
-  return parameters
-
-
-def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
-  """Updates parameters of API by adding 'media_body' if supported by method.
-
-  SIDE EFFECTS: If the method supports media upload and has a required body,
-  sets body to be optional (required=False) instead. Also, if there is a
-  'mediaUpload' in the method description, adds 'media_upload' key to
-  parameters.
-
-  Args:
-    method_desc: Dictionary with metadata describing an API method. Value comes
-        from the dictionary of methods stored in the 'methods' key in the
-        deserialized discovery document.
-    root_desc: Dictionary; the entire original deserialized discovery document.
-    path_url: String; the relative URL for the API method. Relative to the API
-        root, which is specified in the discovery document.
-    parameters: A dictionary describing method parameters for method described
-        in method_desc.
-
-  Returns:
-    Triple (accept, max_size, media_path_url) where:
-      - accept is a list of strings representing what content types are
-        accepted for media upload. Defaults to empty list if not in the
-        discovery document.
-      - max_size is a long representing the max size in bytes allowed for a
-        media upload. Defaults to 0L if not in the discovery document.
-      - media_path_url is a String; the absolute URI for media upload for the
-        API method. Constructed using the API root URI and service path from
-        the discovery document and the relative path for the API method. If
-        media upload is not supported, this is None.
-  """
-  media_upload = method_desc.get('mediaUpload', {})
-  accept = media_upload.get('accept', [])
-  max_size = _media_size_to_long(media_upload.get('maxSize', ''))
-  media_path_url = None
-
-  if media_upload:
-    media_path_url = _media_path_url_from_info(root_desc, path_url)
-    parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
-    if 'body' in parameters:
-      parameters['body']['required'] = False
-
-  return accept, max_size, media_path_url
-
-
-def _fix_up_method_description(method_desc, root_desc):
-  """Updates a method description in a discovery document.
-
-  SIDE EFFECTS: Changes the parameters dictionary in the method description with
-  extra parameters which are used locally.
-
-  Args:
-    method_desc: Dictionary with metadata describing an API method. Value comes
-        from the dictionary of methods stored in the 'methods' key in the
-        deserialized discovery document.
-    root_desc: Dictionary; the entire original deserialized discovery document.
-
-  Returns:
-    Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
-    where:
-      - path_url is a String; the relative URL for the API method. Relative to
-        the API root, which is specified in the discovery document.
-      - http_method is a String; the HTTP method used to call the API method
-        described in the method description.
-      - method_id is a String; the name of the RPC method associated with the
-        API method, and is in the method description in the 'id' key.
-      - accept is a list of strings representing what content types are
-        accepted for media upload. Defaults to empty list if not in the
-        discovery document.
-      - max_size is a long representing the max size in bytes allowed for a
-        media upload. Defaults to 0L if not in the discovery document.
-      - media_path_url is a String; the absolute URI for media upload for the
-        API method. Constructed using the API root URI and service path from
-        the discovery document and the relative path for the API method. If
-        media upload is not supported, this is None.
-  """
-  path_url = method_desc['path']
-  http_method = method_desc['httpMethod']
-  method_id = method_desc['id']
-
-  parameters = _fix_up_parameters(method_desc, root_desc, http_method)
-  # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
-  # 'parameters' key and needs to know if there is a 'body' parameter because it
-  # also sets a 'media_body' parameter.
-  accept, max_size, media_path_url = _fix_up_media_upload(
-      method_desc, root_desc, path_url, parameters)
-
-  return path_url, http_method, method_id, accept, max_size, media_path_url
-
-
-# TODO(dhermes): Convert this class to ResourceMethod and make it callable
-class ResourceMethodParameters(object):
-  """Represents the parameters associated with a method.
-
-  Attributes:
-    argmap: Map from method parameter name (string) to query parameter name
-        (string).
-    required_params: List of required parameters (represented by parameter
-        name as string).
-    repeated_params: List of repeated parameters (represented by parameter
-        name as string).
-    pattern_params: Map from method parameter name (string) to regular
-        expression (as a string). If the pattern is set for a parameter, the
-        value for that parameter must match the regular expression.
-    query_params: List of parameters (represented by parameter name as string)
-        that will be used in the query string.
-    path_params: Set of parameters (represented by parameter name as string)
-        that will be used in the base URL path.
-    param_types: Map from method parameter name (string) to parameter type. Type
-        can be any valid JSON schema type; valid values are 'any', 'array',
-        'boolean', 'integer', 'number', 'object', or 'string'. Reference:
-        http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
-    enum_params: Map from method parameter name (string) to list of strings,
-       where each list of strings is the list of acceptable enum values.
-  """
-
-  def __init__(self, method_desc):
-    """Constructor for ResourceMethodParameters.
-
-    Sets default values and defers to set_parameters to populate.
-
-    Args:
-      method_desc: Dictionary with metadata describing an API method. Value
-          comes from the dictionary of methods stored in the 'methods' key in
-          the deserialized discovery document.
-    """
-    self.argmap = {}
-    self.required_params = []
-    self.repeated_params = []
-    self.pattern_params = {}
-    self.query_params = []
-    # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
-    #                parsing is gotten rid of.
-    self.path_params = set()
-    self.param_types = {}
-    self.enum_params = {}
-
-    self.set_parameters(method_desc)
-
-  def set_parameters(self, method_desc):
-    """Populates maps and lists based on method description.
-
-    Iterates through each parameter for the method and parses the values from
-    the parameter dictionary.
-
-    Args:
-      method_desc: Dictionary with metadata describing an API method. Value
-          comes from the dictionary of methods stored in the 'methods' key in
-          the deserialized discovery document.
-    """
-    for arg, desc in method_desc.get('parameters', {}).iteritems():
-      param = key2param(arg)
-      self.argmap[param] = arg
-
-      if desc.get('pattern'):
-        self.pattern_params[param] = desc['pattern']
-      if desc.get('enum'):
-        self.enum_params[param] = desc['enum']
-      if desc.get('required'):
-        self.required_params.append(param)
-      if desc.get('repeated'):
-        self.repeated_params.append(param)
-      if desc.get('location') == 'query':
-        self.query_params.append(param)
-      if desc.get('location') == 'path':
-        self.path_params.add(param)
-      self.param_types[param] = desc.get('type', 'string')
-
-    # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
-    #                should have all path parameters already marked with
-    #                'location: path'.
-    for match in URITEMPLATE.finditer(method_desc['path']):
-      for namematch in VARNAME.finditer(match.group(0)):
-        name = key2param(namematch.group(0))
-        self.path_params.add(name)
-        if name in self.query_params:
-          self.query_params.remove(name)
-
-
-def createMethod(methodName, methodDesc, rootDesc, schema):
-  """Creates a method for attaching to a Resource.
-
-  Args:
-    methodName: string, name of the method to use.
-    methodDesc: object, fragment of deserialized discovery document that
-      describes the method.
-    rootDesc: object, the entire deserialized discovery document.
-    schema: object, mapping of schema names to schema descriptions.
-  """
-  methodName = fix_method_name(methodName)
-  (pathUrl, httpMethod, methodId, accept,
-   maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
-
-  parameters = ResourceMethodParameters(methodDesc)
-
-  def method(self, **kwargs):
-    # Don't bother with doc string, it will be over-written by createMethod.
-
-    for name in kwargs.iterkeys():
-      if name not in parameters.argmap:
-        raise TypeError('Got an unexpected keyword argument "%s"' % name)
-
-    # Remove args that have a value of None.
-    keys = kwargs.keys()
-    for name in keys:
-      if kwargs[name] is None:
-        del kwargs[name]
-
-    for name in parameters.required_params:
-      if name not in kwargs:
-        raise TypeError('Missing required parameter "%s"' % name)
-
-    for name, regex in parameters.pattern_params.iteritems():
-      if name in kwargs:
-        if isinstance(kwargs[name], basestring):
-          pvalues = [kwargs[name]]
-        else:
-          pvalues = kwargs[name]
-        for pvalue in pvalues:
-          if re.match(regex, pvalue) is None:
-            raise TypeError(
-                'Parameter "%s" value "%s" does not match the pattern "%s"' %
-                (name, pvalue, regex))
-
-    for name, enums in parameters.enum_params.iteritems():
-      if name in kwargs:
-        # We need to handle the case of a repeated enum
-        # name differently, since we want to handle both
-        # arg='value' and arg=['value1', 'value2']
-        if (name in parameters.repeated_params and
-            not isinstance(kwargs[name], basestring)):
-          values = kwargs[name]
-        else:
-          values = [kwargs[name]]
-        for value in values:
-          if value not in enums:
-            raise TypeError(
-                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
-                (name, value, str(enums)))
-
-    actual_query_params = {}
-    actual_path_params = {}
-    for key, value in kwargs.iteritems():
-      to_type = parameters.param_types.get(key, 'string')
-      # For repeated parameters we cast each member of the list.
-      if key in parameters.repeated_params and type(value) == type([]):
-        cast_value = [_cast(x, to_type) for x in value]
-      else:
-        cast_value = _cast(value, to_type)
-      if key in parameters.query_params:
-        actual_query_params[parameters.argmap[key]] = cast_value
-      if key in parameters.path_params:
-        actual_path_params[parameters.argmap[key]] = cast_value
-    body_value = kwargs.get('body', None)
-    media_filename = kwargs.get('media_body', None)
-
-    if self._developerKey:
-      actual_query_params['key'] = self._developerKey
-
-    model = self._model
-    if methodName.endswith('_media'):
-      model = MediaModel()
-    elif 'response' not in methodDesc:
-      model = RawModel()
-
-    headers = {}
-    headers, params, query, body = model.request(headers,
-        actual_path_params, actual_query_params, body_value)
-
-    expanded_url = uritemplate.expand(pathUrl, params)
-    url = urlparse.urljoin(self._baseUrl, expanded_url + query)
-
-    resumable = None
-    multipart_boundary = ''
-
-    if media_filename:
-      # Ensure we end up with a valid MediaUpload object.
-      if isinstance(media_filename, basestring):
-        (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
-        if media_mime_type is None:
-          raise UnknownFileType(media_filename)
-        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
-          raise UnacceptableMimeTypeError(media_mime_type)
-        media_upload = MediaFileUpload(media_filename,
-                                       mimetype=media_mime_type)
-      elif isinstance(media_filename, MediaUpload):
-        media_upload = media_filename
-      else:
-        raise TypeError('media_filename must be str or MediaUpload.')
-
-      # Check the maxSize
-      if maxSize > 0 and media_upload.size() > maxSize:
-        raise MediaUploadSizeError("Media larger than: %s" % maxSize)
-
-      # Use the media path uri for media uploads
-      expanded_url = uritemplate.expand(mediaPathUrl, params)
-      url = urlparse.urljoin(self._baseUrl, expanded_url + query)
-      if media_upload.resumable():
-        url = _add_query_parameter(url, 'uploadType', 'resumable')
-
-      if media_upload.resumable():
-        # This is all we need to do for resumable, if the body exists it gets
-        # sent in the first request, otherwise an empty body is sent.
-        resumable = media_upload
-      else:
-        # A non-resumable upload
-        if body is None:
-          # This is a simple media upload
-          headers['content-type'] = media_upload.mimetype()
-          body = media_upload.getbytes(0, media_upload.size())
-          url = _add_query_parameter(url, 'uploadType', 'media')
-        else:
-          # This is a multipart/related upload.
-          msgRoot = MIMEMultipart('related')
-          # msgRoot should not write out it's own headers
-          setattr(msgRoot, '_write_headers', lambda self: None)
-
-          # attach the body as one part
-          msg = MIMENonMultipart(*headers['content-type'].split('/'))
-          msg.set_payload(body)
-          msgRoot.attach(msg)
-
-          # attach the media as the second part
-          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
-          msg['Content-Transfer-Encoding'] = 'binary'
-
-          payload = media_upload.getbytes(0, media_upload.size())
-          msg.set_payload(payload)
-          msgRoot.attach(msg)
-          body = msgRoot.as_string()
-
-          multipart_boundary = msgRoot.get_boundary()
-          headers['content-type'] = ('multipart/related; '
-                                     'boundary="%s"') % multipart_boundary
-          url = _add_query_parameter(url, 'uploadType', 'multipart')
-
-    logger.info('URL being requested: %s' % url)
-    return self._requestBuilder(self._http,
-                                model.response,
-                                url,
-                                method=httpMethod,
-                                body=body,
-                                headers=headers,
-                                methodId=methodId,
-                                resumable=resumable)
-
-  docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
-  if len(parameters.argmap) > 0:
-    docs.append('Args:\n')
-
-  # Skip undocumented params and params common to all methods.
-  skip_parameters = rootDesc.get('parameters', {}).keys()
-  skip_parameters.extend(STACK_QUERY_PARAMETERS)
-
-  all_args = parameters.argmap.keys()
-  args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
-
-  # Move body to the front of the line.
-  if 'body' in all_args:
-    args_ordered.append('body')
-
-  for name in all_args:
-    if name not in args_ordered:
-      args_ordered.append(name)
-
-  for arg in args_ordered:
-    if arg in skip_parameters:
-      continue
-
-    repeated = ''
-    if arg in parameters.repeated_params:
-      repeated = ' (repeated)'
-    required = ''
-    if arg in parameters.required_params:
-      required = ' (required)'
-    paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
-    paramdoc = paramdesc.get('description', 'A parameter')
-    if '$ref' in paramdesc:
-      docs.append(
-          ('  %s: object, %s%s%s\n    The object takes the'
-          ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
-            schema.prettyPrintByName(paramdesc['$ref'])))
-    else:
-      paramtype = paramdesc.get('type', 'string')
-      docs.append('  %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
-                                          repeated))
-    enum = paramdesc.get('enum', [])
-    enumDesc = paramdesc.get('enumDescriptions', [])
-    if enum and enumDesc:
-      docs.append('    Allowed values\n')
-      for (name, desc) in zip(enum, enumDesc):
-        docs.append('      %s - %s\n' % (name, desc))
-  if 'response' in methodDesc:
-    if methodName.endswith('_media'):
-      docs.append('\nReturns:\n  The media object as a string.\n\n    ')
-    else:
-      docs.append('\nReturns:\n  An object of the form:\n\n    ')
-      docs.append(schema.prettyPrintSchema(methodDesc['response']))
-
-  setattr(method, '__doc__', ''.join(docs))
-  return (methodName, method)
-
-
-def createNextMethod(methodName):
-  """Creates any _next methods for attaching to a Resource.
-
-  The _next methods allow for easy iteration through list() responses.
-
-  Args:
-    methodName: string, name of the method to use.
-  """
-  methodName = fix_method_name(methodName)
-
-  def methodNext(self, previous_request, previous_response):
-    """Retrieves the next page of results.
-
-Args:
-  previous_request: The request for the previous page. (required)
-  previous_response: The response from the request for the previous page. (required)
-
-Returns:
-  A request object that you can call 'execute()' on to request the next
-  page. Returns None if there are no more items in the collection.
-    """
-    # Retrieve nextPageToken from previous_response
-    # Use as pageToken in previous_request to create new request.
-
-    if 'nextPageToken' not in previous_response:
-      return None
-
-    request = copy.copy(previous_request)
-
-    pageToken = previous_response['nextPageToken']
-    parsed = list(urlparse.urlparse(request.uri))
-    q = parse_qsl(parsed[4])
-
-    # Find and remove old 'pageToken' value from URI
-    newq = [(key, value) for (key, value) in q if key != 'pageToken']
-    newq.append(('pageToken', pageToken))
-    parsed[4] = urllib.urlencode(newq)
-    uri = urlparse.urlunparse(parsed)
-
-    request.uri = uri
-
-    logger.info('URL being requested: %s' % uri)
-
-    return request
-
-  return (methodName, methodNext)
-
-
-class Resource(object):
-  """A class for interacting with a resource."""
-
-  def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
-               resourceDesc, rootDesc, schema):
-    """Build a Resource from the API description.
-
-    Args:
-      http: httplib2.Http, Object to make http requests with.
-      baseUrl: string, base URL for the API. All requests are relative to this
-          URI.
-      model: apiclient.Model, converts to and from the wire format.
-      requestBuilder: class or callable that instantiates an
-          apiclient.HttpRequest object.
-      developerKey: string, key obtained from
-          https://code.google.com/apis/console
-      resourceDesc: object, section of deserialized discovery document that
-          describes a resource. Note that the top level discovery document
-          is considered a resource.
-      rootDesc: object, the entire deserialized discovery document.
-      schema: object, mapping of schema names to schema descriptions.
-    """
-    self._dynamic_attrs = []
-
-    self._http = http
-    self._baseUrl = baseUrl
-    self._model = model
-    self._developerKey = developerKey
-    self._requestBuilder = requestBuilder
-    self._resourceDesc = resourceDesc
-    self._rootDesc = rootDesc
-    self._schema = schema
-
-    self._set_service_methods()
-
-  def _set_dynamic_attr(self, attr_name, value):
-    """Sets an instance attribute and tracks it in a list of dynamic attributes.
-
-    Args:
-      attr_name: string; The name of the attribute to be set
-      value: The value being set on the object and tracked in the dynamic cache.
-    """
-    self._dynamic_attrs.append(attr_name)
-    self.__dict__[attr_name] = value
-
-  def __getstate__(self):
-    """Trim the state down to something that can be pickled.
-
-    Uses the fact that the instance variable _dynamic_attrs holds attrs that
-    will be wiped and restored on pickle serialization.
-    """
-    state_dict = copy.copy(self.__dict__)
-    for dynamic_attr in self._dynamic_attrs:
-      del state_dict[dynamic_attr]
-    del state_dict['_dynamic_attrs']
-    return state_dict
-
-  def __setstate__(self, state):
-    """Reconstitute the state of the object from being pickled.
-
-    Uses the fact that the instance variable _dynamic_attrs holds attrs that
-    will be wiped and restored on pickle serialization.
-    """
-    self.__dict__.update(state)
-    self._dynamic_attrs = []
-    self._set_service_methods()
-
-  def _set_service_methods(self):
-    self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
-    self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
-    self._add_next_methods(self._resourceDesc, self._schema)
-
-  def _add_basic_methods(self, resourceDesc, rootDesc, schema):
-    # Add basic methods to Resource
-    if 'methods' in resourceDesc:
-      for methodName, methodDesc in resourceDesc['methods'].iteritems():
-        fixedMethodName, method = createMethod(
-            methodName, methodDesc, rootDesc, schema)
-        self._set_dynamic_attr(fixedMethodName,
-                               method.__get__(self, self.__class__))
-        # Add in _media methods. The functionality of the attached method will
-        # change when it sees that the method name ends in _media.
-        if methodDesc.get('supportsMediaDownload', False):
-          fixedMethodName, method = createMethod(
-              methodName + '_media', methodDesc, rootDesc, schema)
-          self._set_dynamic_attr(fixedMethodName,
-                                 method.__get__(self, self.__class__))
-
-  def _add_nested_resources(self, resourceDesc, rootDesc, schema):
-    # Add in nested resources
-    if 'resources' in resourceDesc:
-
-      def createResourceMethod(methodName, methodDesc):
-        """Create a method on the Resource to access a nested Resource.
-
-        Args:
-          methodName: string, name of the method to use.
-          methodDesc: object, fragment of deserialized discovery document that
-            describes the method.
-        """
-        methodName = fix_method_name(methodName)
-
-        def methodResource(self):
-          return Resource(http=self._http, baseUrl=self._baseUrl,
-                          model=self._model, developerKey=self._developerKey,
-                          requestBuilder=self._requestBuilder,
-                          resourceDesc=methodDesc, rootDesc=rootDesc,
-                          schema=schema)
-
-        setattr(methodResource, '__doc__', 'A collection resource.')
-        setattr(methodResource, '__is_resource__', True)
-
-        return (methodName, methodResource)
-
-      for methodName, methodDesc in resourceDesc['resources'].iteritems():
-        fixedMethodName, method = createResourceMethod(methodName, methodDesc)
-        self._set_dynamic_attr(fixedMethodName,
-                               method.__get__(self, self.__class__))
-
-  def _add_next_methods(self, resourceDesc, schema):
-    # Add _next() methods
-    # Look for response bodies in schema that contain nextPageToken, and methods
-    # that take a pageToken parameter.
-    if 'methods' in resourceDesc:
-      for methodName, methodDesc in resourceDesc['methods'].iteritems():
-        if 'response' in methodDesc:
-          responseSchema = methodDesc['response']
-          if '$ref' in responseSchema:
-            responseSchema = schema.get(responseSchema['$ref'])
-          hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
-                                                                   {})
-          hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
-          if hasNextPageToken and hasPageToken:
-            fixedMethodName, method = createNextMethod(methodName + '_next')
-            self._set_dynamic_attr(fixedMethodName,
-                                   method.__get__(self, self.__class__))
diff --git a/apiclient/errors.py b/apiclient/errors.py
deleted file mode 100644
index 2bf9149..0000000
--- a/apiclient/errors.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Errors for the library.
-
-All exceptions defined by the library
-should be defined in this file.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-
-from oauth2client import util
-from oauth2client.anyjson import simplejson
-
-
-class Error(Exception):
-  """Base error for this module."""
-  pass
-
-
-class HttpError(Error):
-  """HTTP data was invalid or unexpected."""
-
-  @util.positional(3)
-  def __init__(self, resp, content, uri=None):
-    self.resp = resp
-    self.content = content
-    self.uri = uri
-
-  def _get_reason(self):
-    """Calculate the reason for the error from the response content."""
-    reason = self.resp.reason
-    try:
-      data = simplejson.loads(self.content)
-      reason = data['error']['message']
-    except (ValueError, KeyError):
-      pass
-    if reason is None:
-      reason = ''
-    return reason
-
-  def __repr__(self):
-    if self.uri:
-      return '<HttpError %s when requesting %s returned "%s">' % (
-          self.resp.status, self.uri, self._get_reason().strip())
-    else:
-      return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
-
-  __str__ = __repr__
-
-
-class InvalidJsonError(Error):
-  """The JSON returned could not be parsed."""
-  pass
-
-
-class UnknownFileType(Error):
-  """File type unknown or unexpected."""
-  pass
-
-
-class UnknownLinkType(Error):
-  """Link type unknown or unexpected."""
-  pass
-
-
-class UnknownApiNameOrVersion(Error):
-  """No API with that name and version exists."""
-  pass
-
-
-class UnacceptableMimeTypeError(Error):
-  """That is an unacceptable mimetype for this operation."""
-  pass
-
-
-class MediaUploadSizeError(Error):
-  """Media is larger than the method can accept."""
-  pass
-
-
-class ResumableUploadError(HttpError):
-  """Error occured during resumable upload."""
-  pass
-
-
-class InvalidChunkSizeError(Error):
-  """The given chunksize is not valid."""
-  pass
-
-
-class BatchError(HttpError):
-  """Error occured during batch operations."""
-
-  @util.positional(2)
-  def __init__(self, reason, resp=None, content=None):
-    self.resp = resp
-    self.content = content
-    self.reason = reason
-
-  def __repr__(self):
-      return '<BatchError %s "%s">' % (self.resp.status, self.reason)
-
-  __str__ = __repr__
-
-
-class UnexpectedMethodError(Error):
-  """Exception raised by RequestMockBuilder on unexpected calls."""
-
-  @util.positional(1)
-  def __init__(self, methodId=None):
-    """Constructor for an UnexpectedMethodError."""
-    super(UnexpectedMethodError, self).__init__(
-        'Received unexpected call %s' % methodId)
-
-
-class UnexpectedBodyError(Error):
-  """Exception raised by RequestMockBuilder on unexpected bodies."""
-
-  def __init__(self, expected, provided):
-    """Constructor for an UnexpectedMethodError."""
-    super(UnexpectedBodyError, self).__init__(
-        'Expected: [%s] - Provided: [%s]' % (expected, provided))
diff --git a/apiclient/http.py b/apiclient/http.py
deleted file mode 100644
index a956477..0000000
--- a/apiclient/http.py
+++ /dev/null
@@ -1,1536 +0,0 @@
-# Copyright (C) 2012 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Classes to encapsulate a single HTTP request.
-
-The classes implement a command pattern, with every
-object supporting an execute() method that does the
-actuall HTTP request.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import StringIO
-import base64
-import copy
-import gzip
-import httplib2
-import mimeparse
-import mimetypes
-import os
-import sys
-import urllib
-import urlparse
-import uuid
-
-from email.generator import Generator
-from email.mime.multipart import MIMEMultipart
-from email.mime.nonmultipart import MIMENonMultipart
-from email.parser import FeedParser
-from errors import BatchError
-from errors import HttpError
-from errors import InvalidChunkSizeError
-from errors import ResumableUploadError
-from errors import UnexpectedBodyError
-from errors import UnexpectedMethodError
-from model import JsonModel
-from oauth2client import util
-from oauth2client.anyjson import simplejson
-
-
-DEFAULT_CHUNK_SIZE = 512*1024
-
-MAX_URI_LENGTH = 2048
-
-
-class MediaUploadProgress(object):
-  """Status of a resumable upload."""
-
-  def __init__(self, resumable_progress, total_size):
-    """Constructor.
-
-    Args:
-      resumable_progress: int, bytes sent so far.
-      total_size: int, total bytes in complete upload, or None if the total
-        upload size isn't known ahead of time.
-    """
-    self.resumable_progress = resumable_progress
-    self.total_size = total_size
-
-  def progress(self):
-    """Percent of upload completed, as a float.
-
-    Returns:
-      the percentage complete as a float, returning 0.0 if the total size of
-      the upload is unknown.
-    """
-    if self.total_size is not None:
-      return float(self.resumable_progress) / float(self.total_size)
-    else:
-      return 0.0
-
-
-class MediaDownloadProgress(object):
-  """Status of a resumable download."""
-
-  def __init__(self, resumable_progress, total_size):
-    """Constructor.
-
-    Args:
-      resumable_progress: int, bytes received so far.
-      total_size: int, total bytes in complete download.
-    """
-    self.resumable_progress = resumable_progress
-    self.total_size = total_size
-
-  def progress(self):
-    """Percent of download completed, as a float.
-
-    Returns:
-      the percentage complete as a float, returning 0.0 if the total size of
-      the download is unknown.
-    """
-    if self.total_size is not None:
-      return float(self.resumable_progress) / float(self.total_size)
-    else:
-      return 0.0
-
-
-class MediaUpload(object):
-  """Describes a media object to upload.
-
-  Base class that defines the interface of MediaUpload subclasses.
-
-  Note that subclasses of MediaUpload may allow you to control the chunksize
-  when uploading a media object. It is important to keep the size of the chunk
-  as large as possible to keep the upload efficient. Other factors may influence
-  the size of the chunk you use, particularly if you are working in an
-  environment where individual HTTP requests may have a hardcoded time limit,
-  such as under certain classes of requests under Google App Engine.
-
-  Streams are io.Base compatible objects that support seek(). Some MediaUpload
-  subclasses support using streams directly to upload data. Support for
-  streaming may be indicated by a MediaUpload sub-class and if appropriate for a
-  platform that stream will be used for uploading the media object. The support
-  for streaming is indicated by has_stream() returning True. The stream() method
-  should return an io.Base object that supports seek(). On platforms where the
-  underlying httplib module supports streaming, for example Python 2.6 and
-  later, the stream will be passed into the http library which will result in
-  less memory being used and possibly faster uploads.
-
-  If you need to upload media that can't be uploaded using any of the existing
-  MediaUpload sub-class then you can sub-class MediaUpload for your particular
-  needs.
-  """
-
-  def chunksize(self):
-    """Chunk size for resumable uploads.
-
-    Returns:
-      Chunk size in bytes.
-    """
-    raise NotImplementedError()
-
-  def mimetype(self):
-    """Mime type of the body.
-
-    Returns:
-      Mime type.
-    """
-    return 'application/octet-stream'
-
-  def size(self):
-    """Size of upload.
-
-    Returns:
-      Size of the body, or None of the size is unknown.
-    """
-    return None
-
-  def resumable(self):
-    """Whether this upload is resumable.
-
-    Returns:
-      True if resumable upload or False.
-    """
-    return False
-
-  def getbytes(self, begin, end):
-    """Get bytes from the media.
-
-    Args:
-      begin: int, offset from beginning of file.
-      length: int, number of bytes to read, starting at begin.
-
-    Returns:
-      A string of bytes read. May be shorter than length if EOF was reached
-      first.
-    """
-    raise NotImplementedError()
-
-  def has_stream(self):
-    """Does the underlying upload support a streaming interface.
-
-    Streaming means it is an io.IOBase subclass that supports seek, i.e.
-    seekable() returns True.
-
-    Returns:
-      True if the call to stream() will return an instance of a seekable io.Base
-      subclass.
-    """
-    return False
-
-  def stream(self):
-    """A stream interface to the data being uploaded.
-
-    Returns:
-      The returned value is an io.IOBase subclass that supports seek, i.e.
-      seekable() returns True.
-    """
-    raise NotImplementedError()
-
-  @util.positional(1)
-  def _to_json(self, strip=None):
-    """Utility function for creating a JSON representation of a MediaUpload.
-
-    Args:
-      strip: array, An array of names of members to not include in the JSON.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    t = type(self)
-    d = copy.copy(self.__dict__)
-    if strip is not None:
-      for member in strip:
-        del d[member]
-    d['_class'] = t.__name__
-    d['_module'] = t.__module__
-    return simplejson.dumps(d)
-
-  def to_json(self):
-    """Create a JSON representation of an instance of MediaUpload.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    return self._to_json()
-
-  @classmethod
-  def new_from_json(cls, s):
-    """Utility class method to instantiate a MediaUpload subclass from a JSON
-    representation produced by to_json().
-
-    Args:
-      s: string, JSON from to_json().
-
-    Returns:
-      An instance of the subclass of MediaUpload that was serialized with
-      to_json().
-    """
-    data = simplejson.loads(s)
-    # Find and call the right classmethod from_json() to restore the object.
-    module = data['_module']
-    m = __import__(module, fromlist=module.split('.')[:-1])
-    kls = getattr(m, data['_class'])
-    from_json = getattr(kls, 'from_json')
-    return from_json(s)
-
-
-class MediaIoBaseUpload(MediaUpload):
-  """A MediaUpload for a io.Base objects.
-
-  Note that the Python file object is compatible with io.Base and can be used
-  with this class also.
-
-    fh = io.BytesIO('...Some data to upload...')
-    media = MediaIoBaseUpload(fh, mimetype='image/png',
-      chunksize=1024*1024, resumable=True)
-    farm.animals().insert(
-        id='cow',
-        name='cow.png',
-        media_body=media).execute()
-
-  Depending on the platform you are working on, you may pass -1 as the
-  chunksize, which indicates that the entire file should be uploaded in a single
-  request. If the underlying platform supports streams, such as Python 2.6 or
-  later, then this can be very efficient as it avoids multiple connections, and
-  also avoids loading the entire file into memory before sending it. Note that
-  Google App Engine has a 5MB limit on request size, so you should never set
-  your chunksize larger than 5MB, or to -1.
-  """
-
-  @util.positional(3)
-  def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
-      resumable=False):
-    """Constructor.
-
-    Args:
-      fd: io.Base or file object, The source of the bytes to upload. MUST be
-        opened in blocking mode, do not use streams opened in non-blocking mode.
-        The given stream must be seekable, that is, it must be able to call
-        seek() on fd.
-      mimetype: string, Mime-type of the file.
-      chunksize: int, File will be uploaded in chunks of this many bytes. Only
-        used if resumable=True. Pass in a value of -1 if the file is to be
-        uploaded as a single chunk. Note that Google App Engine has a 5MB limit
-        on request size, so you should never set your chunksize larger than 5MB,
-        or to -1.
-      resumable: bool, True if this is a resumable upload. False means upload
-        in a single request.
-    """
-    super(MediaIoBaseUpload, self).__init__()
-    self._fd = fd
-    self._mimetype = mimetype
-    if not (chunksize == -1 or chunksize > 0):
-      raise InvalidChunkSizeError()
-    self._chunksize = chunksize
-    self._resumable = resumable
-
-    self._fd.seek(0, os.SEEK_END)
-    self._size = self._fd.tell()
-
-  def chunksize(self):
-    """Chunk size for resumable uploads.
-
-    Returns:
-      Chunk size in bytes.
-    """
-    return self._chunksize
-
-  def mimetype(self):
-    """Mime type of the body.
-
-    Returns:
-      Mime type.
-    """
-    return self._mimetype
-
-  def size(self):
-    """Size of upload.
-
-    Returns:
-      Size of the body, or None of the size is unknown.
-    """
-    return self._size
-
-  def resumable(self):
-    """Whether this upload is resumable.
-
-    Returns:
-      True if resumable upload or False.
-    """
-    return self._resumable
-
-  def getbytes(self, begin, length):
-    """Get bytes from the media.
-
-    Args:
-      begin: int, offset from beginning of file.
-      length: int, number of bytes to read, starting at begin.
-
-    Returns:
-      A string of bytes read. May be shorted than length if EOF was reached
-      first.
-    """
-    self._fd.seek(begin)
-    return self._fd.read(length)
-
-  def has_stream(self):
-    """Does the underlying upload support a streaming interface.
-
-    Streaming means it is an io.IOBase subclass that supports seek, i.e.
-    seekable() returns True.
-
-    Returns:
-      True if the call to stream() will return an instance of a seekable io.Base
-      subclass.
-    """
-    return True
-
-  def stream(self):
-    """A stream interface to the data being uploaded.
-
-    Returns:
-      The returned value is an io.IOBase subclass that supports seek, i.e.
-      seekable() returns True.
-    """
-    return self._fd
-
-  def to_json(self):
-    """This upload type is not serializable."""
-    raise NotImplementedError('MediaIoBaseUpload is not serializable.')
-
-
-class MediaFileUpload(MediaIoBaseUpload):
-  """A MediaUpload for a file.
-
-  Construct a MediaFileUpload and pass as the media_body parameter of the
-  method. For example, if we had a service that allowed uploading images:
-
-
-    media = MediaFileUpload('cow.png', mimetype='image/png',
-      chunksize=1024*1024, resumable=True)
-    farm.animals().insert(
-        id='cow',
-        name='cow.png',
-        media_body=media).execute()
-
-  Depending on the platform you are working on, you may pass -1 as the
-  chunksize, which indicates that the entire file should be uploaded in a single
-  request. If the underlying platform supports streams, such as Python 2.6 or
-  later, then this can be very efficient as it avoids multiple connections, and
-  also avoids loading the entire file into memory before sending it. Note that
-  Google App Engine has a 5MB limit on request size, so you should never set
-  your chunksize larger than 5MB, or to -1.
-  """
-
-  @util.positional(2)
-  def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
-               resumable=False):
-    """Constructor.
-
-    Args:
-      filename: string, Name of the file.
-      mimetype: string, Mime-type of the file. If None then a mime-type will be
-        guessed from the file extension.
-      chunksize: int, File will be uploaded in chunks of this many bytes. Only
-        used if resumable=True. Pass in a value of -1 if the file is to be
-        uploaded in a single chunk. Note that Google App Engine has a 5MB limit
-        on request size, so you should never set your chunksize larger than 5MB,
-        or to -1.
-      resumable: bool, True if this is a resumable upload. False means upload
-        in a single request.
-    """
-    self._filename = filename
-    fd = open(self._filename, 'rb')
-    if mimetype is None:
-      (mimetype, encoding) = mimetypes.guess_type(filename)
-    super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize,
-                                          resumable=resumable)
-
-  def to_json(self):
-    """Creating a JSON representation of an instance of MediaFileUpload.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    return self._to_json(strip=['_fd'])
-
-  @staticmethod
-  def from_json(s):
-    d = simplejson.loads(s)
-    return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
-                           chunksize=d['_chunksize'], resumable=d['_resumable'])
-
-
-class MediaInMemoryUpload(MediaIoBaseUpload):
-  """MediaUpload for a chunk of bytes.
-
-  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
-  the stream.
-  """
-
-  @util.positional(2)
-  def __init__(self, body, mimetype='application/octet-stream',
-               chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
-    """Create a new MediaInMemoryUpload.
-
-  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
-  the stream.
-
-  Args:
-    body: string, Bytes of body content.
-    mimetype: string, Mime-type of the file or default of
-      'application/octet-stream'.
-    chunksize: int, File will be uploaded in chunks of this many bytes. Only
-      used if resumable=True.
-    resumable: bool, True if this is a resumable upload. False means upload
-      in a single request.
-    """
-    fd = StringIO.StringIO(body)
-    super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
-                                              resumable=resumable)
-
-
-class MediaIoBaseDownload(object):
-  """"Download media resources.
-
-  Note that the Python file object is compatible with io.Base and can be used
-  with this class also.
-
-
-  Example:
-    request = farms.animals().get_media(id='cow')
-    fh = io.FileIO('cow.png', mode='wb')
-    downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
-
-    done = False
-    while done is False:
-      status, done = downloader.next_chunk()
-      if status:
-        print "Download %d%%." % int(status.progress() * 100)
-    print "Download Complete!"
-  """
-
-  @util.positional(3)
-  def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
-    """Constructor.
-
-    Args:
-      fd: io.Base or file object, The stream in which to write the downloaded
-        bytes.
-      request: apiclient.http.HttpRequest, the media request to perform in
-        chunks.
-      chunksize: int, File will be downloaded in chunks of this many bytes.
-    """
-    self._fd = fd
-    self._request = request
-    self._uri = request.uri
-    self._chunksize = chunksize
-    self._progress = 0
-    self._total_size = None
-    self._done = False
-
-  def next_chunk(self):
-    """Get the next chunk of the download.
-
-    Returns:
-      (status, done): (MediaDownloadStatus, boolean)
-         The value of 'done' will be True when the media has been fully
-         downloaded.
-
-    Raises:
-      apiclient.errors.HttpError if the response was not a 2xx.
-      httplib2.HttpLib2Error if a transport error has occured.
-    """
-    headers = {
-        'range': 'bytes=%d-%d' % (
-            self._progress, self._progress + self._chunksize)
-        }
-    http = self._request.http
-    http.follow_redirects = False
-
-    resp, content = http.request(self._uri, headers=headers)
-    if resp.status in [301, 302, 303, 307, 308] and 'location' in resp:
-        self._uri = resp['location']
-        resp, content = http.request(self._uri, headers=headers)
-    if resp.status in [200, 206]:
-      self._progress += len(content)
-      self._fd.write(content)
-
-      if 'content-range' in resp:
-        content_range = resp['content-range']
-        length = content_range.rsplit('/', 1)[1]
-        self._total_size = int(length)
-
-      if self._progress == self._total_size:
-        self._done = True
-      return MediaDownloadProgress(self._progress, self._total_size), self._done
-    else:
-      raise HttpError(resp, content, uri=self._uri)
-
-
-class _StreamSlice(object):
-  """Truncated stream.
-
-  Takes a stream and presents a stream that is a slice of the original stream.
-  This is used when uploading media in chunks. In later versions of Python a
-  stream can be passed to httplib in place of the string of data to send. The
-  problem is that httplib just blindly reads to the end of the stream. This
-  wrapper presents a virtual stream that only reads to the end of the chunk.
-  """
-
-  def __init__(self, stream, begin, chunksize):
-    """Constructor.
-
-    Args:
-      stream: (io.Base, file object), the stream to wrap.
-      begin: int, the seek position the chunk begins at.
-      chunksize: int, the size of the chunk.
-    """
-    self._stream = stream
-    self._begin = begin
-    self._chunksize = chunksize
-    self._stream.seek(begin)
-
-  def read(self, n=-1):
-    """Read n bytes.
-
-    Args:
-      n, int, the number of bytes to read.
-
-    Returns:
-      A string of length 'n', or less if EOF is reached.
-    """
-    # The data left available to read sits in [cur, end)
-    cur = self._stream.tell()
-    end = self._begin + self._chunksize
-    if n == -1 or cur + n > end:
-      n = end - cur
-    return self._stream.read(n)
-
-
-class HttpRequest(object):
-  """Encapsulates a single HTTP request."""
-
-  @util.positional(4)
-  def __init__(self, http, postproc, uri,
-               method='GET',
-               body=None,
-               headers=None,
-               methodId=None,
-               resumable=None):
-    """Constructor for an HttpRequest.
-
-    Args:
-      http: httplib2.Http, the transport object to use to make a request
-      postproc: callable, called on the HTTP response and content to transform
-                it into a data object before returning, or raising an exception
-                on an error.
-      uri: string, the absolute URI to send the request to
-      method: string, the HTTP method to use
-      body: string, the request body of the HTTP request,
-      headers: dict, the HTTP request headers
-      methodId: string, a unique identifier for the API method being called.
-      resumable: MediaUpload, None if this is not a resumbale request.
-    """
-    self.uri = uri
-    self.method = method
-    self.body = body
-    self.headers = headers or {}
-    self.methodId = methodId
-    self.http = http
-    self.postproc = postproc
-    self.resumable = resumable
-    self.response_callbacks = []
-    self._in_error_state = False
-
-    # Pull the multipart boundary out of the content-type header.
-    major, minor, params = mimeparse.parse_mime_type(
-        headers.get('content-type', 'application/json'))
-
-    # The size of the non-media part of the request.
-    self.body_size = len(self.body or '')
-
-    # The resumable URI to send chunks to.
-    self.resumable_uri = None
-
-    # The bytes that have been uploaded.
-    self.resumable_progress = 0
-
-  @util.positional(1)
-  def execute(self, http=None):
-    """Execute the request.
-
-    Args:
-      http: httplib2.Http, an http object to be used in place of the
-            one the HttpRequest request object was constructed with.
-
-    Returns:
-      A deserialized object model of the response body as determined
-      by the postproc.
-
-    Raises:
-      apiclient.errors.HttpError if the response was not a 2xx.
-      httplib2.HttpLib2Error if a transport error has occured.
-    """
-    if http is None:
-      http = self.http
-    if self.resumable:
-      body = None
-      while body is None:
-        _, body = self.next_chunk(http=http)
-      return body
-    else:
-      if 'content-length' not in self.headers:
-        self.headers['content-length'] = str(self.body_size)
-      # If the request URI is too long then turn it into a POST request.
-      if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
-        self.method = 'POST'
-        self.headers['x-http-method-override'] = 'GET'
-        self.headers['content-type'] = 'application/x-www-form-urlencoded'
-        parsed = urlparse.urlparse(self.uri)
-        self.uri = urlparse.urlunparse(
-            (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
-             None)
-            )
-        self.body = parsed.query
-        self.headers['content-length'] = str(len(self.body))
-
-      resp, content = http.request(str(self.uri), method=str(self.method),
-                                   body=self.body, headers=self.headers)
-      for callback in self.response_callbacks:
-        callback(resp)
-      if resp.status >= 300:
-        raise HttpError(resp, content, uri=self.uri)
-    return self.postproc(resp, content)
-
-  @util.positional(2)
-  def add_response_callback(self, cb):
-    """add_response_headers_callback
-
-    Args:
-      cb: Callback to be called on receiving the response headers, of signature:
-
-      def cb(resp):
-        # Where resp is an instance of httplib2.Response
-    """
-    self.response_callbacks.append(cb)
-
-  @util.positional(1)
-  def next_chunk(self, http=None):
-    """Execute the next step of a resumable upload.
-
-    Can only be used if the method being executed supports media uploads and
-    the MediaUpload object passed in was flagged as using resumable upload.
-
-    Example:
-
-      media = MediaFileUpload('cow.png', mimetype='image/png',
-                              chunksize=1000, resumable=True)
-      request = farm.animals().insert(
-          id='cow',
-          name='cow.png',
-          media_body=media)
-
-      response = None
-      while response is None:
-        status, response = request.next_chunk()
-        if status:
-          print "Upload %d%% complete." % int(status.progress() * 100)
-
-
-    Returns:
-      (status, body): (ResumableMediaStatus, object)
-         The body will be None until the resumable media is fully uploaded.
-
-    Raises:
-      apiclient.errors.HttpError if the response was not a 2xx.
-      httplib2.HttpLib2Error if a transport error has occured.
-    """
-    if http is None:
-      http = self.http
-
-    if self.resumable.size() is None:
-      size = '*'
-    else:
-      size = str(self.resumable.size())
-
-    if self.resumable_uri is None:
-      start_headers = copy.copy(self.headers)
-      start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
-      if size != '*':
-        start_headers['X-Upload-Content-Length'] = size
-      start_headers['content-length'] = str(self.body_size)
-
-      resp, content = http.request(self.uri, self.method,
-                                   body=self.body,
-                                   headers=start_headers)
-      if resp.status == 200 and 'location' in resp:
-        self.resumable_uri = resp['location']
-      else:
-        raise ResumableUploadError(resp, content)
-    elif self._in_error_state:
-      # If we are in an error state then query the server for current state of
-      # the upload by sending an empty PUT and reading the 'range' header in
-      # the response.
-      headers = {
-          'Content-Range': 'bytes */%s' % size,
-          'content-length': '0'
-          }
-      resp, content = http.request(self.resumable_uri, 'PUT',
-                                   headers=headers)
-      status, body = self._process_response(resp, content)
-      if body:
-        # The upload was complete.
-        return (status, body)
-
-    # The httplib.request method can take streams for the body parameter, but
-    # only in Python 2.6 or later. If a stream is available under those
-    # conditions then use it as the body argument.
-    if self.resumable.has_stream() and sys.version_info[1] >= 6:
-      data = self.resumable.stream()
-      if self.resumable.chunksize() == -1:
-        data.seek(self.resumable_progress)
-        chunk_end = self.resumable.size() - self.resumable_progress - 1
-      else:
-        # Doing chunking with a stream, so wrap a slice of the stream.
-        data = _StreamSlice(data, self.resumable_progress,
-                            self.resumable.chunksize())
-        chunk_end = min(
-            self.resumable_progress + self.resumable.chunksize() - 1,
-            self.resumable.size() - 1)
-    else:
-      data = self.resumable.getbytes(
-          self.resumable_progress, self.resumable.chunksize())
-
-      # A short read implies that we are at EOF, so finish the upload.
-      if len(data) < self.resumable.chunksize():
-        size = str(self.resumable_progress + len(data))
-
-      chunk_end = self.resumable_progress + len(data) - 1
-
-    headers = {
-        'Content-Range': 'bytes %d-%d/%s' % (
-            self.resumable_progress, chunk_end, size),
-        # Must set the content-length header here because httplib can't
-        # calculate the size when working with _StreamSlice.
-        'Content-Length': str(chunk_end - self.resumable_progress + 1)
-        }
-    try:
-      resp, content = http.request(self.resumable_uri, 'PUT',
-                                   body=data,
-                                   headers=headers)
-    except:
-      self._in_error_state = True
-      raise
-
-    return self._process_response(resp, content)
-
-  def _process_response(self, resp, content):
-    """Process the response from a single chunk upload.
-
-    Args:
-      resp: httplib2.Response, the response object.
-      content: string, the content of the response.
-
-    Returns:
-      (status, body): (ResumableMediaStatus, object)
-         The body will be None until the resumable media is fully uploaded.
-
-    Raises:
-      apiclient.errors.HttpError if the response was not a 2xx or a 308.
-    """
-    if resp.status in [200, 201]:
-      self._in_error_state = False
-      return None, self.postproc(resp, content)
-    elif resp.status == 308:
-      self._in_error_state = False
-      # A "308 Resume Incomplete" indicates we are not done.
-      self.resumable_progress = int(resp['range'].split('-')[1]) + 1
-      if 'location' in resp:
-        self.resumable_uri = resp['location']
-    else:
-      self._in_error_state = True
-      raise HttpError(resp, content, uri=self.uri)
-
-    return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
-            None)
-
-  def to_json(self):
-    """Returns a JSON representation of the HttpRequest."""
-    d = copy.copy(self.__dict__)
-    if d['resumable'] is not None:
-      d['resumable'] = self.resumable.to_json()
-    del d['http']
-    del d['postproc']
-
-    return simplejson.dumps(d)
-
-  @staticmethod
-  def from_json(s, http, postproc):
-    """Returns an HttpRequest populated with info from a JSON object."""
-    d = simplejson.loads(s)
-    if d['resumable'] is not None:
-      d['resumable'] = MediaUpload.new_from_json(d['resumable'])
-    return HttpRequest(
-        http,
-        postproc,
-        uri=d['uri'],
-        method=d['method'],
-        body=d['body'],
-        headers=d['headers'],
-        methodId=d['methodId'],
-        resumable=d['resumable'])
-
-
-class BatchHttpRequest(object):
-  """Batches multiple HttpRequest objects into a single HTTP request.
-
-  Example:
-    from apiclient.http import BatchHttpRequest
-
-    def list_animals(request_id, response, exception):
-      \"\"\"Do something with the animals list response.\"\"\"
-      if exception is not None:
-        # Do something with the exception.
-        pass
-      else:
-        # Do something with the response.
-        pass
-
-    def list_farmers(request_id, response, exception):
-      \"\"\"Do something with the farmers list response.\"\"\"
-      if exception is not None:
-        # Do something with the exception.
-        pass
-      else:
-        # Do something with the response.
-        pass
-
-    service = build('farm', 'v2')
-
-    batch = BatchHttpRequest()
-
-    batch.add(service.animals().list(), list_animals)
-    batch.add(service.farmers().list(), list_farmers)
-    batch.execute(http=http)
-  """
-
-  @util.positional(1)
-  def __init__(self, callback=None, batch_uri=None):
-    """Constructor for a BatchHttpRequest.
-
-    Args:
-      callback: callable, A callback to be called for each response, of the
-        form callback(id, response, exception). The first parameter is the
-        request id, and the second is the deserialized response object. The
-        third is an apiclient.errors.HttpError exception object if an HTTP error
-        occurred while processing the request, or None if no error occurred.
-      batch_uri: string, URI to send batch requests to.
-    """
-    if batch_uri is None:
-      batch_uri = 'https://www.googleapis.com/batch'
-    self._batch_uri = batch_uri
-
-    # Global callback to be called for each individual response in the batch.
-    self._callback = callback
-
-    # A map from id to request.
-    self._requests = {}
-
-    # A map from id to callback.
-    self._callbacks = {}
-
-    # List of request ids, in the order in which they were added.
-    self._order = []
-
-    # The last auto generated id.
-    self._last_auto_id = 0
-
-    # Unique ID on which to base the Content-ID headers.
-    self._base_id = None
-
-    # A map from request id to (httplib2.Response, content) response pairs
-    self._responses = {}
-
-    # A map of id(Credentials) that have been refreshed.
-    self._refreshed_credentials = {}
-
-  def _refresh_and_apply_credentials(self, request, http):
-    """Refresh the credentials and apply to the request.
-
-    Args:
-      request: HttpRequest, the request.
-      http: httplib2.Http, the global http object for the batch.
-    """
-    # For the credentials to refresh, but only once per refresh_token
-    # If there is no http per the request then refresh the http passed in
-    # via execute()
-    creds = None
-    if request.http is not None and hasattr(request.http.request,
-        'credentials'):
-      creds = request.http.request.credentials
-    elif http is not None and hasattr(http.request, 'credentials'):
-      creds = http.request.credentials
-    if creds is not None:
-      if id(creds) not in self._refreshed_credentials:
-        creds.refresh(http)
-        self._refreshed_credentials[id(creds)] = 1
-
-    # Only apply the credentials if we are using the http object passed in,
-    # otherwise apply() will get called during _serialize_request().
-    if request.http is None or not hasattr(request.http.request,
-        'credentials'):
-      creds.apply(request.headers)
-
-  def _id_to_header(self, id_):
-    """Convert an id to a Content-ID header value.
-
-    Args:
-      id_: string, identifier of individual request.
-
-    Returns:
-      A Content-ID header with the id_ encoded into it. A UUID is prepended to
-      the value because Content-ID headers are supposed to be universally
-      unique.
-    """
-    if self._base_id is None:
-      self._base_id = uuid.uuid4()
-
-    return '<%s+%s>' % (self._base_id, urllib.quote(id_))
-
-  def _header_to_id(self, header):
-    """Convert a Content-ID header value to an id.
-
-    Presumes the Content-ID header conforms to the format that _id_to_header()
-    returns.
-
-    Args:
-      header: string, Content-ID header value.
-
-    Returns:
-      The extracted id value.
-
-    Raises:
-      BatchError if the header is not in the expected format.
-    """
-    if header[0] != '<' or header[-1] != '>':
-      raise BatchError("Invalid value for Content-ID: %s" % header)
-    if '+' not in header:
-      raise BatchError("Invalid value for Content-ID: %s" % header)
-    base, id_ = header[1:-1].rsplit('+', 1)
-
-    return urllib.unquote(id_)
-
-  def _serialize_request(self, request):
-    """Convert an HttpRequest object into a string.
-
-    Args:
-      request: HttpRequest, the request to serialize.
-
-    Returns:
-      The request as a string in application/http format.
-    """
-    # Construct status line
-    parsed = urlparse.urlparse(request.uri)
-    request_line = urlparse.urlunparse(
-        (None, None, parsed.path, parsed.params, parsed.query, None)
-        )
-    status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
-    major, minor = request.headers.get('content-type', 'application/json').split('/')
-    msg = MIMENonMultipart(major, minor)
-    headers = request.headers.copy()
-
-    if request.http is not None and hasattr(request.http.request,
-        'credentials'):
-      request.http.request.credentials.apply(headers)
-
-    # MIMENonMultipart adds its own Content-Type header.
-    if 'content-type' in headers:
-      del headers['content-type']
-
-    for key, value in headers.iteritems():
-      msg[key] = value
-    msg['Host'] = parsed.netloc
-    msg.set_unixfrom(None)
-
-    if request.body is not None:
-      msg.set_payload(request.body)
-      msg['content-length'] = str(len(request.body))
-
-    # Serialize the mime message.
-    fp = StringIO.StringIO()
-    # maxheaderlen=0 means don't line wrap headers.
-    g = Generator(fp, maxheaderlen=0)
-    g.flatten(msg, unixfrom=False)
-    body = fp.getvalue()
-
-    # Strip off the \n\n that the MIME lib tacks onto the end of the payload.
-    if request.body is None:
-      body = body[:-2]
-
-    return status_line.encode('utf-8') + body
-
-  def _deserialize_response(self, payload):
-    """Convert string into httplib2 response and content.
-
-    Args:
-      payload: string, headers and body as a string.
-
-    Returns:
-      A pair (resp, content), such as would be returned from httplib2.request.
-    """
-    # Strip off the status line
-    status_line, payload = payload.split('\n', 1)
-    protocol, status, reason = status_line.split(' ', 2)
-
-    # Parse the rest of the response
-    parser = FeedParser()
-    parser.feed(payload)
-    msg = parser.close()
-    msg['status'] = status
-
-    # Create httplib2.Response from the parsed headers.
-    resp = httplib2.Response(msg)
-    resp.reason = reason
-    resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
-
-    content = payload.split('\r\n\r\n', 1)[1]
-
-    return resp, content
-
-  def _new_id(self):
-    """Create a new id.
-
-    Auto incrementing number that avoids conflicts with ids already used.
-
-    Returns:
-       string, a new unique id.
-    """
-    self._last_auto_id += 1
-    while str(self._last_auto_id) in self._requests:
-      self._last_auto_id += 1
-    return str(self._last_auto_id)
-
-  @util.positional(2)
-  def add(self, request, callback=None, request_id=None):
-    """Add a new request.
-
-    Every callback added will be paired with a unique id, the request_id. That
-    unique id will be passed back to the callback when the response comes back
-    from the server. The default behavior is to have the library generate it's
-    own unique id. If the caller passes in a request_id then they must ensure
-    uniqueness for each request_id, and if they are not an exception is
-    raised. Callers should either supply all request_ids or nevery supply a
-    request id, to avoid such an error.
-
-    Args:
-      request: HttpRequest, Request to add to the batch.
-      callback: callable, A callback to be called for this response, of the
-        form callback(id, response, exception). The first parameter is the
-        request id, and the second is the deserialized response object. The
-        third is an apiclient.errors.HttpError exception object if an HTTP error
-        occurred while processing the request, or None if no errors occurred.
-      request_id: string, A unique id for the request. The id will be passed to
-        the callback with the response.
-
-    Returns:
-      None
-
-    Raises:
-      BatchError if a media request is added to a batch.
-      KeyError is the request_id is not unique.
-    """
-    if request_id is None:
-      request_id = self._new_id()
-    if request.resumable is not None:
-      raise BatchError("Media requests cannot be used in a batch request.")
-    if request_id in self._requests:
-      raise KeyError("A request with this ID already exists: %s" % request_id)
-    self._requests[request_id] = request
-    self._callbacks[request_id] = callback
-    self._order.append(request_id)
-
-  def _execute(self, http, order, requests):
-    """Serialize batch request, send to server, process response.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the request with.
-      order: list, list of request ids in the order they were added to the
-        batch.
-      request: list, list of request objects to send.
-
-    Raises:
-      httplib2.HttpLib2Error if a transport error has occured.
-      apiclient.errors.BatchError if the response is the wrong format.
-    """
-    message = MIMEMultipart('mixed')
-    # Message should not write out it's own headers.
-    setattr(message, '_write_headers', lambda self: None)
-
-    # Add all the individual requests.
-    for request_id in order:
-      request = requests[request_id]
-
-      msg = MIMENonMultipart('application', 'http')
-      msg['Content-Transfer-Encoding'] = 'binary'
-      msg['Content-ID'] = self._id_to_header(request_id)
-
-      body = self._serialize_request(request)
-      msg.set_payload(body)
-      message.attach(msg)
-
-    body = message.as_string()
-
-    headers = {}
-    headers['content-type'] = ('multipart/mixed; '
-                               'boundary="%s"') % message.get_boundary()
-
-    resp, content = http.request(self._batch_uri, 'POST', body=body,
-                                 headers=headers)
-
-    if resp.status >= 300:
-      raise HttpError(resp, content, uri=self._batch_uri)
-
-    # Now break out the individual responses and store each one.
-    boundary, _ = content.split(None, 1)
-
-    # Prepend with a content-type header so FeedParser can handle it.
-    header = 'content-type: %s\r\n\r\n' % resp['content-type']
-    for_parser = header + content
-
-    parser = FeedParser()
-    parser.feed(for_parser)
-    mime_response = parser.close()
-
-    if not mime_response.is_multipart():
-      raise BatchError("Response not in multipart/mixed format.", resp=resp,
-                       content=content)
-
-    for part in mime_response.get_payload():
-      request_id = self._header_to_id(part['Content-ID'])
-      response, content = self._deserialize_response(part.get_payload())
-      self._responses[request_id] = (response, content)
-
-  @util.positional(1)
-  def execute(self, http=None):
-    """Execute all the requests as a single batched HTTP request.
-
-    Args:
-      http: httplib2.Http, an http object to be used in place of the one the
-        HttpRequest request object was constructed with. If one isn't supplied
-        then use a http object from the requests in this batch.
-
-    Returns:
-      None
-
-    Raises:
-      httplib2.HttpLib2Error if a transport error has occured.
-      apiclient.errors.BatchError if the response is the wrong format.
-    """
-
-    # If http is not supplied use the first valid one given in the requests.
-    if http is None:
-      for request_id in self._order:
-        request = self._requests[request_id]
-        if request is not None:
-          http = request.http
-          break
-
-    if http is None:
-      raise ValueError("Missing a valid http object.")
-
-    self._execute(http, self._order, self._requests)
-
-    # Loop over all the requests and check for 401s. For each 401 request the
-    # credentials should be refreshed and then sent again in a separate batch.
-    redo_requests = {}
-    redo_order = []
-
-    for request_id in self._order:
-      resp, content = self._responses[request_id]
-      if resp['status'] == '401':
-        redo_order.append(request_id)
-        request = self._requests[request_id]
-        self._refresh_and_apply_credentials(request, http)
-        redo_requests[request_id] = request
-
-    if redo_requests:
-      self._execute(http, redo_order, redo_requests)
-
-    # Now process all callbacks that are erroring, and raise an exception for
-    # ones that return a non-2xx response? Or add extra parameter to callback
-    # that contains an HttpError?
-
-    for request_id in self._order:
-      resp, content = self._responses[request_id]
-
-      request = self._requests[request_id]
-      callback = self._callbacks[request_id]
-
-      response = None
-      exception = None
-      try:
-        if resp.status >= 300:
-          raise HttpError(resp, content, uri=request.uri)
-        response = request.postproc(resp, content)
-      except HttpError, e:
-        exception = e
-
-      if callback is not None:
-        callback(request_id, response, exception)
-      if self._callback is not None:
-        self._callback(request_id, response, exception)
-
-
-class HttpRequestMock(object):
-  """Mock of HttpRequest.
-
-  Do not construct directly, instead use RequestMockBuilder.
-  """
-
-  def __init__(self, resp, content, postproc):
-    """Constructor for HttpRequestMock
-
-    Args:
-      resp: httplib2.Response, the response to emulate coming from the request
-      content: string, the response body
-      postproc: callable, the post processing function usually supplied by
-                the model class. See model.JsonModel.response() as an example.
-    """
-    self.resp = resp
-    self.content = content
-    self.postproc = postproc
-    if resp is None:
-      self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
-    if 'reason' in self.resp:
-      self.resp.reason = self.resp['reason']
-
-  def execute(self, http=None):
-    """Execute the request.
-
-    Same behavior as HttpRequest.execute(), but the response is
-    mocked and not really from an HTTP request/response.
-    """
-    return self.postproc(self.resp, self.content)
-
-
-class RequestMockBuilder(object):
-  """A simple mock of HttpRequest
-
-    Pass in a dictionary to the constructor that maps request methodIds to
-    tuples of (httplib2.Response, content, opt_expected_body) that should be
-    returned when that method is called. None may also be passed in for the
-    httplib2.Response, in which case a 200 OK response will be generated.
-    If an opt_expected_body (str or dict) is provided, it will be compared to
-    the body and UnexpectedBodyError will be raised on inequality.
-
-    Example:
-      response = '{"data": {"id": "tag:google.c...'
-      requestBuilder = RequestMockBuilder(
-        {
-          'plus.activities.get': (None, response),
-        }
-      )
-      apiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
-
-    Methods that you do not supply a response for will return a
-    200 OK with an empty string as the response content or raise an excpetion
-    if check_unexpected is set to True. The methodId is taken from the rpcName
-    in the discovery document.
-
-    For more details see the project wiki.
-  """
-
-  def __init__(self, responses, check_unexpected=False):
-    """Constructor for RequestMockBuilder
-
-    The constructed object should be a callable object
-    that can replace the class HttpResponse.
-
-    responses - A dictionary that maps methodIds into tuples
-                of (httplib2.Response, content). The methodId
-                comes from the 'rpcName' field in the discovery
-                document.
-    check_unexpected - A boolean setting whether or not UnexpectedMethodError
-                       should be raised on unsupplied method.
-    """
-    self.responses = responses
-    self.check_unexpected = check_unexpected
-
-  def __call__(self, http, postproc, uri, method='GET', body=None,
-               headers=None, methodId=None, resumable=None):
-    """Implements the callable interface that discovery.build() expects
-    of requestBuilder, which is to build an object compatible with
-    HttpRequest.execute(). See that method for the description of the
-    parameters and the expected response.
-    """
-    if methodId in self.responses:
-      response = self.responses[methodId]
-      resp, content = response[:2]
-      if len(response) > 2:
-        # Test the body against the supplied expected_body.
-        expected_body = response[2]
-        if bool(expected_body) != bool(body):
-          # Not expecting a body and provided one
-          # or expecting a body and not provided one.
-          raise UnexpectedBodyError(expected_body, body)
-        if isinstance(expected_body, str):
-          expected_body = simplejson.loads(expected_body)
-        body = simplejson.loads(body)
-        if body != expected_body:
-          raise UnexpectedBodyError(expected_body, body)
-      return HttpRequestMock(resp, content, postproc)
-    elif self.check_unexpected:
-      raise UnexpectedMethodError(methodId=methodId)
-    else:
-      model = JsonModel(False)
-      return HttpRequestMock(None, '{}', model.response)
-
-
-class HttpMock(object):
-  """Mock of httplib2.Http"""
-
-  def __init__(self, filename=None, headers=None):
-    """
-    Args:
-      filename: string, absolute filename to read response from
-      headers: dict, header to return with response
-    """
-    if headers is None:
-      headers = {'status': '200 OK'}
-    if filename:
-      f = file(filename, 'r')
-      self.data = f.read()
-      f.close()
-    else:
-      self.data = None
-    self.response_headers = headers
-    self.headers = None
-    self.uri = None
-    self.method = None
-    self.body = None
-    self.headers = None
-
-
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    self.uri = uri
-    self.method = method
-    self.body = body
-    self.headers = headers
-    return httplib2.Response(self.response_headers), self.data
-
-
-class HttpMockSequence(object):
-  """Mock of httplib2.Http
-
-  Mocks a sequence of calls to request returning different responses for each
-  call. Create an instance initialized with the desired response headers
-  and content and then use as if an httplib2.Http instance.
-
-    http = HttpMockSequence([
-      ({'status': '401'}, ''),
-      ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
-      ({'status': '200'}, 'echo_request_headers'),
-      ])
-    resp, content = http.request("http://examples.com")
-
-  There are special values you can pass in for content to trigger
-  behavours that are helpful in testing.
-
-  'echo_request_headers' means return the request headers in the response body
-  'echo_request_headers_as_json' means return the request headers in
-     the response body
-  'echo_request_body' means return the request body in the response body
-  'echo_request_uri' means return the request uri in the response body
-  """
-
-  def __init__(self, iterable):
-    """
-    Args:
-      iterable: iterable, a sequence of pairs of (headers, body)
-    """
-    self._iterable = iterable
-    self.follow_redirects = True
-
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    resp, content = self._iterable.pop(0)
-    if content == 'echo_request_headers':
-      content = headers
-    elif content == 'echo_request_headers_as_json':
-      content = simplejson.dumps(headers)
-    elif content == 'echo_request_body':
-      if hasattr(body, 'read'):
-        content = body.read()
-      else:
-        content = body
-    elif content == 'echo_request_uri':
-      content = uri
-    return httplib2.Response(resp), content
-
-
-def set_user_agent(http, user_agent):
-  """Set the user-agent on every request.
-
-  Args:
-     http - An instance of httplib2.Http
-         or something that acts like it.
-     user_agent: string, the value for the user-agent header.
-
-  Returns:
-     A modified instance of http that was passed in.
-
-  Example:
-
-    h = httplib2.Http()
-    h = set_user_agent(h, "my-app-name/6.0")
-
-  Most of the time the user-agent will be set doing auth, this is for the rare
-  cases where you are accessing an unauthenticated endpoint.
-  """
-  request_orig = http.request
-
-  # The closure that will replace 'httplib2.Http.request'.
-  def new_request(uri, method='GET', body=None, headers=None,
-                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                  connection_type=None):
-    """Modify the request headers to add the user-agent."""
-    if headers is None:
-      headers = {}
-    if 'user-agent' in headers:
-      headers['user-agent'] = user_agent + ' ' + headers['user-agent']
-    else:
-      headers['user-agent'] = user_agent
-    resp, content = request_orig(uri, method, body, headers,
-                        redirections, connection_type)
-    return resp, content
-
-  http.request = new_request
-  return http
-
-
-def tunnel_patch(http):
-  """Tunnel PATCH requests over POST.
-  Args:
-     http - An instance of httplib2.Http
-         or something that acts like it.
-
-  Returns:
-     A modified instance of http that was passed in.
-
-  Example:
-
-    h = httplib2.Http()
-    h = tunnel_patch(h, "my-app-name/6.0")
-
-  Useful if you are running on a platform that doesn't support PATCH.
-  Apply this last if you are using OAuth 1.0, as changing the method
-  will result in a different signature.
-  """
-  request_orig = http.request
-
-  # The closure that will replace 'httplib2.Http.request'.
-  def new_request(uri, method='GET', body=None, headers=None,
-                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                  connection_type=None):
-    """Modify the request headers to add the user-agent."""
-    if headers is None:
-      headers = {}
-    if method == 'PATCH':
-      if 'oauth_token' in headers.get('authorization', ''):
-        logging.warning(
-            'OAuth 1.0 request made with Credentials after tunnel_patch.')
-      headers['x-http-method-override'] = "PATCH"
-      method = 'POST'
-    resp, content = request_orig(uri, method, body, headers,
-                        redirections, connection_type)
-    return resp, content
-
-  http.request = new_request
-  return http
diff --git a/apiclient/mimeparse.py b/apiclient/mimeparse.py
deleted file mode 100644
index cbb9d07..0000000
--- a/apiclient/mimeparse.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Copyright (C) 2007 Joe Gregorio
-#
-# Licensed under the MIT License
-
-"""MIME-Type Parser
-
-This module provides basic functions for handling mime-types. It can handle
-matching mime-types against a list of media-ranges. See section 14.1 of the
-HTTP specification [RFC 2616] for a complete explanation.
-
-   http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
-
-Contents:
- - parse_mime_type():   Parses a mime-type into its component parts.
- - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
-                          quality parameter.
- - quality():           Determines the quality ('q') of a mime-type when
-                          compared against a list of media-ranges.
- - quality_parsed():    Just like quality() except the second parameter must be
-                          pre-parsed.
- - best_match():        Choose the mime-type with the highest quality ('q')
-                          from a list of candidates.
-"""
-
-__version__ = '0.1.3'
-__author__ = 'Joe Gregorio'
-__email__ = 'joe@bitworking.org'
-__license__ = 'MIT License'
-__credits__ = ''
-
-
-def parse_mime_type(mime_type):
-    """Parses a mime-type into its component parts.
-
-    Carves up a mime-type and returns a tuple of the (type, subtype, params)
-    where 'params' is a dictionary of all the parameters for the media range.
-    For example, the media range 'application/xhtml;q=0.5' would get parsed
-    into:
-
-       ('application', 'xhtml', {'q', '0.5'})
-       """
-    parts = mime_type.split(';')
-    params = dict([tuple([s.strip() for s in param.split('=', 1)])\
-            for param in parts[1:]
-                  ])
-    full_type = parts[0].strip()
-    # Java URLConnection class sends an Accept header that includes a
-    # single '*'. Turn it into a legal wildcard.
-    if full_type == '*':
-        full_type = '*/*'
-    (type, subtype) = full_type.split('/')
-
-    return (type.strip(), subtype.strip(), params)
-
-
-def parse_media_range(range):
-    """Parse a media-range into its component parts.
-
-    Carves up a media range and returns a tuple of the (type, subtype,
-    params) where 'params' is a dictionary of all the parameters for the media
-    range.  For example, the media range 'application/*;q=0.5' would get parsed
-    into:
-
-       ('application', '*', {'q', '0.5'})
-
-    In addition this function also guarantees that there is a value for 'q'
-    in the params dictionary, filling it in with a proper default if
-    necessary.
-    """
-    (type, subtype, params) = parse_mime_type(range)
-    if not params.has_key('q') or not params['q'] or \
-            not float(params['q']) or float(params['q']) > 1\
-            or float(params['q']) < 0:
-        params['q'] = '1'
-
-    return (type, subtype, params)
-
-
-def fitness_and_quality_parsed(mime_type, parsed_ranges):
-    """Find the best match for a mime-type amongst parsed media-ranges.
-
-    Find the best match for a given mime-type against a list of media_ranges
-    that have already been parsed by parse_media_range(). Returns a tuple of
-    the fitness value and the value of the 'q' quality parameter of the best
-    match, or (-1, 0) if no match was found. Just as for quality_parsed(),
-    'parsed_ranges' must be a list of parsed media ranges.
-    """
-    best_fitness = -1
-    best_fit_q = 0
-    (target_type, target_subtype, target_params) =\
-            parse_media_range(mime_type)
-    for (type, subtype, params) in parsed_ranges:
-        type_match = (type == target_type or\
-                      type == '*' or\
-                      target_type == '*')
-        subtype_match = (subtype == target_subtype or\
-                         subtype == '*' or\
-                         target_subtype == '*')
-        if type_match and subtype_match:
-            param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
-                    target_params.iteritems() if key != 'q' and \
-                    params.has_key(key) and value == params[key]], 0)
-            fitness = (type == target_type) and 100 or 0
-            fitness += (subtype == target_subtype) and 10 or 0
-            fitness += param_matches
-            if fitness > best_fitness:
-                best_fitness = fitness
-                best_fit_q = params['q']
-
-    return best_fitness, float(best_fit_q)
-
-
-def quality_parsed(mime_type, parsed_ranges):
-    """Find the best match for a mime-type amongst parsed media-ranges.
-
-    Find the best match for a given mime-type against a list of media_ranges
-    that have already been parsed by parse_media_range(). Returns the 'q'
-    quality parameter of the best match, 0 if no match was found. This function
-    bahaves the same as quality() except that 'parsed_ranges' must be a list of
-    parsed media ranges.
-    """
-
-    return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
-
-
-def quality(mime_type, ranges):
-    """Return the quality ('q') of a mime-type against a list of media-ranges.
-
-    Returns the quality 'q' of a mime-type when compared against the
-    media-ranges in ranges. For example:
-
-    >>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
-                  text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
-    0.7
-
-    """
-    parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
-
-    return quality_parsed(mime_type, parsed_ranges)
-
-
-def best_match(supported, header):
-    """Return mime-type with the highest quality ('q') from list of candidates.
-
-    Takes a list of supported mime-types and finds the best match for all the
-    media-ranges listed in header. The value of header must be a string that
-    conforms to the format of the HTTP Accept: header. The value of 'supported'
-    is a list of mime-types. The list of supported mime-types should be sorted
-    in order of increasing desirability, in case of a situation where there is
-    a tie.
-
-    >>> best_match(['application/xbel+xml', 'text/xml'],
-                   'text/*;q=0.5,*/*; q=0.1')
-    'text/xml'
-    """
-    split_header = _filter_blank(header.split(','))
-    parsed_header = [parse_media_range(r) for r in split_header]
-    weighted_matches = []
-    pos = 0
-    for mime_type in supported:
-        weighted_matches.append((fitness_and_quality_parsed(mime_type,
-                                 parsed_header), pos, mime_type))
-        pos += 1
-    weighted_matches.sort()
-
-    return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
-
-
-def _filter_blank(i):
-    for s in i:
-        if s.strip():
-            yield s
diff --git a/apiclient/model.py b/apiclient/model.py
deleted file mode 100644
index 12fcab6..0000000
--- a/apiclient/model.py
+++ /dev/null
@@ -1,385 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Model objects for requests and responses.
-
-Each API may support one or more serializations, such
-as JSON, Atom, etc. The model classes are responsible
-for converting between the wire format and the Python
-object representation.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import gflags
-import logging
-import urllib
-
-from errors import HttpError
-from oauth2client.anyjson import simplejson
-
-FLAGS = gflags.FLAGS
-
-gflags.DEFINE_boolean('dump_request_response', False,
-                      'Dump all http server requests and responses. '
-                     )
-
-
-def _abstract():
-  raise NotImplementedError('You need to override this function')
-
-
-class Model(object):
-  """Model base class.
-
-  All Model classes should implement this interface.
-  The Model serializes and de-serializes between a wire
-  format such as JSON and a Python object representation.
-  """
-
-  def request(self, headers, path_params, query_params, body_value):
-    """Updates outgoing requests with a serialized body.
-
-    Args:
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query_params: dict, parameters that appear in the query
-      body_value: object, the request body as a Python object, which must be
-                  serializable.
-    Returns:
-      A tuple of (headers, path_params, query, body)
-
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query: string, query part of the request URI
-      body: string, the body serialized in the desired wire format.
-    """
-    _abstract()
-
-  def response(self, resp, content):
-    """Convert the response wire format into a Python object.
-
-    Args:
-      resp: httplib2.Response, the HTTP response headers and status
-      content: string, the body of the HTTP response
-
-    Returns:
-      The body de-serialized as a Python object.
-
-    Raises:
-      apiclient.errors.HttpError if a non 2xx response is received.
-    """
-    _abstract()
-
-
-class BaseModel(Model):
-  """Base model class.
-
-  Subclasses should provide implementations for the "serialize" and
-  "deserialize" methods, as well as values for the following class attributes.
-
-  Attributes:
-    accept: The value to use for the HTTP Accept header.
-    content_type: The value to use for the HTTP Content-type header.
-    no_content_response: The value to return when deserializing a 204 "No
-        Content" response.
-    alt_param: The value to supply as the "alt" query parameter for requests.
-  """
-
-  accept = None
-  content_type = None
-  no_content_response = None
-  alt_param = None
-
-  def _log_request(self, headers, path_params, query, body):
-    """Logs debugging information about the request if requested."""
-    if FLAGS.dump_request_response:
-      logging.info('--request-start--')
-      logging.info('-headers-start-')
-      for h, v in headers.iteritems():
-        logging.info('%s: %s', h, v)
-      logging.info('-headers-end-')
-      logging.info('-path-parameters-start-')
-      for h, v in path_params.iteritems():
-        logging.info('%s: %s', h, v)
-      logging.info('-path-parameters-end-')
-      logging.info('body: %s', body)
-      logging.info('query: %s', query)
-      logging.info('--request-end--')
-
-  def request(self, headers, path_params, query_params, body_value):
-    """Updates outgoing requests with a serialized body.
-
-    Args:
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query_params: dict, parameters that appear in the query
-      body_value: object, the request body as a Python object, which must be
-                  serializable by simplejson.
-    Returns:
-      A tuple of (headers, path_params, query, body)
-
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query: string, query part of the request URI
-      body: string, the body serialized as JSON
-    """
-    query = self._build_query(query_params)
-    headers['accept'] = self.accept
-    headers['accept-encoding'] = 'gzip, deflate'
-    if 'user-agent' in headers:
-      headers['user-agent'] += ' '
-    else:
-      headers['user-agent'] = ''
-    headers['user-agent'] += 'google-api-python-client/1.0'
-
-    if body_value is not None:
-      headers['content-type'] = self.content_type
-      body_value = self.serialize(body_value)
-    self._log_request(headers, path_params, query, body_value)
-    return (headers, path_params, query, body_value)
-
-  def _build_query(self, params):
-    """Builds a query string.
-
-    Args:
-      params: dict, the query parameters
-
-    Returns:
-      The query parameters properly encoded into an HTTP URI query string.
-    """
-    if self.alt_param is not None:
-      params.update({'alt': self.alt_param})
-    astuples = []
-    for key, value in params.iteritems():
-      if type(value) == type([]):
-        for x in value:
-          x = x.encode('utf-8')
-          astuples.append((key, x))
-      else:
-        if getattr(value, 'encode', False) and callable(value.encode):
-          value = value.encode('utf-8')
-        astuples.append((key, value))
-    return '?' + urllib.urlencode(astuples)
-
-  def _log_response(self, resp, content):
-    """Logs debugging information about the response if requested."""
-    if FLAGS.dump_request_response:
-      logging.info('--response-start--')
-      for h, v in resp.iteritems():
-        logging.info('%s: %s', h, v)
-      if content:
-        logging.info(content)
-      logging.info('--response-end--')
-
-  def response(self, resp, content):
-    """Convert the response wire format into a Python object.
-
-    Args:
-      resp: httplib2.Response, the HTTP response headers and status
-      content: string, the body of the HTTP response
-
-    Returns:
-      The body de-serialized as a Python object.
-
-    Raises:
-      apiclient.errors.HttpError if a non 2xx response is received.
-    """
-    self._log_response(resp, content)
-    # Error handling is TBD, for example, do we retry
-    # for some operation/error combinations?
-    if resp.status < 300:
-      if resp.status == 204:
-        # A 204: No Content response should be treated differently
-        # to all the other success states
-        return self.no_content_response
-      return self.deserialize(content)
-    else:
-      logging.debug('Content from bad request was: %s' % content)
-      raise HttpError(resp, content)
-
-  def serialize(self, body_value):
-    """Perform the actual Python object serialization.
-
-    Args:
-      body_value: object, the request body as a Python object.
-
-    Returns:
-      string, the body in serialized form.
-    """
-    _abstract()
-
-  def deserialize(self, content):
-    """Perform the actual deserialization from response string to Python
-    object.
-
-    Args:
-      content: string, the body of the HTTP response
-
-    Returns:
-      The body de-serialized as a Python object.
-    """
-    _abstract()
-
-
-class JsonModel(BaseModel):
-  """Model class for JSON.
-
-  Serializes and de-serializes between JSON and the Python
-  object representation of HTTP request and response bodies.
-  """
-  accept = 'application/json'
-  content_type = 'application/json'
-  alt_param = 'json'
-
-  def __init__(self, data_wrapper=False):
-    """Construct a JsonModel.
-
-    Args:
-      data_wrapper: boolean, wrap requests and responses in a data wrapper
-    """
-    self._data_wrapper = data_wrapper
-
-  def serialize(self, body_value):
-    if (isinstance(body_value, dict) and 'data' not in body_value and
-        self._data_wrapper):
-      body_value = {'data': body_value}
-    return simplejson.dumps(body_value)
-
-  def deserialize(self, content):
-    body = simplejson.loads(content)
-    if self._data_wrapper and isinstance(body, dict) and 'data' in body:
-      body = body['data']
-    return body
-
-  @property
-  def no_content_response(self):
-    return {}
-
-
-class RawModel(JsonModel):
-  """Model class for requests that don't return JSON.
-
-  Serializes and de-serializes between JSON and the Python
-  object representation of HTTP request, and returns the raw bytes
-  of the response body.
-  """
-  accept = '*/*'
-  content_type = 'application/json'
-  alt_param = None
-
-  def deserialize(self, content):
-    return content
-
-  @property
-  def no_content_response(self):
-    return ''
-
-
-class MediaModel(JsonModel):
-  """Model class for requests that return Media.
-
-  Serializes and de-serializes between JSON and the Python
-  object representation of HTTP request, and returns the raw bytes
-  of the response body.
-  """
-  accept = '*/*'
-  content_type = 'application/json'
-  alt_param = 'media'
-
-  def deserialize(self, content):
-    return content
-
-  @property
-  def no_content_response(self):
-    return ''
-
-
-class ProtocolBufferModel(BaseModel):
-  """Model class for protocol buffers.
-
-  Serializes and de-serializes the binary protocol buffer sent in the HTTP
-  request and response bodies.
-  """
-  accept = 'application/x-protobuf'
-  content_type = 'application/x-protobuf'
-  alt_param = 'proto'
-
-  def __init__(self, protocol_buffer):
-    """Constructs a ProtocolBufferModel.
-
-    The serialzed protocol buffer returned in an HTTP response will be
-    de-serialized using the given protocol buffer class.
-
-    Args:
-      protocol_buffer: The protocol buffer class used to de-serialize a
-      response from the API.
-    """
-    self._protocol_buffer = protocol_buffer
-
-  def serialize(self, body_value):
-    return body_value.SerializeToString()
-
-  def deserialize(self, content):
-    return self._protocol_buffer.FromString(content)
-
-  @property
-  def no_content_response(self):
-    return self._protocol_buffer()
-
-
-def makepatch(original, modified):
-  """Create a patch object.
-
-  Some methods support PATCH, an efficient way to send updates to a resource.
-  This method allows the easy construction of patch bodies by looking at the
-  differences between a resource before and after it was modified.
-
-  Args:
-    original: object, the original deserialized resource
-    modified: object, the modified deserialized resource
-  Returns:
-    An object that contains only the changes from original to modified, in a
-    form suitable to pass to a PATCH method.
-
-  Example usage:
-    item = service.activities().get(postid=postid, userid=userid).execute()
-    original = copy.deepcopy(item)
-    item['object']['content'] = 'This is updated.'
-    service.activities.patch(postid=postid, userid=userid,
-      body=makepatch(original, item)).execute()
-  """
-  patch = {}
-  for key, original_value in original.iteritems():
-    modified_value = modified.get(key, None)
-    if modified_value is None:
-      # Use None to signal that the element is deleted
-      patch[key] = None
-    elif original_value != modified_value:
-      if type(original_value) == type({}):
-        # Recursively descend objects
-        patch[key] = makepatch(original_value, modified_value)
-      else:
-        # In the case of simple types or arrays we just replace
-        patch[key] = modified_value
-    else:
-      # Don't add anything to patch if there's no change
-      pass
-  for key in modified:
-    if key not in original:
-      patch[key] = modified[key]
-
-  return patch
diff --git a/apiclient/oauth.py b/apiclient/oauth.py
deleted file mode 100644
index 136adcd..0000000
--- a/apiclient/oauth.py
+++ /dev/null
@@ -1,443 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for OAuth.
-
-Utilities for making it easier to work with OAuth.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-
-import copy
-import httplib2
-import logging
-import oauth2 as oauth
-import urllib
-import urlparse
-
-from oauth2client.anyjson import simplejson
-from oauth2client.client import Credentials
-from oauth2client.client import Flow
-from oauth2client.client import Storage
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
-
-
-class Error(Exception):
-  """Base error for this module."""
-  pass
-
-
-class RequestError(Error):
-  """Error occurred during request."""
-  pass
-
-
-class MissingParameter(Error):
-  pass
-
-
-class CredentialsInvalidError(Error):
-  pass
-
-
-def _abstract():
-  raise NotImplementedError('You need to override this function')
-
-
-def _oauth_uri(name, discovery, params):
-  """Look up the OAuth URI from the discovery
-  document and add query parameters based on
-  params.
-
-  name      - The name of the OAuth URI to lookup, one
-              of 'request', 'access', or 'authorize'.
-  discovery - Portion of discovery document the describes
-              the OAuth endpoints.
-  params    - Dictionary that is used to form the query parameters
-              for the specified URI.
-  """
-  if name not in ['request', 'access', 'authorize']:
-    raise KeyError(name)
-  keys = discovery[name]['parameters'].keys()
-  query = {}
-  for key in keys:
-    if key in params:
-      query[key] = params[key]
-  return discovery[name]['url'] + '?' + urllib.urlencode(query)
-
-
-
-class OAuthCredentials(Credentials):
-  """Credentials object for OAuth 1.0a
-  """
-
-  def __init__(self, consumer, token, user_agent):
-    """
-    consumer   - An instance of oauth.Consumer.
-    token      - An instance of oauth.Token constructed with
-                 the access token and secret.
-    user_agent - The HTTP User-Agent to provide for this application.
-    """
-    self.consumer = consumer
-    self.token = token
-    self.user_agent = user_agent
-    self.store = None
-
-    # True if the credentials have been revoked
-    self._invalid = False
-
-  @property
-  def invalid(self):
-    """True if the credentials are invalid, such as being revoked."""
-    return getattr(self, "_invalid", False)
-
-  def set_store(self, store):
-    """Set the storage for the credential.
-
-    Args:
-      store: callable, a callable that when passed a Credential
-        will store the credential back to where it came from.
-        This is needed to store the latest access_token if it
-        has been revoked.
-    """
-    self.store = store
-
-  def __getstate__(self):
-    """Trim the state down to something that can be pickled."""
-    d = copy.copy(self.__dict__)
-    del d['store']
-    return d
-
-  def __setstate__(self, state):
-    """Reconstitute the state of the object from being pickled."""
-    self.__dict__.update(state)
-    self.store = None
-
-  def authorize(self, http):
-    """Authorize an httplib2.Http instance with these Credentials
-
-    Args:
-       http - An instance of httplib2.Http
-           or something that acts like it.
-
-    Returns:
-       A modified instance of http that was passed in.
-
-    Example:
-
-      h = httplib2.Http()
-      h = credentials.authorize(h)
-
-    You can't create a new OAuth
-    subclass of httplib2.Authenication because
-    it never gets passed the absolute URI, which is
-    needed for signing. So instead we have to overload
-    'request' with a closure that adds in the
-    Authorization header and then calls the original version
-    of 'request()'.
-    """
-    request_orig = http.request
-    signer = oauth.SignatureMethod_HMAC_SHA1()
-
-    # The closure that will replace 'httplib2.Http.request'.
-    def new_request(uri, method='GET', body=None, headers=None,
-                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                    connection_type=None):
-      """Modify the request headers to add the appropriate
-      Authorization header."""
-      response_code = 302
-      http.follow_redirects = False
-      while response_code in [301, 302]:
-        req = oauth.Request.from_consumer_and_token(
-            self.consumer, self.token, http_method=method, http_url=uri)
-        req.sign_request(signer, self.consumer, self.token)
-        if headers is None:
-          headers = {}
-        headers.update(req.to_header())
-        if 'user-agent' in headers:
-          headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
-        else:
-          headers['user-agent'] = self.user_agent
-
-        resp, content = request_orig(uri, method, body, headers,
-                            redirections, connection_type)
-        response_code = resp.status
-        if response_code in [301, 302]:
-          uri = resp['location']
-
-      # Update the stored credential if it becomes invalid.
-      if response_code == 401:
-        logging.info('Access token no longer valid: %s' % content)
-        self._invalid = True
-        if self.store is not None:
-          self.store(self)
-        raise CredentialsInvalidError("Credentials are no longer valid.")
-
-      return resp, content
-
-    http.request = new_request
-    return http
-
-
-class TwoLeggedOAuthCredentials(Credentials):
-  """Two Legged Credentials object for OAuth 1.0a.
-
-  The Two Legged object is created directly, not from a flow.  Once you
-  authorize and httplib2.Http instance you can change the requestor and that
-  change will propogate to the authorized httplib2.Http instance. For example:
-
-    http = httplib2.Http()
-    http = credentials.authorize(http)
-
-    credentials.requestor = 'foo@example.info'
-    http.request(...)
-    credentials.requestor = 'bar@example.info'
-    http.request(...)
-  """
-
-  def __init__(self, consumer_key, consumer_secret, user_agent):
-    """
-    Args:
-      consumer_key: string, An OAuth 1.0 consumer key
-      consumer_secret: string, An OAuth 1.0 consumer secret
-      user_agent: string, The HTTP User-Agent to provide for this application.
-    """
-    self.consumer = oauth.Consumer(consumer_key, consumer_secret)
-    self.user_agent = user_agent
-    self.store = None
-
-    # email address of the user to act on the behalf of.
-    self._requestor = None
-
-  @property
-  def invalid(self):
-    """True if the credentials are invalid, such as being revoked.
-
-    Always returns False for Two Legged Credentials.
-    """
-    return False
-
-  def getrequestor(self):
-    return self._requestor
-
-  def setrequestor(self, email):
-    self._requestor = email
-
-  requestor = property(getrequestor, setrequestor, None,
-      'The email address of the user to act on behalf of')
-
-  def set_store(self, store):
-    """Set the storage for the credential.
-
-    Args:
-      store: callable, a callable that when passed a Credential
-        will store the credential back to where it came from.
-        This is needed to store the latest access_token if it
-        has been revoked.
-    """
-    self.store = store
-
-  def __getstate__(self):
-    """Trim the state down to something that can be pickled."""
-    d = copy.copy(self.__dict__)
-    del d['store']
-    return d
-
-  def __setstate__(self, state):
-    """Reconstitute the state of the object from being pickled."""
-    self.__dict__.update(state)
-    self.store = None
-
-  def authorize(self, http):
-    """Authorize an httplib2.Http instance with these Credentials
-
-    Args:
-       http - An instance of httplib2.Http
-           or something that acts like it.
-
-    Returns:
-       A modified instance of http that was passed in.
-
-    Example:
-
-      h = httplib2.Http()
-      h = credentials.authorize(h)
-
-    You can't create a new OAuth
-    subclass of httplib2.Authenication because
-    it never gets passed the absolute URI, which is
-    needed for signing. So instead we have to overload
-    'request' with a closure that adds in the
-    Authorization header and then calls the original version
-    of 'request()'.
-    """
-    request_orig = http.request
-    signer = oauth.SignatureMethod_HMAC_SHA1()
-
-    # The closure that will replace 'httplib2.Http.request'.
-    def new_request(uri, method='GET', body=None, headers=None,
-                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                    connection_type=None):
-      """Modify the request headers to add the appropriate
-      Authorization header."""
-      response_code = 302
-      http.follow_redirects = False
-      while response_code in [301, 302]:
-        # add in xoauth_requestor_id=self._requestor to the uri
-        if self._requestor is None:
-          raise MissingParameter(
-              'Requestor must be set before using TwoLeggedOAuthCredentials')
-        parsed = list(urlparse.urlparse(uri))
-        q = parse_qsl(parsed[4])
-        q.append(('xoauth_requestor_id', self._requestor))
-        parsed[4] = urllib.urlencode(q)
-        uri = urlparse.urlunparse(parsed)
-
-        req = oauth.Request.from_consumer_and_token(
-            self.consumer, None, http_method=method, http_url=uri)
-        req.sign_request(signer, self.consumer, None)
-        if headers is None:
-          headers = {}
-        headers.update(req.to_header())
-        if 'user-agent' in headers:
-          headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
-        else:
-          headers['user-agent'] = self.user_agent
-        resp, content = request_orig(uri, method, body, headers,
-                            redirections, connection_type)
-        response_code = resp.status
-        if response_code in [301, 302]:
-          uri = resp['location']
-
-      if response_code == 401:
-        logging.info('Access token no longer valid: %s' % content)
-        # Do not store the invalid state of the Credentials because
-        # being 2LO they could be reinstated in the future.
-        raise CredentialsInvalidError("Credentials are invalid.")
-
-      return resp, content
-
-    http.request = new_request
-    return http
-
-
-class FlowThreeLegged(Flow):
-  """Does the Three Legged Dance for OAuth 1.0a.
-  """
-
-  def __init__(self, discovery, consumer_key, consumer_secret, user_agent,
-               **kwargs):
-    """
-    discovery       - Section of the API discovery document that describes
-                      the OAuth endpoints.
-    consumer_key    - OAuth consumer key
-    consumer_secret - OAuth consumer secret
-    user_agent      - The HTTP User-Agent that identifies the application.
-    **kwargs        - The keyword arguments are all optional and required
-                      parameters for the OAuth calls.
-    """
-    self.discovery = discovery
-    self.consumer_key = consumer_key
-    self.consumer_secret = consumer_secret
-    self.user_agent = user_agent
-    self.params = kwargs
-    self.request_token = {}
-    required = {}
-    for uriinfo in discovery.itervalues():
-      for name, value in uriinfo['parameters'].iteritems():
-        if value['required'] and not name.startswith('oauth_'):
-          required[name] = 1
-    for key in required.iterkeys():
-      if key not in self.params:
-        raise MissingParameter('Required parameter %s not supplied' % key)
-
-  def step1_get_authorize_url(self, oauth_callback='oob'):
-    """Returns a URI to redirect to the provider.
-
-    oauth_callback - Either the string 'oob' for a non-web-based application,
-                     or a URI that handles the callback from the authorization
-                     server.
-
-    If oauth_callback is 'oob' then pass in the
-    generated verification code to step2_exchange,
-    otherwise pass in the query parameters received
-    at the callback uri to step2_exchange.
-    """
-    consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
-    client = oauth.Client(consumer)
-
-    headers = {
-        'user-agent': self.user_agent,
-        'content-type': 'application/x-www-form-urlencoded'
-    }
-    body = urllib.urlencode({'oauth_callback': oauth_callback})
-    uri = _oauth_uri('request', self.discovery, self.params)
-
-    resp, content = client.request(uri, 'POST', headers=headers,
-                                   body=body)
-    if resp['status'] != '200':
-      logging.error('Failed to retrieve temporary authorization: %s', content)
-      raise RequestError('Invalid response %s.' % resp['status'])
-
-    self.request_token = dict(parse_qsl(content))
-
-    auth_params = copy.copy(self.params)
-    auth_params['oauth_token'] = self.request_token['oauth_token']
-
-    return _oauth_uri('authorize', self.discovery, auth_params)
-
-  def step2_exchange(self, verifier):
-    """Exhanges an authorized request token
-    for OAuthCredentials.
-
-    Args:
-      verifier: string, dict - either the verifier token, or a dictionary
-        of the query parameters to the callback, which contains
-        the oauth_verifier.
-    Returns:
-       The Credentials object.
-    """
-
-    if not (isinstance(verifier, str) or isinstance(verifier, unicode)):
-      verifier = verifier['oauth_verifier']
-
-    token = oauth.Token(
-        self.request_token['oauth_token'],
-        self.request_token['oauth_token_secret'])
-    token.set_verifier(verifier)
-    consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
-    client = oauth.Client(consumer, token)
-
-    headers = {
-        'user-agent': self.user_agent,
-        'content-type': 'application/x-www-form-urlencoded'
-    }
-
-    uri = _oauth_uri('access', self.discovery, self.params)
-    resp, content = client.request(uri, 'POST', headers=headers)
-    if resp['status'] != '200':
-      logging.error('Failed to retrieve access token: %s', content)
-      raise RequestError('Invalid response %s.' % resp['status'])
-
-    oauth_params = dict(parse_qsl(content))
-    token = oauth.Token(
-        oauth_params['oauth_token'],
-        oauth_params['oauth_token_secret'])
-
-    return OAuthCredentials(consumer, token, self.user_agent)
diff --git a/apiclient/push.py b/apiclient/push.py
deleted file mode 100644
index c520faf..0000000
--- a/apiclient/push.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Push notifications support.
-
-This code is based on experimental APIs and is subject to change.
-"""
-
-__author__ = 'afshar@google.com (Ali Afshar)'
-
-import binascii
-import collections
-import os
-import urllib
-
-SUBSCRIBE = 'X-GOOG-SUBSCRIBE'
-SUBSCRIPTION_ID = 'X-GOOG-SUBSCRIPTION-ID'
-TOPIC_ID = 'X-GOOG-TOPIC-ID'
-TOPIC_URI = 'X-GOOG-TOPIC-URI'
-CLIENT_TOKEN = 'X-GOOG-CLIENT-TOKEN'
-EVENT_TYPE = 'X-GOOG-EVENT-TYPE'
-UNSUBSCRIBE = 'X-GOOG-UNSUBSCRIBE'
-
-
-class InvalidSubscriptionRequestError(ValueError):
-  """The request cannot be subscribed."""
-
-
-def new_token():
-  """Gets a random token for use as a client_token in push notifications.
-
-  Returns:
-    str, a new random token.
-  """
-  return binascii.hexlify(os.urandom(32))
-
-
-class Channel(object):
-  """Base class for channel types."""
-
-  def __init__(self, channel_type, channel_args):
-    """Create a new Channel.
-
-    You probably won't need to create this channel manually, since there are
-    subclassed Channel for each specific type with a more customized set of
-    arguments to pass. However, you may wish to just create it manually here.
-
-    Args:
-      channel_type: str, the type of channel.
-      channel_args: dict, arguments to pass to the channel.
-    """
-    self.channel_type = channel_type
-    self.channel_args = channel_args
-
-  def as_header_value(self):
-    """Create the appropriate header for this channel.
-
-    Returns:
-      str encoded channel description suitable for use as a header.
-    """
-    return '%s?%s' % (self.channel_type, urllib.urlencode(self.channel_args))
-
-  def write_header(self, headers):
-    """Write the appropriate subscribe header to a headers dict.
-
-    Args:
-      headers: dict, headers to add subscribe header to.
-    """
-    headers[SUBSCRIBE] = self.as_header_value()
-
-
-class WebhookChannel(Channel):
-  """Channel for registering web hook notifications."""
-
-  def __init__(self, url, app_engine=False):
-    """Create a new WebhookChannel
-
-    Args:
-      url: str, URL to post notifications to.
-      app_engine: bool, default=False, whether the destination for the
-      notifications is an App Engine application.
-    """
-    super(WebhookChannel, self).__init__(
-        channel_type='web_hook',
-        channel_args={
-            'url': url,
-            'app_engine': app_engine and 'true' or 'false',
-        }
-    )
-
-
-class Headers(collections.defaultdict):
-  """Headers for managing subscriptions."""
-
-
-  ALL_HEADERS = set([SUBSCRIBE, SUBSCRIPTION_ID, TOPIC_ID, TOPIC_URI,
-                     CLIENT_TOKEN, EVENT_TYPE, UNSUBSCRIBE])
-
-  def __init__(self):
-    """Create a new subscription configuration instance."""
-    collections.defaultdict.__init__(self, str)
-
-  def __setitem__(self, key, value):
-    """Set a header value, ensuring the key is an allowed value.
-
-    Args:
-      key: str, the header key.
-      value: str, the header value.
-    Raises:
-      ValueError if key is not one of the accepted headers.
-    """
-    normal_key = self._normalize_key(key)
-    if normal_key not in self.ALL_HEADERS:
-      raise ValueError('Header name must be one of %s.' % self.ALL_HEADERS)
-    else:
-      return collections.defaultdict.__setitem__(self, normal_key, value)
-
-  def __getitem__(self, key):
-    """Get a header value, normalizing the key case.
-
-    Args:
-      key: str, the header key.
-    Returns:
-      String header value.
-    Raises:
-      KeyError if the key is not one of the accepted headers.
-    """
-    normal_key = self._normalize_key(key)
-    if normal_key not in self.ALL_HEADERS:
-      raise ValueError('Header name must be one of %s.' % self.ALL_HEADERS)
-    else:
-      return collections.defaultdict.__getitem__(self, normal_key)
-
-  def _normalize_key(self, key):
-    """Normalize a header name for use as a key."""
-    return key.upper()
-
-  def items(self):
-    """Generator for each header."""
-    for header in self.ALL_HEADERS:
-      value = self[header]
-      if value:
-        yield header, value
-
-  def write(self, headers):
-    """Applies the subscription headers.
-
-    Args:
-      headers: dict of headers to insert values into.
-    """
-    for header, value in self.items():
-      headers[header.lower()] = value
-
-  def read(self, headers):
-    """Read from headers.
-
-    Args:
-      headers: dict of headers to read from.
-    """
-    for header in self.ALL_HEADERS:
-      if header.lower() in headers:
-        self[header] = headers[header.lower()]
-
-
-class Subscription(object):
-  """Information about a subscription."""
-
-  def __init__(self):
-    """Create a new Subscription."""
-    self.headers = Headers()
-
-  @classmethod
-  def for_request(cls, request, channel, client_token=None):
-    """Creates a subscription and attaches it to a request.
-
-    Args:
-      request: An http.HttpRequest to modify for making a subscription.
-      channel: A apiclient.push.Channel describing the subscription to
-               create.
-      client_token: (optional) client token to verify the notification.
-
-    Returns:
-      New subscription object.
-    """
-    subscription = cls.for_channel(channel=channel, client_token=client_token)
-    subscription.headers.write(request.headers)
-    if request.method != 'GET':
-      raise InvalidSubscriptionRequestError(
-          'Can only subscribe to requests which are GET.')
-    request.method = 'POST'
-
-    def _on_response(response, subscription=subscription):
-      """Called with the response headers. Reads the subscription headers."""
-      subscription.headers.read(response)
-
-    request.add_response_callback(_on_response)
-    return subscription
-
-  @classmethod
-  def for_channel(cls, channel, client_token=None):
-    """Alternate constructor to create a subscription from a channel.
-
-    Args:
-      channel: A apiclient.push.Channel describing the subscription to
-               create.
-      client_token: (optional) client token to verify the notification.
-
-    Returns:
-      New subscription object.
-    """
-    subscription = cls()
-    channel.write_header(subscription.headers)
-    if client_token is None:
-      client_token = new_token()
-    subscription.headers[SUBSCRIPTION_ID] = new_token()
-    subscription.headers[CLIENT_TOKEN] = client_token
-    return subscription
-
-  def verify(self, headers):
-    """Verifies that a webhook notification has the correct client_token.
-
-    Args:
-      headers: dict of request headers for a push notification.
-
-    Returns:
-      Boolean value indicating whether the notification is verified.
-    """
-    new_subscription = Subscription()
-    new_subscription.headers.read(headers)
-    return new_subscription.client_token == self.client_token
-
-  @property
-  def subscribe(self):
-    """Subscribe header value."""
-    return self.headers[SUBSCRIBE]
-
-  @property
-  def subscription_id(self):
-    """Subscription ID header value."""
-    return self.headers[SUBSCRIPTION_ID]
-
-  @property
-  def topic_id(self):
-    """Topic ID header value."""
-    return self.headers[TOPIC_ID]
-
-  @property
-  def topic_uri(self):
-    """Topic URI header value."""
-    return self.headers[TOPIC_URI]
-
-  @property
-  def client_token(self):
-    """Client Token header value."""
-    return self.headers[CLIENT_TOKEN]
-
-  @property
-  def event_type(self):
-    """Event Type header value."""
-    return self.headers[EVENT_TYPE]
-
-  @property
-  def unsubscribe(self):
-    """Unsuscribe header value."""
-    return self.headers[UNSUBSCRIBE]
diff --git a/apiclient/schema.py b/apiclient/schema.py
deleted file mode 100644
index d076a86..0000000
--- a/apiclient/schema.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Schema processing for discovery based APIs
-
-Schemas holds an APIs discovery schemas. It can return those schema as
-deserialized JSON objects, or pretty print them as prototype objects that
-conform to the schema.
-
-For example, given the schema:
-
- schema = \"\"\"{
-   "Foo": {
-    "type": "object",
-    "properties": {
-     "etag": {
-      "type": "string",
-      "description": "ETag of the collection."
-     },
-     "kind": {
-      "type": "string",
-      "description": "Type of the collection ('calendar#acl').",
-      "default": "calendar#acl"
-     },
-     "nextPageToken": {
-      "type": "string",
-      "description": "Token used to access the next
-         page of this result. Omitted if no further results are available."
-     }
-    }
-   }
- }\"\"\"
-
- s = Schemas(schema)
- print s.prettyPrintByName('Foo')
-
- Produces the following output:
-
-  {
-   "nextPageToken": "A String", # Token used to access the
-       # next page of this result. Omitted if no further results are available.
-   "kind": "A String", # Type of the collection ('calendar#acl').
-   "etag": "A String", # ETag of the collection.
-  },
-
-The constructor takes a discovery document in which to look up named schema.
-"""
-
-# TODO(jcgregorio) support format, enum, minimum, maximum
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import copy
-
-from oauth2client import util
-from oauth2client.anyjson import simplejson
-
-
-class Schemas(object):
-  """Schemas for an API."""
-
-  def __init__(self, discovery):
-    """Constructor.
-
-    Args:
-      discovery: object, Deserialized discovery document from which we pull
-        out the named schema.
-    """
-    self.schemas = discovery.get('schemas', {})
-
-    # Cache of pretty printed schemas.
-    self.pretty = {}
-
-  @util.positional(2)
-  def _prettyPrintByName(self, name, seen=None, dent=0):
-    """Get pretty printed object prototype from the schema name.
-
-    Args:
-      name: string, Name of schema in the discovery document.
-      seen: list of string, Names of schema already seen. Used to handle
-        recursive definitions.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    if seen is None:
-      seen = []
-
-    if name in seen:
-      # Do not fall into an infinite loop over recursive definitions.
-      return '# Object with schema name: %s' % name
-    seen.append(name)
-
-    if name not in self.pretty:
-      self.pretty[name] = _SchemaToStruct(self.schemas[name],
-          seen, dent=dent).to_str(self._prettyPrintByName)
-
-    seen.pop()
-
-    return self.pretty[name]
-
-  def prettyPrintByName(self, name):
-    """Get pretty printed object prototype from the schema name.
-
-    Args:
-      name: string, Name of schema in the discovery document.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    # Return with trailing comma and newline removed.
-    return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
-
-  @util.positional(2)
-  def _prettyPrintSchema(self, schema, seen=None, dent=0):
-    """Get pretty printed object prototype of schema.
-
-    Args:
-      schema: object, Parsed JSON schema.
-      seen: list of string, Names of schema already seen. Used to handle
-        recursive definitions.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    if seen is None:
-      seen = []
-
-    return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
-
-  def prettyPrintSchema(self, schema):
-    """Get pretty printed object prototype of schema.
-
-    Args:
-      schema: object, Parsed JSON schema.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    # Return with trailing comma and newline removed.
-    return self._prettyPrintSchema(schema, dent=1)[:-2]
-
-  def get(self, name):
-    """Get deserialized JSON schema from the schema name.
-
-    Args:
-      name: string, Schema name.
-    """
-    return self.schemas[name]
-
-
-class _SchemaToStruct(object):
-  """Convert schema to a prototype object."""
-
-  @util.positional(3)
-  def __init__(self, schema, seen, dent=0):
-    """Constructor.
-
-    Args:
-      schema: object, Parsed JSON schema.
-      seen: list, List of names of schema already seen while parsing. Used to
-        handle recursive definitions.
-      dent: int, Initial indentation depth.
-    """
-    # The result of this parsing kept as list of strings.
-    self.value = []
-
-    # The final value of the parsing.
-    self.string = None
-
-    # The parsed JSON schema.
-    self.schema = schema
-
-    # Indentation level.
-    self.dent = dent
-
-    # Method that when called returns a prototype object for the schema with
-    # the given name.
-    self.from_cache = None
-
-    # List of names of schema already seen while parsing.
-    self.seen = seen
-
-  def emit(self, text):
-    """Add text as a line to the output.
-
-    Args:
-      text: string, Text to output.
-    """
-    self.value.extend(["  " * self.dent, text, '\n'])
-
-  def emitBegin(self, text):
-    """Add text to the output, but with no line terminator.
-
-    Args:
-      text: string, Text to output.
-      """
-    self.value.extend(["  " * self.dent, text])
-
-  def emitEnd(self, text, comment):
-    """Add text and comment to the output with line terminator.
-
-    Args:
-      text: string, Text to output.
-      comment: string, Python comment.
-    """
-    if comment:
-      divider = '\n' + '  ' * (self.dent + 2) + '# '
-      lines = comment.splitlines()
-      lines = [x.rstrip() for x in lines]
-      comment = divider.join(lines)
-      self.value.extend([text, ' # ', comment, '\n'])
-    else:
-      self.value.extend([text, '\n'])
-
-  def indent(self):
-    """Increase indentation level."""
-    self.dent += 1
-
-  def undent(self):
-    """Decrease indentation level."""
-    self.dent -= 1
-
-  def _to_str_impl(self, schema):
-    """Prototype object based on the schema, in Python code with comments.
-
-    Args:
-      schema: object, Parsed JSON schema file.
-
-    Returns:
-      Prototype object based on the schema, in Python code with comments.
-    """
-    stype = schema.get('type')
-    if stype == 'object':
-      self.emitEnd('{', schema.get('description', ''))
-      self.indent()
-      if 'properties' in schema:
-        for pname, pschema in schema.get('properties', {}).iteritems():
-          self.emitBegin('"%s": ' % pname)
-          self._to_str_impl(pschema)
-      elif 'additionalProperties' in schema:
-        self.emitBegin('"a_key": ')
-        self._to_str_impl(schema['additionalProperties'])
-      self.undent()
-      self.emit('},')
-    elif '$ref' in schema:
-      schemaName = schema['$ref']
-      description = schema.get('description', '')
-      s = self.from_cache(schemaName, seen=self.seen)
-      parts = s.splitlines()
-      self.emitEnd(parts[0], description)
-      for line in parts[1:]:
-        self.emit(line.rstrip())
-    elif stype == 'boolean':
-      value = schema.get('default', 'True or False')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'string':
-      value = schema.get('default', 'A String')
-      self.emitEnd('"%s",' % str(value), schema.get('description', ''))
-    elif stype == 'integer':
-      value = schema.get('default', '42')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'number':
-      value = schema.get('default', '3.14')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'null':
-      self.emitEnd('None,', schema.get('description', ''))
-    elif stype == 'any':
-      self.emitEnd('"",', schema.get('description', ''))
-    elif stype == 'array':
-      self.emitEnd('[', schema.get('description'))
-      self.indent()
-      self.emitBegin('')
-      self._to_str_impl(schema['items'])
-      self.undent()
-      self.emit('],')
-    else:
-      self.emit('Unknown type! %s' % stype)
-      self.emitEnd('', '')
-
-    self.string = ''.join(self.value)
-    return self.string
-
-  def to_str(self, from_cache):
-    """Prototype object based on the schema, in Python code with comments.
-
-    Args:
-      from_cache: callable(name, seen), Callable that retrieves an object
-         prototype for a schema with the given name. Seen is a list of schema
-         names already seen as we recursively descend the schema definition.
-
-    Returns:
-      Prototype object based on the schema, in Python code with comments.
-      The lines of the code will all be properly indented.
-    """
-    self.from_cache = from_cache
-    return self._to_str_impl(self.schema)
diff --git a/appcfg.py b/appcfg.py
index af18e1c..661f26e 100644
--- a/appcfg.py
+++ b/appcfg.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/backends_conversion.py b/backends_conversion.py
index af18e1c..661f26e 100644
--- a/backends_conversion.py
+++ b/backends_conversion.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/bulkload_client.py b/bulkload_client.py
index af18e1c..661f26e 100644
--- a/bulkload_client.py
+++ b/bulkload_client.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/bulkloader.py b/bulkloader.py
index af18e1c..661f26e 100644
--- a/bulkloader.py
+++ b/bulkloader.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/dev_appserver.py b/dev_appserver.py
index 1c48e39..b615636 100644
--- a/dev_appserver.py
+++ b/dev_appserver.py
@@ -52,15 +52,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/download_appstats.py b/download_appstats.py
index af18e1c..661f26e 100644
--- a/download_appstats.py
+++ b/download_appstats.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/endpointscfg.py b/endpointscfg.py
index af18e1c..661f26e 100644
--- a/endpointscfg.py
+++ b/endpointscfg.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/gen_protorpc.py b/gen_protorpc.py
index af18e1c..661f26e 100644
--- a/gen_protorpc.py
+++ b/gen_protorpc.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/gflags/__init__.py b/gflags/__init__.py
deleted file mode 100644
index 77e4285..0000000
--- a/gflags/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from gflags import *
diff --git a/gflags/gflags.py b/gflags/gflags.py
deleted file mode 100644
index 822256a..0000000
--- a/gflags/gflags.py
+++ /dev/null
@@ -1,2862 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2002, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-# ---
-# Author: Chad Lester
-# Design and style contributions by:
-#   Amit Patel, Bogdan Cocosel, Daniel Dulitz, Eric Tiedemann,
-#   Eric Veach, Laurence Gonsalves, Matthew Springer
-# Code reorganized a bit by Craig Silverstein
-
-"""This module is used to define and parse command line flags.
-
-This module defines a *distributed* flag-definition policy: rather than
-an application having to define all flags in or near main(), each python
-module defines flags that are useful to it.  When one python module
-imports another, it gains access to the other's flags.  (This is
-implemented by having all modules share a common, global registry object
-containing all the flag information.)
-
-Flags are defined through the use of one of the DEFINE_xxx functions.
-The specific function used determines how the flag is parsed, checked,
-and optionally type-converted, when it's seen on the command line.
-
-
-IMPLEMENTATION: DEFINE_* creates a 'Flag' object and registers it with a
-'FlagValues' object (typically the global FlagValues FLAGS, defined
-here).  The 'FlagValues' object can scan the command line arguments and
-pass flag arguments to the corresponding 'Flag' objects for
-value-checking and type conversion.  The converted flag values are
-available as attributes of the 'FlagValues' object.
-
-Code can access the flag through a FlagValues object, for instance
-gflags.FLAGS.myflag.  Typically, the __main__ module passes the command
-line arguments to gflags.FLAGS for parsing.
-
-At bottom, this module calls getopt(), so getopt functionality is
-supported, including short- and long-style flags, and the use of -- to
-terminate flags.
-
-Methods defined by the flag module will throw 'FlagsError' exceptions.
-The exception argument will be a human-readable string.
-
-
-FLAG TYPES: This is a list of the DEFINE_*'s that you can do.  All flags
-take a name, default value, help-string, and optional 'short' name
-(one-letter name).  Some flags have other arguments, which are described
-with the flag.
-
-DEFINE_string: takes any input, and interprets it as a string.
-
-DEFINE_bool or
-DEFINE_boolean: typically does not take an argument: say --myflag to
-                set FLAGS.myflag to true, or --nomyflag to set
-                FLAGS.myflag to false.  Alternately, you can say
-                   --myflag=true  or --myflag=t or --myflag=1  or
-                   --myflag=false or --myflag=f or --myflag=0
-
-DEFINE_float: takes an input and interprets it as a floating point
-              number.  Takes optional args lower_bound and upper_bound;
-              if the number specified on the command line is out of
-              range, it will raise a FlagError.
-
-DEFINE_integer: takes an input and interprets it as an integer.  Takes
-                optional args lower_bound and upper_bound as for floats.
-
-DEFINE_enum: takes a list of strings which represents legal values.  If
-             the command-line value is not in this list, raise a flag
-             error.  Otherwise, assign to FLAGS.flag as a string.
-
-DEFINE_list: Takes a comma-separated list of strings on the commandline.
-             Stores them in a python list object.
-
-DEFINE_spaceseplist: Takes a space-separated list of strings on the
-                     commandline.  Stores them in a python list object.
-                     Example: --myspacesepflag "foo bar baz"
-
-DEFINE_multistring: The same as DEFINE_string, except the flag can be
-                    specified more than once on the commandline.  The
-                    result is a python list object (list of strings),
-                    even if the flag is only on the command line once.
-
-DEFINE_multi_int: The same as DEFINE_integer, except the flag can be
-                  specified more than once on the commandline.  The
-                  result is a python list object (list of ints), even if
-                  the flag is only on the command line once.
-
-
-SPECIAL FLAGS: There are a few flags that have special meaning:
-   --help          prints a list of all the flags in a human-readable fashion
-   --helpshort     prints a list of all key flags (see below).
-   --helpxml       prints a list of all flags, in XML format.  DO NOT parse
-                   the output of --help and --helpshort.  Instead, parse
-                   the output of --helpxml.  For more info, see
-                   "OUTPUT FOR --helpxml" below.
-   --flagfile=foo  read flags from file foo.
-   --undefok=f1,f2 ignore unrecognized option errors for f1,f2.
-                   For boolean flags, you should use --undefok=boolflag, and
-                   --boolflag and --noboolflag will be accepted.  Do not use
-                   --undefok=noboolflag.
-   --              as in getopt(), terminates flag-processing
-
-
-FLAGS VALIDATORS: If your program:
-  - requires flag X to be specified
-  - needs flag Y to match a regular expression
-  - or requires any more general constraint to be satisfied
-then validators are for you!
-
-Each validator represents a constraint over one flag, which is enforced
-starting from the initial parsing of the flags and until the program
-terminates.
-
-Also, lower_bound and upper_bound for numerical flags are enforced using flag
-validators.
-
-Howto:
-If you want to enforce a constraint over one flag, use
-
-gflags.RegisterValidator(flag_name,
-                        checker,
-                        message='Flag validation failed',
-                        flag_values=FLAGS)
-
-After flag values are initially parsed, and after any change to the specified
-flag, method checker(flag_value) will be executed. If constraint is not
-satisfied, an IllegalFlagValue exception will be raised. See
-RegisterValidator's docstring for a detailed explanation on how to construct
-your own checker.
-
-
-EXAMPLE USAGE:
-
-FLAGS = gflags.FLAGS
-
-gflags.DEFINE_integer('my_version', 0, 'Version number.')
-gflags.DEFINE_string('filename', None, 'Input file name', short_name='f')
-
-gflags.RegisterValidator('my_version',
-                        lambda value: value % 2 == 0,
-                        message='--my_version must be divisible by 2')
-gflags.MarkFlagAsRequired('filename')
-
-
-NOTE ON --flagfile:
-
-Flags may be loaded from text files in addition to being specified on
-the commandline.
-
-Any flags you don't feel like typing, throw them in a file, one flag per
-line, for instance:
-   --myflag=myvalue
-   --nomyboolean_flag
-You then specify your file with the special flag '--flagfile=somefile'.
-You CAN recursively nest flagfile= tokens OR use multiple files on the
-command line.  Lines beginning with a single hash '#' or a double slash
-'//' are comments in your flagfile.
-
-Any flagfile=<file> will be interpreted as having a relative path from
-the current working directory rather than from the place the file was
-included from:
-   myPythonScript.py --flagfile=config/somefile.cfg
-
-If somefile.cfg includes further --flagfile= directives, these will be
-referenced relative to the original CWD, not from the directory the
-including flagfile was found in!
-
-The caveat applies to people who are including a series of nested files
-in a different dir than they are executing out of.  Relative path names
-are always from CWD, not from the directory of the parent include
-flagfile. We do now support '~' expanded directory names.
-
-Absolute path names ALWAYS work!
-
-
-EXAMPLE USAGE:
-
-
-  FLAGS = gflags.FLAGS
-
-  # Flag names are globally defined!  So in general, we need to be
-  # careful to pick names that are unlikely to be used by other libraries.
-  # If there is a conflict, we'll get an error at import time.
-  gflags.DEFINE_string('name', 'Mr. President', 'your name')
-  gflags.DEFINE_integer('age', None, 'your age in years', lower_bound=0)
-  gflags.DEFINE_boolean('debug', False, 'produces debugging output')
-  gflags.DEFINE_enum('gender', 'male', ['male', 'female'], 'your gender')
-
-  def main(argv):
-    try:
-      argv = FLAGS(argv)  # parse flags
-    except gflags.FlagsError, e:
-      print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS)
-      sys.exit(1)
-    if FLAGS.debug: print 'non-flag arguments:', argv
-    print 'Happy Birthday', FLAGS.name
-    if FLAGS.age is not None:
-      print 'You are a %d year old %s' % (FLAGS.age, FLAGS.gender)
-
-  if __name__ == '__main__':
-    main(sys.argv)
-
-
-KEY FLAGS:
-
-As we already explained, each module gains access to all flags defined
-by all the other modules it transitively imports.  In the case of
-non-trivial scripts, this means a lot of flags ...  For documentation
-purposes, it is good to identify the flags that are key (i.e., really
-important) to a module.  Clearly, the concept of "key flag" is a
-subjective one.  When trying to determine whether a flag is key to a
-module or not, assume that you are trying to explain your module to a
-potential user: which flags would you really like to mention first?
-
-We'll describe shortly how to declare which flags are key to a module.
-For the moment, assume we know the set of key flags for each module.
-Then, if you use the app.py module, you can use the --helpshort flag to
-print only the help for the flags that are key to the main module, in a
-human-readable format.
-
-NOTE: If you need to parse the flag help, do NOT use the output of
---help / --helpshort.  That output is meant for human consumption, and
-may be changed in the future.  Instead, use --helpxml; flags that are
-key for the main module are marked there with a <key>yes</key> element.
-
-The set of key flags for a module M is composed of:
-
-1. Flags defined by module M by calling a DEFINE_* function.
-
-2. Flags that module M explictly declares as key by using the function
-
-     DECLARE_key_flag(<flag_name>)
-
-3. Key flags of other modules that M specifies by using the function
-
-     ADOPT_module_key_flags(<other_module>)
-
-   This is a "bulk" declaration of key flags: each flag that is key for
-   <other_module> becomes key for the current module too.
-
-Notice that if you do not use the functions described at points 2 and 3
-above, then --helpshort prints information only about the flags defined
-by the main module of our script.  In many cases, this behavior is good
-enough.  But if you move part of the main module code (together with the
-related flags) into a different module, then it is nice to use
-DECLARE_key_flag / ADOPT_module_key_flags and make sure --helpshort
-lists all relevant flags (otherwise, your code refactoring may confuse
-your users).
-
-Note: each of DECLARE_key_flag / ADOPT_module_key_flags has its own
-pluses and minuses: DECLARE_key_flag is more targeted and may lead a
-more focused --helpshort documentation.  ADOPT_module_key_flags is good
-for cases when an entire module is considered key to the current script.
-Also, it does not require updates to client scripts when a new flag is
-added to the module.
-
-
-EXAMPLE USAGE 2 (WITH KEY FLAGS):
-
-Consider an application that contains the following three files (two
-auxiliary modules and a main module)
-
-File libfoo.py:
-
-  import gflags
-
-  gflags.DEFINE_integer('num_replicas', 3, 'Number of replicas to start')
-  gflags.DEFINE_boolean('rpc2', True, 'Turn on the usage of RPC2.')
-
-  ... some code ...
-
-File libbar.py:
-
-  import gflags
-
-  gflags.DEFINE_string('bar_gfs_path', '/gfs/path',
-                      'Path to the GFS files for libbar.')
-  gflags.DEFINE_string('email_for_bar_errors', 'bar-team@google.com',
-                      'Email address for bug reports about module libbar.')
-  gflags.DEFINE_boolean('bar_risky_hack', False,
-                       'Turn on an experimental and buggy optimization.')
-
-  ... some code ...
-
-File myscript.py:
-
-  import gflags
-  import libfoo
-  import libbar
-
-  gflags.DEFINE_integer('num_iterations', 0, 'Number of iterations.')
-
-  # Declare that all flags that are key for libfoo are
-  # key for this module too.
-  gflags.ADOPT_module_key_flags(libfoo)
-
-  # Declare that the flag --bar_gfs_path (defined in libbar) is key
-  # for this module.
-  gflags.DECLARE_key_flag('bar_gfs_path')
-
-  ... some code ...
-
-When myscript is invoked with the flag --helpshort, the resulted help
-message lists information about all the key flags for myscript:
---num_iterations, --num_replicas, --rpc2, and --bar_gfs_path.
-
-Of course, myscript uses all the flags declared by it (in this case,
-just --num_replicas) or by any of the modules it transitively imports
-(e.g., the modules libfoo, libbar).  E.g., it can access the value of
-FLAGS.bar_risky_hack, even if --bar_risky_hack is not declared as a key
-flag for myscript.
-
-
-OUTPUT FOR --helpxml:
-
-The --helpxml flag generates output with the following structure:
-
-<?xml version="1.0"?>
-<AllFlags>
-  <program>PROGRAM_BASENAME</program>
-  <usage>MAIN_MODULE_DOCSTRING</usage>
-  (<flag>
-    [<key>yes</key>]
-    <file>DECLARING_MODULE</file>
-    <name>FLAG_NAME</name>
-    <meaning>FLAG_HELP_MESSAGE</meaning>
-    <default>DEFAULT_FLAG_VALUE</default>
-    <current>CURRENT_FLAG_VALUE</current>
-    <type>FLAG_TYPE</type>
-    [OPTIONAL_ELEMENTS]
-  </flag>)*
-</AllFlags>
-
-Notes:
-
-1. The output is intentionally similar to the output generated by the
-C++ command-line flag library.  The few differences are due to the
-Python flags that do not have a C++ equivalent (at least not yet),
-e.g., DEFINE_list.
-
-2. New XML elements may be added in the future.
-
-3. DEFAULT_FLAG_VALUE is in serialized form, i.e., the string you can
-pass for this flag on the command-line.  E.g., for a flag defined
-using DEFINE_list, this field may be foo,bar, not ['foo', 'bar'].
-
-4. CURRENT_FLAG_VALUE is produced using str().  This means that the
-string 'false' will be represented in the same way as the boolean
-False.  Using repr() would have removed this ambiguity and simplified
-parsing, but would have broken the compatibility with the C++
-command-line flags.
-
-5. OPTIONAL_ELEMENTS describe elements relevant for certain kinds of
-flags: lower_bound, upper_bound (for flags that specify bounds),
-enum_value (for enum flags), list_separator (for flags that consist of
-a list of values, separated by a special token).
-
-6. We do not provide any example here: please use --helpxml instead.
-
-This module requires at least python 2.2.1 to run.
-"""
-
-import cgi
-import getopt
-import os
-import re
-import string
-import struct
-import sys
-# pylint: disable-msg=C6204
-try:
-  import fcntl
-except ImportError:
-  fcntl = None
-try:
-  # Importing termios will fail on non-unix platforms.
-  import termios
-except ImportError:
-  termios = None
-
-import gflags_validators
-# pylint: enable-msg=C6204
-
-
-# Are we running under pychecker?
-_RUNNING_PYCHECKER = 'pychecker.python' in sys.modules
-
-
-def _GetCallingModuleObjectAndName():
-  """Returns the module that's calling into this module.
-
-  We generally use this function to get the name of the module calling a
-  DEFINE_foo... function.
-  """
-  # Walk down the stack to find the first globals dict that's not ours.
-  for depth in range(1, sys.getrecursionlimit()):
-    if not sys._getframe(depth).f_globals is globals():
-      globals_for_frame = sys._getframe(depth).f_globals
-      module, module_name = _GetModuleObjectAndName(globals_for_frame)
-      if module_name is not None:
-        return module, module_name
-  raise AssertionError("No module was found")
-
-
-def _GetCallingModule():
-  """Returns the name of the module that's calling into this module."""
-  return _GetCallingModuleObjectAndName()[1]
-
-
-def _GetThisModuleObjectAndName():
-  """Returns: (module object, module name) for this module."""
-  return _GetModuleObjectAndName(globals())
-
-
-# module exceptions:
-class FlagsError(Exception):
-  """The base class for all flags errors."""
-  pass
-
-
-class DuplicateFlag(FlagsError):
-  """Raised if there is a flag naming conflict."""
-  pass
-
-class CantOpenFlagFileError(FlagsError):
-  """Raised if flagfile fails to open: doesn't exist, wrong permissions, etc."""
-  pass
-
-
-class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag):
-  """Special case of DuplicateFlag -- SWIG flag value can't be set to None.
-
-  This can be raised when a duplicate flag is created. Even if allow_override is
-  True, we still abort if the new value is None, because it's currently
-  impossible to pass None default value back to SWIG. See FlagValues.SetDefault
-  for details.
-  """
-  pass
-
-
-class DuplicateFlagError(DuplicateFlag):
-  """A DuplicateFlag whose message cites the conflicting definitions.
-
-  A DuplicateFlagError conveys more information than a DuplicateFlag,
-  namely the modules where the conflicting definitions occur. This
-  class was created to avoid breaking external modules which depend on
-  the existing DuplicateFlags interface.
-  """
-
-  def __init__(self, flagname, flag_values, other_flag_values=None):
-    """Create a DuplicateFlagError.
-
-    Args:
-      flagname: Name of the flag being redefined.
-      flag_values: FlagValues object containing the first definition of
-          flagname.
-      other_flag_values: If this argument is not None, it should be the
-          FlagValues object where the second definition of flagname occurs.
-          If it is None, we assume that we're being called when attempting
-          to create the flag a second time, and we use the module calling
-          this one as the source of the second definition.
-    """
-    self.flagname = flagname
-    first_module = flag_values.FindModuleDefiningFlag(
-        flagname, default='<unknown>')
-    if other_flag_values is None:
-      second_module = _GetCallingModule()
-    else:
-      second_module = other_flag_values.FindModuleDefiningFlag(
-          flagname, default='<unknown>')
-    msg = "The flag '%s' is defined twice. First from %s, Second from %s" % (
-        self.flagname, first_module, second_module)
-    DuplicateFlag.__init__(self, msg)
-
-
-class IllegalFlagValue(FlagsError):
-  """The flag command line argument is illegal."""
-  pass
-
-
-class UnrecognizedFlag(FlagsError):
-  """Raised if a flag is unrecognized."""
-  pass
-
-
-# An UnrecognizedFlagError conveys more information than an UnrecognizedFlag.
-# Since there are external modules that create DuplicateFlags, the interface to
-# DuplicateFlag shouldn't change.  The flagvalue will be assigned the full value
-# of the flag and its argument, if any, allowing handling of unrecognized flags
-# in an exception handler.
-# If flagvalue is the empty string, then this exception is an due to a
-# reference to a flag that was not already defined.
-class UnrecognizedFlagError(UnrecognizedFlag):
-  def __init__(self, flagname, flagvalue=''):
-    self.flagname = flagname
-    self.flagvalue = flagvalue
-    UnrecognizedFlag.__init__(
-        self, "Unknown command line flag '%s'" % flagname)
-
-# Global variable used by expvar
-_exported_flags = {}
-_help_width = 80  # width of help output
-
-
-def GetHelpWidth():
-  """Returns: an integer, the width of help lines that is used in TextWrap."""
-  if (not sys.stdout.isatty()) or (termios is None) or (fcntl is None):
-    return _help_width
-  try:
-    data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234')
-    columns = struct.unpack('hh', data)[1]
-    # Emacs mode returns 0.
-    # Here we assume that any value below 40 is unreasonable
-    if columns >= 40:
-      return columns
-    # Returning an int as default is fine, int(int) just return the int.
-    return int(os.getenv('COLUMNS', _help_width))
-
-  except (TypeError, IOError, struct.error):
-    return _help_width
-
-
-def CutCommonSpacePrefix(text):
-  """Removes a common space prefix from the lines of a multiline text.
-
-  If the first line does not start with a space, it is left as it is and
-  only in the remaining lines a common space prefix is being searched
-  for. That means the first line will stay untouched. This is especially
-  useful to turn doc strings into help texts. This is because some
-  people prefer to have the doc comment start already after the
-  apostrophe and then align the following lines while others have the
-  apostrophes on a separate line.
-
-  The function also drops trailing empty lines and ignores empty lines
-  following the initial content line while calculating the initial
-  common whitespace.
-
-  Args:
-    text: text to work on
-
-  Returns:
-    the resulting text
-  """
-  text_lines = text.splitlines()
-  # Drop trailing empty lines
-  while text_lines and not text_lines[-1]:
-    text_lines = text_lines[:-1]
-  if text_lines:
-    # We got some content, is the first line starting with a space?
-    if text_lines[0] and text_lines[0][0].isspace():
-      text_first_line = []
-    else:
-      text_first_line = [text_lines.pop(0)]
-    # Calculate length of common leading whitespace (only over content lines)
-    common_prefix = os.path.commonprefix([line for line in text_lines if line])
-    space_prefix_len = len(common_prefix) - len(common_prefix.lstrip())
-    # If we have a common space prefix, drop it from all lines
-    if space_prefix_len:
-      for index in xrange(len(text_lines)):
-        if text_lines[index]:
-          text_lines[index] = text_lines[index][space_prefix_len:]
-    return '\n'.join(text_first_line + text_lines)
-  return ''
-
-
-def TextWrap(text, length=None, indent='', firstline_indent=None, tabs='    '):
-  """Wraps a given text to a maximum line length and returns it.
-
-  We turn lines that only contain whitespace into empty lines.  We keep
-  new lines and tabs (e.g., we do not treat tabs as spaces).
-
-  Args:
-    text:             text to wrap
-    length:           maximum length of a line, includes indentation
-                      if this is None then use GetHelpWidth()
-    indent:           indent for all but first line
-    firstline_indent: indent for first line; if None, fall back to indent
-    tabs:             replacement for tabs
-
-  Returns:
-    wrapped text
-
-  Raises:
-    FlagsError: if indent not shorter than length
-    FlagsError: if firstline_indent not shorter than length
-  """
-  # Get defaults where callee used None
-  if length is None:
-    length = GetHelpWidth()
-  if indent is None:
-    indent = ''
-  if len(indent) >= length:
-    raise FlagsError('Indent must be shorter than length')
-  # In line we will be holding the current line which is to be started
-  # with indent (or firstline_indent if available) and then appended
-  # with words.
-  if firstline_indent is None:
-    firstline_indent = ''
-    line = indent
-  else:
-    line = firstline_indent
-    if len(firstline_indent) >= length:
-      raise FlagsError('First line indent must be shorter than length')
-
-  # If the callee does not care about tabs we simply convert them to
-  # spaces If callee wanted tabs to be single space then we do that
-  # already here.
-  if not tabs or tabs == ' ':
-    text = text.replace('\t', ' ')
-  else:
-    tabs_are_whitespace = not tabs.strip()
-
-  line_regex = re.compile('([ ]*)(\t*)([^ \t]+)', re.MULTILINE)
-
-  # Split the text into lines and the lines with the regex above. The
-  # resulting lines are collected in result[]. For each split we get the
-  # spaces, the tabs and the next non white space (e.g. next word).
-  result = []
-  for text_line in text.splitlines():
-    # Store result length so we can find out whether processing the next
-    # line gave any new content
-    old_result_len = len(result)
-    # Process next line with line_regex. For optimization we do an rstrip().
-    # - process tabs (changes either line or word, see below)
-    # - process word (first try to squeeze on line, then wrap or force wrap)
-    # Spaces found on the line are ignored, they get added while wrapping as
-    # needed.
-    for spaces, current_tabs, word in line_regex.findall(text_line.rstrip()):
-      # If tabs weren't converted to spaces, handle them now
-      if current_tabs:
-        # If the last thing we added was a space anyway then drop
-        # it. But let's not get rid of the indentation.
-        if (((result and line != indent) or
-             (not result and line != firstline_indent)) and line[-1] == ' '):
-          line = line[:-1]
-        # Add the tabs, if that means adding whitespace, just add it at
-        # the line, the rstrip() code while shorten the line down if
-        # necessary
-        if tabs_are_whitespace:
-          line += tabs * len(current_tabs)
-        else:
-          # if not all tab replacement is whitespace we prepend it to the word
-          word = tabs * len(current_tabs) + word
-      # Handle the case where word cannot be squeezed onto current last line
-      if len(line) + len(word) > length and len(indent) + len(word) <= length:
-        result.append(line.rstrip())
-        line = indent + word
-        word = ''
-        # No space left on line or can we append a space?
-        if len(line) + 1 >= length:
-          result.append(line.rstrip())
-          line = indent
-        else:
-          line += ' '
-      # Add word and shorten it up to allowed line length. Restart next
-      # line with indent and repeat, or add a space if we're done (word
-      # finished) This deals with words that cannot fit on one line
-      # (e.g. indent + word longer than allowed line length).
-      while len(line) + len(word) >= length:
-        line += word
-        result.append(line[:length])
-        word = line[length:]
-        line = indent
-      # Default case, simply append the word and a space
-      if word:
-        line += word + ' '
-    # End of input line. If we have content we finish the line. If the
-    # current line is just the indent but we had content in during this
-    # original line then we need to add an empty line.
-    if (result and line != indent) or (not result and line != firstline_indent):
-      result.append(line.rstrip())
-    elif len(result) == old_result_len:
-      result.append('')
-    line = indent
-
-  return '\n'.join(result)
-
-
-def DocToHelp(doc):
-  """Takes a __doc__ string and reformats it as help."""
-
-  # Get rid of starting and ending white space. Using lstrip() or even
-  # strip() could drop more than maximum of first line and right space
-  # of last line.
-  doc = doc.strip()
-
-  # Get rid of all empty lines
-  whitespace_only_line = re.compile('^[ \t]+$', re.M)
-  doc = whitespace_only_line.sub('', doc)
-
-  # Cut out common space at line beginnings
-  doc = CutCommonSpacePrefix(doc)
-
-  # Just like this module's comment, comments tend to be aligned somehow.
-  # In other words they all start with the same amount of white space
-  # 1) keep double new lines
-  # 2) keep ws after new lines if not empty line
-  # 3) all other new lines shall be changed to a space
-  # Solution: Match new lines between non white space and replace with space.
-  doc = re.sub('(?<=\S)\n(?=\S)', ' ', doc, re.M)
-
-  return doc
-
-
-def _GetModuleObjectAndName(globals_dict):
-  """Returns the module that defines a global environment, and its name.
-
-  Args:
-    globals_dict: A dictionary that should correspond to an environment
-      providing the values of the globals.
-
-  Returns:
-    A pair consisting of (1) module object and (2) module name (a
-    string).  Returns (None, None) if the module could not be
-    identified.
-  """
-  # The use of .items() (instead of .iteritems()) is NOT a mistake: if
-  # a parallel thread imports a module while we iterate over
-  # .iteritems() (not nice, but possible), we get a RuntimeError ...
-  # Hence, we use the slightly slower but safer .items().
-  for name, module in sys.modules.items():
-    if getattr(module, '__dict__', None) is globals_dict:
-      if name == '__main__':
-        # Pick a more informative name for the main module.
-        name = sys.argv[0]
-      return (module, name)
-  return (None, None)
-
-
-def _GetMainModule():
-  """Returns: string, name of the module from which execution started."""
-  # First, try to use the same logic used by _GetCallingModuleObjectAndName(),
-  # i.e., call _GetModuleObjectAndName().  For that we first need to
-  # find the dictionary that the main module uses to store the
-  # globals.
-  #
-  # That's (normally) the same dictionary object that the deepest
-  # (oldest) stack frame is using for globals.
-  deepest_frame = sys._getframe(0)
-  while deepest_frame.f_back is not None:
-    deepest_frame = deepest_frame.f_back
-  globals_for_main_module = deepest_frame.f_globals
-  main_module_name = _GetModuleObjectAndName(globals_for_main_module)[1]
-  # The above strategy fails in some cases (e.g., tools that compute
-  # code coverage by redefining, among other things, the main module).
-  # If so, just use sys.argv[0].  We can probably always do this, but
-  # it's safest to try to use the same logic as _GetCallingModuleObjectAndName()
-  if main_module_name is None:
-    main_module_name = sys.argv[0]
-  return main_module_name
-
-
-class FlagValues:
-  """Registry of 'Flag' objects.
-
-  A 'FlagValues' can then scan command line arguments, passing flag
-  arguments through to the 'Flag' objects that it owns.  It also
-  provides easy access to the flag values.  Typically only one
-  'FlagValues' object is needed by an application: gflags.FLAGS
-
-  This class is heavily overloaded:
-
-  'Flag' objects are registered via __setitem__:
-       FLAGS['longname'] = x   # register a new flag
-
-  The .value attribute of the registered 'Flag' objects can be accessed
-  as attributes of this 'FlagValues' object, through __getattr__.  Both
-  the long and short name of the original 'Flag' objects can be used to
-  access its value:
-       FLAGS.longname          # parsed flag value
-       FLAGS.x                 # parsed flag value (short name)
-
-  Command line arguments are scanned and passed to the registered 'Flag'
-  objects through the __call__ method.  Unparsed arguments, including
-  argv[0] (e.g. the program name) are returned.
-       argv = FLAGS(sys.argv)  # scan command line arguments
-
-  The original registered Flag objects can be retrieved through the use
-  of the dictionary-like operator, __getitem__:
-       x = FLAGS['longname']   # access the registered Flag object
-
-  The str() operator of a 'FlagValues' object provides help for all of
-  the registered 'Flag' objects.
-  """
-
-  def __init__(self):
-    # Since everything in this class is so heavily overloaded, the only
-    # way of defining and using fields is to access __dict__ directly.
-
-    # Dictionary: flag name (string) -> Flag object.
-    self.__dict__['__flags'] = {}
-    # Dictionary: module name (string) -> list of Flag objects that are defined
-    # by that module.
-    self.__dict__['__flags_by_module'] = {}
-    # Dictionary: module id (int) -> list of Flag objects that are defined by
-    # that module.
-    self.__dict__['__flags_by_module_id'] = {}
-    # Dictionary: module name (string) -> list of Flag objects that are
-    # key for that module.
-    self.__dict__['__key_flags_by_module'] = {}
-
-    # Set if we should use new style gnu_getopt rather than getopt when parsing
-    # the args.  Only possible with Python 2.3+
-    self.UseGnuGetOpt(False)
-
-  def UseGnuGetOpt(self, use_gnu_getopt=True):
-    """Use GNU-style scanning. Allows mixing of flag and non-flag arguments.
-
-    See http://docs.python.org/library/getopt.html#getopt.gnu_getopt
-
-    Args:
-      use_gnu_getopt: wether or not to use GNU style scanning.
-    """
-    self.__dict__['__use_gnu_getopt'] = use_gnu_getopt
-
-  def IsGnuGetOpt(self):
-    return self.__dict__['__use_gnu_getopt']
-
-  def FlagDict(self):
-    return self.__dict__['__flags']
-
-  def FlagsByModuleDict(self):
-    """Returns the dictionary of module_name -> list of defined flags.
-
-    Returns:
-      A dictionary.  Its keys are module names (strings).  Its values
-      are lists of Flag objects.
-    """
-    return self.__dict__['__flags_by_module']
-
-  def FlagsByModuleIdDict(self):
-    """Returns the dictionary of module_id -> list of defined flags.
-
-    Returns:
-      A dictionary.  Its keys are module IDs (ints).  Its values
-      are lists of Flag objects.
-    """
-    return self.__dict__['__flags_by_module_id']
-
-  def KeyFlagsByModuleDict(self):
-    """Returns the dictionary of module_name -> list of key flags.
-
-    Returns:
-      A dictionary.  Its keys are module names (strings).  Its values
-      are lists of Flag objects.
-    """
-    return self.__dict__['__key_flags_by_module']
-
-  def _RegisterFlagByModule(self, module_name, flag):
-    """Records the module that defines a specific flag.
-
-    We keep track of which flag is defined by which module so that we
-    can later sort the flags by module.
-
-    Args:
-      module_name: A string, the name of a Python module.
-      flag: A Flag object, a flag that is key to the module.
-    """
-    flags_by_module = self.FlagsByModuleDict()
-    flags_by_module.setdefault(module_name, []).append(flag)
-
-  def _RegisterFlagByModuleId(self, module_id, flag):
-    """Records the module that defines a specific flag.
-
-    Args:
-      module_id: An int, the ID of the Python module.
-      flag: A Flag object, a flag that is key to the module.
-    """
-    flags_by_module_id = self.FlagsByModuleIdDict()
-    flags_by_module_id.setdefault(module_id, []).append(flag)
-
-  def _RegisterKeyFlagForModule(self, module_name, flag):
-    """Specifies that a flag is a key flag for a module.
-
-    Args:
-      module_name: A string, the name of a Python module.
-      flag: A Flag object, a flag that is key to the module.
-    """
-    key_flags_by_module = self.KeyFlagsByModuleDict()
-    # The list of key flags for the module named module_name.
-    key_flags = key_flags_by_module.setdefault(module_name, [])
-    # Add flag, but avoid duplicates.
-    if flag not in key_flags:
-      key_flags.append(flag)
-
-  def _GetFlagsDefinedByModule(self, module):
-    """Returns the list of flags defined by a module.
-
-    Args:
-      module: A module object or a module name (a string).
-
-    Returns:
-      A new list of Flag objects.  Caller may update this list as he
-      wishes: none of those changes will affect the internals of this
-      FlagValue object.
-    """
-    if not isinstance(module, str):
-      module = module.__name__
-
-    return list(self.FlagsByModuleDict().get(module, []))
-
-  def _GetKeyFlagsForModule(self, module):
-    """Returns the list of key flags for a module.
-
-    Args:
-      module: A module object or a module name (a string)
-
-    Returns:
-      A new list of Flag objects.  Caller may update this list as he
-      wishes: none of those changes will affect the internals of this
-      FlagValue object.
-    """
-    if not isinstance(module, str):
-      module = module.__name__
-
-    # Any flag is a key flag for the module that defined it.  NOTE:
-    # key_flags is a fresh list: we can update it without affecting the
-    # internals of this FlagValues object.
-    key_flags = self._GetFlagsDefinedByModule(module)
-
-    # Take into account flags explicitly declared as key for a module.
-    for flag in self.KeyFlagsByModuleDict().get(module, []):
-      if flag not in key_flags:
-        key_flags.append(flag)
-    return key_flags
-
-  def FindModuleDefiningFlag(self, flagname, default=None):
-    """Return the name of the module defining this flag, or default.
-
-    Args:
-      flagname: Name of the flag to lookup.
-      default: Value to return if flagname is not defined. Defaults
-          to None.
-
-    Returns:
-      The name of the module which registered the flag with this name.
-      If no such module exists (i.e. no flag with this name exists),
-      we return default.
-    """
-    for module, flags in self.FlagsByModuleDict().iteritems():
-      for flag in flags:
-        if flag.name == flagname or flag.short_name == flagname:
-          return module
-    return default
-
-  def FindModuleIdDefiningFlag(self, flagname, default=None):
-    """Return the ID of the module defining this flag, or default.
-
-    Args:
-      flagname: Name of the flag to lookup.
-      default: Value to return if flagname is not defined. Defaults
-          to None.
-
-    Returns:
-      The ID of the module which registered the flag with this name.
-      If no such module exists (i.e. no flag with this name exists),
-      we return default.
-    """
-    for module_id, flags in self.FlagsByModuleIdDict().iteritems():
-      for flag in flags:
-        if flag.name == flagname or flag.short_name == flagname:
-          return module_id
-    return default
-
-  def AppendFlagValues(self, flag_values):
-    """Appends flags registered in another FlagValues instance.
-
-    Args:
-      flag_values: registry to copy from
-    """
-    for flag_name, flag in flag_values.FlagDict().iteritems():
-      # Each flags with shortname appears here twice (once under its
-      # normal name, and again with its short name).  To prevent
-      # problems (DuplicateFlagError) with double flag registration, we
-      # perform a check to make sure that the entry we're looking at is
-      # for its normal name.
-      if flag_name == flag.name:
-        try:
-          self[flag_name] = flag
-        except DuplicateFlagError:
-          raise DuplicateFlagError(flag_name, self,
-                                   other_flag_values=flag_values)
-
-  def RemoveFlagValues(self, flag_values):
-    """Remove flags that were previously appended from another FlagValues.
-
-    Args:
-      flag_values: registry containing flags to remove.
-    """
-    for flag_name in flag_values.FlagDict():
-      self.__delattr__(flag_name)
-
-  def __setitem__(self, name, flag):
-    """Registers a new flag variable."""
-    fl = self.FlagDict()
-    if not isinstance(flag, Flag):
-      raise IllegalFlagValue(flag)
-    if not isinstance(name, type("")):
-      raise FlagsError("Flag name must be a string")
-    if len(name) == 0:
-      raise FlagsError("Flag name cannot be empty")
-    # If running under pychecker, duplicate keys are likely to be
-    # defined.  Disable check for duplicate keys when pycheck'ing.
-    if (name in fl and not flag.allow_override and
-        not fl[name].allow_override and not _RUNNING_PYCHECKER):
-      module, module_name = _GetCallingModuleObjectAndName()
-      if (self.FindModuleDefiningFlag(name) == module_name and
-          id(module) != self.FindModuleIdDefiningFlag(name)):
-        # If the flag has already been defined by a module with the same name,
-        # but a different ID, we can stop here because it indicates that the
-        # module is simply being imported a subsequent time.
-        return
-      raise DuplicateFlagError(name, self)
-    short_name = flag.short_name
-    if short_name is not None:
-      if (short_name in fl and not flag.allow_override and
-          not fl[short_name].allow_override and not _RUNNING_PYCHECKER):
-        raise DuplicateFlagError(short_name, self)
-      fl[short_name] = flag
-    fl[name] = flag
-    global _exported_flags
-    _exported_flags[name] = flag
-
-  def __getitem__(self, name):
-    """Retrieves the Flag object for the flag --name."""
-    return self.FlagDict()[name]
-
-  def __getattr__(self, name):
-    """Retrieves the 'value' attribute of the flag --name."""
-    fl = self.FlagDict()
-    if name not in fl:
-      raise AttributeError(name)
-    return fl[name].value
-
-  def __setattr__(self, name, value):
-    """Sets the 'value' attribute of the flag --name."""
-    fl = self.FlagDict()
-    fl[name].value = value
-    self._AssertValidators(fl[name].validators)
-    return value
-
-  def _AssertAllValidators(self):
-    all_validators = set()
-    for flag in self.FlagDict().itervalues():
-      for validator in flag.validators:
-        all_validators.add(validator)
-    self._AssertValidators(all_validators)
-
-  def _AssertValidators(self, validators):
-    """Assert if all validators in the list are satisfied.
-
-    Asserts validators in the order they were created.
-    Args:
-      validators: Iterable(gflags_validators.Validator), validators to be
-        verified
-    Raises:
-      AttributeError: if validators work with a non-existing flag.
-      IllegalFlagValue: if validation fails for at least one validator
-    """
-    for validator in sorted(
-        validators, key=lambda validator: validator.insertion_index):
-      try:
-        validator.Verify(self)
-      except gflags_validators.Error, e:
-        message = validator.PrintFlagsWithValues(self)
-        raise IllegalFlagValue('%s: %s' % (message, str(e)))
-
-  def _FlagIsRegistered(self, flag_obj):
-    """Checks whether a Flag object is registered under some name.
-
-    Note: this is non trivial: in addition to its normal name, a flag
-    may have a short name too.  In self.FlagDict(), both the normal and
-    the short name are mapped to the same flag object.  E.g., calling
-    only "del FLAGS.short_name" is not unregistering the corresponding
-    Flag object (it is still registered under the longer name).
-
-    Args:
-      flag_obj: A Flag object.
-
-    Returns:
-      A boolean: True iff flag_obj is registered under some name.
-    """
-    flag_dict = self.FlagDict()
-    # Check whether flag_obj is registered under its long name.
-    name = flag_obj.name
-    if flag_dict.get(name, None) == flag_obj:
-      return True
-    # Check whether flag_obj is registered under its short name.
-    short_name = flag_obj.short_name
-    if (short_name is not None and
-        flag_dict.get(short_name, None) == flag_obj):
-      return True
-    # The flag cannot be registered under any other name, so we do not
-    # need to do a full search through the values of self.FlagDict().
-    return False
-
-  def __delattr__(self, flag_name):
-    """Deletes a previously-defined flag from a flag object.
-
-    This method makes sure we can delete a flag by using
-
-      del flag_values_object.<flag_name>
-
-    E.g.,
-
-      gflags.DEFINE_integer('foo', 1, 'Integer flag.')
-      del gflags.FLAGS.foo
-
-    Args:
-      flag_name: A string, the name of the flag to be deleted.
-
-    Raises:
-      AttributeError: When there is no registered flag named flag_name.
-    """
-    fl = self.FlagDict()
-    if flag_name not in fl:
-      raise AttributeError(flag_name)
-
-    flag_obj = fl[flag_name]
-    del fl[flag_name]
-
-    if not self._FlagIsRegistered(flag_obj):
-      # If the Flag object indicated by flag_name is no longer
-      # registered (please see the docstring of _FlagIsRegistered), then
-      # we delete the occurrences of the flag object in all our internal
-      # dictionaries.
-      self.__RemoveFlagFromDictByModule(self.FlagsByModuleDict(), flag_obj)
-      self.__RemoveFlagFromDictByModule(self.FlagsByModuleIdDict(), flag_obj)
-      self.__RemoveFlagFromDictByModule(self.KeyFlagsByModuleDict(), flag_obj)
-
-  def __RemoveFlagFromDictByModule(self, flags_by_module_dict, flag_obj):
-    """Removes a flag object from a module -> list of flags dictionary.
-
-    Args:
-      flags_by_module_dict: A dictionary that maps module names to lists of
-        flags.
-      flag_obj: A flag object.
-    """
-    for unused_module, flags_in_module in flags_by_module_dict.iteritems():
-      # while (as opposed to if) takes care of multiple occurrences of a
-      # flag in the list for the same module.
-      while flag_obj in flags_in_module:
-        flags_in_module.remove(flag_obj)
-
-  def SetDefault(self, name, value):
-    """Changes the default value of the named flag object."""
-    fl = self.FlagDict()
-    if name not in fl:
-      raise AttributeError(name)
-    fl[name].SetDefault(value)
-    self._AssertValidators(fl[name].validators)
-
-  def __contains__(self, name):
-    """Returns True if name is a value (flag) in the dict."""
-    return name in self.FlagDict()
-
-  has_key = __contains__  # a synonym for __contains__()
-
-  def __iter__(self):
-    return iter(self.FlagDict())
-
-  def __call__(self, argv):
-    """Parses flags from argv; stores parsed flags into this FlagValues object.
-
-    All unparsed arguments are returned.  Flags are parsed using the GNU
-    Program Argument Syntax Conventions, using getopt:
-
-    http://www.gnu.org/software/libc/manual/html_mono/libc.html#Getopt
-
-    Args:
-       argv: argument list. Can be of any type that may be converted to a list.
-
-    Returns:
-       The list of arguments not parsed as options, including argv[0]
-
-    Raises:
-       FlagsError: on any parsing error
-    """
-    # Support any sequence type that can be converted to a list
-    argv = list(argv)
-
-    shortopts = ""
-    longopts = []
-
-    fl = self.FlagDict()
-
-    # This pre parses the argv list for --flagfile=<> options.
-    argv = argv[:1] + self.ReadFlagsFromFiles(argv[1:], force_gnu=False)
-
-    # Correct the argv to support the google style of passing boolean
-    # parameters.  Boolean parameters may be passed by using --mybool,
-    # --nomybool, --mybool=(true|false|1|0).  getopt does not support
-    # having options that may or may not have a parameter.  We replace
-    # instances of the short form --mybool and --nomybool with their
-    # full forms: --mybool=(true|false).
-    original_argv = list(argv)  # list() makes a copy
-    shortest_matches = None
-    for name, flag in fl.items():
-      if not flag.boolean:
-        continue
-      if shortest_matches is None:
-        # Determine the smallest allowable prefix for all flag names
-        shortest_matches = self.ShortestUniquePrefixes(fl)
-      no_name = 'no' + name
-      prefix = shortest_matches[name]
-      no_prefix = shortest_matches[no_name]
-
-      # Replace all occurrences of this boolean with extended forms
-      for arg_idx in range(1, len(argv)):
-        arg = argv[arg_idx]
-        if arg.find('=') >= 0: continue
-        if arg.startswith('--'+prefix) and ('--'+name).startswith(arg):
-          argv[arg_idx] = ('--%s=true' % name)
-        elif arg.startswith('--'+no_prefix) and ('--'+no_name).startswith(arg):
-          argv[arg_idx] = ('--%s=false' % name)
-
-    # Loop over all of the flags, building up the lists of short options
-    # and long options that will be passed to getopt.  Short options are
-    # specified as a string of letters, each letter followed by a colon
-    # if it takes an argument.  Long options are stored in an array of
-    # strings.  Each string ends with an '=' if it takes an argument.
-    for name, flag in fl.items():
-      longopts.append(name + "=")
-      if len(name) == 1:  # one-letter option: allow short flag type also
-        shortopts += name
-        if not flag.boolean:
-          shortopts += ":"
-
-    longopts.append('undefok=')
-    undefok_flags = []
-
-    # In case --undefok is specified, loop to pick up unrecognized
-    # options one by one.
-    unrecognized_opts = []
-    args = argv[1:]
-    while True:
-      try:
-        if self.__dict__['__use_gnu_getopt']:
-          optlist, unparsed_args = getopt.gnu_getopt(args, shortopts, longopts)
-        else:
-          optlist, unparsed_args = getopt.getopt(args, shortopts, longopts)
-        break
-      except getopt.GetoptError, e:
-        if not e.opt or e.opt in fl:
-          # Not an unrecognized option, re-raise the exception as a FlagsError
-          raise FlagsError(e)
-        # Remove offender from args and try again
-        for arg_index in range(len(args)):
-          if ((args[arg_index] == '--' + e.opt) or
-              (args[arg_index] == '-' + e.opt) or
-              (args[arg_index].startswith('--' + e.opt + '='))):
-            unrecognized_opts.append((e.opt, args[arg_index]))
-            args = args[0:arg_index] + args[arg_index+1:]
-            break
-        else:
-          # We should have found the option, so we don't expect to get
-          # here.  We could assert, but raising the original exception
-          # might work better.
-          raise FlagsError(e)
-
-    for name, arg in optlist:
-      if name == '--undefok':
-        flag_names = arg.split(',')
-        undefok_flags.extend(flag_names)
-        # For boolean flags, if --undefok=boolflag is specified, then we should
-        # also accept --noboolflag, in addition to --boolflag.
-        # Since we don't know the type of the undefok'd flag, this will affect
-        # non-boolean flags as well.
-        # NOTE: You shouldn't use --undefok=noboolflag, because then we will
-        # accept --nonoboolflag here.  We are choosing not to do the conversion
-        # from noboolflag -> boolflag because of the ambiguity that flag names
-        # can start with 'no'.
-        undefok_flags.extend('no' + name for name in flag_names)
-        continue
-      if name.startswith('--'):
-        # long option
-        name = name[2:]
-        short_option = 0
-      else:
-        # short option
-        name = name[1:]
-        short_option = 1
-      if name in fl:
-        flag = fl[name]
-        if flag.boolean and short_option: arg = 1
-        flag.Parse(arg)
-
-    # If there were unrecognized options, raise an exception unless
-    # the options were named via --undefok.
-    for opt, value in unrecognized_opts:
-      if opt not in undefok_flags:
-        raise UnrecognizedFlagError(opt, value)
-
-    if unparsed_args:
-      if self.__dict__['__use_gnu_getopt']:
-        # if using gnu_getopt just return the program name + remainder of argv.
-        ret_val = argv[:1] + unparsed_args
-      else:
-        # unparsed_args becomes the first non-flag detected by getopt to
-        # the end of argv.  Because argv may have been modified above,
-        # return original_argv for this region.
-        ret_val = argv[:1] + original_argv[-len(unparsed_args):]
-    else:
-      ret_val = argv[:1]
-
-    self._AssertAllValidators()
-    return ret_val
-
-  def Reset(self):
-    """Resets the values to the point before FLAGS(argv) was called."""
-    for f in self.FlagDict().values():
-      f.Unparse()
-
-  def RegisteredFlags(self):
-    """Returns: a list of the names and short names of all registered flags."""
-    return list(self.FlagDict())
-
-  def FlagValuesDict(self):
-    """Returns: a dictionary that maps flag names to flag values."""
-    flag_values = {}
-
-    for flag_name in self.RegisteredFlags():
-      flag = self.FlagDict()[flag_name]
-      flag_values[flag_name] = flag.value
-
-    return flag_values
-
-  def __str__(self):
-    """Generates a help string for all known flags."""
-    return self.GetHelp()
-
-  def GetHelp(self, prefix=''):
-    """Generates a help string for all known flags."""
-    helplist = []
-
-    flags_by_module = self.FlagsByModuleDict()
-    if flags_by_module:
-
-      modules = sorted(flags_by_module)
-
-      # Print the help for the main module first, if possible.
-      main_module = _GetMainModule()
-      if main_module in modules:
-        modules.remove(main_module)
-        modules = [main_module] + modules
-
-      for module in modules:
-        self.__RenderOurModuleFlags(module, helplist)
-
-      self.__RenderModuleFlags('gflags',
-                               _SPECIAL_FLAGS.FlagDict().values(),
-                               helplist)
-
-    else:
-      # Just print one long list of flags.
-      self.__RenderFlagList(
-          self.FlagDict().values() + _SPECIAL_FLAGS.FlagDict().values(),
-          helplist, prefix)
-
-    return '\n'.join(helplist)
-
-  def __RenderModuleFlags(self, module, flags, output_lines, prefix=""):
-    """Generates a help string for a given module."""
-    if not isinstance(module, str):
-      module = module.__name__
-    output_lines.append('\n%s%s:' % (prefix, module))
-    self.__RenderFlagList(flags, output_lines, prefix + "  ")
-
-  def __RenderOurModuleFlags(self, module, output_lines, prefix=""):
-    """Generates a help string for a given module."""
-    flags = self._GetFlagsDefinedByModule(module)
-    if flags:
-      self.__RenderModuleFlags(module, flags, output_lines, prefix)
-
-  def __RenderOurModuleKeyFlags(self, module, output_lines, prefix=""):
-    """Generates a help string for the key flags of a given module.
-
-    Args:
-      module: A module object or a module name (a string).
-      output_lines: A list of strings.  The generated help message
-        lines will be appended to this list.
-      prefix: A string that is prepended to each generated help line.
-    """
-    key_flags = self._GetKeyFlagsForModule(module)
-    if key_flags:
-      self.__RenderModuleFlags(module, key_flags, output_lines, prefix)
-
-  def ModuleHelp(self, module):
-    """Describe the key flags of a module.
-
-    Args:
-      module: A module object or a module name (a string).
-
-    Returns:
-      string describing the key flags of a module.
-    """
-    helplist = []
-    self.__RenderOurModuleKeyFlags(module, helplist)
-    return '\n'.join(helplist)
-
-  def MainModuleHelp(self):
-    """Describe the key flags of the main module.
-
-    Returns:
-      string describing the key flags of a module.
-    """
-    return self.ModuleHelp(_GetMainModule())
-
-  def __RenderFlagList(self, flaglist, output_lines, prefix="  "):
-    fl = self.FlagDict()
-    special_fl = _SPECIAL_FLAGS.FlagDict()
-    flaglist = [(flag.name, flag) for flag in flaglist]
-    flaglist.sort()
-    flagset = {}
-    for (name, flag) in flaglist:
-      # It's possible this flag got deleted or overridden since being
-      # registered in the per-module flaglist.  Check now against the
-      # canonical source of current flag information, the FlagDict.
-      if fl.get(name, None) != flag and special_fl.get(name, None) != flag:
-        # a different flag is using this name now
-        continue
-      # only print help once
-      if flag in flagset: continue
-      flagset[flag] = 1
-      flaghelp = ""
-      if flag.short_name: flaghelp += "-%s," % flag.short_name
-      if flag.boolean:
-        flaghelp += "--[no]%s" % flag.name + ":"
-      else:
-        flaghelp += "--%s" % flag.name + ":"
-      flaghelp += "  "
-      if flag.help:
-        flaghelp += flag.help
-      flaghelp = TextWrap(flaghelp, indent=prefix+"  ",
-                          firstline_indent=prefix)
-      if flag.default_as_str:
-        flaghelp += "\n"
-        flaghelp += TextWrap("(default: %s)" % flag.default_as_str,
-                             indent=prefix+"  ")
-      if flag.parser.syntactic_help:
-        flaghelp += "\n"
-        flaghelp += TextWrap("(%s)" % flag.parser.syntactic_help,
-                             indent=prefix+"  ")
-      output_lines.append(flaghelp)
-
-  def get(self, name, default):
-    """Returns the value of a flag (if not None) or a default value.
-
-    Args:
-      name: A string, the name of a flag.
-      default: Default value to use if the flag value is None.
-    """
-
-    value = self.__getattr__(name)
-    if value is not None:  # Can't do if not value, b/c value might be '0' or ""
-      return value
-    else:
-      return default
-
-  def ShortestUniquePrefixes(self, fl):
-    """Returns: dictionary; maps flag names to their shortest unique prefix."""
-    # Sort the list of flag names
-    sorted_flags = []
-    for name, flag in fl.items():
-      sorted_flags.append(name)
-      if flag.boolean:
-        sorted_flags.append('no%s' % name)
-    sorted_flags.sort()
-
-    # For each name in the sorted list, determine the shortest unique
-    # prefix by comparing itself to the next name and to the previous
-    # name (the latter check uses cached info from the previous loop).
-    shortest_matches = {}
-    prev_idx = 0
-    for flag_idx in range(len(sorted_flags)):
-      curr = sorted_flags[flag_idx]
-      if flag_idx == (len(sorted_flags) - 1):
-        next = None
-      else:
-        next = sorted_flags[flag_idx+1]
-        next_len = len(next)
-      for curr_idx in range(len(curr)):
-        if (next is None
-            or curr_idx >= next_len
-            or curr[curr_idx] != next[curr_idx]):
-          # curr longer than next or no more chars in common
-          shortest_matches[curr] = curr[:max(prev_idx, curr_idx) + 1]
-          prev_idx = curr_idx
-          break
-      else:
-        # curr shorter than (or equal to) next
-        shortest_matches[curr] = curr
-        prev_idx = curr_idx + 1  # next will need at least one more char
-    return shortest_matches
-
-  def __IsFlagFileDirective(self, flag_string):
-    """Checks whether flag_string contain a --flagfile=<foo> directive."""
-    if isinstance(flag_string, type("")):
-      if flag_string.startswith('--flagfile='):
-        return 1
-      elif flag_string == '--flagfile':
-        return 1
-      elif flag_string.startswith('-flagfile='):
-        return 1
-      elif flag_string == '-flagfile':
-        return 1
-      else:
-        return 0
-    return 0
-
-  def ExtractFilename(self, flagfile_str):
-    """Returns filename from a flagfile_str of form -[-]flagfile=filename.
-
-    The cases of --flagfile foo and -flagfile foo shouldn't be hitting
-    this function, as they are dealt with in the level above this
-    function.
-    """
-    if flagfile_str.startswith('--flagfile='):
-      return os.path.expanduser((flagfile_str[(len('--flagfile=')):]).strip())
-    elif flagfile_str.startswith('-flagfile='):
-      return os.path.expanduser((flagfile_str[(len('-flagfile=')):]).strip())
-    else:
-      raise FlagsError('Hit illegal --flagfile type: %s' % flagfile_str)
-
-  def __GetFlagFileLines(self, filename, parsed_file_list):
-    """Returns the useful (!=comments, etc) lines from a file with flags.
-
-    Args:
-      filename: A string, the name of the flag file.
-      parsed_file_list: A list of the names of the files we have
-        already read.  MUTATED BY THIS FUNCTION.
-
-    Returns:
-      List of strings. See the note below.
-
-    NOTE(springer): This function checks for a nested --flagfile=<foo>
-    tag and handles the lower file recursively. It returns a list of
-    all the lines that _could_ contain command flags. This is
-    EVERYTHING except whitespace lines and comments (lines starting
-    with '#' or '//').
-    """
-    line_list = []  # All line from flagfile.
-    flag_line_list = []  # Subset of lines w/o comments, blanks, flagfile= tags.
-    try:
-      file_obj = open(filename, 'r')
-    except IOError, e_msg:
-      raise CantOpenFlagFileError('ERROR:: Unable to open flagfile: %s' % e_msg)
-
-    line_list = file_obj.readlines()
-    file_obj.close()
-    parsed_file_list.append(filename)
-
-    # This is where we check each line in the file we just read.
-    for line in line_list:
-      if line.isspace():
-        pass
-      # Checks for comment (a line that starts with '#').
-      elif line.startswith('#') or line.startswith('//'):
-        pass
-      # Checks for a nested "--flagfile=<bar>" flag in the current file.
-      # If we find one, recursively parse down into that file.
-      elif self.__IsFlagFileDirective(line):
-        sub_filename = self.ExtractFilename(line)
-        # We do a little safety check for reparsing a file we've already done.
-        if not sub_filename in parsed_file_list:
-          included_flags = self.__GetFlagFileLines(sub_filename,
-                                                   parsed_file_list)
-          flag_line_list.extend(included_flags)
-        else:  # Case of hitting a circularly included file.
-          sys.stderr.write('Warning: Hit circular flagfile dependency: %s\n' %
-                           (sub_filename,))
-      else:
-        # Any line that's not a comment or a nested flagfile should get
-        # copied into 2nd position.  This leaves earlier arguments
-        # further back in the list, thus giving them higher priority.
-        flag_line_list.append(line.strip())
-    return flag_line_list
-
-  def ReadFlagsFromFiles(self, argv, force_gnu=True):
-    """Processes command line args, but also allow args to be read from file.
-
-    Args:
-      argv: A list of strings, usually sys.argv[1:], which may contain one or
-        more flagfile directives of the form --flagfile="./filename".
-        Note that the name of the program (sys.argv[0]) should be omitted.
-      force_gnu: If False, --flagfile parsing obeys normal flag semantics.
-        If True, --flagfile parsing instead follows gnu_getopt semantics.
-        *** WARNING *** force_gnu=False may become the future default!
-
-    Returns:
-
-      A new list which has the original list combined with what we read
-      from any flagfile(s).
-
-    References: Global gflags.FLAG class instance.
-
-    This function should be called before the normal FLAGS(argv) call.
-    This function scans the input list for a flag that looks like:
-    --flagfile=<somefile>. Then it opens <somefile>, reads all valid key
-    and value pairs and inserts them into the input list between the
-    first item of the list and any subsequent items in the list.
-
-    Note that your application's flags are still defined the usual way
-    using gflags DEFINE_flag() type functions.
-
-    Notes (assuming we're getting a commandline of some sort as our input):
-    --> Flags from the command line argv _should_ always take precedence!
-    --> A further "--flagfile=<otherfile.cfg>" CAN be nested in a flagfile.
-        It will be processed after the parent flag file is done.
-    --> For duplicate flags, first one we hit should "win".
-    --> In a flagfile, a line beginning with # or // is a comment.
-    --> Entirely blank lines _should_ be ignored.
-    """
-    parsed_file_list = []
-    rest_of_args = argv
-    new_argv = []
-    while rest_of_args:
-      current_arg = rest_of_args[0]
-      rest_of_args = rest_of_args[1:]
-      if self.__IsFlagFileDirective(current_arg):
-        # This handles the case of -(-)flagfile foo.  In this case the
-        # next arg really is part of this one.
-        if current_arg == '--flagfile' or current_arg == '-flagfile':
-          if not rest_of_args:
-            raise IllegalFlagValue('--flagfile with no argument')
-          flag_filename = os.path.expanduser(rest_of_args[0])
-          rest_of_args = rest_of_args[1:]
-        else:
-          # This handles the case of (-)-flagfile=foo.
-          flag_filename = self.ExtractFilename(current_arg)
-        new_argv.extend(
-            self.__GetFlagFileLines(flag_filename, parsed_file_list))
-      else:
-        new_argv.append(current_arg)
-        # Stop parsing after '--', like getopt and gnu_getopt.
-        if current_arg == '--':
-          break
-        # Stop parsing after a non-flag, like getopt.
-        if not current_arg.startswith('-'):
-          if not force_gnu and not self.__dict__['__use_gnu_getopt']:
-            break
-
-    if rest_of_args:
-      new_argv.extend(rest_of_args)
-
-    return new_argv
-
-  def FlagsIntoString(self):
-    """Returns a string with the flags assignments from this FlagValues object.
-
-    This function ignores flags whose value is None.  Each flag
-    assignment is separated by a newline.
-
-    NOTE: MUST mirror the behavior of the C++ CommandlineFlagsIntoString
-    from http://code.google.com/p/google-gflags
-    """
-    s = ''
-    for flag in self.FlagDict().values():
-      if flag.value is not None:
-        s += flag.Serialize() + '\n'
-    return s
-
-  def AppendFlagsIntoFile(self, filename):
-    """Appends all flags assignments from this FlagInfo object to a file.
-
-    Output will be in the format of a flagfile.
-
-    NOTE: MUST mirror the behavior of the C++ AppendFlagsIntoFile
-    from http://code.google.com/p/google-gflags
-    """
-    out_file = open(filename, 'a')
-    out_file.write(self.FlagsIntoString())
-    out_file.close()
-
-  def WriteHelpInXMLFormat(self, outfile=None):
-    """Outputs flag documentation in XML format.
-
-    NOTE: We use element names that are consistent with those used by
-    the C++ command-line flag library, from
-    http://code.google.com/p/google-gflags
-    We also use a few new elements (e.g., <key>), but we do not
-    interfere / overlap with existing XML elements used by the C++
-    library.  Please maintain this consistency.
-
-    Args:
-      outfile: File object we write to.  Default None means sys.stdout.
-    """
-    outfile = outfile or sys.stdout
-
-    outfile.write('<?xml version=\"1.0\"?>\n')
-    outfile.write('<AllFlags>\n')
-    indent = '  '
-    _WriteSimpleXMLElement(outfile, 'program', os.path.basename(sys.argv[0]),
-                           indent)
-
-    usage_doc = sys.modules['__main__'].__doc__
-    if not usage_doc:
-      usage_doc = '\nUSAGE: %s [flags]\n' % sys.argv[0]
-    else:
-      usage_doc = usage_doc.replace('%s', sys.argv[0])
-    _WriteSimpleXMLElement(outfile, 'usage', usage_doc, indent)
-
-    # Get list of key flags for the main module.
-    key_flags = self._GetKeyFlagsForModule(_GetMainModule())
-
-    # Sort flags by declaring module name and next by flag name.
-    flags_by_module = self.FlagsByModuleDict()
-    all_module_names = list(flags_by_module.keys())
-    all_module_names.sort()
-    for module_name in all_module_names:
-      flag_list = [(f.name, f) for f in flags_by_module[module_name]]
-      flag_list.sort()
-      for unused_flag_name, flag in flag_list:
-        is_key = flag in key_flags
-        flag.WriteInfoInXMLFormat(outfile, module_name,
-                                  is_key=is_key, indent=indent)
-
-    outfile.write('</AllFlags>\n')
-    outfile.flush()
-
-  def AddValidator(self, validator):
-    """Register new flags validator to be checked.
-
-    Args:
-      validator: gflags_validators.Validator
-    Raises:
-      AttributeError: if validators work with a non-existing flag.
-    """
-    for flag_name in validator.GetFlagsNames():
-      flag = self.FlagDict()[flag_name]
-      flag.validators.append(validator)
-
-# end of FlagValues definition
-
-
-# The global FlagValues instance
-FLAGS = FlagValues()
-
-
-def _StrOrUnicode(value):
-  """Converts value to a python string or, if necessary, unicode-string."""
-  try:
-    return str(value)
-  except UnicodeEncodeError:
-    return unicode(value)
-
-
-def _MakeXMLSafe(s):
-  """Escapes <, >, and & from s, and removes XML 1.0-illegal chars."""
-  s = cgi.escape(s)  # Escape <, >, and &
-  # Remove characters that cannot appear in an XML 1.0 document
-  # (http://www.w3.org/TR/REC-xml/#charsets).
-  #
-  # NOTE: if there are problems with current solution, one may move to
-  # XML 1.1, which allows such chars, if they're entity-escaped (&#xHH;).
-  s = re.sub(r'[\x00-\x08\x0b\x0c\x0e-\x1f]', '', s)
-  # Convert non-ascii characters to entities.  Note: requires python >=2.3
-  s = s.encode('ascii', 'xmlcharrefreplace')   # u'\xce\x88' -> 'u&#904;'
-  return s
-
-
-def _WriteSimpleXMLElement(outfile, name, value, indent):
-  """Writes a simple XML element.
-
-  Args:
-    outfile: File object we write the XML element to.
-    name: A string, the name of XML element.
-    value: A Python object, whose string representation will be used
-      as the value of the XML element.
-    indent: A string, prepended to each line of generated output.
-  """
-  value_str = _StrOrUnicode(value)
-  if isinstance(value, bool):
-    # Display boolean values as the C++ flag library does: no caps.
-    value_str = value_str.lower()
-  safe_value_str = _MakeXMLSafe(value_str)
-  outfile.write('%s<%s>%s</%s>\n' % (indent, name, safe_value_str, name))
-
-
-class Flag:
-  """Information about a command-line flag.
-
-  'Flag' objects define the following fields:
-    .name  - the name for this flag
-    .default - the default value for this flag
-    .default_as_str - default value as repr'd string, e.g., "'true'" (or None)
-    .value  - the most recent parsed value of this flag; set by Parse()
-    .help  - a help string or None if no help is available
-    .short_name  - the single letter alias for this flag (or None)
-    .boolean  - if 'true', this flag does not accept arguments
-    .present  - true if this flag was parsed from command line flags.
-    .parser  - an ArgumentParser object
-    .serializer - an ArgumentSerializer object
-    .allow_override - the flag may be redefined without raising an error
-
-  The only public method of a 'Flag' object is Parse(), but it is
-  typically only called by a 'FlagValues' object.  The Parse() method is
-  a thin wrapper around the 'ArgumentParser' Parse() method.  The parsed
-  value is saved in .value, and the .present attribute is updated.  If
-  this flag was already present, a FlagsError is raised.
-
-  Parse() is also called during __init__ to parse the default value and
-  initialize the .value attribute.  This enables other python modules to
-  safely use flags even if the __main__ module neglects to parse the
-  command line arguments.  The .present attribute is cleared after
-  __init__ parsing.  If the default value is set to None, then the
-  __init__ parsing step is skipped and the .value attribute is
-  initialized to None.
-
-  Note: The default value is also presented to the user in the help
-  string, so it is important that it be a legal value for this flag.
-  """
-
-  def __init__(self, parser, serializer, name, default, help_string,
-               short_name=None, boolean=0, allow_override=0):
-    self.name = name
-
-    if not help_string:
-      help_string = '(no help available)'
-
-    self.help = help_string
-    self.short_name = short_name
-    self.boolean = boolean
-    self.present = 0
-    self.parser = parser
-    self.serializer = serializer
-    self.allow_override = allow_override
-    self.value = None
-    self.validators = []
-
-    self.SetDefault(default)
-
-  def __hash__(self):
-    return hash(id(self))
-
-  def __eq__(self, other):
-    return self is other
-
-  def __lt__(self, other):
-    if isinstance(other, Flag):
-      return id(self) < id(other)
-    return NotImplemented
-
-  def __GetParsedValueAsString(self, value):
-    if value is None:
-      return None
-    if self.serializer:
-      return repr(self.serializer.Serialize(value))
-    if self.boolean:
-      if value:
-        return repr('true')
-      else:
-        return repr('false')
-    return repr(_StrOrUnicode(value))
-
-  def Parse(self, argument):
-    try:
-      self.value = self.parser.Parse(argument)
-    except ValueError, e:  # recast ValueError as IllegalFlagValue
-      raise IllegalFlagValue("flag --%s=%s: %s" % (self.name, argument, e))
-    self.present += 1
-
-  def Unparse(self):
-    if self.default is None:
-      self.value = None
-    else:
-      self.Parse(self.default)
-    self.present = 0
-
-  def Serialize(self):
-    if self.value is None:
-      return ''
-    if self.boolean:
-      if self.value:
-        return "--%s" % self.name
-      else:
-        return "--no%s" % self.name
-    else:
-      if not self.serializer:
-        raise FlagsError("Serializer not present for flag %s" % self.name)
-      return "--%s=%s" % (self.name, self.serializer.Serialize(self.value))
-
-  def SetDefault(self, value):
-    """Changes the default value (and current value too) for this Flag."""
-    # We can't allow a None override because it may end up not being
-    # passed to C++ code when we're overriding C++ flags.  So we
-    # cowardly bail out until someone fixes the semantics of trying to
-    # pass None to a C++ flag.  See swig_flags.Init() for details on
-    # this behavior.
-    # TODO(olexiy): Users can directly call this method, bypassing all flags
-    # validators (we don't have FlagValues here, so we can not check
-    # validators).
-    # The simplest solution I see is to make this method private.
-    # Another approach would be to store reference to the corresponding
-    # FlagValues with each flag, but this seems to be an overkill.
-    if value is None and self.allow_override:
-      raise DuplicateFlagCannotPropagateNoneToSwig(self.name)
-
-    self.default = value
-    self.Unparse()
-    self.default_as_str = self.__GetParsedValueAsString(self.value)
-
-  def Type(self):
-    """Returns: a string that describes the type of this Flag."""
-    # NOTE: we use strings, and not the types.*Type constants because
-    # our flags can have more exotic types, e.g., 'comma separated list
-    # of strings', 'whitespace separated list of strings', etc.
-    return self.parser.Type()
-
-  def WriteInfoInXMLFormat(self, outfile, module_name, is_key=False, indent=''):
-    """Writes common info about this flag, in XML format.
-
-    This is information that is relevant to all flags (e.g., name,
-    meaning, etc.).  If you defined a flag that has some other pieces of
-    info, then please override _WriteCustomInfoInXMLFormat.
-
-    Please do NOT override this method.
-
-    Args:
-      outfile: File object we write to.
-      module_name: A string, the name of the module that defines this flag.
-      is_key: A boolean, True iff this flag is key for main module.
-      indent: A string that is prepended to each generated line.
-    """
-    outfile.write(indent + '<flag>\n')
-    inner_indent = indent + '  '
-    if is_key:
-      _WriteSimpleXMLElement(outfile, 'key', 'yes', inner_indent)
-    _WriteSimpleXMLElement(outfile, 'file', module_name, inner_indent)
-    # Print flag features that are relevant for all flags.
-    _WriteSimpleXMLElement(outfile, 'name', self.name, inner_indent)
-    if self.short_name:
-      _WriteSimpleXMLElement(outfile, 'short_name', self.short_name,
-                             inner_indent)
-    if self.help:
-      _WriteSimpleXMLElement(outfile, 'meaning', self.help, inner_indent)
-    # The default flag value can either be represented as a string like on the
-    # command line, or as a Python object.  We serialize this value in the
-    # latter case in order to remain consistent.
-    if self.serializer and not isinstance(self.default, str):
-      default_serialized = self.serializer.Serialize(self.default)
-    else:
-      default_serialized = self.default
-    _WriteSimpleXMLElement(outfile, 'default', default_serialized, inner_indent)
-    _WriteSimpleXMLElement(outfile, 'current', self.value, inner_indent)
-    _WriteSimpleXMLElement(outfile, 'type', self.Type(), inner_indent)
-    # Print extra flag features this flag may have.
-    self._WriteCustomInfoInXMLFormat(outfile, inner_indent)
-    outfile.write(indent + '</flag>\n')
-
-  def _WriteCustomInfoInXMLFormat(self, outfile, indent):
-    """Writes extra info about this flag, in XML format.
-
-    "Extra" means "not already printed by WriteInfoInXMLFormat above."
-
-    Args:
-      outfile: File object we write to.
-      indent: A string that is prepended to each generated line.
-    """
-    # Usually, the parser knows the extra details about the flag, so
-    # we just forward the call to it.
-    self.parser.WriteCustomInfoInXMLFormat(outfile, indent)
-# End of Flag definition
-
-
-class _ArgumentParserCache(type):
-  """Metaclass used to cache and share argument parsers among flags."""
-
-  _instances = {}
-
-  def __call__(mcs, *args, **kwargs):
-    """Returns an instance of the argument parser cls.
-
-    This method overrides behavior of the __new__ methods in
-    all subclasses of ArgumentParser (inclusive). If an instance
-    for mcs with the same set of arguments exists, this instance is
-    returned, otherwise a new instance is created.
-
-    If any keyword arguments are defined, or the values in args
-    are not hashable, this method always returns a new instance of
-    cls.
-
-    Args:
-      args: Positional initializer arguments.
-      kwargs: Initializer keyword arguments.
-
-    Returns:
-      An instance of cls, shared or new.
-    """
-    if kwargs:
-      return type.__call__(mcs, *args, **kwargs)
-    else:
-      instances = mcs._instances
-      key = (mcs,) + tuple(args)
-      try:
-        return instances[key]
-      except KeyError:
-        # No cache entry for key exists, create a new one.
-        return instances.setdefault(key, type.__call__(mcs, *args))
-      except TypeError:
-        # An object in args cannot be hashed, always return
-        # a new instance.
-        return type.__call__(mcs, *args)
-
-
-class ArgumentParser(object):
-  """Base class used to parse and convert arguments.
-
-  The Parse() method checks to make sure that the string argument is a
-  legal value and convert it to a native type.  If the value cannot be
-  converted, it should throw a 'ValueError' exception with a human
-  readable explanation of why the value is illegal.
-
-  Subclasses should also define a syntactic_help string which may be
-  presented to the user to describe the form of the legal values.
-
-  Argument parser classes must be stateless, since instances are cached
-  and shared between flags. Initializer arguments are allowed, but all
-  member variables must be derived from initializer arguments only.
-  """
-  __metaclass__ = _ArgumentParserCache
-
-  syntactic_help = ""
-
-  def Parse(self, argument):
-    """Default implementation: always returns its argument unmodified."""
-    return argument
-
-  def Type(self):
-    return 'string'
-
-  def WriteCustomInfoInXMLFormat(self, outfile, indent):
-    pass
-
-
-class ArgumentSerializer:
-  """Base class for generating string representations of a flag value."""
-
-  def Serialize(self, value):
-    return _StrOrUnicode(value)
-
-
-class ListSerializer(ArgumentSerializer):
-
-  def __init__(self, list_sep):
-    self.list_sep = list_sep
-
-  def Serialize(self, value):
-    return self.list_sep.join([_StrOrUnicode(x) for x in value])
-
-
-# Flags validators
-
-
-def RegisterValidator(flag_name,
-                      checker,
-                      message='Flag validation failed',
-                      flag_values=FLAGS):
-  """Adds a constraint, which will be enforced during program execution.
-
-  The constraint is validated when flags are initially parsed, and after each
-  change of the corresponding flag's value.
-  Args:
-    flag_name: string, name of the flag to be checked.
-    checker: method to validate the flag.
-      input  - value of the corresponding flag (string, boolean, etc.
-        This value will be passed to checker by the library). See file's
-        docstring for examples.
-      output - Boolean.
-        Must return True if validator constraint is satisfied.
-        If constraint is not satisfied, it should either return False or
-          raise gflags_validators.Error(desired_error_message).
-    message: error text to be shown to the user if checker returns False.
-      If checker raises gflags_validators.Error, message from the raised
-        Error will be shown.
-    flag_values: FlagValues
-  Raises:
-    AttributeError: if flag_name is not registered as a valid flag name.
-  """
-  flag_values.AddValidator(gflags_validators.SimpleValidator(flag_name,
-                                                            checker,
-                                                            message))
-
-
-def MarkFlagAsRequired(flag_name, flag_values=FLAGS):
-  """Ensure that flag is not None during program execution.
-
-  Registers a flag validator, which will follow usual validator
-  rules.
-  Args:
-    flag_name: string, name of the flag
-    flag_values: FlagValues
-  Raises:
-    AttributeError: if flag_name is not registered as a valid flag name.
-  """
-  RegisterValidator(flag_name,
-                    lambda value: value is not None,
-                    message='Flag --%s must be specified.' % flag_name,
-                    flag_values=flag_values)
-
-
-def _RegisterBoundsValidatorIfNeeded(parser, name, flag_values):
-  """Enforce lower and upper bounds for numeric flags.
-
-  Args:
-    parser: NumericParser (either FloatParser or IntegerParser). Provides lower
-      and upper bounds, and help text to display.
-    name: string, name of the flag
-    flag_values: FlagValues
-  """
-  if parser.lower_bound is not None or parser.upper_bound is not None:
-
-    def Checker(value):
-      if value is not None and parser.IsOutsideBounds(value):
-        message = '%s is not %s' % (value, parser.syntactic_help)
-        raise gflags_validators.Error(message)
-      return True
-
-    RegisterValidator(name,
-                      Checker,
-                      flag_values=flag_values)
-
-
-# The DEFINE functions are explained in mode details in the module doc string.
-
-
-def DEFINE(parser, name, default, help, flag_values=FLAGS, serializer=None,
-           **args):
-  """Registers a generic Flag object.
-
-  NOTE: in the docstrings of all DEFINE* functions, "registers" is short
-  for "creates a new flag and registers it".
-
-  Auxiliary function: clients should use the specialized DEFINE_<type>
-  function instead.
-
-  Args:
-    parser: ArgumentParser that is used to parse the flag arguments.
-    name: A string, the flag name.
-    default: The default value of the flag.
-    help: A help string.
-    flag_values: FlagValues object the flag will be registered with.
-    serializer: ArgumentSerializer that serializes the flag value.
-    args: Dictionary with extra keyword args that are passes to the
-      Flag __init__.
-  """
-  DEFINE_flag(Flag(parser, serializer, name, default, help, **args),
-              flag_values)
-
-
-def DEFINE_flag(flag, flag_values=FLAGS):
-  """Registers a 'Flag' object with a 'FlagValues' object.
-
-  By default, the global FLAGS 'FlagValue' object is used.
-
-  Typical users will use one of the more specialized DEFINE_xxx
-  functions, such as DEFINE_string or DEFINE_integer.  But developers
-  who need to create Flag objects themselves should use this function
-  to register their flags.
-  """
-  # copying the reference to flag_values prevents pychecker warnings
-  fv = flag_values
-  fv[flag.name] = flag
-  # Tell flag_values who's defining the flag.
-  if isinstance(flag_values, FlagValues):
-    # Regarding the above isinstance test: some users pass funny
-    # values of flag_values (e.g., {}) in order to avoid the flag
-    # registration (in the past, there used to be a flag_values ==
-    # FLAGS test here) and redefine flags with the same name (e.g.,
-    # debug).  To avoid breaking their code, we perform the
-    # registration only if flag_values is a real FlagValues object.
-    module, module_name = _GetCallingModuleObjectAndName()
-    flag_values._RegisterFlagByModule(module_name, flag)
-    flag_values._RegisterFlagByModuleId(id(module), flag)
-
-
-def _InternalDeclareKeyFlags(flag_names,
-                             flag_values=FLAGS, key_flag_values=None):
-  """Declares a flag as key for the calling module.
-
-  Internal function.  User code should call DECLARE_key_flag or
-  ADOPT_module_key_flags instead.
-
-  Args:
-    flag_names: A list of strings that are names of already-registered
-      Flag objects.
-    flag_values: A FlagValues object that the flags listed in
-      flag_names have registered with (the value of the flag_values
-      argument from the DEFINE_* calls that defined those flags).
-      This should almost never need to be overridden.
-    key_flag_values: A FlagValues object that (among possibly many
-      other things) keeps track of the key flags for each module.
-      Default None means "same as flag_values".  This should almost
-      never need to be overridden.
-
-  Raises:
-    UnrecognizedFlagError: when we refer to a flag that was not
-      defined yet.
-  """
-  key_flag_values = key_flag_values or flag_values
-
-  module = _GetCallingModule()
-
-  for flag_name in flag_names:
-    if flag_name not in flag_values:
-      raise UnrecognizedFlagError(flag_name)
-    flag = flag_values.FlagDict()[flag_name]
-    key_flag_values._RegisterKeyFlagForModule(module, flag)
-
-
-def DECLARE_key_flag(flag_name, flag_values=FLAGS):
-  """Declares one flag as key to the current module.
-
-  Key flags are flags that are deemed really important for a module.
-  They are important when listing help messages; e.g., if the
-  --helpshort command-line flag is used, then only the key flags of the
-  main module are listed (instead of all flags, as in the case of
-  --help).
-
-  Sample usage:
-
-    gflags.DECLARED_key_flag('flag_1')
-
-  Args:
-    flag_name: A string, the name of an already declared flag.
-      (Redeclaring flags as key, including flags implicitly key
-      because they were declared in this module, is a no-op.)
-    flag_values: A FlagValues object.  This should almost never
-      need to be overridden.
-  """
-  if flag_name in _SPECIAL_FLAGS:
-    # Take care of the special flags, e.g., --flagfile, --undefok.
-    # These flags are defined in _SPECIAL_FLAGS, and are treated
-    # specially during flag parsing, taking precedence over the
-    # user-defined flags.
-    _InternalDeclareKeyFlags([flag_name],
-                             flag_values=_SPECIAL_FLAGS,
-                             key_flag_values=flag_values)
-    return
-  _InternalDeclareKeyFlags([flag_name], flag_values=flag_values)
-
-
-def ADOPT_module_key_flags(module, flag_values=FLAGS):
-  """Declares that all flags key to a module are key to the current module.
-
-  Args:
-    module: A module object.
-    flag_values: A FlagValues object.  This should almost never need
-      to be overridden.
-
-  Raises:
-    FlagsError: When given an argument that is a module name (a
-    string), instead of a module object.
-  """
-  # NOTE(salcianu): an even better test would be if not
-  # isinstance(module, types.ModuleType) but I didn't want to import
-  # types for such a tiny use.
-  if isinstance(module, str):
-    raise FlagsError('Received module name %s; expected a module object.'
-                     % module)
-  _InternalDeclareKeyFlags(
-      [f.name for f in flag_values._GetKeyFlagsForModule(module.__name__)],
-      flag_values=flag_values)
-  # If module is this flag module, take _SPECIAL_FLAGS into account.
-  if module == _GetThisModuleObjectAndName()[0]:
-    _InternalDeclareKeyFlags(
-        # As we associate flags with _GetCallingModuleObjectAndName(), the
-        # special flags defined in this module are incorrectly registered with
-        # a different module.  So, we can't use _GetKeyFlagsForModule.
-        # Instead, we take all flags from _SPECIAL_FLAGS (a private
-        # FlagValues, where no other module should register flags).
-        [f.name for f in _SPECIAL_FLAGS.FlagDict().values()],
-        flag_values=_SPECIAL_FLAGS,
-        key_flag_values=flag_values)
-
-
-#
-# STRING FLAGS
-#
-
-
-def DEFINE_string(name, default, help, flag_values=FLAGS, **args):
-  """Registers a flag whose value can be any string."""
-  parser = ArgumentParser()
-  serializer = ArgumentSerializer()
-  DEFINE(parser, name, default, help, flag_values, serializer, **args)
-
-
-#
-# BOOLEAN FLAGS
-#
-
-
-class BooleanParser(ArgumentParser):
-  """Parser of boolean values."""
-
-  def Convert(self, argument):
-    """Converts the argument to a boolean; raise ValueError on errors."""
-    if type(argument) == str:
-      if argument.lower() in ['true', 't', '1']:
-        return True
-      elif argument.lower() in ['false', 'f', '0']:
-        return False
-
-    bool_argument = bool(argument)
-    if argument == bool_argument:
-      # The argument is a valid boolean (True, False, 0, or 1), and not just
-      # something that always converts to bool (list, string, int, etc.).
-      return bool_argument
-
-    raise ValueError('Non-boolean argument to boolean flag', argument)
-
-  def Parse(self, argument):
-    val = self.Convert(argument)
-    return val
-
-  def Type(self):
-    return 'bool'
-
-
-class BooleanFlag(Flag):
-  """Basic boolean flag.
-
-  Boolean flags do not take any arguments, and their value is either
-  True (1) or False (0).  The false value is specified on the command
-  line by prepending the word 'no' to either the long or the short flag
-  name.
-
-  For example, if a Boolean flag was created whose long name was
-  'update' and whose short name was 'x', then this flag could be
-  explicitly unset through either --noupdate or --nox.
-  """
-
-  def __init__(self, name, default, help, short_name=None, **args):
-    p = BooleanParser()
-    Flag.__init__(self, p, None, name, default, help, short_name, 1, **args)
-    if not self.help: self.help = "a boolean value"
-
-
-def DEFINE_boolean(name, default, help, flag_values=FLAGS, **args):
-  """Registers a boolean flag.
-
-  Such a boolean flag does not take an argument.  If a user wants to
-  specify a false value explicitly, the long option beginning with 'no'
-  must be used: i.e. --noflag
-
-  This flag will have a value of None, True or False.  None is possible
-  if default=None and the user does not specify the flag on the command
-  line.
-  """
-  DEFINE_flag(BooleanFlag(name, default, help, **args), flag_values)
-
-
-# Match C++ API to unconfuse C++ people.
-DEFINE_bool = DEFINE_boolean
-
-
-class HelpFlag(BooleanFlag):
-  """
-  HelpFlag is a special boolean flag that prints usage information and
-  raises a SystemExit exception if it is ever found in the command
-  line arguments.  Note this is called with allow_override=1, so other
-  apps can define their own --help flag, replacing this one, if they want.
-  """
-  def __init__(self):
-    BooleanFlag.__init__(self, "help", 0, "show this help",
-                         short_name="?", allow_override=1)
-  def Parse(self, arg):
-    if arg:
-      doc = sys.modules["__main__"].__doc__
-      flags = str(FLAGS)
-      print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0])
-      if flags:
-        print "flags:"
-        print flags
-      sys.exit(1)
-class HelpXMLFlag(BooleanFlag):
-  """Similar to HelpFlag, but generates output in XML format."""
-  def __init__(self):
-    BooleanFlag.__init__(self, 'helpxml', False,
-                         'like --help, but generates XML output',
-                         allow_override=1)
-  def Parse(self, arg):
-    if arg:
-      FLAGS.WriteHelpInXMLFormat(sys.stdout)
-      sys.exit(1)
-class HelpshortFlag(BooleanFlag):
-  """
-  HelpshortFlag is a special boolean flag that prints usage
-  information for the "main" module, and rasies a SystemExit exception
-  if it is ever found in the command line arguments.  Note this is
-  called with allow_override=1, so other apps can define their own
-  --helpshort flag, replacing this one, if they want.
-  """
-  def __init__(self):
-    BooleanFlag.__init__(self, "helpshort", 0,
-                         "show usage only for this module", allow_override=1)
-  def Parse(self, arg):
-    if arg:
-      doc = sys.modules["__main__"].__doc__
-      flags = FLAGS.MainModuleHelp()
-      print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0])
-      if flags:
-        print "flags:"
-        print flags
-      sys.exit(1)
-
-#
-# Numeric parser - base class for Integer and Float parsers
-#
-
-
-class NumericParser(ArgumentParser):
-  """Parser of numeric values.
-
-  Parsed value may be bounded to a given upper and lower bound.
-  """
-
-  def IsOutsideBounds(self, val):
-    return ((self.lower_bound is not None and val < self.lower_bound) or
-            (self.upper_bound is not None and val > self.upper_bound))
-
-  def Parse(self, argument):
-    val = self.Convert(argument)
-    if self.IsOutsideBounds(val):
-      raise ValueError("%s is not %s" % (val, self.syntactic_help))
-    return val
-
-  def WriteCustomInfoInXMLFormat(self, outfile, indent):
-    if self.lower_bound is not None:
-      _WriteSimpleXMLElement(outfile, 'lower_bound', self.lower_bound, indent)
-    if self.upper_bound is not None:
-      _WriteSimpleXMLElement(outfile, 'upper_bound', self.upper_bound, indent)
-
-  def Convert(self, argument):
-    """Default implementation: always returns its argument unmodified."""
-    return argument
-
-# End of Numeric Parser
-
-#
-# FLOAT FLAGS
-#
-
-
-class FloatParser(NumericParser):
-  """Parser of floating point values.
-
-  Parsed value may be bounded to a given upper and lower bound.
-  """
-  number_article = "a"
-  number_name = "number"
-  syntactic_help = " ".join((number_article, number_name))
-
-  def __init__(self, lower_bound=None, upper_bound=None):
-    super(FloatParser, self).__init__()
-    self.lower_bound = lower_bound
-    self.upper_bound = upper_bound
-    sh = self.syntactic_help
-    if lower_bound is not None and upper_bound is not None:
-      sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound))
-    elif lower_bound == 0:
-      sh = "a non-negative %s" % self.number_name
-    elif upper_bound == 0:
-      sh = "a non-positive %s" % self.number_name
-    elif upper_bound is not None:
-      sh = "%s <= %s" % (self.number_name, upper_bound)
-    elif lower_bound is not None:
-      sh = "%s >= %s" % (self.number_name, lower_bound)
-    self.syntactic_help = sh
-
-  def Convert(self, argument):
-    """Converts argument to a float; raises ValueError on errors."""
-    return float(argument)
-
-  def Type(self):
-    return 'float'
-# End of FloatParser
-
-
-def DEFINE_float(name, default, help, lower_bound=None, upper_bound=None,
-                 flag_values=FLAGS, **args):
-  """Registers a flag whose value must be a float.
-
-  If lower_bound or upper_bound are set, then this flag must be
-  within the given range.
-  """
-  parser = FloatParser(lower_bound, upper_bound)
-  serializer = ArgumentSerializer()
-  DEFINE(parser, name, default, help, flag_values, serializer, **args)
-  _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
-
-#
-# INTEGER FLAGS
-#
-
-
-class IntegerParser(NumericParser):
-  """Parser of an integer value.
-
-  Parsed value may be bounded to a given upper and lower bound.
-  """
-  number_article = "an"
-  number_name = "integer"
-  syntactic_help = " ".join((number_article, number_name))
-
-  def __init__(self, lower_bound=None, upper_bound=None):
-    super(IntegerParser, self).__init__()
-    self.lower_bound = lower_bound
-    self.upper_bound = upper_bound
-    sh = self.syntactic_help
-    if lower_bound is not None and upper_bound is not None:
-      sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound))
-    elif lower_bound == 1:
-      sh = "a positive %s" % self.number_name
-    elif upper_bound == -1:
-      sh = "a negative %s" % self.number_name
-    elif lower_bound == 0:
-      sh = "a non-negative %s" % self.number_name
-    elif upper_bound == 0:
-      sh = "a non-positive %s" % self.number_name
-    elif upper_bound is not None:
-      sh = "%s <= %s" % (self.number_name, upper_bound)
-    elif lower_bound is not None:
-      sh = "%s >= %s" % (self.number_name, lower_bound)
-    self.syntactic_help = sh
-
-  def Convert(self, argument):
-    __pychecker__ = 'no-returnvalues'
-    if type(argument) == str:
-      base = 10
-      if len(argument) > 2 and argument[0] == "0" and argument[1] == "x":
-        base = 16
-      return int(argument, base)
-    else:
-      return int(argument)
-
-  def Type(self):
-    return 'int'
-
-
-def DEFINE_integer(name, default, help, lower_bound=None, upper_bound=None,
-                   flag_values=FLAGS, **args):
-  """Registers a flag whose value must be an integer.
-
-  If lower_bound, or upper_bound are set, then this flag must be
-  within the given range.
-  """
-  parser = IntegerParser(lower_bound, upper_bound)
-  serializer = ArgumentSerializer()
-  DEFINE(parser, name, default, help, flag_values, serializer, **args)
-  _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
-
-
-#
-# ENUM FLAGS
-#
-
-
-class EnumParser(ArgumentParser):
-  """Parser of a string enum value (a string value from a given set).
-
-  If enum_values (see below) is not specified, any string is allowed.
-  """
-
-  def __init__(self, enum_values=None):
-    super(EnumParser, self).__init__()
-    self.enum_values = enum_values
-
-  def Parse(self, argument):
-    if self.enum_values and argument not in self.enum_values:
-      raise ValueError("value should be one of <%s>" %
-                       "|".join(self.enum_values))
-    return argument
-
-  def Type(self):
-    return 'string enum'
-
-
-class EnumFlag(Flag):
-  """Basic enum flag; its value can be any string from list of enum_values."""
-
-  def __init__(self, name, default, help, enum_values=None,
-               short_name=None, **args):
-    enum_values = enum_values or []
-    p = EnumParser(enum_values)
-    g = ArgumentSerializer()
-    Flag.__init__(self, p, g, name, default, help, short_name, **args)
-    if not self.help: self.help = "an enum string"
-    self.help = "<%s>: %s" % ("|".join(enum_values), self.help)
-
-  def _WriteCustomInfoInXMLFormat(self, outfile, indent):
-    for enum_value in self.parser.enum_values:
-      _WriteSimpleXMLElement(outfile, 'enum_value', enum_value, indent)
-
-
-def DEFINE_enum(name, default, enum_values, help, flag_values=FLAGS,
-                **args):
-  """Registers a flag whose value can be any string from enum_values."""
-  DEFINE_flag(EnumFlag(name, default, help, enum_values, ** args),
-              flag_values)
-
-
-#
-# LIST FLAGS
-#
-
-
-class BaseListParser(ArgumentParser):
-  """Base class for a parser of lists of strings.
-
-  To extend, inherit from this class; from the subclass __init__, call
-
-    BaseListParser.__init__(self, token, name)
-
-  where token is a character used to tokenize, and name is a description
-  of the separator.
-  """
-
-  def __init__(self, token=None, name=None):
-    assert name
-    super(BaseListParser, self).__init__()
-    self._token = token
-    self._name = name
-    self.syntactic_help = "a %s separated list" % self._name
-
-  def Parse(self, argument):
-    if isinstance(argument, list):
-      return argument
-    elif argument == '':
-      return []
-    else:
-      return [s.strip() for s in argument.split(self._token)]
-
-  def Type(self):
-    return '%s separated list of strings' % self._name
-
-
-class ListParser(BaseListParser):
-  """Parser for a comma-separated list of strings."""
-
-  def __init__(self):
-    BaseListParser.__init__(self, ',', 'comma')
-
-  def WriteCustomInfoInXMLFormat(self, outfile, indent):
-    BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent)
-    _WriteSimpleXMLElement(outfile, 'list_separator', repr(','), indent)
-
-
-class WhitespaceSeparatedListParser(BaseListParser):
-  """Parser for a whitespace-separated list of strings."""
-
-  def __init__(self):
-    BaseListParser.__init__(self, None, 'whitespace')
-
-  def WriteCustomInfoInXMLFormat(self, outfile, indent):
-    BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent)
-    separators = list(string.whitespace)
-    separators.sort()
-    for ws_char in string.whitespace:
-      _WriteSimpleXMLElement(outfile, 'list_separator', repr(ws_char), indent)
-
-
-def DEFINE_list(name, default, help, flag_values=FLAGS, **args):
-  """Registers a flag whose value is a comma-separated list of strings."""
-  parser = ListParser()
-  serializer = ListSerializer(',')
-  DEFINE(parser, name, default, help, flag_values, serializer, **args)
-
-
-def DEFINE_spaceseplist(name, default, help, flag_values=FLAGS, **args):
-  """Registers a flag whose value is a whitespace-separated list of strings.
-
-  Any whitespace can be used as a separator.
-  """
-  parser = WhitespaceSeparatedListParser()
-  serializer = ListSerializer(' ')
-  DEFINE(parser, name, default, help, flag_values, serializer, **args)
-
-
-#
-# MULTI FLAGS
-#
-
-
-class MultiFlag(Flag):
-  """A flag that can appear multiple time on the command-line.
-
-  The value of such a flag is a list that contains the individual values
-  from all the appearances of that flag on the command-line.
-
-  See the __doc__ for Flag for most behavior of this class.  Only
-  differences in behavior are described here:
-
-    * The default value may be either a single value or a list of values.
-      A single value is interpreted as the [value] singleton list.
-
-    * The value of the flag is always a list, even if the option was
-      only supplied once, and even if the default value is a single
-      value
-  """
-
-  def __init__(self, *args, **kwargs):
-    Flag.__init__(self, *args, **kwargs)
-    self.help += ';\n    repeat this option to specify a list of values'
-
-  def Parse(self, arguments):
-    """Parses one or more arguments with the installed parser.
-
-    Args:
-      arguments: a single argument or a list of arguments (typically a
-        list of default values); a single argument is converted
-        internally into a list containing one item.
-    """
-    if not isinstance(arguments, list):
-      # Default value may be a list of values.  Most other arguments
-      # will not be, so convert them into a single-item list to make
-      # processing simpler below.
-      arguments = [arguments]
-
-    if self.present:
-      # keep a backup reference to list of previously supplied option values
-      values = self.value
-    else:
-      # "erase" the defaults with an empty list
-      values = []
-
-    for item in arguments:
-      # have Flag superclass parse argument, overwriting self.value reference
-      Flag.Parse(self, item)  # also increments self.present
-      values.append(self.value)
-
-    # put list of option values back in the 'value' attribute
-    self.value = values
-
-  def Serialize(self):
-    if not self.serializer:
-      raise FlagsError("Serializer not present for flag %s" % self.name)
-    if self.value is None:
-      return ''
-
-    s = ''
-
-    multi_value = self.value
-
-    for self.value in multi_value:
-      if s: s += ' '
-      s += Flag.Serialize(self)
-
-    self.value = multi_value
-
-    return s
-
-  def Type(self):
-    return 'multi ' + self.parser.Type()
-
-
-def DEFINE_multi(parser, serializer, name, default, help, flag_values=FLAGS,
-                 **args):
-  """Registers a generic MultiFlag that parses its args with a given parser.
-
-  Auxiliary function.  Normal users should NOT use it directly.
-
-  Developers who need to create their own 'Parser' classes for options
-  which can appear multiple times can call this module function to
-  register their flags.
-  """
-  DEFINE_flag(MultiFlag(parser, serializer, name, default, help, **args),
-              flag_values)
-
-
-def DEFINE_multistring(name, default, help, flag_values=FLAGS, **args):
-  """Registers a flag whose value can be a list of any strings.
-
-  Use the flag on the command line multiple times to place multiple
-  string values into the list.  The 'default' may be a single string
-  (which will be converted into a single-element list) or a list of
-  strings.
-  """
-  parser = ArgumentParser()
-  serializer = ArgumentSerializer()
-  DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
-
-
-def DEFINE_multi_int(name, default, help, lower_bound=None, upper_bound=None,
-                     flag_values=FLAGS, **args):
-  """Registers a flag whose value can be a list of arbitrary integers.
-
-  Use the flag on the command line multiple times to place multiple
-  integer values into the list.  The 'default' may be a single integer
-  (which will be converted into a single-element list) or a list of
-  integers.
-  """
-  parser = IntegerParser(lower_bound, upper_bound)
-  serializer = ArgumentSerializer()
-  DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
-
-
-def DEFINE_multi_float(name, default, help, lower_bound=None, upper_bound=None,
-                       flag_values=FLAGS, **args):
-  """Registers a flag whose value can be a list of arbitrary floats.
-
-  Use the flag on the command line multiple times to place multiple
-  float values into the list.  The 'default' may be a single float
-  (which will be converted into a single-element list) or a list of
-  floats.
-  """
-  parser = FloatParser(lower_bound, upper_bound)
-  serializer = ArgumentSerializer()
-  DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
-
-
-# Now register the flags that we want to exist in all applications.
-# These are all defined with allow_override=1, so user-apps can use
-# these flagnames for their own purposes, if they want.
-DEFINE_flag(HelpFlag())
-DEFINE_flag(HelpshortFlag())
-DEFINE_flag(HelpXMLFlag())
-
-# Define special flags here so that help may be generated for them.
-# NOTE: Please do NOT use _SPECIAL_FLAGS from outside this module.
-_SPECIAL_FLAGS = FlagValues()
-
-
-DEFINE_string(
-    'flagfile', "",
-    "Insert flag definitions from the given file into the command line.",
-    _SPECIAL_FLAGS)
-
-DEFINE_string(
-    'undefok', "",
-    "comma-separated list of flag names that it is okay to specify "
-    "on the command line even if the program does not define a flag "
-    "with that name.  IMPORTANT: flags in this list that have "
-    "arguments MUST use the --flag=value format.", _SPECIAL_FLAGS)
diff --git a/gflags/gflags_validators.py b/gflags/gflags_validators.py
deleted file mode 100644
index d83058d..0000000
--- a/gflags/gflags_validators.py
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Module to enforce different constraints on flags.
-
-A validator represents an invariant, enforced over a one or more flags.
-See 'FLAGS VALIDATORS' in gflags.py's docstring for a usage manual.
-"""
-
-__author__ = 'olexiy@google.com (Olexiy Oryeshko)'
-
-
-class Error(Exception):
-  """Thrown If validator constraint is not satisfied."""
-
-
-class Validator(object):
-  """Base class for flags validators.
-
-  Users should NOT overload these classes, and use gflags.Register...
-  methods instead.
-  """
-
-  # Used to assign each validator an unique insertion_index
-  validators_count = 0
-
-  def __init__(self, checker, message):
-    """Constructor to create all validators.
-
-    Args:
-      checker: function to verify the constraint.
-        Input of this method varies, see SimpleValidator and
-          DictionaryValidator for a detailed description.
-      message: string, error message to be shown to the user
-    """
-    self.checker = checker
-    self.message = message
-    Validator.validators_count += 1
-    # Used to assert validators in the order they were registered (CL/18694236)
-    self.insertion_index = Validator.validators_count
-
-  def Verify(self, flag_values):
-    """Verify that constraint is satisfied.
-
-    flags library calls this method to verify Validator's constraint.
-    Args:
-      flag_values: gflags.FlagValues, containing all flags
-    Raises:
-      Error: if constraint is not satisfied.
-    """
-    param = self._GetInputToCheckerFunction(flag_values)
-    if not self.checker(param):
-      raise Error(self.message)
-
-  def GetFlagsNames(self):
-    """Return the names of the flags checked by this validator.
-
-    Returns:
-      [string], names of the flags
-    """
-    raise NotImplementedError('This method should be overloaded')
-
-  def PrintFlagsWithValues(self, flag_values):
-    raise NotImplementedError('This method should be overloaded')
-
-  def _GetInputToCheckerFunction(self, flag_values):
-    """Given flag values, construct the input to be given to checker.
-
-    Args:
-      flag_values: gflags.FlagValues, containing all flags.
-    Returns:
-      Return type depends on the specific validator.
-    """
-    raise NotImplementedError('This method should be overloaded')
-
-
-class SimpleValidator(Validator):
-  """Validator behind RegisterValidator() method.
-
-  Validates that a single flag passes its checker function. The checker function
-  takes the flag value and returns True (if value looks fine) or, if flag value
-  is not valid, either returns False or raises an Exception."""
-  def __init__(self, flag_name, checker, message):
-    """Constructor.
-
-    Args:
-      flag_name: string, name of the flag.
-      checker: function to verify the validator.
-        input  - value of the corresponding flag (string, boolean, etc).
-        output - Boolean. Must return True if validator constraint is satisfied.
-          If constraint is not satisfied, it should either return False or
-          raise Error.
-      message: string, error message to be shown to the user if validator's
-        condition is not satisfied
-    """
-    super(SimpleValidator, self).__init__(checker, message)
-    self.flag_name = flag_name
-
-  def GetFlagsNames(self):
-    return [self.flag_name]
-
-  def PrintFlagsWithValues(self, flag_values):
-    return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value)
-
-  def _GetInputToCheckerFunction(self, flag_values):
-    """Given flag values, construct the input to be given to checker.
-
-    Args:
-      flag_values: gflags.FlagValues
-    Returns:
-      value of the corresponding flag.
-    """
-    return flag_values[self.flag_name].value
-
-
-class DictionaryValidator(Validator):
-  """Validator behind RegisterDictionaryValidator method.
-
-  Validates that flag values pass their common checker function. The checker
-  function takes flag values and returns True (if values look fine) or,
-  if values are not valid, either returns False or raises an Exception.
-  """
-  def __init__(self, flag_names, checker, message):
-    """Constructor.
-
-    Args:
-      flag_names: [string], containing names of the flags used by checker.
-      checker: function to verify the validator.
-        input  - dictionary, with keys() being flag_names, and value for each
-          key being the value of the corresponding flag (string, boolean, etc).
-        output - Boolean. Must return True if validator constraint is satisfied.
-          If constraint is not satisfied, it should either return False or
-          raise Error.
-      message: string, error message to be shown to the user if validator's
-        condition is not satisfied
-    """
-    super(DictionaryValidator, self).__init__(checker, message)
-    self.flag_names = flag_names
-
-  def _GetInputToCheckerFunction(self, flag_values):
-    """Given flag values, construct the input to be given to checker.
-
-    Args:
-      flag_values: gflags.FlagValues
-    Returns:
-      dictionary, with keys() being self.lag_names, and value for each key
-        being the value of the corresponding flag (string, boolean, etc).
-    """
-    return dict([key, flag_values[key].value] for key in self.flag_names)
-
-  def PrintFlagsWithValues(self, flag_values):
-    prefix = 'flags '
-    flags_with_values = []
-    for key in self.flag_names:
-      flags_with_values.append('%s=%s' % (key, flag_values[key].value))
-    return prefix + ', '.join(flags_with_values)
-
-  def GetFlagsNames(self):
-    return self.flag_names
diff --git a/google/appengine/api/appinfo.py b/google/appengine/api/appinfo.py
index 2392709..18732d7 100644
--- a/google/appengine/api/appinfo.py
+++ b/google/appengine/api/appinfo.py
@@ -1269,6 +1269,32 @@
   VALUE_VALIDATOR = str
 
 
+def NormalizeVmSettings(appyaml):
+  """Normalize Vm settings.
+
+  Args:
+    appyaml: AppInfoExternal instance.
+
+  Returns:
+    Normalized app yaml.
+  """
+
+
+
+
+
+
+  if appyaml.vm:
+    if not appyaml.vm_settings:
+      appyaml.vm_settings = VmSettings()
+    if 'vm_runtime' not in appyaml.vm_settings:
+
+
+      appyaml.vm_settings['vm_runtime'] = appyaml.runtime
+      appyaml.runtime = 'vm'
+  return appyaml
+
+
 class AppInclude(validation.Validated):
   """Class representing the contents of an included app.yaml file.
 
@@ -1284,6 +1310,7 @@
       HANDLERS: validation.Optional(validation.Repeated(URLMap)),
       ADMIN_CONSOLE: validation.Optional(AdminConsole),
       MANUAL_SCALING: validation.Optional(ManualScaling),
+      VM: validation.Optional(bool),
       VM_SETTINGS: validation.Optional(VmSettings),
 
 
@@ -1327,6 +1354,26 @@
     return appinclude_one
 
   @classmethod
+  def _CommonMergeOps(cls, one, two):
+    """This function performs common merge operations."""
+
+    AppInclude.MergeManualScaling(one, two)
+
+
+    one.admin_console = AdminConsole.Merge(one.admin_console,
+                                           two.admin_console)
+
+
+
+    one.vm = two.vm or one.vm
+
+
+    one.vm_settings = VmSettings.Merge(one.vm_settings,
+                                       two.vm_settings)
+
+    return one
+
+  @classmethod
   def MergeAppYamlAppInclude(cls, appyaml, appinclude):
     """This function merges an app.yaml file with referenced builtins/includes.
     """
@@ -1354,18 +1401,8 @@
 
       appyaml.handlers.extend(tail)
 
-
-    AppInclude.MergeManualScaling(appyaml, appinclude)
-
-
-    appyaml.admin_console = AdminConsole.Merge(appyaml.admin_console,
-                                               appinclude.admin_console)
-
-
-    appyaml.vm_settings = VmSettings.Merge(appyaml.vm_settings,
-                                           appinclude.vm_settings)
-
-    return appyaml
+    appyaml = cls._CommonMergeOps(appyaml, appinclude)
+    return NormalizeVmSettings(appyaml)
 
   @classmethod
   def MergeAppIncludes(cls, appinclude_one, appinclude_two):
@@ -1399,22 +1436,7 @@
     else:
       appinclude_one.handlers = appinclude_two.handlers
 
-
-    appinclude_one = AppInclude.MergeManualScaling(
-        appinclude_one,
-        appinclude_two)
-
-
-    appinclude_one.admin_console = (
-        AdminConsole.Merge(appinclude_one.admin_console,
-                           appinclude_two.admin_console))
-
-
-    appinclude_one.vm_settings = VmSettings.Merge(
-        appinclude_one.vm_settings,
-        appinclude_two.vm_settings)
-
-    return appinclude_one
+    return cls._CommonMergeOps(appinclude_one, appinclude_two)
 
 
 class AppInfoExternal(validation.Validated):
@@ -1448,7 +1470,7 @@
   ATTRIBUTES = {
 
 
-      APPLICATION: APPLICATION_RE_STRING,
+      APPLICATION: validation.Optional(APPLICATION_RE_STRING),
       MODULE: validation.Optional(MODULE_ID_RE_STRING),
       VERSION: validation.Optional(MODULE_VERSION_ID_RE_STRING),
       RUNTIME: RUNTIME_RE_STRING,
@@ -1726,22 +1748,7 @@
   if appyaml.builtins:
     BuiltinHandler.Validate(appyaml.builtins, appyaml.runtime)
 
-
-
-
-
-
-
-  if appyaml.vm:
-    if not appyaml.vm_settings:
-      appyaml.vm_settings = VmSettings()
-    if not 'vm_runtime' in appyaml.vm_settings:
-
-
-      appyaml.vm_settings['vm_runtime'] = appyaml.runtime
-      appyaml.runtime = 'vm'
-
-  return appyaml
+  return NormalizeVmSettings(appyaml)
 
 
 class AppInfoSummary(validation.Validated):
diff --git a/google/appengine/api/appinfo_includes.py b/google/appengine/api/appinfo_includes.py
index 5aea927..817f78d 100644
--- a/google/appengine/api/appinfo_includes.py
+++ b/google/appengine/api/appinfo_includes.py
@@ -109,7 +109,7 @@
 
   runtime_for_including = appyaml.runtime
   if runtime_for_including == 'vm':
-    runtime_for_including = appyaml.vm_settings['vm_runtime']
+    runtime_for_including = appyaml.vm_settings.get('vm_runtime', 'python27')
   aggregate_appinclude, include_paths = (
       _ResolveIncludes(appinfo_path,
                        appinfo.AppInclude(builtins=appyaml.builtins,
diff --git a/google/appengine/api/mail.py b/google/appengine/api/mail.py
index f046215..c5020f8 100644
--- a/google/appengine/api/mail.py
+++ b/google/appengine/api/mail.py
@@ -460,6 +460,22 @@
   return mime_type
 
 
+def _GuessCharset(text):
+  """Guess the charset of a text.
+
+  Args:
+    text: a string (str) that is either a us-ascii string or a unicode that was
+        encoded in utf-8.
+  Returns:
+    Charset needed by the string, either 'us-ascii' or 'utf-8'.
+  """
+  try:
+    text.decode('us-ascii')
+    return 'us-ascii'
+  except UnicodeDecodeError:
+    return 'utf-8'
+
+
 def mail_message_to_mime_message(protocol_message):
   """Generate a MIMEMultitype message from protocol buffer.
 
@@ -481,10 +497,13 @@
   """
   parts = []
   if protocol_message.has_textbody():
-    parts.append(MIMEText.MIMEText(protocol_message.textbody()))
+    parts.append(MIMEText.MIMEText(
+        protocol_message.textbody(),
+        _charset=_GuessCharset(protocol_message.textbody())))
   if protocol_message.has_htmlbody():
-    parts.append(MIMEText.MIMEText(protocol_message.htmlbody(),
-                                   _subtype='html'))
+    parts.append(MIMEText.MIMEText(
+        protocol_message.htmlbody(), _subtype='html',
+        _charset=_GuessCharset(protocol_message.htmlbody())))
 
   if len(parts) == 1:
 
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index a7adb64..85f45d9 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -937,7 +937,7 @@
 class DateField(Field):
   """A Field that has a date value.
 
-  The following example shows an date field named creation_date:
+  The following example shows a date field named creation_date:
     DateField(name='creation_date', value=datetime.date(2011, 03, 11))
   """
 
diff --git a/google/appengine/api/search/simple_search_stub.py b/google/appengine/api/search/simple_search_stub.py
index 9bb25e8..7377b35 100644
--- a/google/appengine/api/search/simple_search_stub.py
+++ b/google/appengine/api/search/simple_search_stub.py
@@ -932,20 +932,27 @@
 
 
     if offset < len(results):
-      position_range = range(
-          offset,
-          min(offset + params.limit(), len(results)))
+
+
+      limit = offset + params.limit()
+      if limit >= len(results):
+
+
+        range_end = len(results)
+      else:
+
+
+
+        range_end = limit
+        if params.cursor_type() == search_service_pb.SearchParams.SINGLE:
+          response.set_cursor(results[range_end - 1].document.id())
+      result_range = range(offset, range_end)
     else:
-      position_range = range(0)
-    field_spec = None
+      result_range = range(0)
     field_names = params.field_spec().name_list()
-    self._FillSearchResponse(results, position_range, params.cursor_type(),
+    self._FillSearchResponse(results, result_range, params.cursor_type(),
                              _ScoreRequested(params), response, field_names,
                              params.keys_only())
-    if (params.cursor_type() == search_service_pb.SearchParams.SINGLE and
-        len(position_range)):
-      response.set_cursor(
-          results[position_range[len(position_range) - 1]].document.id())
 
     response.mutable_status().set_code(search_service_pb.SearchServiceError.OK)
 
diff --git a/google/appengine/api/search/stub/document_matcher.py b/google/appengine/api/search/stub/document_matcher.py
index 4bd2ddc..a1b03ff 100644
--- a/google/appengine/api/search/stub/document_matcher.py
+++ b/google/appengine/api/search/stub/document_matcher.py
@@ -34,6 +34,8 @@
 from google.appengine.api.search.stub import tokens
 
 
+MSEC_PER_DAY = 86400000
+
 class ExpressionTreeException(Exception):
   """An error occurred while analyzing/translating the expression parse tree."""
 
@@ -163,7 +165,7 @@
 
 
     return self._MatchComparableField(
-        field, match, search_util.DeserializeDate, operator, document)
+        field, match, _DateStrToDays, operator, document)
 
 
 
@@ -199,7 +201,7 @@
 
     if match.getType() == QueryParser.VALUE:
       try:
-        match_val = cast_to_type(query_parser.GetQueryNodeText(match))
+        match_val = cast_to_type(query_parser.GetPhraseQueryNodeText(match))
       except ValueError:
         return False
     else:
@@ -297,3 +299,9 @@
 
   def FilterDocuments(self, documents):
     return (doc for doc in documents if self.Matches(doc))
+
+
+def _DateStrToDays(date_str):
+
+  date = search_util.DeserializeDate(date_str)
+  return search_util.EpochTime(date) / MSEC_PER_DAY
diff --git a/google/appengine/api/search/stub/expression_evaluator.py b/google/appengine/api/search/stub/expression_evaluator.py
index ad9180e..44c6e2c 100644
--- a/google/appengine/api/search/stub/expression_evaluator.py
+++ b/google/appengine/api/search/stub/expression_evaluator.py
@@ -14,8 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
 """Expression evaluator for Full Text Search API stub.
 
 An associated ExpressionEvaluator object is created for every scored document in
@@ -46,9 +44,6 @@
 
 
 
-
-
-
 import logging
 
 
@@ -353,6 +348,8 @@
 
     name = expression.name()
     result = self.ValueOf(expression.expression())
+    if isinstance(result, unicode):
+      result = result.encode('utf-8')
     if result != None:
       self._doc.expressions[name] = result
 
diff --git a/google/appengine/datastore/datastore_rpc.py b/google/appengine/datastore/datastore_rpc.py
index 7248b54..c60f60f 100644
--- a/google/appengine/datastore/datastore_rpc.py
+++ b/google/appengine/datastore/datastore_rpc.py
@@ -67,7 +67,7 @@
 
 from google.appengine.api.app_identity import app_identity
 from google.appengine.datastore import datastore_pb
-from google.appengine.datastore import datastore_v4a_pb
+from google.appengine.datastore import datastore_v4_pb
 from google.appengine.datastore import entity_v4_pb
 from google.appengine.runtime import apiproxy_errors
 
@@ -1908,9 +1908,9 @@
     rpcs = []
     pbsgen = self._generate_pb_lists(keys_by_idkey, 0, max_count, None, config)
     for pbs, _ in pbsgen:
-      req = datastore_v4a_pb.AllocateIdsRequest()
+      req = datastore_v4_pb.AllocateIdsRequest()
       req.reserve_list().extend([self.__to_v4_key(key) for key in pbs])
-      resp = datastore_v4a_pb.AllocateIdsResponse()
+      resp = datastore_v4_pb.AllocateIdsResponse()
       rpcs.append(self.make_rpc_call(config, 'AllocateIds', req, resp,
                                      self.__reserve_keys_hook, extra_hook,
                                      'datastore_v4'))
diff --git a/google/appengine/datastore/datastore_v4a_pb.py b/google/appengine/datastore/datastore_v4_pb.py
similarity index 87%
rename from google/appengine/datastore/datastore_v4a_pb.py
rename to google/appengine/datastore/datastore_v4_pb.py
index 293bcc0..b667c68 100644
--- a/google/appengine/datastore/datastore_v4a_pb.py
+++ b/google/appengine/datastore/datastore_v4_pb.py
@@ -53,8 +53,6 @@
 
 from google.appengine.datastore.entity_v4_pb import *
 import google.appengine.datastore.entity_v4_pb
-from google.appengine.datastore.entity_pb import *
-import google.appengine.datastore.entity_pb
 class Error(ProtocolBuffer.ProtocolMessage):
 
 
@@ -179,7 +177,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Error'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcnN6CUVycm9yQ29kZYsBkgELQkFEX1JFUVVFU1SYAQGMAYsBkgEWQ09OQ1VSUkVOVF9UUkFOU0FDVElPTpgBAowBiwGSAQ5JTlRFUk5BTF9FUlJPUpgBA4wBiwGSAQpORUVEX0lOREVYmAEEjAGLAZIBB1RJTUVPVVSYAQWMAYsBkgERUEVSTUlTU0lPTl9ERU5JRUSYAQaMAYsBkgEOQklHVEFCTEVfRVJST1KYAQeMAYsBkgEcQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlOR5gBCIwBiwGSARNDQVBBQklMSVRZX0RJU0FCTEVEmAEJjAGLAZIBFVRSWV9BTFRFUk5BVEVfQkFDS0VORJgBCowBiwGSARFTQUZFX1RJTUVfVE9PX09MRJgBC4wBdLoB4ysKKGFwcGhvc3RpbmcvZGF0YXN0b3JlL2RhdGFzdG9yZV92NGEucHJvdG8SF2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0GiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8aIHN0b3JhZ2Uvb25lc3RvcmUvdjMvZW50aXR5LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIpMCCghNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSDQoFZm9yY2UYBiABKAgiYQoOTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgBIAIoBRI4ChJpbnNlcnRfYXV0b19pZF9rZXkYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkidQoMRW50aXR5UmVzdWx0Ei8KBmVudGl0eRgBIAIoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eSI0CgpSZXN1bHRUeXBlEggKBEZVTEwQARIOCgpQUk9KRUNUSU9OEAISDAoIS0VZX09OTFkQAyLxAgoFUXVlcnkSPwoKcHJvamVjdGlvbhgCIAMoCzIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbhI1CgRraW5kGAMgAygLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2luZEV4cHJlc3Npb24SLwoGZmlsdGVyGAQgASgLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRmlsdGVyEjUKBW9yZGVyGAUgAygLMiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlPcmRlchI8Cghncm91cF9ieRgGIAMoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEhQKDHN0YXJ0X2N1cnNvchgHIAEoDBISCgplbmRfY3Vyc29yGAggASgMEhEKBm9mZnNldBgKIAEoBToBMBINCgVsaW1pdBgLIAEoBSIeCg5LaW5kRXhwcmVzc2lvbhIMCgRuYW1lGAEgAigJIiEKEVByb3BlcnR5UmVmZXJlbmNlEgwKBG5hbWUYAiACKAki0wEKElByb3BlcnR5RXhwcmVzc2lvbhI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEl0KFGFnZ3JlZ2F0aW9uX2Z1bmN0aW9uGAIgASgOMj8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlFeHByZXNzaW9uLkFnZ3JlZ2F0aW9uRnVuY3Rpb24iIAoTQWdncmVnYXRpb25GdW5jdGlvbhIJCgVGSVJTVBABIskBCg1Qcm9wZXJ0eU9yZGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USTgoJZGlyZWN0aW9uGAIgASgOMjAuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlPcmRlci5EaXJlY3Rpb246CUFTQ0VORElORyIqCglEaXJlY3Rpb24SDQoJQVNDRU5ESU5HEAESDgoKREVTQ0VORElORxACIo4BCgZGaWx0ZXISQgoQY29tcG9zaXRlX2ZpbHRlchgBIAEoCzIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlchJACg9wcm9wZXJ0eV9maWx0ZXIYAiABKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlciKcAQoPQ29tcG9zaXRlRmlsdGVyEkMKCG9wZXJhdG9yGAEgAigOMjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyLk9wZXJhdG9yEi8KBmZpbHRlchgCIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlciITCghPcGVyYXRvchIHCgNBTkQQASK+AgoOUHJvcGVydHlGaWx0ZXISPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJCCghvcGVyYXRvchgCIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RmlsdGVyLk9wZXJhdG9yEi0KBXZhbHVlGAMgAigLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUiewoIT3BlcmF0b3ISDQoJTEVTU19USEFOEAESFgoSTEVTU19USEFOX09SX0VRVUFMEAISEAoMR1JFQVRFUl9USEFOEAMSGQoVR1JFQVRFUl9USEFOX09SX0VRVUFMEAQSCQoFRVFVQUwQBRIQCgxIQVNfQU5DRVNUT1IQCyKwAQoIR3FsUXVlcnkSFAoMcXVlcnlfc3RyaW5nGAEgAigJEhwKDWFsbG93X2xpdGVyYWwYAiABKAg6BWZhbHNlEjYKCG5hbWVfYXJnGAMgAygLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmcSOAoKbnVtYmVyX2FyZxgEIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnIkoKC0dxbFF1ZXJ5QXJnEgwKBG5hbWUYASABKAkSLQoFdmFsdWUYAiACKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZSL3AgoQUXVlcnlSZXN1bHRCYXRjaBJMChJlbnRpdHlfcmVzdWx0X3R5cGUYASACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQuUmVzdWx0VHlwZRI8Cg1lbnRpdHlfcmVzdWx0GAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EhIKCmVuZF9jdXJzb3IYBCABKAwSTwoMbW9yZV9yZXN1bHRzGAUgAigOMjkuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaC5Nb3JlUmVzdWx0c1R5cGUSGgoPc2tpcHBlZF9yZXN1bHRzGAYgASgFOgEwIlYKD01vcmVSZXN1bHRzVHlwZRIQCgxOT1RfRklOSVNIRUQQARIcChhNT1JFX1JFU1VMVFNfQUZURVJfTElNSVQQAhITCg9OT19NT1JFX1JFU1VMVFMQAyK1AQoLUmVhZE9wdGlvbnMSVwoQcmVhZF9jb25zaXN0ZW5jeRgBIAEoDjI0LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zLlJlYWRDb25zaXN0ZW5jeToHREVGQVVMVBITCgt0cmFuc2FjdGlvbhgCIAEoDCI4Cg9SZWFkQ29uc2lzdGVuY3kSCwoHREVGQVVMVBAAEgoKBlNUUk9ORxABEgwKCEVWRU5UVUFMEAIicwoKR2V0UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxIpCgNrZXkYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiqwEKC0dldFJlc3BvbnNlEjQKBWZvdW5kGAEgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EjYKB21pc3NpbmcYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSLgoIZGVmZXJyZWQYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXki9QEKD1J1blF1ZXJ5UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxI6CgxwYXJ0aXRpb25faWQYAiABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZBItCgVxdWVyeRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5Eh0KFW1pbl9zYWZlX3RpbWVfc2Vjb25kcxgEIAEoAxIcChRzdWdnZXN0ZWRfYmF0Y2hfc2l6ZRgFIAEoBSKWAQoQUnVuUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gSFAoMcXVlcnlfaGFuZGxlGAIgASgMEjIKBWluZGV4GAMgAygLMiMuc3RvcmFnZV9vbmVzdG9yZV92My5Db21wb3NpdGVJbmRleCIsChRDb250aW51ZVF1ZXJ5UmVxdWVzdBIUCgxxdWVyeV9oYW5kbGUYASACKAwiUQoVQ29udGludWVRdWVyeVJlc3BvbnNlEjgKBWJhdGNoGAEgAigLMikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaCJTChdCZWdpblRyYW5zYWN0aW9uUmVxdWVzdBIaCgtjcm9zc19ncm91cBgBIAEoCDoFZmFsc2USHAoNY3Jvc3NfcmVxdWVzdBgCIAEoCDoFZmFsc2UiLwoYQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlEhMKC3RyYW5zYWN0aW9uGAEgAigMIiYKD1JvbGxiYWNrUmVxdWVzdBITCgt0cmFuc2FjdGlvbhgBIAIoDCISChBSb2xsYmFja1Jlc3BvbnNlIlkKDUNvbW1pdFJlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASACKAwSMwoIbXV0YXRpb24YAiABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbiJSCg5Db21taXRSZXNwb25zZRJACg9tdXRhdGlvbl9yZXN1bHQYASABKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdCJDCgxXcml0ZVJlcXVlc3QSMwoIbXV0YXRpb24YASACKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbiJRCg1Xcml0ZVJlc3BvbnNlEkAKD211dGF0aW9uX3Jlc3VsdBgBIAIoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0InMKEkFsbG9jYXRlSWRzUmVxdWVzdBIuCghhbGxvY2F0ZRgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRItCgdyZXNlcnZlGAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IkYKE0FsbG9jYXRlSWRzUmVzcG9uc2USLwoJYWxsb2NhdGVkGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Mr4GChJEYXRhc3RvcmVWNFNlcnZpY2USeQoQQmVnaW5UcmFuc2FjdGlvbhIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0GjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlIgASYQoIUm9sbGJhY2sSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlIgASWwoGQ29tbWl0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgASWAoFV3JpdGUSJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QaJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlc3BvbnNlIgASYQoIUnVuUXVlcnkSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlIgAScAoNQ29udGludWVRdWVyeRItLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXF1ZXN0Gi4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlc3BvbnNlIgASUgoDR2V0EiMuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR2V0UmVxdWVzdBokLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdldFJlc3BvbnNlIgASagoLQWxsb2NhdGVJZHMSKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QaLC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlIgBCIQofY29tLmdvb2dsZS5hcHBob3N0aW5nLmRhdGFzdG9yZQ=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugHlLAonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIpMCCghNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSDQoFZm9yY2UYBiABKAgiYQoOTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgBIAIoBRI4ChJpbnNlcnRfYXV0b19pZF9rZXkYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkidQoMRW50aXR5UmVzdWx0Ei8KBmVudGl0eRgBIAIoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eSI0CgpSZXN1bHRUeXBlEggKBEZVTEwQARIOCgpQUk9KRUNUSU9OEAISDAoIS0VZX09OTFkQAyLxAgoFUXVlcnkSPwoKcHJvamVjdGlvbhgCIAMoCzIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbhI1CgRraW5kGAMgAygLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2luZEV4cHJlc3Npb24SLwoGZmlsdGVyGAQgASgLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRmlsdGVyEjUKBW9yZGVyGAUgAygLMiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlPcmRlchI8Cghncm91cF9ieRgGIAMoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEhQKDHN0YXJ0X2N1cnNvchgHIAEoDBISCgplbmRfY3Vyc29yGAggASgMEhEKBm9mZnNldBgKIAEoBToBMBINCgVsaW1pdBgLIAEoBSIeCg5LaW5kRXhwcmVzc2lvbhIMCgRuYW1lGAEgAigJIiEKEVByb3BlcnR5UmVmZXJlbmNlEgwKBG5hbWUYAiACKAki0wEKElByb3BlcnR5RXhwcmVzc2lvbhI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEl0KFGFnZ3JlZ2F0aW9uX2Z1bmN0aW9uGAIgASgOMj8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlFeHByZXNzaW9uLkFnZ3JlZ2F0aW9uRnVuY3Rpb24iIAoTQWdncmVnYXRpb25GdW5jdGlvbhIJCgVGSVJTVBABIskBCg1Qcm9wZXJ0eU9yZGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USTgoJZGlyZWN0aW9uGAIgASgOMjAuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlPcmRlci5EaXJlY3Rpb246CUFTQ0VORElORyIqCglEaXJlY3Rpb24SDQoJQVNDRU5ESU5HEAESDgoKREVTQ0VORElORxACIo4BCgZGaWx0ZXISQgoQY29tcG9zaXRlX2ZpbHRlchgBIAEoCzIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlchJACg9wcm9wZXJ0eV9maWx0ZXIYAiABKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlciKcAQoPQ29tcG9zaXRlRmlsdGVyEkMKCG9wZXJhdG9yGAEgAigOMjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyLk9wZXJhdG9yEi8KBmZpbHRlchgCIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlciITCghPcGVyYXRvchIHCgNBTkQQASK+AgoOUHJvcGVydHlGaWx0ZXISPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJCCghvcGVyYXRvchgCIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RmlsdGVyLk9wZXJhdG9yEi0KBXZhbHVlGAMgAigLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUiewoIT3BlcmF0b3ISDQoJTEVTU19USEFOEAESFgoSTEVTU19USEFOX09SX0VRVUFMEAISEAoMR1JFQVRFUl9USEFOEAMSGQoVR1JFQVRFUl9USEFOX09SX0VRVUFMEAQSCQoFRVFVQUwQBRIQCgxIQVNfQU5DRVNUT1IQCyKwAQoIR3FsUXVlcnkSFAoMcXVlcnlfc3RyaW5nGAEgAigJEhwKDWFsbG93X2xpdGVyYWwYAiABKAg6BWZhbHNlEjYKCG5hbWVfYXJnGAMgAygLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmcSOAoKbnVtYmVyX2FyZxgEIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnIloKC0dxbFF1ZXJ5QXJnEgwKBG5hbWUYASABKAkSLQoFdmFsdWUYAiABKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZRIOCgZjdXJzb3IYAyABKAwi9wIKEFF1ZXJ5UmVzdWx0QmF0Y2gSTAoSZW50aXR5X3Jlc3VsdF90eXBlGAEgAigOMjAuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0LlJlc3VsdFR5cGUSPAoNZW50aXR5X3Jlc3VsdBgCIAMoCzIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdBISCgplbmRfY3Vyc29yGAQgASgMEk8KDG1vcmVfcmVzdWx0cxgFIAIoDjI5LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2guTW9yZVJlc3VsdHNUeXBlEhoKD3NraXBwZWRfcmVzdWx0cxgGIAEoBToBMCJWCg9Nb3JlUmVzdWx0c1R5cGUSEAoMTk9UX0ZJTklTSEVEEAESHAoYTU9SRV9SRVNVTFRTX0FGVEVSX0xJTUlUEAISEwoPTk9fTU9SRV9SRVNVTFRTEAMitQEKC1JlYWRPcHRpb25zElcKEHJlYWRfY29uc2lzdGVuY3kYASABKA4yNC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucy5SZWFkQ29uc2lzdGVuY3k6B0RFRkFVTFQSEwoLdHJhbnNhY3Rpb24YAiABKAwiOAoPUmVhZENvbnNpc3RlbmN5EgsKB0RFRkFVTFQQABIKCgZTVFJPTkcQARIMCghFVkVOVFVBTBACInYKDUxvb2t1cFJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSKQoDa2V5GAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Iq4BCg5Mb29rdXBSZXNwb25zZRI0CgVmb3VuZBgBIAMoCzIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdBI2CgdtaXNzaW5nGAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0Ei4KCGRlZmVycmVkGAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IqsCCg9SdW5RdWVyeVJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSOgoMcGFydGl0aW9uX2lkGAIgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSLQoFcXVlcnkYAyABKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeRI0CglncWxfcXVlcnkYByABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeRIdChVtaW5fc2FmZV90aW1lX3NlY29uZHMYBCABKAMSHAoUc3VnZ2VzdGVkX2JhdGNoX3NpemUYBSABKAUiYgoQUnVuUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gSFAoMcXVlcnlfaGFuZGxlGAIgASgMIiwKFENvbnRpbnVlUXVlcnlSZXF1ZXN0EhQKDHF1ZXJ5X2hhbmRsZRgBIAIoDCJRChVDb250aW51ZVF1ZXJ5UmVzcG9uc2USOAoFYmF0Y2gYASACKAsyKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoIlMKF0JlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0EhoKC2Nyb3NzX2dyb3VwGAEgASgIOgVmYWxzZRIcCg1jcm9zc19yZXF1ZXN0GAIgASgIOgVmYWxzZSIvChhCZWdpblRyYW5zYWN0aW9uUmVzcG9uc2USEwoLdHJhbnNhY3Rpb24YASACKAwiJgoPUm9sbGJhY2tSZXF1ZXN0EhMKC3RyYW5zYWN0aW9uGAEgAigMIhIKEFJvbGxiYWNrUmVzcG9uc2Ui1QEKDUNvbW1pdFJlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASABKAwSMwoIbXV0YXRpb24YAiABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbhJICgRtb2RlGAQgASgOMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdC5Nb2RlOg1UUkFOU0FDVElPTkFMIjAKBE1vZGUSEQoNVFJBTlNBQ1RJT05BTBABEhUKEU5PTl9UUkFOU0FDVElPTkFMEAIiUgoOQ29tbWl0UmVzcG9uc2USQAoPbXV0YXRpb25fcmVzdWx0GAEgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb25SZXN1bHQicwoSQWxsb2NhdGVJZHNSZXF1ZXN0Ei4KCGFsbG9jYXRlGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Ei0KB3Jlc2VydmUYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiRgoTQWxsb2NhdGVJZHNSZXNwb25zZRIvCglhbGxvY2F0ZWQYASADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiQwoMV3JpdGVSZXF1ZXN0EjMKCG11dGF0aW9uGAEgAigLMiEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb24yogcKEkRhdGFzdG9yZVY0U2VydmljZRJ5ChBCZWdpblRyYW5zYWN0aW9uEjAuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlcXVlc3QaMS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVzcG9uc2UiABJhCghSb2xsYmFjaxIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJvbGxiYWNrUmVxdWVzdBopLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJvbGxiYWNrUmVzcG9uc2UiABJbCgZDb21taXQSJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVzcG9uc2UiABJhCghSdW5RdWVyeRIoLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJ1blF1ZXJ5UmVxdWVzdBopLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJ1blF1ZXJ5UmVzcG9uc2UiABJwCg1Db250aW51ZVF1ZXJ5Ei0uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlcXVlc3QaLi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVzcG9uc2UiABJbCgZMb29rdXASJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVzcG9uc2UiABJqCgtBbGxvY2F0ZUlkcxIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkFsbG9jYXRlSWRzUmVxdWVzdBosLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkFsbG9jYXRlSWRzUmVzcG9uc2UiABJYCgNHZXQSJi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVzcG9uc2UiABJZCgVXcml0ZRIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LldyaXRlUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgBCIQofY29tLmdvb2dsZS5hcHBob3N0aW5nLmRhdGFzdG9yZQ=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -572,7 +570,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Mutation'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbhMaBnVwc2VydCABKAIwCzgDSh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGdXBkYXRlIAIoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZpbnNlcnQgAygCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDmluc2VydF9hdXRvX2lkIAQoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZkZWxldGUgBSgCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBWZvcmNlIAYoADAIOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uExoGdXBzZXJ0IAEoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZ1cGRhdGUgAigCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBmluc2VydCADKAIwCzgDSh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoOaW5zZXJ0X2F1dG9faWQgBCgCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBmRlbGV0ZSAFKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoFZm9yY2UgBigAMAg4ARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -758,7 +756,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.MutationResult'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdBMaDWluZGV4X3VwZGF0ZXMgASgAMAU4AhQTGhJpbnNlcnRfYXV0b19pZF9rZXkgAigCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0ExoNaW5kZXhfdXBkYXRlcyABKAAwBTgCFBMaEmluc2VydF9hdXRvX2lkX2tleSACKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -910,7 +908,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.EntityResult'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiRhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQTGgZlbnRpdHkgASgCMAs4AkoeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFHN6ClJlc3VsdFR5cGWLAZIBBEZVTEyYAQGMAYsBkgEKUFJPSkVDVElPTpgBAowBiwGSAQhLRVlfT05MWZgBA4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdBMaBmVudGl0eSABKAIwCzgCSh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUc3oKUmVzdWx0VHlwZYsBkgEERlVMTJgBAYwBiwGSAQpQUk9KRUNUSU9OmAECjAGLAZIBCEtFWV9PTkxZmAEDjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1402,7 +1400,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Query'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeRMaCnByb2plY3Rpb24gAigCMAs4A0oqYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlFeHByZXNzaW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBGtpbmQgAygCMAs4A0omYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2luZEV4cHJlc3Npb26jAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGZmlsdGVyIAQoAjALOAFKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgVvcmRlciAFKAIwCzgDSiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eU9yZGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCGdyb3VwX2J5IAYoAjALOANKKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDHN0YXJ0X2N1cnNvciAHKAIwCTgBFBMaCmVuZF9jdXJzb3IgCCgCMAk4ARQTGgZvZmZzZXQgCigAMAU4AUIBMKMBqgEHZGVmYXVsdLIBATCkARQTGgVsaW1pdCALKAAwBTgBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5ExoKcHJvamVjdGlvbiACKAIwCzgDSiphcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb26jAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoEa2luZCADKAIwCzgDSiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LaW5kRXhwcmVzc2lvbqMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgZmaWx0ZXIgBCgCMAs4AUoeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRmlsdGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBW9yZGVyIAUoAjALOANKJWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXKjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoIZ3JvdXBfYnkgBigCMAs4A0opYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2WjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoMc3RhcnRfY3Vyc29yIAcoAjAJOAEUExoKZW5kX2N1cnNvciAIKAIwCTgBFBMaBm9mZnNldCAKKAAwBTgBQgEwowGqAQdkZWZhdWx0sgEBMKQBFBMaBWxpbWl0IAsoADAFOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1535,7 +1533,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.KindExpression'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LaW5kRXhwcmVzc2lvbhMaBG5hbWUgASgCMAk4AhTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uExoEbmFtZSABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1668,7 +1666,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyReference'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCilhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRMaBG5hbWUgAigCMAk4AhTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlExoEbmFtZSACKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1850,7 +1848,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyExpression'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiphcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24TGghwcm9wZXJ0eSABKAIwCzgCSilhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGhRhZ2dyZWdhdGlvbl9mdW5jdGlvbiACKAAwBTgBaAAUc3oTQWdncmVnYXRpb25GdW5jdGlvbosBkgEFRklSU1SYAQGMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbhMaCHByb3BlcnR5IAEoAjALOAJKKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaFGFnZ3JlZ2F0aW9uX2Z1bmN0aW9uIAIoADAFOAFoABRzehNBZ2dyZWdhdGlvbkZ1bmN0aW9uiwGSAQVGSVJTVJgBAYwBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2034,7 +2032,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyOrder'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eU9yZGVyExoIcHJvcGVydHkgASgCMAs4AkopYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2WjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoJZGlyZWN0aW9uIAIoADAFOAFCATFoAKMBqgEHZGVmYXVsdLIBCUFTQ0VORElOR6QBFHN6CURpcmVjdGlvbosBkgEJQVNDRU5ESU5HmAEBjAGLAZIBCkRFU0NFTkRJTkeYAQKMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXITGghwcm9wZXJ0eSABKAIwCzgCSilhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGglkaXJlY3Rpb24gAigAMAU4AUIBMWgAowGqAQdkZWZhdWx0sgEJQVNDRU5ESU5HpAEUc3oJRGlyZWN0aW9uiwGSAQlBU0NFTkRJTkeYAQGMAYsBkgEKREVTQ0VORElOR5gBAowBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2227,7 +2225,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Filter'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCh5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXITGhBjb21wb3NpdGVfZmlsdGVyIAEoAjALOAFKJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARQTGg9wcm9wZXJ0eV9maWx0ZXIgAigCMAs4AUomYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXKjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchMaEGNvbXBvc2l0ZV9maWx0ZXIgASgCMAs4AUonYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaD3Byb3BlcnR5X2ZpbHRlciACKAIwCzgBSiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2424,7 +2422,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CompositeFilter'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCidhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXITGghvcGVyYXRvciABKAAwBTgCaAAUExoGZmlsdGVyIAIoAjALOANKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARRzeghPcGVyYXRvcosBkgEDQU5EmAEBjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlchMaCG9wZXJhdG9yIAEoADAFOAJoABQTGgZmaWx0ZXIgAigCMAs4A0oeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRmlsdGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFHN6CE9wZXJhdG9yiwGSAQNBTkSYAQGMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2664,7 +2662,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PropertyFilter'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlchMaCHByb3BlcnR5IAEoAjALOAJKKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCG9wZXJhdG9yIAIoADAFOAJoABQTGgV2YWx1ZSADKAIwCzgCSh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZaMBqgEFY3R5cGWyAQZwcm90bzKkARRzeghPcGVyYXRvcosBkgEJTEVTU19USEFOmAEBjAGLAZIBEkxFU1NfVEhBTl9PUl9FUVVBTJgBAowBiwGSAQxHUkVBVEVSX1RIQU6YAQOMAYsBkgEVR1JFQVRFUl9USEFOX09SX0VRVUFMmAEEjAGLAZIBBUVRVUFMmAEFjAGLAZIBDEhBU19BTkNFU1RPUpgBC4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RmlsdGVyExoIcHJvcGVydHkgASgCMAs4AkopYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2WjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoIb3BlcmF0b3IgAigAMAU4AmgAFBMaBXZhbHVlIAMoAjALOAJKHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlowGqAQVjdHlwZbIBBnByb3RvMqQBFHN6CE9wZXJhdG9yiwGSAQlMRVNTX1RIQU6YAQGMAYsBkgESTEVTU19USEFOX09SX0VRVUFMmAECjAGLAZIBDEdSRUFURVJfVEhBTpgBA4wBiwGSARVHUkVBVEVSX1RIQU5fT1JfRVFVQUyYAQSMAYsBkgEFRVFVQUyYAQWMAYsBkgEMSEFTX0FOQ0VTVE9SmAELjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2937,7 +2935,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GqlQuery'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeRMaDHF1ZXJ5X3N0cmluZyABKAIwCTgCFBMaDWFsbG93X2xpdGVyYWwgAigAMAg4AUIFZmFsc2WjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFBMaCG5hbWVfYXJnIAMoAjALOANKI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCm51bWJlcl9hcmcgBCgCMAs4A0ojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmejAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5ExoMcXVlcnlfc3RyaW5nIAEoAjAJOAIUExoNYWxsb3dfbGl0ZXJhbCACKAAwCDgBQgVmYWxzZaMBqgEHZGVmYXVsdLIBBWZhbHNlpAEUExoIbmFtZV9hcmcgAygCMAs4A0ojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmejAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoKbnVtYmVyX2FyZyAEKAIwCzgDSiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZ6MBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2946,9 +2944,12 @@
   has_name_ = 0
   name_ = ""
   has_value_ = 0
+  value_ = None
+  has_cursor_ = 0
+  cursor_ = ""
 
   def __init__(self, contents=None):
-    self.value_ = google.appengine.datastore.entity_v4_pb.Value()
+    self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
   def name(self): return self.name_
@@ -2964,19 +2965,44 @@
 
   def has_name(self): return self.has_name_
 
-  def value(self): return self.value_
+  def value(self):
+    if self.value_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.value_ is None: self.value_ = google.appengine.datastore.entity_v4_pb.Value()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.value_
 
-  def mutable_value(self): self.has_value_ = 1; return self.value_
+  def mutable_value(self): self.has_value_ = 1; return self.value()
 
-  def clear_value(self):self.has_value_ = 0; self.value_.Clear()
+  def clear_value(self):
+
+    if self.has_value_:
+      self.has_value_ = 0;
+      if self.value_ is not None: self.value_.Clear()
 
   def has_value(self): return self.has_value_
 
+  def cursor(self): return self.cursor_
+
+  def set_cursor(self, x):
+    self.has_cursor_ = 1
+    self.cursor_ = x
+
+  def clear_cursor(self):
+    if self.has_cursor_:
+      self.has_cursor_ = 0
+      self.cursor_ = ""
+
+  def has_cursor(self): return self.has_cursor_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_name()): self.set_name(x.name())
     if (x.has_value()): self.mutable_value().MergeFrom(x.value())
+    if (x.has_cursor()): self.set_cursor(x.cursor())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -3011,42 +3037,45 @@
     if self.has_name_ and self.name_ != x.name_: return 0
     if self.has_value_ != x.has_value_: return 0
     if self.has_value_ and self.value_ != x.value_: return 0
+    if self.has_cursor_ != x.has_cursor_: return 0
+    if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
-    if (not self.has_value_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: value not set.')
-    elif not self.value_.IsInitialized(debug_strs): initialized = 0
+    if (self.has_value_ and not self.value_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
-    n += self.lengthString(self.value_.ByteSize())
-    return n + 1
+    if (self.has_value_): n += 1 + self.lengthString(self.value_.ByteSize())
+    if (self.has_cursor_): n += 1 + self.lengthString(len(self.cursor_))
+    return n
 
   def ByteSizePartial(self):
     n = 0
     if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
-    if (self.has_value_):
-      n += 1
-      n += self.lengthString(self.value_.ByteSizePartial())
+    if (self.has_value_): n += 1 + self.lengthString(self.value_.ByteSizePartial())
+    if (self.has_cursor_): n += 1 + self.lengthString(len(self.cursor_))
     return n
 
   def Clear(self):
     self.clear_name()
     self.clear_value()
+    self.clear_cursor()
 
   def OutputUnchecked(self, out):
     if (self.has_name_):
       out.putVarInt32(10)
       out.putPrefixedString(self.name_)
-    out.putVarInt32(18)
-    out.putVarInt32(self.value_.ByteSize())
-    self.value_.OutputUnchecked(out)
+    if (self.has_value_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.value_.ByteSize())
+      self.value_.OutputUnchecked(out)
+    if (self.has_cursor_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.cursor_)
 
   def OutputPartial(self, out):
     if (self.has_name_):
@@ -3056,6 +3085,9 @@
       out.putVarInt32(18)
       out.putVarInt32(self.value_.ByteSizePartial())
       self.value_.OutputPartial(out)
+    if (self.has_cursor_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.cursor_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -3069,6 +3101,9 @@
         d.skip(length)
         self.mutable_value().TryMerge(tmp)
         continue
+      if tt == 26:
+        self.set_cursor(d.getPrefixedString())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -3082,6 +3117,7 @@
       res+=prefix+"value <\n"
       res+=self.value_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatString(self.cursor_))
     return res
 
 
@@ -3090,25 +3126,28 @@
 
   kname = 1
   kvalue = 2
+  kcursor = 3
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "name",
     2: "value",
-  }, 2)
+    3: "cursor",
+  }, 3)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.STRING,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GqlQueryArg'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZxMaBG5hbWUgASgCMAk4ARQTGgV2YWx1ZSACKAIwCzgCSh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnExoEbmFtZSABKAIwCTgBFBMaBXZhbHVlIAIoAjALOAFKHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBmN1cnNvciADKAIwCTgBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -3416,7 +3455,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.QueryResultBatch'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoExoSZW50aXR5X3Jlc3VsdF90eXBlIAEoADAFOAIUExoNZW50aXR5X3Jlc3VsdCACKAIwCzgDSiRhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHSjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoKZW5kX2N1cnNvciAEKAIwCTgBFBMaDG1vcmVfcmVzdWx0cyAFKAAwBTgCaAAUExoPc2tpcHBlZF9yZXN1bHRzIAYoADAFOAFCATCjAaoBB2RlZmF1bHSyAQEwpAEUc3oPTW9yZVJlc3VsdHNUeXBliwGSAQxOT1RfRklOSVNIRUSYAQGMAYsBkgEYTU9SRV9SRVNVTFRTX0FGVEVSX0xJTUlUmAECjAGLAZIBD05PX01PUkVfUkVTVUxUU5gBA4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gTGhJlbnRpdHlfcmVzdWx0X3R5cGUgASgAMAU4AhQTGg1lbnRpdHlfcmVzdWx0IAIoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgplbmRfY3Vyc29yIAQoAjAJOAEUExoMbW9yZV9yZXN1bHRzIAUoADAFOAJoABQTGg9za2lwcGVkX3Jlc3VsdHMgBigAMAU4AUIBMKMBqgEHZGVmYXVsdLIBATCkARRzeg9Nb3JlUmVzdWx0c1R5cGWLAZIBDE5PVF9GSU5JU0hFRJgBAYwBiwGSARhNT1JFX1JFU1VMVFNfQUZURVJfTElNSVSYAQKMAYsBkgEPTk9fTU9SRV9SRVNVTFRTmAEDjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -3593,12 +3632,12 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.ReadOptions'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxMaEHJlYWRfY29uc2lzdGVuY3kgASgAMAU4AUIBMGgAowGqAQdkZWZhdWx0sgEHREVGQVVMVKQBFBMaC3RyYW5zYWN0aW9uIAIoAjAJOAEUc3oPUmVhZENvbnNpc3RlbmN5iwGSAQdERUZBVUxUmAEAjAGLAZIBBlNUUk9OR5gBAYwBiwGSAQhFVkVOVFVBTJgBAowBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zExoQcmVhZF9jb25zaXN0ZW5jeSABKAAwBTgBQgEwaACjAaoBB2RlZmF1bHSyAQdERUZBVUxUpAEUExoLdHJhbnNhY3Rpb24gAigCMAk4ARRzeg9SZWFkQ29uc2lzdGVuY3mLAZIBB0RFRkFVTFSYAQCMAYsBkgEGU1RST05HmAEBjAGLAZIBCEVWRU5UVUFMmAECjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
-class GetRequest(ProtocolBuffer.ProtocolMessage):
+class LookupRequest(ProtocolBuffer.ProtocolMessage):
   has_read_options_ = 0
   read_options_ = None
 
@@ -3650,29 +3689,29 @@
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
-      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.GetRequest', s)
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.LookupRequest', s)
 
   if _net_proto___parse__python is not None:
     def _CEncode(self):
-      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.GetRequest')
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.LookupRequest')
 
   if _net_proto___parse__python is not None:
     def _CEncodePartial(self):
-      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.GetRequest')
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.LookupRequest')
 
   if _net_proto___parse__python is not None:
     def _CToASCII(self, output_format):
-      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.GetRequest', output_format)
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.LookupRequest', output_format)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCII(self, s):
-      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.GetRequest', s)
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.LookupRequest', s)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCIIIgnoreUnknown(self, s):
-      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.GetRequest', s)
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.LookupRequest', s)
 
 
   def Equals(self, x):
@@ -3788,14 +3827,14 @@
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GetRequest'
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.LookupRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiJhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HZXRSZXF1ZXN0ExoMcmVhZF9vcHRpb25zIAEoAjALOAFKI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaA2tleSADKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlcXVlc3QTGgxyZWFkX29wdGlvbnMgASgCMAs4AUojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnOjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoDa2V5IAMoAjALOANKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
-class GetResponse(ProtocolBuffer.ProtocolMessage):
+class LookupResponse(ProtocolBuffer.ProtocolMessage):
 
   def __init__(self, contents=None):
     self.found_ = []
@@ -3860,29 +3899,29 @@
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
-      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.GetResponse', s)
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.LookupResponse', s)
 
   if _net_proto___parse__python is not None:
     def _CEncode(self):
-      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.GetResponse')
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.LookupResponse')
 
   if _net_proto___parse__python is not None:
     def _CEncodePartial(self):
-      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.GetResponse')
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.LookupResponse')
 
   if _net_proto___parse__python is not None:
     def _CToASCII(self, output_format):
-      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.GetResponse', output_format)
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.LookupResponse', output_format)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCII(self, s):
-      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.GetResponse', s)
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.LookupResponse', s)
 
 
   if _net_proto___parse__python is not None:
     def ParseASCIIIgnoreUnknown(self, s):
-      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.GetResponse', s)
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.LookupResponse', s)
 
 
   def Equals(self, x):
@@ -4041,9 +4080,9 @@
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GetResponse'
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.LookupResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HZXRSZXNwb25zZRMaBWZvdW5kIAEoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgdtaXNzaW5nIAIoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGghkZWZlcnJlZCADKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlExoFZm91bmQgASgCMAs4A0okYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaB21pc3NpbmcgAigCMAs4A0okYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCGRlZmVycmVkIAMoAjALOANKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -4054,13 +4093,15 @@
   has_partition_id_ = 0
   partition_id_ = None
   has_query_ = 0
+  query_ = None
+  has_gql_query_ = 0
+  gql_query_ = None
   has_min_safe_time_seconds_ = 0
   min_safe_time_seconds_ = 0
   has_suggested_batch_size_ = 0
   suggested_batch_size_ = 0
 
   def __init__(self, contents=None):
-    self.query_ = Query()
     self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
@@ -4102,14 +4143,44 @@
 
   def has_partition_id(self): return self.has_partition_id_
 
-  def query(self): return self.query_
+  def query(self):
+    if self.query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.query_ is None: self.query_ = Query()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.query_
 
-  def mutable_query(self): self.has_query_ = 1; return self.query_
+  def mutable_query(self): self.has_query_ = 1; return self.query()
 
-  def clear_query(self):self.has_query_ = 0; self.query_.Clear()
+  def clear_query(self):
+
+    if self.has_query_:
+      self.has_query_ = 0;
+      if self.query_ is not None: self.query_.Clear()
 
   def has_query(self): return self.has_query_
 
+  def gql_query(self):
+    if self.gql_query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.gql_query_ is None: self.gql_query_ = GqlQuery()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.gql_query_
+
+  def mutable_gql_query(self): self.has_gql_query_ = 1; return self.gql_query()
+
+  def clear_gql_query(self):
+
+    if self.has_gql_query_:
+      self.has_gql_query_ = 0;
+      if self.gql_query_ is not None: self.gql_query_.Clear()
+
+  def has_gql_query(self): return self.has_gql_query_
+
   def min_safe_time_seconds(self): return self.min_safe_time_seconds_
 
   def set_min_safe_time_seconds(self, x):
@@ -4142,6 +4213,7 @@
     if (x.has_read_options()): self.mutable_read_options().MergeFrom(x.read_options())
     if (x.has_partition_id()): self.mutable_partition_id().MergeFrom(x.partition_id())
     if (x.has_query()): self.mutable_query().MergeFrom(x.query())
+    if (x.has_gql_query()): self.mutable_gql_query().MergeFrom(x.gql_query())
     if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds())
     if (x.has_suggested_batch_size()): self.set_suggested_batch_size(x.suggested_batch_size())
 
@@ -4180,6 +4252,8 @@
     if self.has_partition_id_ and self.partition_id_ != x.partition_id_: return 0
     if self.has_query_ != x.has_query_: return 0
     if self.has_query_ and self.query_ != x.query_: return 0
+    if self.has_gql_query_ != x.has_gql_query_: return 0
+    if self.has_gql_query_ and self.gql_query_ != x.gql_query_: return 0
     if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0
     if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0
     if self.has_suggested_batch_size_ != x.has_suggested_batch_size_: return 0
@@ -4190,29 +4264,26 @@
     initialized = 1
     if (self.has_read_options_ and not self.read_options_.IsInitialized(debug_strs)): initialized = 0
     if (self.has_partition_id_ and not self.partition_id_.IsInitialized(debug_strs)): initialized = 0
-    if (not self.has_query_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: query not set.')
-    elif not self.query_.IsInitialized(debug_strs): initialized = 0
+    if (self.has_query_ and not self.query_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_gql_query_ and not self.gql_query_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     if (self.has_read_options_): n += 1 + self.lengthString(self.read_options_.ByteSize())
     if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSize())
-    n += self.lengthString(self.query_.ByteSize())
+    if (self.has_query_): n += 1 + self.lengthString(self.query_.ByteSize())
+    if (self.has_gql_query_): n += 1 + self.lengthString(self.gql_query_.ByteSize())
     if (self.has_min_safe_time_seconds_): n += 1 + self.lengthVarInt64(self.min_safe_time_seconds_)
     if (self.has_suggested_batch_size_): n += 1 + self.lengthVarInt64(self.suggested_batch_size_)
-    return n + 1
+    return n
 
   def ByteSizePartial(self):
     n = 0
     if (self.has_read_options_): n += 1 + self.lengthString(self.read_options_.ByteSizePartial())
     if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSizePartial())
-    if (self.has_query_):
-      n += 1
-      n += self.lengthString(self.query_.ByteSizePartial())
+    if (self.has_query_): n += 1 + self.lengthString(self.query_.ByteSizePartial())
+    if (self.has_gql_query_): n += 1 + self.lengthString(self.gql_query_.ByteSizePartial())
     if (self.has_min_safe_time_seconds_): n += 1 + self.lengthVarInt64(self.min_safe_time_seconds_)
     if (self.has_suggested_batch_size_): n += 1 + self.lengthVarInt64(self.suggested_batch_size_)
     return n
@@ -4221,6 +4292,7 @@
     self.clear_read_options()
     self.clear_partition_id()
     self.clear_query()
+    self.clear_gql_query()
     self.clear_min_safe_time_seconds()
     self.clear_suggested_batch_size()
 
@@ -4233,15 +4305,20 @@
       out.putVarInt32(18)
       out.putVarInt32(self.partition_id_.ByteSize())
       self.partition_id_.OutputUnchecked(out)
-    out.putVarInt32(26)
-    out.putVarInt32(self.query_.ByteSize())
-    self.query_.OutputUnchecked(out)
+    if (self.has_query_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.query_.ByteSize())
+      self.query_.OutputUnchecked(out)
     if (self.has_min_safe_time_seconds_):
       out.putVarInt32(32)
       out.putVarInt64(self.min_safe_time_seconds_)
     if (self.has_suggested_batch_size_):
       out.putVarInt32(40)
       out.putVarInt32(self.suggested_batch_size_)
+    if (self.has_gql_query_):
+      out.putVarInt32(58)
+      out.putVarInt32(self.gql_query_.ByteSize())
+      self.gql_query_.OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_read_options_):
@@ -4262,6 +4339,10 @@
     if (self.has_suggested_batch_size_):
       out.putVarInt32(40)
       out.putVarInt32(self.suggested_batch_size_)
+    if (self.has_gql_query_):
+      out.putVarInt32(58)
+      out.putVarInt32(self.gql_query_.ByteSizePartial())
+      self.gql_query_.OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -4290,6 +4371,12 @@
       if tt == 40:
         self.set_suggested_batch_size(d.getVarInt32())
         continue
+      if tt == 58:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_gql_query().TryMerge(tmp)
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -4310,6 +4397,10 @@
       res+=prefix+"query <\n"
       res+=self.query_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_gql_query_:
+      res+=prefix+"gql_query <\n"
+      res+=self.gql_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_))
     if self.has_suggested_batch_size_: res+=prefix+("suggested_batch_size: %s\n" % self.DebugFormatInt32(self.suggested_batch_size_))
     return res
@@ -4321,6 +4412,7 @@
   kread_options = 1
   kpartition_id = 2
   kquery = 3
+  kgql_query = 7
   kmin_safe_time_seconds = 4
   ksuggested_batch_size = 5
 
@@ -4331,7 +4423,8 @@
     3: "query",
     4: "min_safe_time_seconds",
     5: "suggested_batch_size",
-  }, 5)
+    7: "gql_query",
+  }, 7)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -4340,14 +4433,15 @@
     3: ProtocolBuffer.Encoder.STRING,
     4: ProtocolBuffer.Encoder.NUMERIC,
     5: ProtocolBuffer.Encoder.NUMERIC,
-  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+    7: ProtocolBuffer.Encoder.STRING,
+  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RunQueryRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCidhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QTGgxyZWFkX29wdGlvbnMgASgCMAs4AUojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnOjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoMcGFydGl0aW9uX2lkIAIoAjALOAFKI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlBhcnRpdGlvbklkowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBXF1ZXJ5IAMoAjALOAJKHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaFW1pbl9zYWZlX3RpbWVfc2Vjb25kcyAEKAAwAzgBFBMaFHN1Z2dlc3RlZF9iYXRjaF9zaXplIAUoADAFOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJ1blF1ZXJ5UmVxdWVzdBMaDHJlYWRfb3B0aW9ucyABKAIwCzgBSiNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9uc6MBqgEFY3R5cGWyAQZwcm90bzKkARQTGgxwYXJ0aXRpb25faWQgAigCMAs4AUojYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWSjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoFcXVlcnkgAygCMAs4AUodYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoJZ3FsX3F1ZXJ5IAcoAjALOAFKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaFW1pbl9zYWZlX3RpbWVfc2Vjb25kcyAEKAAwAzgBFBMaFHN1Z2dlc3RlZF9iYXRjaF9zaXplIAUoADAFOAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -4359,7 +4453,6 @@
 
   def __init__(self, contents=None):
     self.batch_ = QueryResultBatch()
-    self.index_ = []
     if contents is not None: self.MergeFromString(contents)
 
   def batch(self): return self.batch_
@@ -4383,28 +4476,11 @@
 
   def has_query_handle(self): return self.has_query_handle_
 
-  def index_size(self): return len(self.index_)
-  def index_list(self): return self.index_
-
-  def index(self, i):
-    return self.index_[i]
-
-  def mutable_index(self, i):
-    return self.index_[i]
-
-  def add_index(self):
-    x = google.appengine.datastore.entity_pb.CompositeIndex()
-    self.index_.append(x)
-    return x
-
-  def clear_index(self):
-    self.index_ = []
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_batch()): self.mutable_batch().MergeFrom(x.batch())
     if (x.has_query_handle()): self.set_query_handle(x.query_handle())
-    for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -4439,9 +4515,6 @@
     if self.has_batch_ and self.batch_ != x.batch_: return 0
     if self.has_query_handle_ != x.has_query_handle_: return 0
     if self.has_query_handle_ and self.query_handle_ != x.query_handle_: return 0
-    if len(self.index_) != len(x.index_): return 0
-    for e1, e2 in zip(self.index_, x.index_):
-      if e1 != e2: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -4451,16 +4524,12 @@
       if debug_strs is not None:
         debug_strs.append('Required field: batch not set.')
     elif not self.batch_.IsInitialized(debug_strs): initialized = 0
-    for p in self.index_:
-      if not p.IsInitialized(debug_strs): initialized=0
     return initialized
 
   def ByteSize(self):
     n = 0
     n += self.lengthString(self.batch_.ByteSize())
     if (self.has_query_handle_): n += 1 + self.lengthString(len(self.query_handle_))
-    n += 1 * len(self.index_)
-    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
     return n + 1
 
   def ByteSizePartial(self):
@@ -4469,14 +4538,11 @@
       n += 1
       n += self.lengthString(self.batch_.ByteSizePartial())
     if (self.has_query_handle_): n += 1 + self.lengthString(len(self.query_handle_))
-    n += 1 * len(self.index_)
-    for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
     return n
 
   def Clear(self):
     self.clear_batch()
     self.clear_query_handle()
-    self.clear_index()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -4485,10 +4551,6 @@
     if (self.has_query_handle_):
       out.putVarInt32(18)
       out.putPrefixedString(self.query_handle_)
-    for i in xrange(len(self.index_)):
-      out.putVarInt32(26)
-      out.putVarInt32(self.index_[i].ByteSize())
-      self.index_[i].OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_batch_):
@@ -4498,10 +4560,6 @@
     if (self.has_query_handle_):
       out.putVarInt32(18)
       out.putPrefixedString(self.query_handle_)
-    for i in xrange(len(self.index_)):
-      out.putVarInt32(26)
-      out.putVarInt32(self.index_[i].ByteSizePartial())
-      self.index_[i].OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -4515,12 +4573,6 @@
       if tt == 18:
         self.set_query_handle(d.getPrefixedString())
         continue
-      if tt == 26:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.add_index().TryMerge(tmp)
-        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -4534,14 +4586,6 @@
       res+=self.batch_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_query_handle_: res+=prefix+("query_handle: %s\n" % self.DebugFormatString(self.query_handle_))
-    cnt=0
-    for e in self.index_:
-      elm=""
-      if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("index%s <\n" % elm)
-      res+=e.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-      cnt+=1
     return res
 
 
@@ -4550,28 +4594,25 @@
 
   kbatch = 1
   kquery_handle = 2
-  kindex = 3
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "batch",
     2: "query_handle",
-    3: "index",
-  }, 3)
+  }, 2)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.STRING,
-    3: ProtocolBuffer.Encoder.STRING,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RunQueryResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlExoFYmF0Y2ggASgCMAs4AkooYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgxxdWVyeV9oYW5kbGUgAigCMAk4ARQTGgVpbmRleCADKAIwCzgDSiJzdG9yYWdlX29uZXN0b3JlX3YzLkNvbXBvc2l0ZUluZGV4FMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJ1blF1ZXJ5UmVzcG9uc2UTGgViYXRjaCABKAIwCzgCSihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDHF1ZXJ5X2hhbmRsZSACKAIwCTgBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -4704,7 +4745,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.ContinueQueryRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCixhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVxdWVzdBMaDHF1ZXJ5X2hhbmRsZSABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KLGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXF1ZXN0ExoMcXVlcnlfaGFuZGxlIAEoAjAJOAIUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -4841,7 +4882,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.ContinueQueryResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCi1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVzcG9uc2UTGgViYXRjaCABKAIwCzgCSihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoowGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KLWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXNwb25zZRMaBWJhdGNoIAEoAjALOAJKKGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2ijAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -5003,7 +5044,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.BeginTransactionRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCi9hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVxdWVzdBMaC2Nyb3NzX2dyb3VwIAEoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARQTGg1jcm9zc19yZXF1ZXN0IAIoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KL2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0ExoLY3Jvc3NfZ3JvdXAgASgAMAg4AUIFZmFsc2WjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFBMaDWNyb3NzX3JlcXVlc3QgAigAMAg4AUIFZmFsc2WjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -5136,7 +5177,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.BeginTransactionResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCjBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVzcG9uc2UTGgt0cmFuc2FjdGlvbiABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KMGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXNwb25zZRMaC3RyYW5zYWN0aW9uIAEoAjAJOAIUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -5269,7 +5310,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RollbackRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCidhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QTGgt0cmFuc2FjdGlvbiABKAIwCTgCFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJ2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJvbGxiYWNrUmVxdWVzdBMaC3RyYW5zYWN0aW9uIAEoAjAJOAIUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -5367,16 +5408,31 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.RollbackResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCihhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJvbGxiYWNrUmVzcG9uc2XCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
 class CommitRequest(ProtocolBuffer.ProtocolMessage):
+
+
+  TRANSACTIONAL =    1
+  NON_TRANSACTIONAL =    2
+
+  _Mode_NAMES = {
+    1: "TRANSACTIONAL",
+    2: "NON_TRANSACTIONAL",
+  }
+
+  def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
+  Mode_Name = classmethod(Mode_Name)
+
   has_transaction_ = 0
   transaction_ = ""
   has_mutation_ = 0
   mutation_ = None
+  has_mode_ = 0
+  mode_ = 1
 
   def __init__(self, contents=None):
     self.lazy_init_lock_ = thread.allocate_lock()
@@ -5414,11 +5470,25 @@
 
   def has_mutation(self): return self.has_mutation_
 
+  def mode(self): return self.mode_
+
+  def set_mode(self, x):
+    self.has_mode_ = 1
+    self.mode_ = x
+
+  def clear_mode(self):
+    if self.has_mode_:
+      self.has_mode_ = 0
+      self.mode_ = 1
+
+  def has_mode(self): return self.has_mode_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_transaction()): self.set_transaction(x.transaction())
     if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation())
+    if (x.has_mode()): self.set_mode(x.mode())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -5453,42 +5523,45 @@
     if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
     if self.has_mutation_ != x.has_mutation_: return 0
     if self.has_mutation_ and self.mutation_ != x.mutation_: return 0
+    if self.has_mode_ != x.has_mode_: return 0
+    if self.has_mode_ and self.mode_ != x.mode_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
-    if (not self.has_transaction_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: transaction not set.')
     if (self.has_mutation_ and not self.mutation_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
-    n += self.lengthString(len(self.transaction_))
+    if (self.has_transaction_): n += 1 + self.lengthString(len(self.transaction_))
     if (self.has_mutation_): n += 1 + self.lengthString(self.mutation_.ByteSize())
-    return n + 1
+    if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
+    return n
 
   def ByteSizePartial(self):
     n = 0
-    if (self.has_transaction_):
-      n += 1
-      n += self.lengthString(len(self.transaction_))
+    if (self.has_transaction_): n += 1 + self.lengthString(len(self.transaction_))
     if (self.has_mutation_): n += 1 + self.lengthString(self.mutation_.ByteSizePartial())
+    if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
     return n
 
   def Clear(self):
     self.clear_transaction()
     self.clear_mutation()
+    self.clear_mode()
 
   def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putPrefixedString(self.transaction_)
+    if (self.has_transaction_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.transaction_)
     if (self.has_mutation_):
       out.putVarInt32(18)
       out.putVarInt32(self.mutation_.ByteSize())
       self.mutation_.OutputUnchecked(out)
+    if (self.has_mode_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.mode_)
 
   def OutputPartial(self, out):
     if (self.has_transaction_):
@@ -5498,6 +5571,9 @@
       out.putVarInt32(18)
       out.putVarInt32(self.mutation_.ByteSizePartial())
       self.mutation_.OutputPartial(out)
+    if (self.has_mode_):
+      out.putVarInt32(32)
+      out.putVarInt32(self.mode_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -5511,6 +5587,9 @@
         d.skip(length)
         self.mutable_mutation().TryMerge(tmp)
         continue
+      if tt == 32:
+        self.set_mode(d.getVarInt32())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -5524,6 +5603,7 @@
       res+=prefix+"mutation <\n"
       res+=self.mutation_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
     return res
 
 
@@ -5532,25 +5612,28 @@
 
   ktransaction = 1
   kmutation = 2
+  kmode = 4
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "transaction",
     2: "mutation",
-  }, 2)
+    4: "mode",
+  }, 4)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.STRING,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CommitRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXF1ZXN0ExoLdHJhbnNhY3Rpb24gASgCMAk4AhQTGghtdXRhdGlvbiACKAIwCzgBSiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbqMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QTGgt0cmFuc2FjdGlvbiABKAIwCTgBFBMaCG11dGF0aW9uIAIoAjALOAFKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaBG1vZGUgBCgAMAU4AUIBMWgAowGqAQdkZWZhdWx0sgENVFJBTlNBQ1RJT05BTKQBFHN6BE1vZGWLAZIBDVRSQU5TQUNUSU9OQUyYAQGMAYsBkgERTk9OX1RSQU5TQUNUSU9OQUyYAQKMAXTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -5694,281 +5777,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.CommitResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZRMaD211dGF0aW9uX3Jlc3VsdCABKAIwCzgBSiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
-  if _net_proto___parse__python is not None:
-    _net_proto___parse__python.RegisterType(
-        _SERIALIZED_DESCRIPTOR.tostring())
-
-class WriteRequest(ProtocolBuffer.ProtocolMessage):
-  has_mutation_ = 0
-
-  def __init__(self, contents=None):
-    self.mutation_ = Mutation()
-    if contents is not None: self.MergeFromString(contents)
-
-  def mutation(self): return self.mutation_
-
-  def mutable_mutation(self): self.has_mutation_ = 1; return self.mutation_
-
-  def clear_mutation(self):self.has_mutation_ = 0; self.mutation_.Clear()
-
-  def has_mutation(self): return self.has_mutation_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation())
-
-  if _net_proto___parse__python is not None:
-    def _CMergeFromString(self, s):
-      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.WriteRequest', s)
-
-  if _net_proto___parse__python is not None:
-    def _CEncode(self):
-      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.WriteRequest')
-
-  if _net_proto___parse__python is not None:
-    def _CEncodePartial(self):
-      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.WriteRequest')
-
-  if _net_proto___parse__python is not None:
-    def _CToASCII(self, output_format):
-      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.WriteRequest', output_format)
-
-
-  if _net_proto___parse__python is not None:
-    def ParseASCII(self, s):
-      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.WriteRequest', s)
-
-
-  if _net_proto___parse__python is not None:
-    def ParseASCIIIgnoreUnknown(self, s):
-      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.WriteRequest', s)
-
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_mutation_ != x.has_mutation_: return 0
-    if self.has_mutation_ and self.mutation_ != x.mutation_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_mutation_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: mutation not set.')
-    elif not self.mutation_.IsInitialized(debug_strs): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(self.mutation_.ByteSize())
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_mutation_):
-      n += 1
-      n += self.lengthString(self.mutation_.ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_mutation()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putVarInt32(self.mutation_.ByteSize())
-    self.mutation_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_mutation_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.mutation_.ByteSizePartial())
-      self.mutation_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_mutation().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_mutation_:
-      res+=prefix+"mutation <\n"
-      res+=self.mutation_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kmutation = 1
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "mutation",
-  }, 1)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.WriteRequest'
-  _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiRhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QTGghtdXRhdGlvbiABKAIwCzgCSiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbqMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
-  if _net_proto___parse__python is not None:
-    _net_proto___parse__python.RegisterType(
-        _SERIALIZED_DESCRIPTOR.tostring())
-
-class WriteResponse(ProtocolBuffer.ProtocolMessage):
-  has_mutation_result_ = 0
-
-  def __init__(self, contents=None):
-    self.mutation_result_ = MutationResult()
-    if contents is not None: self.MergeFromString(contents)
-
-  def mutation_result(self): return self.mutation_result_
-
-  def mutable_mutation_result(self): self.has_mutation_result_ = 1; return self.mutation_result_
-
-  def clear_mutation_result(self):self.has_mutation_result_ = 0; self.mutation_result_.Clear()
-
-  def has_mutation_result(self): return self.has_mutation_result_
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-    if (x.has_mutation_result()): self.mutable_mutation_result().MergeFrom(x.mutation_result())
-
-  if _net_proto___parse__python is not None:
-    def _CMergeFromString(self, s):
-      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.WriteResponse', s)
-
-  if _net_proto___parse__python is not None:
-    def _CEncode(self):
-      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.WriteResponse')
-
-  if _net_proto___parse__python is not None:
-    def _CEncodePartial(self):
-      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.WriteResponse')
-
-  if _net_proto___parse__python is not None:
-    def _CToASCII(self, output_format):
-      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.WriteResponse', output_format)
-
-
-  if _net_proto___parse__python is not None:
-    def ParseASCII(self, s):
-      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.WriteResponse', s)
-
-
-  if _net_proto___parse__python is not None:
-    def ParseASCIIIgnoreUnknown(self, s):
-      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.WriteResponse', s)
-
-
-  def Equals(self, x):
-    if x is self: return 1
-    if self.has_mutation_result_ != x.has_mutation_result_: return 0
-    if self.has_mutation_result_ and self.mutation_result_ != x.mutation_result_: return 0
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    if (not self.has_mutation_result_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: mutation_result not set.')
-    elif not self.mutation_result_.IsInitialized(debug_strs): initialized = 0
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    n += self.lengthString(self.mutation_result_.ByteSize())
-    return n + 1
-
-  def ByteSizePartial(self):
-    n = 0
-    if (self.has_mutation_result_):
-      n += 1
-      n += self.lengthString(self.mutation_result_.ByteSizePartial())
-    return n
-
-  def Clear(self):
-    self.clear_mutation_result()
-
-  def OutputUnchecked(self, out):
-    out.putVarInt32(10)
-    out.putVarInt32(self.mutation_result_.ByteSize())
-    self.mutation_result_.OutputUnchecked(out)
-
-  def OutputPartial(self, out):
-    if (self.has_mutation_result_):
-      out.putVarInt32(10)
-      out.putVarInt32(self.mutation_result_.ByteSizePartial())
-      self.mutation_result_.OutputPartial(out)
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-      if tt == 10:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_mutation_result().TryMerge(tmp)
-        continue
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    if self.has_mutation_result_:
-      res+=prefix+"mutation_result <\n"
-      res+=self.mutation_result_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-  kmutation_result = 1
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-    1: "mutation_result",
-  }, 1)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-    1: ProtocolBuffer.Encoder.STRING,
-  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.WriteResponse'
-  _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiVhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlc3BvbnNlExoPbXV0YXRpb25fcmVzdWx0IAEoAjALOAJKJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlExoPbXV0YXRpb25fcmVzdWx0IAEoAjALOAFKJmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uUmVzdWx0owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -6168,7 +5977,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.AllocateIdsRequest'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCiphcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QTGghhbGxvY2F0ZSABKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoHcmVzZXJ2ZSACKAIwCzgDShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkFsbG9jYXRlSWRzUmVxdWVzdBMaCGFsbG9jYXRlIAEoAjALOANKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgdyZXNlcnZlIAIoAjALOANKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -6315,7 +6124,144 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.AllocateIdsResponse'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WihhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjRhLnByb3RvCithcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlExoJYWxsb2NhdGVkIAEoAjALOANKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KK2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkFsbG9jYXRlSWRzUmVzcG9uc2UTGglhbGxvY2F0ZWQgASgCMAs4A0obYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5owGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
+class WriteRequest(ProtocolBuffer.ProtocolMessage):
+  has_mutation_ = 0
+
+  def __init__(self, contents=None):
+    self.mutation_ = Mutation()
+    if contents is not None: self.MergeFromString(contents)
+
+  def mutation(self): return self.mutation_
+
+  def mutable_mutation(self): self.has_mutation_ = 1; return self.mutation_
+
+  def clear_mutation(self):self.has_mutation_ = 0; self.mutation_.Clear()
+
+  def has_mutation(self): return self.has_mutation_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_mutation()): self.mutable_mutation().MergeFrom(x.mutation())
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.WriteRequest', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.WriteRequest')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.WriteRequest')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.WriteRequest', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.WriteRequest', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.WriteRequest', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_mutation_ != x.has_mutation_: return 0
+    if self.has_mutation_ and self.mutation_ != x.mutation_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_mutation_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: mutation not set.')
+    elif not self.mutation_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.mutation_.ByteSize())
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_mutation_):
+      n += 1
+      n += self.lengthString(self.mutation_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_mutation()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.mutation_.ByteSize())
+    self.mutation_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_mutation_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.mutation_.ByteSizePartial())
+      self.mutation_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_mutation().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_mutation_:
+      res+=prefix+"mutation <\n"
+      res+=self.mutation_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kmutation = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "mutation",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.WriteRequest'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LldyaXRlUmVxdWVzdBMaCG11dGF0aW9uIAEoAjALOAJKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uowGqAQVjdHlwZbIBBnByb3RvMqQBFMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -6329,11 +6275,12 @@
       '_protorpc_BeginTransaction', '_full_name_BeginTransaction',
       '_protorpc_Rollback', '_full_name_Rollback',
       '_protorpc_Commit', '_full_name_Commit',
-      '_protorpc_Write', '_full_name_Write',
       '_protorpc_RunQuery', '_full_name_RunQuery',
       '_protorpc_ContinueQuery', '_full_name_ContinueQuery',
-      '_protorpc_Get', '_full_name_Get',
+      '_protorpc_Lookup', '_full_name_Lookup',
       '_protorpc_AllocateIds', '_full_name_AllocateIds',
+      '_protorpc_Get', '_full_name_Get',
+      '_protorpc_Write', '_full_name_Write',
   )
 
   def __init__(self, rpc_stub):
@@ -6351,10 +6298,6 @@
     self._full_name_Commit = self._stub.GetFullMethodName(
         'Commit')
 
-    self._protorpc_Write = pywraprpc.RPC()
-    self._full_name_Write = self._stub.GetFullMethodName(
-        'Write')
-
     self._protorpc_RunQuery = pywraprpc.RPC()
     self._full_name_RunQuery = self._stub.GetFullMethodName(
         'RunQuery')
@@ -6363,14 +6306,22 @@
     self._full_name_ContinueQuery = self._stub.GetFullMethodName(
         'ContinueQuery')
 
-    self._protorpc_Get = pywraprpc.RPC()
-    self._full_name_Get = self._stub.GetFullMethodName(
-        'Get')
+    self._protorpc_Lookup = pywraprpc.RPC()
+    self._full_name_Lookup = self._stub.GetFullMethodName(
+        'Lookup')
 
     self._protorpc_AllocateIds = pywraprpc.RPC()
     self._full_name_AllocateIds = self._stub.GetFullMethodName(
         'AllocateIds')
 
+    self._protorpc_Get = pywraprpc.RPC()
+    self._full_name_Get = self._stub.GetFullMethodName(
+        'Get')
+
+    self._protorpc_Write = pywraprpc.RPC()
+    self._full_name_Write = self._stub.GetFullMethodName(
+        'Write')
+
   def BeginTransaction(self, request, rpc=None, callback=None, response=None):
     """Make a BeginTransaction RPC call.
 
@@ -6446,31 +6397,6 @@
                           callback,
                           self._protorpc_Commit)
 
-  def Write(self, request, rpc=None, callback=None, response=None):
-    """Make a Write RPC call.
-
-    Args:
-      request: a WriteRequest instance.
-      rpc: Optional RPC instance to use for the call.
-      callback: Optional final callback. Will be called as
-          callback(rpc, result) when the rpc completes. If None, the
-          call is synchronous.
-      response: Optional ProtocolMessage to be filled in with response.
-
-    Returns:
-      The WriteResponse if callback is None. Otherwise, returns None.
-    """
-
-    if response is None:
-      response = WriteResponse
-    return self._MakeCall(rpc,
-                          self._full_name_Write,
-                          'Write',
-                          request,
-                          response,
-                          callback,
-                          self._protorpc_Write)
-
   def RunQuery(self, request, rpc=None, callback=None, response=None):
     """Make a RunQuery RPC call.
 
@@ -6521,11 +6447,11 @@
                           callback,
                           self._protorpc_ContinueQuery)
 
-  def Get(self, request, rpc=None, callback=None, response=None):
-    """Make a Get RPC call.
+  def Lookup(self, request, rpc=None, callback=None, response=None):
+    """Make a Lookup RPC call.
 
     Args:
-      request: a GetRequest instance.
+      request: a LookupRequest instance.
       rpc: Optional RPC instance to use for the call.
       callback: Optional final callback. Will be called as
           callback(rpc, result) when the rpc completes. If None, the
@@ -6533,18 +6459,18 @@
       response: Optional ProtocolMessage to be filled in with response.
 
     Returns:
-      The GetResponse if callback is None. Otherwise, returns None.
+      The LookupResponse if callback is None. Otherwise, returns None.
     """
 
     if response is None:
-      response = GetResponse
+      response = LookupResponse
     return self._MakeCall(rpc,
-                          self._full_name_Get,
-                          'Get',
+                          self._full_name_Lookup,
+                          'Lookup',
                           request,
                           response,
                           callback,
-                          self._protorpc_Get)
+                          self._protorpc_Lookup)
 
   def AllocateIds(self, request, rpc=None, callback=None, response=None):
     """Make a AllocateIds RPC call.
@@ -6571,6 +6497,56 @@
                           callback,
                           self._protorpc_AllocateIds)
 
+  def Get(self, request, rpc=None, callback=None, response=None):
+    """Make a Get RPC call.
+
+    Args:
+      request: a LookupRequest instance.
+      rpc: Optional RPC instance to use for the call.
+      callback: Optional final callback. Will be called as
+          callback(rpc, result) when the rpc completes. If None, the
+          call is synchronous.
+      response: Optional ProtocolMessage to be filled in with response.
+
+    Returns:
+      The LookupResponse if callback is None. Otherwise, returns None.
+    """
+
+    if response is None:
+      response = LookupResponse
+    return self._MakeCall(rpc,
+                          self._full_name_Get,
+                          'Get',
+                          request,
+                          response,
+                          callback,
+                          self._protorpc_Get)
+
+  def Write(self, request, rpc=None, callback=None, response=None):
+    """Make a Write RPC call.
+
+    Args:
+      request: a WriteRequest instance.
+      rpc: Optional RPC instance to use for the call.
+      callback: Optional final callback. Will be called as
+          callback(rpc, result) when the rpc completes. If None, the
+          call is synchronous.
+      response: Optional ProtocolMessage to be filled in with response.
+
+    Returns:
+      The CommitResponse if callback is None. Otherwise, returns None.
+    """
+
+    if response is None:
+      response = CommitResponse
+    return self._MakeCall(rpc,
+                          self._full_name_Write,
+                          'Write',
+                          request,
+                          response,
+                          callback,
+                          self._protorpc_Write)
+
 
 class _DatastoreV4Service_ClientStub(_DatastoreV4Service_ClientBaseStub):
   __slots__ = ('_params',)
@@ -6605,11 +6581,12 @@
       'BeginTransaction': (BeginTransactionRequest, BeginTransactionResponse),
       'Rollback': (RollbackRequest, RollbackResponse),
       'Commit': (CommitRequest, CommitResponse),
-      'Write': (WriteRequest, WriteResponse),
       'RunQuery': (RunQueryRequest, RunQueryResponse),
       'ContinueQuery': (ContinueQueryRequest, ContinueQueryResponse),
-      'Get': (GetRequest, GetResponse),
+      'Lookup': (LookupRequest, LookupResponse),
       'AllocateIds': (AllocateIdsRequest, AllocateIdsResponse),
+      'Get': (LookupRequest, LookupResponse),
+      'Write': (WriteRequest, CommitResponse),
       }
 
   def __init__(self, *args, **kwargs):
@@ -6683,17 +6660,6 @@
     raise NotImplementedError
 
 
-  def Write(self, rpc, request, response):
-    """Handles a Write RPC call. You should override this.
-
-    Args:
-      rpc: a Stubby RPC object
-      request: a WriteRequest that contains the client request
-      response: a WriteResponse that should be modified to send the response
-    """
-    raise NotImplementedError
-
-
   def RunQuery(self, rpc, request, response):
     """Handles a RunQuery RPC call. You should override this.
 
@@ -6716,13 +6682,13 @@
     raise NotImplementedError
 
 
-  def Get(self, rpc, request, response):
-    """Handles a Get RPC call. You should override this.
+  def Lookup(self, rpc, request, response):
+    """Handles a Lookup RPC call. You should override this.
 
     Args:
       rpc: a Stubby RPC object
-      request: a GetRequest that contains the client request
-      response: a GetResponse that should be modified to send the response
+      request: a LookupRequest that contains the client request
+      response: a LookupResponse that should be modified to send the response
     """
     raise NotImplementedError
 
@@ -6737,6 +6703,28 @@
     """
     raise NotImplementedError
 
+
+  def Get(self, rpc, request, response):
+    """Handles a Get RPC call. You should override this.
+
+    Args:
+      rpc: a Stubby RPC object
+      request: a LookupRequest that contains the client request
+      response: a LookupResponse that should be modified to send the response
+    """
+    raise NotImplementedError
+
+
+  def Write(self, rpc, request, response):
+    """Handles a Write RPC call. You should override this.
+
+    Args:
+      rpc: a Stubby RPC object
+      request: a WriteRequest that contains the client request
+      response: a CommitResponse that should be modified to send the response
+    """
+    raise NotImplementedError
+
   def _AddMethodAttributes(self):
     """Sets attributes on Python RPC handlers.
 
@@ -6761,12 +6749,6 @@
         None,
         'none')
     rpcserver._GetHandlerDecorator(
-        self.Write.im_func,
-        WriteRequest,
-        WriteResponse,
-        None,
-        'none')
-    rpcserver._GetHandlerDecorator(
         self.RunQuery.im_func,
         RunQueryRequest,
         RunQueryResponse,
@@ -6779,9 +6761,9 @@
         None,
         'none')
     rpcserver._GetHandlerDecorator(
-        self.Get.im_func,
-        GetRequest,
-        GetResponse,
+        self.Lookup.im_func,
+        LookupRequest,
+        LookupResponse,
         None,
         'none')
     rpcserver._GetHandlerDecorator(
@@ -6790,8 +6772,20 @@
         AllocateIdsResponse,
         None,
         'none')
+    rpcserver._GetHandlerDecorator(
+        self.Get.im_func,
+        LookupRequest,
+        LookupResponse,
+        None,
+        'none')
+    rpcserver._GetHandlerDecorator(
+        self.Write.im_func,
+        WriteRequest,
+        CommitResponse,
+        None,
+        'none')
 
 if _extension_runtime:
   pass
 
-__all__ = ['Error','Mutation','MutationResult','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','GqlQuery','GqlQueryArg','QueryResultBatch','ReadOptions','GetRequest','GetResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','WriteRequest','WriteResponse','AllocateIdsRequest','AllocateIdsResponse','DatastoreV4Service']
+__all__ = ['Error','Mutation','MutationResult','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','GqlQuery','GqlQueryArg','QueryResultBatch','ReadOptions','LookupRequest','LookupResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','AllocateIdsRequest','AllocateIdsResponse','WriteRequest','DatastoreV4Service']
diff --git a/google/appengine/datastore/entity_v4_pb.py b/google/appengine/datastore/entity_v4_pb.py
index ce2972a..c998e2f 100644
--- a/google/appengine/datastore/entity_v4_pb.py
+++ b/google/appengine/datastore/entity_v4_pb.py
@@ -204,7 +204,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PartitionId'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlBhcnRpdGlvbklkExoKZGF0YXNldF9pZCADKAIwCTgBFBMaCW5hbWVzcGFjZSAEKAIwCTgBFHN6CUNvbnN0YW50c4sBkgERTUFYX0RJTUVOU0lPTl9UQUeYAWSMAXS6AYYHCiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8SF2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0IlgKC1BhcnRpdGlvbklkEhIKCmRhdGFzZXRfaWQYAyABKAkSEQoJbmFtZXNwYWNlGAQgASgJIiIKCUNvbnN0YW50cxIVChFNQVhfRElNRU5TSU9OX1RBRxBkIrgBCgNLZXkSOgoMcGFydGl0aW9uX2lkGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSPgoMcGF0aF9lbGVtZW50GAIgAygLMiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5LlBhdGhFbGVtZW50GjUKC1BhdGhFbGVtZW50EgwKBGtpbmQYASACKAkSCgoCaWQYAiABKAMSDAoEbmFtZRgDIAEoCSLDAgoFVmFsdWUSFQoNYm9vbGVhbl92YWx1ZRgBIAEoCBIVCg1pbnRlZ2VyX3ZhbHVlGAIgASgDEhQKDGRvdWJsZV92YWx1ZRgDIAEoARIkChx0aW1lc3RhbXBfbWljcm9zZWNvbmRzX3ZhbHVlGAQgASgDEi8KCWtleV92YWx1ZRgFIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIWCg5ibG9iX2tleV92YWx1ZRgQIAEoCRIUCgxzdHJpbmdfdmFsdWUYESABKAkSEgoKYmxvYl92YWx1ZRgSIAEoDBI1CgxlbnRpdHlfdmFsdWUYBiABKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSDwoHbWVhbmluZxgOIAEoBRIVCgdpbmRleGVkGA8gASgIOgR0cnVlIl0KCFByb3BlcnR5EgwKBG5hbWUYASACKAkSFAoFbXVsdGkYAiABKAg6BWZhbHNlEi0KBXZhbHVlGAMgAygLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUiaAoGRW50aXR5EikKA2tleRgBIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIzCghwcm9wZXJ0eRgCIAMoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5QiEKH2NvbS5nb29nbGUuYXBwaG9zdGluZy5kYXRhc3RvcmU="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KI2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlBhcnRpdGlvbklkExoKZGF0YXNldF9pZCADKAIwCTgBFBMaCW5hbWVzcGFjZSAEKAIwCTgBFHN6CUNvbnN0YW50c4sBkgERTUFYX0RJTUVOU0lPTl9UQUeYAWSMAXS6AYAICiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8SF2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0IlgKC1BhcnRpdGlvbklkEhIKCmRhdGFzZXRfaWQYAyABKAkSEQoJbmFtZXNwYWNlGAQgASgJIiIKCUNvbnN0YW50cxIVChFNQVhfRElNRU5TSU9OX1RBRxBkIrgBCgNLZXkSOgoMcGFydGl0aW9uX2lkGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSPgoMcGF0aF9lbGVtZW50GAIgAygLMiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5LlBhdGhFbGVtZW50GjUKC1BhdGhFbGVtZW50EgwKBGtpbmQYASACKAkSCgoCaWQYAiABKAMSDAoEbmFtZRgDIAEoCSL3AgoFVmFsdWUSFQoNYm9vbGVhbl92YWx1ZRgBIAEoCBIVCg1pbnRlZ2VyX3ZhbHVlGAIgASgDEhQKDGRvdWJsZV92YWx1ZRgDIAEoARIkChx0aW1lc3RhbXBfbWljcm9zZWNvbmRzX3ZhbHVlGAQgASgDEi8KCWtleV92YWx1ZRgFIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIWCg5ibG9iX2tleV92YWx1ZRgQIAEoCRIUCgxzdHJpbmdfdmFsdWUYESABKAkSEgoKYmxvYl92YWx1ZRgSIAEoDBI1CgxlbnRpdHlfdmFsdWUYBiABKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSMgoKbGlzdF92YWx1ZRgHIAMoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlEg8KB21lYW5pbmcYDiABKAUSFQoHaW5kZXhlZBgPIAEoCDoEdHJ1ZSKiAQoIUHJvcGVydHkSDAoEbmFtZRgBIAIoCRIfChBkZXByZWNhdGVkX211bHRpGAIgASgIOgVmYWxzZRI4ChBkZXByZWNhdGVkX3ZhbHVlGAMgAygLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUSLQoFdmFsdWUYBCABKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZSJoCgZFbnRpdHkSKQoDa2V5GAEgASgLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EjMKCHByb3BlcnR5GAIgAygLMiEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlCIQofY29tLmdvb2dsZS5hcHBob3N0aW5nLmRhdGFzdG9yZQ=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -632,6 +632,7 @@
   indexed_ = 1
 
   def __init__(self, contents=None):
+    self.list_value_ = []
     self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
@@ -764,6 +765,22 @@
 
   def has_entity_value(self): return self.has_entity_value_
 
+  def list_value_size(self): return len(self.list_value_)
+  def list_value_list(self): return self.list_value_
+
+  def list_value(self, i):
+    return self.list_value_[i]
+
+  def mutable_list_value(self, i):
+    return self.list_value_[i]
+
+  def add_list_value(self):
+    x = Value()
+    self.list_value_.append(x)
+    return x
+
+  def clear_list_value(self):
+    self.list_value_ = []
   def meaning(self): return self.meaning_
 
   def set_meaning(self, x):
@@ -802,6 +819,7 @@
     if (x.has_string_value()): self.set_string_value(x.string_value())
     if (x.has_blob_value()): self.set_blob_value(x.blob_value())
     if (x.has_entity_value()): self.mutable_entity_value().MergeFrom(x.entity_value())
+    for i in xrange(x.list_value_size()): self.add_list_value().CopyFrom(x.list_value(i))
     if (x.has_meaning()): self.set_meaning(x.meaning())
     if (x.has_indexed()): self.set_indexed(x.indexed())
 
@@ -852,6 +870,9 @@
     if self.has_blob_value_ and self.blob_value_ != x.blob_value_: return 0
     if self.has_entity_value_ != x.has_entity_value_: return 0
     if self.has_entity_value_ and self.entity_value_ != x.entity_value_: return 0
+    if len(self.list_value_) != len(x.list_value_): return 0
+    for e1, e2 in zip(self.list_value_, x.list_value_):
+      if e1 != e2: return 0
     if self.has_meaning_ != x.has_meaning_: return 0
     if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
     if self.has_indexed_ != x.has_indexed_: return 0
@@ -862,6 +883,8 @@
     initialized = 1
     if (self.has_key_value_ and not self.key_value_.IsInitialized(debug_strs)): initialized = 0
     if (self.has_entity_value_ and not self.entity_value_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.list_value_:
+      if not p.IsInitialized(debug_strs): initialized=0
     return initialized
 
   def ByteSize(self):
@@ -875,6 +898,8 @@
     if (self.has_string_value_): n += 2 + self.lengthString(len(self.string_value_))
     if (self.has_blob_value_): n += 2 + self.lengthString(len(self.blob_value_))
     if (self.has_entity_value_): n += 1 + self.lengthString(self.entity_value_.ByteSize())
+    n += 1 * len(self.list_value_)
+    for i in xrange(len(self.list_value_)): n += self.lengthString(self.list_value_[i].ByteSize())
     if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
     if (self.has_indexed_): n += 2
     return n
@@ -890,6 +915,8 @@
     if (self.has_string_value_): n += 2 + self.lengthString(len(self.string_value_))
     if (self.has_blob_value_): n += 2 + self.lengthString(len(self.blob_value_))
     if (self.has_entity_value_): n += 1 + self.lengthString(self.entity_value_.ByteSizePartial())
+    n += 1 * len(self.list_value_)
+    for i in xrange(len(self.list_value_)): n += self.lengthString(self.list_value_[i].ByteSizePartial())
     if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
     if (self.has_indexed_): n += 2
     return n
@@ -904,6 +931,7 @@
     self.clear_string_value()
     self.clear_blob_value()
     self.clear_entity_value()
+    self.clear_list_value()
     self.clear_meaning()
     self.clear_indexed()
 
@@ -928,6 +956,10 @@
       out.putVarInt32(50)
       out.putVarInt32(self.entity_value_.ByteSize())
       self.entity_value_.OutputUnchecked(out)
+    for i in xrange(len(self.list_value_)):
+      out.putVarInt32(58)
+      out.putVarInt32(self.list_value_[i].ByteSize())
+      self.list_value_[i].OutputUnchecked(out)
     if (self.has_meaning_):
       out.putVarInt32(112)
       out.putVarInt32(self.meaning_)
@@ -965,6 +997,10 @@
       out.putVarInt32(50)
       out.putVarInt32(self.entity_value_.ByteSizePartial())
       self.entity_value_.OutputPartial(out)
+    for i in xrange(len(self.list_value_)):
+      out.putVarInt32(58)
+      out.putVarInt32(self.list_value_[i].ByteSizePartial())
+      self.list_value_[i].OutputPartial(out)
     if (self.has_meaning_):
       out.putVarInt32(112)
       out.putVarInt32(self.meaning_)
@@ -1008,6 +1044,12 @@
         d.skip(length)
         self.mutable_entity_value().TryMerge(tmp)
         continue
+      if tt == 58:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_list_value().TryMerge(tmp)
+        continue
       if tt == 112:
         self.set_meaning(d.getVarInt32())
         continue
@@ -1046,6 +1088,14 @@
       res+=prefix+"entity_value <\n"
       res+=self.entity_value_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    cnt=0
+    for e in self.list_value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("list_value%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
     if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
     if self.has_indexed_: res+=prefix+("indexed: %s\n" % self.DebugFormatBool(self.indexed_))
     return res
@@ -1063,6 +1113,7 @@
   kstring_value = 17
   kblob_value = 18
   kentity_value = 6
+  klist_value = 7
   kmeaning = 14
   kindexed = 15
 
@@ -1074,6 +1125,7 @@
     4: "timestamp_microseconds_value",
     5: "key_value",
     6: "entity_value",
+    7: "list_value",
     14: "meaning",
     15: "indexed",
     16: "blob_key_value",
@@ -1089,6 +1141,7 @@
     4: ProtocolBuffer.Encoder.NUMERIC,
     5: ProtocolBuffer.Encoder.STRING,
     6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.STRING,
     14: ProtocolBuffer.Encoder.NUMERIC,
     15: ProtocolBuffer.Encoder.NUMERIC,
     16: ProtocolBuffer.Encoder.STRING,
@@ -1101,7 +1154,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Value'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlExoNYm9vbGVhbl92YWx1ZSABKAAwCDgBFBMaDWludGVnZXJfdmFsdWUgAigAMAM4ARQTGgxkb3VibGVfdmFsdWUgAygBMAE4ARQTGhx0aW1lc3RhbXBfbWljcm9zZWNvbmRzX3ZhbHVlIAQoADADOAEUExoJa2V5X3ZhbHVlIAUoAjALOAFKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGg5ibG9iX2tleV92YWx1ZSAQKAIwCTgBFBMaDHN0cmluZ192YWx1ZSARKAIwCTgBFBMaCmJsb2JfdmFsdWUgEigCMAk4ARQTGgxlbnRpdHlfdmFsdWUgBigCMAs4AUoeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaB21lYW5pbmcgDigAMAU4ARQTGgdpbmRleGVkIA8oADAIOAFCBHRydWWjAaoBB2RlZmF1bHSyAQR0cnVlpAEUwgEjYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQ="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlExoNYm9vbGVhbl92YWx1ZSABKAAwCDgBFBMaDWludGVnZXJfdmFsdWUgAigAMAM4ARQTGgxkb3VibGVfdmFsdWUgAygBMAE4ARQTGhx0aW1lc3RhbXBfbWljcm9zZWNvbmRzX3ZhbHVlIAQoADADOAEUExoJa2V5X3ZhbHVlIAUoAjALOAFKG2FwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleaMBqgEFY3R5cGWyAQZwcm90bzKkARQTGg5ibG9iX2tleV92YWx1ZSAQKAIwCTgBFBMaDHN0cmluZ192YWx1ZSARKAIwCTgBFBMaCmJsb2JfdmFsdWUgEigCMAk4ARQTGgxlbnRpdHlfdmFsdWUgBigCMAs4AUoeYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCmxpc3RfdmFsdWUgBygCMAs4A0odYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWWjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoHbWVhbmluZyAOKAAwBTgBFBMaB2luZGV4ZWQgDygAMAg4AUIEdHJ1ZaMBqgEHZGVmYXVsdLIBBHRydWWkARTCASNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZA=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1109,11 +1162,14 @@
 class Property(ProtocolBuffer.ProtocolMessage):
   has_name_ = 0
   name_ = ""
-  has_multi_ = 0
-  multi_ = 0
+  has_deprecated_multi_ = 0
+  deprecated_multi_ = 0
+  has_value_ = 0
+  value_ = None
 
   def __init__(self, contents=None):
-    self.value_ = []
+    self.deprecated_value_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
   def name(self): return self.name_
@@ -1129,41 +1185,61 @@
 
   def has_name(self): return self.has_name_
 
-  def multi(self): return self.multi_
+  def deprecated_multi(self): return self.deprecated_multi_
 
-  def set_multi(self, x):
-    self.has_multi_ = 1
-    self.multi_ = x
+  def set_deprecated_multi(self, x):
+    self.has_deprecated_multi_ = 1
+    self.deprecated_multi_ = x
 
-  def clear_multi(self):
-    if self.has_multi_:
-      self.has_multi_ = 0
-      self.multi_ = 0
+  def clear_deprecated_multi(self):
+    if self.has_deprecated_multi_:
+      self.has_deprecated_multi_ = 0
+      self.deprecated_multi_ = 0
 
-  def has_multi(self): return self.has_multi_
+  def has_deprecated_multi(self): return self.has_deprecated_multi_
 
-  def value_size(self): return len(self.value_)
-  def value_list(self): return self.value_
+  def deprecated_value_size(self): return len(self.deprecated_value_)
+  def deprecated_value_list(self): return self.deprecated_value_
 
-  def value(self, i):
-    return self.value_[i]
+  def deprecated_value(self, i):
+    return self.deprecated_value_[i]
 
-  def mutable_value(self, i):
-    return self.value_[i]
+  def mutable_deprecated_value(self, i):
+    return self.deprecated_value_[i]
 
-  def add_value(self):
+  def add_deprecated_value(self):
     x = Value()
-    self.value_.append(x)
+    self.deprecated_value_.append(x)
     return x
 
+  def clear_deprecated_value(self):
+    self.deprecated_value_ = []
+  def value(self):
+    if self.value_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.value_ is None: self.value_ = Value()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.value_
+
+  def mutable_value(self): self.has_value_ = 1; return self.value()
+
   def clear_value(self):
-    self.value_ = []
+
+    if self.has_value_:
+      self.has_value_ = 0;
+      if self.value_ is not None: self.value_.Clear()
+
+  def has_value(self): return self.has_value_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_name()): self.set_name(x.name())
-    if (x.has_multi()): self.set_multi(x.multi())
-    for i in xrange(x.value_size()): self.add_value().CopyFrom(x.value(i))
+    if (x.has_deprecated_multi()): self.set_deprecated_multi(x.deprecated_multi())
+    for i in xrange(x.deprecated_value_size()): self.add_deprecated_value().CopyFrom(x.deprecated_value(i))
+    if (x.has_value()): self.mutable_value().MergeFrom(x.value())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -1196,11 +1272,13 @@
     if x is self: return 1
     if self.has_name_ != x.has_name_: return 0
     if self.has_name_ and self.name_ != x.name_: return 0
-    if self.has_multi_ != x.has_multi_: return 0
-    if self.has_multi_ and self.multi_ != x.multi_: return 0
-    if len(self.value_) != len(x.value_): return 0
-    for e1, e2 in zip(self.value_, x.value_):
+    if self.has_deprecated_multi_ != x.has_deprecated_multi_: return 0
+    if self.has_deprecated_multi_ and self.deprecated_multi_ != x.deprecated_multi_: return 0
+    if len(self.deprecated_value_) != len(x.deprecated_value_): return 0
+    for e1, e2 in zip(self.deprecated_value_, x.deprecated_value_):
       if e1 != e2: return 0
+    if self.has_value_ != x.has_value_: return 0
+    if self.has_value_ and self.value_ != x.value_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -1209,16 +1287,18 @@
       initialized = 0
       if debug_strs is not None:
         debug_strs.append('Required field: name not set.')
-    for p in self.value_:
+    for p in self.deprecated_value_:
       if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_value_ and not self.value_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     n += self.lengthString(len(self.name_))
-    if (self.has_multi_): n += 2
-    n += 1 * len(self.value_)
-    for i in xrange(len(self.value_)): n += self.lengthString(self.value_[i].ByteSize())
+    if (self.has_deprecated_multi_): n += 2
+    n += 1 * len(self.deprecated_value_)
+    for i in xrange(len(self.deprecated_value_)): n += self.lengthString(self.deprecated_value_[i].ByteSize())
+    if (self.has_value_): n += 1 + self.lengthString(self.value_.ByteSize())
     return n + 1
 
   def ByteSizePartial(self):
@@ -1226,38 +1306,48 @@
     if (self.has_name_):
       n += 1
       n += self.lengthString(len(self.name_))
-    if (self.has_multi_): n += 2
-    n += 1 * len(self.value_)
-    for i in xrange(len(self.value_)): n += self.lengthString(self.value_[i].ByteSizePartial())
+    if (self.has_deprecated_multi_): n += 2
+    n += 1 * len(self.deprecated_value_)
+    for i in xrange(len(self.deprecated_value_)): n += self.lengthString(self.deprecated_value_[i].ByteSizePartial())
+    if (self.has_value_): n += 1 + self.lengthString(self.value_.ByteSizePartial())
     return n
 
   def Clear(self):
     self.clear_name()
-    self.clear_multi()
+    self.clear_deprecated_multi()
+    self.clear_deprecated_value()
     self.clear_value()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
     out.putPrefixedString(self.name_)
-    if (self.has_multi_):
+    if (self.has_deprecated_multi_):
       out.putVarInt32(16)
-      out.putBoolean(self.multi_)
-    for i in xrange(len(self.value_)):
+      out.putBoolean(self.deprecated_multi_)
+    for i in xrange(len(self.deprecated_value_)):
       out.putVarInt32(26)
-      out.putVarInt32(self.value_[i].ByteSize())
-      self.value_[i].OutputUnchecked(out)
+      out.putVarInt32(self.deprecated_value_[i].ByteSize())
+      self.deprecated_value_[i].OutputUnchecked(out)
+    if (self.has_value_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.value_.ByteSize())
+      self.value_.OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_name_):
       out.putVarInt32(10)
       out.putPrefixedString(self.name_)
-    if (self.has_multi_):
+    if (self.has_deprecated_multi_):
       out.putVarInt32(16)
-      out.putBoolean(self.multi_)
-    for i in xrange(len(self.value_)):
+      out.putBoolean(self.deprecated_multi_)
+    for i in xrange(len(self.deprecated_value_)):
       out.putVarInt32(26)
-      out.putVarInt32(self.value_[i].ByteSizePartial())
-      self.value_[i].OutputPartial(out)
+      out.putVarInt32(self.deprecated_value_[i].ByteSizePartial())
+      self.deprecated_value_[i].OutputPartial(out)
+    if (self.has_value_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.value_.ByteSizePartial())
+      self.value_.OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -1266,13 +1356,19 @@
         self.set_name(d.getPrefixedString())
         continue
       if tt == 16:
-        self.set_multi(d.getBoolean())
+        self.set_deprecated_multi(d.getBoolean())
         continue
       if tt == 26:
         length = d.getVarInt32()
         tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
         d.skip(length)
-        self.add_value().TryMerge(tmp)
+        self.add_deprecated_value().TryMerge(tmp)
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_value().TryMerge(tmp)
         continue
 
 
@@ -1283,15 +1379,19 @@
   def __str__(self, prefix="", printElemNumber=0):
     res=""
     if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
-    if self.has_multi_: res+=prefix+("multi: %s\n" % self.DebugFormatBool(self.multi_))
+    if self.has_deprecated_multi_: res+=prefix+("deprecated_multi: %s\n" % self.DebugFormatBool(self.deprecated_multi_))
     cnt=0
-    for e in self.value_:
+    for e in self.deprecated_value_:
       elm=""
       if printElemNumber: elm="(%d)" % cnt
-      res+=prefix+("value%s <\n" % elm)
+      res+=prefix+("deprecated_value%s <\n" % elm)
       res+=e.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
       cnt+=1
+    if self.has_value_:
+      res+=prefix+"value <\n"
+      res+=self.value_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
 
@@ -1299,29 +1399,32 @@
     return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
 
   kname = 1
-  kmulti = 2
-  kvalue = 3
+  kdeprecated_multi = 2
+  kdeprecated_value = 3
+  kvalue = 4
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "name",
-    2: "multi",
-    3: "value",
-  }, 3)
+    2: "deprecated_multi",
+    3: "deprecated_value",
+    4: "value",
+  }, 4)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.NUMERIC,
     3: ProtocolBuffer.Encoder.STRING,
-  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Property'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5ExoEbmFtZSABKAIwCTgCFBMaBW11bHRpIAIoADAIOAFCBWZhbHNlowGqAQdkZWZhdWx0sgEFZmFsc2WkARQTGgV2YWx1ZSADKAIwCzgDSh1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5WYWx1ZaMBqgEFY3R5cGWyAQZwcm90bzKkARTCASNhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZA=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WiRhcHBob3N0aW5nL2RhdGFzdG9yZS9lbnRpdHlfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5ExoEbmFtZSABKAIwCTgCFBMaEGRlcHJlY2F0ZWRfbXVsdGkgAigAMAg4AUIFZmFsc2WjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFBMaEGRlcHJlY2F0ZWRfdmFsdWUgAygCMAs4A0odYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWWjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoFdmFsdWUgBCgCMAs4AUodYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWWjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEjYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQ="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
diff --git a/google/appengine/ext/analytics/static/analytics_js.js b/google/appengine/ext/analytics/static/analytics_js.js
index 05cb1dd..aca2d10 100644
--- a/google/appengine/ext/analytics/static/analytics_js.js
+++ b/google/appengine/ext/analytics/static/analytics_js.js
@@ -1,23 +1,22 @@
-/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){var n,q=this,r=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";
-else if("function"==b&&"undefined"==typeof a.call)return"object";return b},s=function(a){return"string"==typeof a},t="closure_uid_"+(1E9*Math.random()>>>0),u=0,v=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},w=function(a,b){var c=a.split("."),e=q;c[0]in e||!e.execScript||e.execScript("var "+c[0]);for(var d;c.length&&(d=c.shift());)c.length||void 0===b?e=e[d]?e[d]:e[d]={}:e[d]=
-b},x=function(a,b){function c(){}c.prototype=b.prototype;a.k=b.prototype;a.prototype=new c};var y=function(a){Error.captureStackTrace?Error.captureStackTrace(this,y):this.stack=Error().stack||"";a&&(this.message=String(a))};x(y,Error);var aa=function(a,b){for(var c=a.split("%s"),e="",d=Array.prototype.slice.call(arguments,1);d.length&&1<c.length;)e+=c.shift()+d.shift();return e+c.join("%s")};var z=function(a,b){b.unshift(a);y.call(this,aa.apply(null,b));b.shift()};x(z,y);var A=function(a,b,c){if(!a){var e=Array.prototype.slice.call(arguments,2),d="Assertion failed";if(b)var d=d+(": "+b),f=e;throw new z(""+d,f||[]);}};var B=Array.prototype,C=B.indexOf?function(a,b,c){A(null!=a.length);return B.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(s(a))return s(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ba=B.forEach?function(a,b,c){A(null!=a.length);B.forEach.call(a,b,c)}:function(a,b,c){for(var e=a.length,d=s(a)?a.split(""):a,f=0;f<e;f++)f in d&&b.call(c,d[f],f,a)},ca=B.filter?function(a,b,c){A(null!=a.length);return B.filter.call(a,b,
-c)}:function(a,b,c){for(var e=a.length,d=[],f=0,g=s(a)?a.split(""):a,h=0;h<e;h++)if(h in g){var p=g[h];b.call(c,p,h,a)&&(d[f++]=p)}return d},D=function(a,b){var c=C(a,b),e;if(e=0<=c)A(null!=a.length),B.splice.call(a,c,1);return e},E=function(a){var b=a.length;if(0<b){for(var c=Array(b),e=0;e<b;e++)c[e]=a[e];return c}return[]},da=function(a,b,c){A(null!=a.length);return 2>=arguments.length?B.slice.call(a,b):B.slice.call(a,b,c)};var F,G,H,I,ea=function(){return q.navigator?q.navigator.userAgent:null};I=H=G=F=!1;var J;if(J=ea()){var fa=q.navigator;F=0==J.lastIndexOf("Opera",0);G=!F&&(-1!=J.indexOf("MSIE")||-1!=J.indexOf("Trident"));H=!F&&-1!=J.indexOf("WebKit");I=!F&&!H&&!G&&"Gecko"==fa.product}var ga=F,K=G,L=I,M=H,ha=function(){var a=q.document;return a?a.documentMode:void 0},N;
-n:{var O="",P;if(ga&&q.opera)var Q=q.opera.version,O="function"==typeof Q?Q():Q;else if(L?P=/rv\:([^\);]+)(\)|;)/:K?P=/\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/:M&&(P=/WebKit\/(\S+)/),P)var ia=P.exec(ea()),O=ia?ia[1]:"";if(K){var ja=ha();if(ja>parseFloat(O)){N=String(ja);break n}}N=O}
-var ka=N,la={},R=function(a){var b;if(!(b=la[a])){b=0;for(var c=String(ka).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=Math.max(c.length,e.length),f=0;0==b&&f<d;f++){var g=c[f]||"",h=e[f]||"",p=RegExp("(\\d*)(\\D*)","g"),l=RegExp("(\\d*)(\\D*)","g");do{var k=p.exec(g)||["","",""],m=l.exec(h)||["","",""];if(0==k[0].length&&0==m[0].length)break;b=((0==k[1].length?0:parseInt(k[1],10))<(0==m[1].length?0:parseInt(m[1],10))?-1:(0==k[1].length?
-0:parseInt(k[1],10))>(0==m[1].length?0:parseInt(m[1],10))?1:0)||((0==k[2].length)<(0==m[2].length)?-1:(0==k[2].length)>(0==m[2].length)?1:0)||(k[2]<m[2]?-1:k[2]>m[2]?1:0)}while(0==b)}b=la[a]=0<=b}return b},ma=q.document,na=ma&&K?ha()||("CSS1Compat"==ma.compatMode?parseInt(ka,10):5):void 0;!L&&!K||K&&K&&9<=na||L&&R("1.9.1");K&&R("9");var oa=function(a){a=a.className;return s(a)&&a.match(/\S+/g)||[]},pa=function(a,b){for(var c=oa(a),e=da(arguments,1),d=c,f=0;f<e.length;f++)0<=C(d,e[f])||d.push(e[f]);c=c.join(" ");a.className=c},ra=function(a,b){var c=oa(a),e=da(arguments,1),c=qa(c,e).join(" ");a.className=c},qa=function(a,b){return ca(a,function(a){return!(0<=C(b,a))})};var S=function(a,b,c){var e=document;c=c||e;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var e={},d=0,f=0,g;g=c[f];f++)a==g.nodeName&&(e[d++]=g);e.length=d;return e}return c}c=c.getElementsByTagName(a||"*");if(b){e={};for(f=d=0;g=c[f];f++)a=g.className,"function"==typeof a.split&&0<=C(a.split(/\s+/),b)&&(e[d++]=g);e.length=d;return e}return c};var T=function(a){T[" "](a);return a};T[" "]=function(){};var sa=!K||K&&9<=na,ta=K&&!R("9");!M||R("528");L&&R("1.9b")||K&&R("8")||ga&&R("9.5")||M&&R("528");L&&!R("8")||K&&R("9");var U=function(a,b){this.type=a;this.currentTarget=this.target=b};U.prototype.f=!1;U.prototype.defaultPrevented=!1;U.prototype.preventDefault=function(){this.defaultPrevented=!0};var V=function(a,b){if(a){var c=this.type=a.type;U.call(this,c);this.target=a.target||a.srcElement;this.currentTarget=b;var e=a.relatedTarget;if(e){if(L){var d;n:{try{T(e.nodeName);d=!0;break n}catch(f){}d=!1}d||(e=null)}}else"mouseover"==c?e=a.fromElement:"mouseout"==c&&(e=a.toElement);this.relatedTarget=e;this.offsetX=M||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=M||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:
-a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.i=a;a.defaultPrevented&&this.preventDefault();delete this.f}};x(V,U);n=V.prototype;n.target=null;n.relatedTarget=null;n.offsetX=0;n.offsetY=0;n.clientX=0;n.clientY=0;n.screenX=0;n.screenY=0;n.button=0;n.keyCode=0;
-n.charCode=0;n.ctrlKey=!1;n.altKey=!1;n.shiftKey=!1;n.metaKey=!1;n.i=null;n.preventDefault=function(){V.k.preventDefault.call(this);var a=this.i;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,ta)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var ua="closure_listenable_"+(1E6*Math.random()|0),va=0;var wa=function(a,b,c,e,d,f){this.b=a;this.h=b;this.src=c;this.type=e;this.capture=!!d;this.d=f;this.key=++va;this.c=this.g=!1},xa=function(a){a.c=!0;a.b=null;a.h=null;a.src=null;a.d=null};var ya={},W={},X={},Y={},Aa=function(){var a=za,b=sa?function(c){return a.call(b.src,b.b,c)}:function(c){c=a.call(b.src,b.b,c);if(!c)return c};return b},Ba=function(a,b,c,e,d){if("array"==r(b))for(var f=0;f<b.length;f++)Ba(a,b[f],c,e,d);else if(c=Ca(c),a&&a[ua])a.j.add(b,c,!0,e,d);else n:{if(!b)throw Error("Invalid event type");e=!!e;var g=W;b in g||(g[b]={a:0});g=g[b];e in g||(g[e]={a:0},g.a++);var g=g[e],f=a[t]||(a[t]=++u),h;if(g[f]){h=g[f];for(var p=0;p<h.length;p++)if(g=h[p],g.b==c&&g.d==d){if(g.c)break;
-break n}}else h=g[f]=[],g.a++;p=Aa();g=new wa(c,p,a,b,e,d);g.g=!0;p.src=a;p.b=g;h.push(g);X[f]||(X[f]=[]);X[f].push(g);a.addEventListener?a.addEventListener(b,p,e):a.attachEvent(b in Y?Y[b]:Y[b]="on"+b,p);ya[g.key]=g}},Da=function(a,b,c){var e=1;b=b[t]||(b[t]=++u);if(a[b])for(a=E(a[b]),b=0;b<a.length;b++){var d=a[b];d&&!d.c&&(e&=!1!==Z(d,c))}return Boolean(e)},Z=function(a,b){var c=a.b,e=a.d||a.src;if(a.g&&"number"!=typeof a&&a&&!a.c){var d=a.src;if(d&&d[ua]){var f=d.j,g=a.type;g in f.e&&D(f.e[g],
-a)&&(xa(a),0==f.e[g].length&&delete f.e[g])}else{var f=a.type,h=a.h,g=a.capture;d.removeEventListener?d.removeEventListener(f,h,g):d.detachEvent&&d.detachEvent(f in Y?Y[f]:Y[f]="on"+f,h);d=d[t]||(d[t]=++u);X[d]&&(h=X[d],D(h,a),0==h.length&&delete X[d]);xa(a);if(h=W[f][g][d])D(h,a),0==h.length&&(delete W[f][g][d],W[f][g].a--),0==W[f][g].a&&(delete W[f][g],W[f].a--),0==W[f].a&&delete W[f];delete ya[a.key]}}return c.call(e,b)},za=function(a,b){if(a.c)return!0;var c=a.type,e=W;if(!(c in e))return!0;var e=
-e[c],d,f;if(!sa){if(!(c=b))n:{for(var c=["window","event"],g=q;d=c.shift();)if(null!=g[d])g=g[d];else{c=null;break n}c=g}d=c;c=!0 in e;g=!1 in e;if(c){if(0>d.keyCode||void 0!=d.returnValue)return!0;n:{var h=!1;if(0==d.keyCode)try{d.keyCode=-1;break n}catch(p){h=!0}if(h||void 0==d.returnValue)d.returnValue=!0}}h=new V(d,this);d=!0;try{if(c){for(var l=[],k=h.currentTarget;k;k=k.parentNode)l.push(k);f=e[!0];for(var m=l.length-1;!h.f&&0<=m;m--)h.currentTarget=l[m],d&=Da(f,l[m],h);if(g)for(f=e[!1],m=0;!h.f&&
-m<l.length;m++)h.currentTarget=l[m],d&=Da(f,l[m],h)}else d=Z(a,h)}finally{l&&(l.length=0)}return d}return Z(a,new V(b,this))},Ea="__closure_events_fn_"+(1E9*Math.random()>>>0),Ca=function(a){A(a,"Listener can not be null.");if("function"==r(a))return a;A(a.handleEvent,"An object listener must have handleEvent method.");return a[Ea]||(a[Ea]=function(b){return a.handleEvent(b)})};var $=function(){};$.l=function(){$.m||($.m=new $)};$.l();K||M&&R("525");w("ae.init",function(){Fa();Ga();Ba(window,"load",function(){});Ha()});
-var Fa=function(){var a;if(a=s("ae-content")?document.getElementById("ae-content"):"ae-content"){a=S("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=S("tbody",null,c);for(var e=0,d;d=c[e];e++){d=S("tr",null,d);for(var f=0,g;g=d[f];f++)f%2&&pa(g,"ae-even")}}}},Ga=function(){var a=S(null,"ae-noscript",void 0);ba(E(a),function(a){ra(a,"ae-noscript")})},Ha=function(){q._gaq=q._gaq||[];q._gaq.push(function(){q._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
-document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};w("ae.trackPageView",function(){q._gaq&&q._gaq._getAsyncTracker("ae")._trackPageview()});var Ja=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return Ia(a)},Ia=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Ka=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
-La=function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=Ja(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
-v(Ka,a,e,d))};w("ae.Charts.latencyHistogram",function(a,b,c){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");for(var d=0;d<b.length;d++)e.addRow([""+a[d],b[d]]);for(d=b.length;d<a.length;d++)e.addRow([""+a[d],0]);b=Ja(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(e,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
-w("ae.Charts.latencyTimestampScatter",function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("number","Time (seconds from start)");f.addColumn("number","Latency");for(var g=0;g<a.length;g++){var h=Math.round(a[g]-c);f.addRow([h,b[g]])}a=e.starttime?e.starttime:0;b=new google.visualization.ScatterChart(document.getElementById("LatencyVsTimestamp"));b.draw(f,{hAxis:{title:"Time (seconds from start of recording)",minValue:a},vAxis:{title:"Request Latency (milliseconds)",minValue:0},
-tooltip:{trigger:"none"},legend:"none"});google.visualization.events.addListener(b,"select",v(Ka,b,e,d))});
-w("ae.Charts.entityCountBarChart",function(a,b,c,e){var d=new google.visualization.DataTable;d.addColumn("string","");d.addColumn("number","Reads");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Misses");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Writes");d.addColumn({type:"string",role:"tooltip"});var f=50;f>b.length&&(f=b.length);for(var g=0;g<f;g++)d.addRow(["",b[g][1]-b[g][3],b[g][0],b[g][3],b[g][0],b[g][2],b[g][0]]);b=20*f;f=b+130;a=new google.visualization.ColumnChart(document.getElementById(e+
-"-"+a));c=Ia(c);a.draw(d,{height:100,width:f,chartArea:{width:b},fontSize:10,isStacked:!0,vAxis:{minValue:0,maxValue:c,gridlines:{count:5}}})});
-w("ae.Charts.rpcVariationCandlestick",function(a){var b=new google.visualization.DataTable;b.addColumn("string","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addRows(a);(new google.visualization.CandlestickChart(document.getElementById("rpcvariation"))).draw(b,{vAxis:{title:"RPC Latency variation (milliseconds)"},hAxis:{textPosition:"out",slantedText:!0,slantedTextAngle:45,textStyle:{fontSize:13}},height:250,chartArea:{top:10,height:100},
-legend:"none",tooltip:{trigger:"none"}})});w("ae.Charts.totalTimeBarChart",function(a,b,c,e){for(var d=[],f=0;f<b.length;f++)d[f]=b[f]+" milliseconds";La(a,b,d,c,e)});w("ae.Charts.rpcTimeBarChart",function(a,b,c,e,d){var f=[],g=[],h=c.indices,p=c.times;c=c.stats;for(var l=0;l<b;l++)f[l]=0,g[l]=null;for(l=0;l<h.length;l++){f[h[l]]=p[l];b=c[l];var k="Calls: "+b[0];if(0<b[1]||0<b[2]||0<b[3])k+=" Entities";0<b[1]&&(k+=" R:"+b[1]);0<b[2]&&(k+=" W:"+b[2]);0<b[3]&&(k+=" M:"+b[3]);g[h[l]]=k}La(a,f,g,e,d)});})();
+/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){var k,m=this,n=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";
+else if("function"==b&&"undefined"==typeof a.call)return"object";return b},q=function(a){return"string"==typeof a},r="closure_uid_"+(1E9*Math.random()>>>0),t=0,u=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},v=function(a,b){var c=a.split("."),e=m;c[0]in e||!e.execScript||e.execScript("var "+c[0]);for(var d;c.length&&(d=c.shift());)c.length||void 0===b?e=e[d]?e[d]:e[d]={}:e[d]=b},w=function(a,b){function c(){}
+c.prototype=b.prototype;a.l=b.prototype;a.prototype=new c};var y=function(a){Error.captureStackTrace?Error.captureStackTrace(this,y):this.stack=Error().stack||"";a&&(this.message=String(a))};w(y,Error);var aa=function(a,b){for(var c=a.split("%s"),e="",d=Array.prototype.slice.call(arguments,1);d.length&&1<c.length;)e+=c.shift()+d.shift();return e+c.join("%s")};var z=function(a,b){b.unshift(a);y.call(this,aa.apply(null,b));b.shift()};w(z,y);var A=function(a,b,c){if(!a){var e=Array.prototype.slice.call(arguments,2),d="Assertion failed";if(b)var d=d+(": "+b),f=e;throw new z(""+d,f||[]);}};var B=Array.prototype,C=B.indexOf?function(a,b,c){A(null!=a.length);return B.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(q(a))return q(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ba=B.forEach?function(a,b,c){A(null!=a.length);B.forEach.call(a,b,c)}:function(a,b,c){for(var e=a.length,d=q(a)?a.split(""):a,f=0;f<e;f++)f in d&&b.call(c,d[f],f,a)},ca=B.filter?function(a,b,c){A(null!=a.length);return B.filter.call(a,b,
+c)}:function(a,b,c){for(var e=a.length,d=[],f=0,g=q(a)?a.split(""):a,h=0;h<e;h++)if(h in g){var x=g[h];b.call(c,x,h,a)&&(d[f++]=x)}return d},D=function(a){var b=a.length;if(0<b){for(var c=Array(b),e=0;e<b;e++)c[e]=a[e];return c}return[]},E=function(a,b,c){A(null!=a.length);return 2>=arguments.length?B.slice.call(a,b):B.slice.call(a,b,c)};var F,G,H,I,J=function(){return m.navigator?m.navigator.userAgent:null};I=H=G=F=!1;var K;if(K=J()){var da=m.navigator;F=0==K.lastIndexOf("Opera",0);G=!F&&(-1!=K.indexOf("MSIE")||-1!=K.indexOf("Trident"));H=!F&&-1!=K.indexOf("WebKit");I=!F&&!H&&!G&&"Gecko"==da.product}var L=F,M=G,N=I,O=H,ea=m.navigator,fa=-1!=(ea&&ea.platform||"").indexOf("Mac"),ga=function(){var a=m.document;return a?a.documentMode:void 0},P;
+n:{var Q="",R;if(L&&m.opera)var S=m.opera.version,Q="function"==typeof S?S():S;else if(N?R=/rv\:([^\);]+)(\)|;)/:M?R=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:O&&(R=/WebKit\/(\S+)/),R)var ha=R.exec(J()),Q=ha?ha[1]:"";if(M){var ia=ga();if(ia>parseFloat(Q)){P=String(ia);break n}}P=Q}
+var ja=P,ka={},T=function(a){var b;if(!(b=ka[a])){b=0;for(var c=String(ja).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=Math.max(c.length,e.length),f=0;0==b&&f<d;f++){var g=c[f]||"",h=e[f]||"",x=RegExp("(\\d*)(\\D*)","g"),p=RegExp("(\\d*)(\\D*)","g");do{var l=x.exec(g)||["","",""],s=p.exec(h)||["","",""];if(0==l[0].length&&0==s[0].length)break;b=((0==l[1].length?0:parseInt(l[1],10))<(0==s[1].length?0:parseInt(s[1],10))?-1:(0==l[1].length?
+0:parseInt(l[1],10))>(0==s[1].length?0:parseInt(s[1],10))?1:0)||((0==l[2].length)<(0==s[2].length)?-1:(0==l[2].length)>(0==s[2].length)?1:0)||(l[2]<s[2]?-1:l[2]>s[2]?1:0)}while(0==b)}b=ka[a]=0<=b}return b},la=m.document,ma=la&&M?ga()||("CSS1Compat"==la.compatMode?parseInt(ja,10):5):void 0;!N&&!M||M&&M&&9<=ma||N&&T("1.9.1");M&&T("9");var na=function(a){a=a.className;return q(a)&&a.match(/\S+/g)||[]},oa=function(a,b){for(var c=na(a),e=E(arguments,1),d=c,f=0;f<e.length;f++)0<=C(d,e[f])||d.push(e[f]);c=c.join(" ");a.className=c},qa=function(a,b){var c=na(a),e=E(arguments,1),c=pa(c,e).join(" ");a.className=c},pa=function(a,b){return ca(a,function(a){return!(0<=C(b,a))})};var U=function(a,b,c){var e=document;c=c||e;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var e={},d=0,f=0,g;g=c[f];f++)a==g.nodeName&&(e[d++]=g);e.length=d;return e}return c}c=c.getElementsByTagName(a||"*");if(b){e={};for(f=d=0;g=c[f];f++)a=g.className,"function"==typeof a.split&&0<=C(a.split(/\s+/),b)&&(e[d++]=g);e.length=d;return e}return c};var V=function(a){V[" "](a);return a};V[" "]=function(){};var ra=!M||M&&9<=ma,sa=M&&!T("9");!O||T("528");N&&T("1.9b")||M&&T("8")||L&&T("9.5")||O&&T("528");N&&!T("8")||M&&T("9");var W=function(a,b){this.type=a;this.currentTarget=this.target=b};W.prototype.h=!1;W.prototype.defaultPrevented=!1;W.prototype.o=!0;W.prototype.preventDefault=function(){this.defaultPrevented=!0;this.o=!1};var X=function(a,b){if(a){var c=this.type=a.type;W.call(this,c);this.target=a.target||a.srcElement;this.currentTarget=b;var e=a.relatedTarget;if(e){if(N){var d;n:{try{V(e.nodeName);d=!0;break n}catch(f){}d=!1}d||(e=null)}}else"mouseover"==c?e=a.fromElement:"mouseout"==c&&(e=a.toElement);this.relatedTarget=e;this.offsetX=O||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=O||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:
+a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.k=fa?a.metaKey:a.ctrlKey;this.state=a.state;this.i=a;a.defaultPrevented&&this.preventDefault();delete this.h}};w(X,W);k=X.prototype;k.target=null;k.relatedTarget=null;k.offsetX=0;k.offsetY=0;k.clientX=0;k.clientY=0;k.screenX=0;k.screenY=0;
+k.button=0;k.keyCode=0;k.charCode=0;k.ctrlKey=!1;k.altKey=!1;k.shiftKey=!1;k.metaKey=!1;k.k=!1;k.i=null;k.preventDefault=function(){X.l.preventDefault.call(this);var a=this.i;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,sa)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var ta="closure_listenable_"+(1E6*Math.random()|0),ua=0;var va=function(a,b,c,e,d){this.b=a;this.d=null;this.src=b;this.type=c;this.capture=!!e;this.e=d;this.key=++ua;this.c=this.f=!1},wa=function(a){a.c=!0;a.b=null;a.d=null;a.src=null;a.e=null};var xa=function(a){this.src=a;this.a={};this.g=0};xa.prototype.add=function(a,b,c,e,d){var f=this.a[a];f||(f=this.a[a]=[],this.g++);var g;n:{for(g=0;g<f.length;++g){var h=f[g];if(!h.c&&h.b==b&&h.capture==!!e&&h.e==d)break n}g=-1}-1<g?(a=f[g],c||(a.f=!1)):(a=new va(b,this.src,a,!!e,d),a.f=c,f.push(a));return a};var ya=function(a,b){var c=b.type;if(c in a.a){var e=a.a[c],d=C(e,b),f;if(f=0<=d)A(null!=e.length),B.splice.call(e,d,1);f&&(wa(b),0==a.a[c].length&&(delete a.a[c],a.g--))}};var za={},Y={},Z={},Ba=function(){var a=Aa,b=ra?function(c){return a.call(b.src,b.b,c)}:function(c){c=a.call(b.src,b.b,c);if(!c)return c};return b},Ca=function(a,b,c,e,d){if("array"==n(b))for(var f=0;f<b.length;f++)Ca(a,b[f],c,e,d);else if(c=Da(c),a&&a[ta])a.j.add(b,c,!0,e,d);else{if(!b)throw Error("Invalid event type");var f=!!e,g=a[r]||(a[r]=++t),h=Y[g];h||(Y[g]=h=new xa(a));c=h.add(b,c,!0,e,d);c.d||(e=Ba(),c.d=e,e.src=a,e.b=c,a.addEventListener?a.addEventListener(b,e,f):a.attachEvent(b in Z?Z[b]:
+Z[b]="on"+b,e),za[c.key]=c)}},Ga=function(a,b,c,e){var d=1;if(a=Ea(a))if(b=a.a[b])for(b=D(b),a=0;a<b.length;a++){var f=b[a];f&&(f.capture==c&&!f.c)&&(d&=!1!==Fa(f,e))}return Boolean(d)},Fa=function(a,b){var c=a.b,e=a.e||a.src;if(a.f&&"number"!=typeof a&&a&&!a.c){var d=a.src;if(d&&d[ta])ya(d.j,a);else{var f=a.type,g=a.d;d.removeEventListener?d.removeEventListener(f,g,a.capture):d.detachEvent&&d.detachEvent(f in Z?Z[f]:Z[f]="on"+f,g);(f=Ea(d))?(ya(f,a),0==f.g&&(f.src=null,delete Y[d[r]||(d[r]=++t)])):
+wa(a);delete za[a.key]}}return c.call(e,b)},Aa=function(a,b){if(a.c)return!0;if(!ra){var c;if(!(c=b))n:{c=["window","event"];for(var e=m,d;d=c.shift();)if(null!=e[d])e=e[d];else{c=null;break n}c=e}d=c;c=new X(d,this);e=!0;if(!(0>d.keyCode||void 0!=d.returnValue)){n:{var f=!1;if(0==d.keyCode)try{d.keyCode=-1;break n}catch(g){f=!0}if(f||void 0==d.returnValue)d.returnValue=!0}d=[];for(f=c.currentTarget;f;f=f.parentNode)d.push(f);for(var f=a.type,h=d.length-1;!c.h&&0<=h;h--)c.currentTarget=d[h],e&=Ga(d[h],
+f,!0,c);for(h=0;!c.h&&h<d.length;h++)c.currentTarget=d[h],e&=Ga(d[h],f,!1,c)}return e}return Fa(a,new X(b,this))},Ea=function(a){return a[r]?Y[a[r]||(a[r]=++t)]||null:null},Ha="__closure_events_fn_"+(1E9*Math.random()>>>0),Da=function(a){A(a,"Listener can not be null.");if("function"==n(a))return a;A(a.handleEvent,"An object listener must have handleEvent method.");return a[Ha]||(a[Ha]=function(b){return a.handleEvent(b)})};var $=function(){};$.m=function(){$.n||($.n=new $)};$.m();M||O&&T("525");v("ae.init",function(){Ia();Ja();Ca(window,"load",function(){});Ka()});
+var Ia=function(){var a;a=document;if(a=q("ae-content")?a.getElementById("ae-content"):"ae-content"){a=U("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=U("tbody",null,c);for(var e=0,d;d=c[e];e++){d=U("tr",null,d);for(var f=0,g;g=d[f];f++)f%2&&oa(g,"ae-even")}}}},Ja=function(){var a=U(null,"ae-noscript",void 0);ba(D(a),function(a){qa(a,"ae-noscript")})},Ka=function(){m._gaq=m._gaq||[];m._gaq.push(function(){m._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
+document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};v("ae.trackPageView",function(){m._gaq&&m._gaq._getAsyncTracker("ae")._trackPageview()});var Ma=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return La(a)},La=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Na=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
+Oa=function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=Ma(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
+u(Na,a,e,d))};v("ae.Charts.latencyHistogram",function(a,b,c){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");for(var d=0;d<b.length;d++)e.addRow([""+a[d],b[d]]);for(d=b.length;d<a.length;d++)e.addRow([""+a[d],0]);b=Ma(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(e,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
+v("ae.Charts.latencyTimestampScatter",function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("number","Time (seconds from start)");f.addColumn("number","Latency");for(var g=0;g<a.length;g++){var h=Math.round(a[g]-c);f.addRow([h,b[g]])}a=e.starttime?e.starttime:0;b=new google.visualization.ScatterChart(document.getElementById("LatencyVsTimestamp"));b.draw(f,{hAxis:{title:"Time (seconds from start of recording)",minValue:a},vAxis:{title:"Request Latency (milliseconds)",minValue:0},
+tooltip:{trigger:"none"},legend:"none"});google.visualization.events.addListener(b,"select",u(Na,b,e,d))});
+v("ae.Charts.entityCountBarChart",function(a,b,c,e){var d=new google.visualization.DataTable;d.addColumn("string","");d.addColumn("number","Reads");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Misses");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Writes");d.addColumn({type:"string",role:"tooltip"});var f=50;f>b.length&&(f=b.length);for(var g=0;g<f;g++)d.addRow(["",b[g][1]-b[g][3],b[g][0],b[g][3],b[g][0],b[g][2],b[g][0]]);b=20*f;f=b+130;a=new google.visualization.ColumnChart(document.getElementById(e+
+"-"+a));c=La(c);a.draw(d,{height:100,width:f,chartArea:{width:b},fontSize:10,isStacked:!0,vAxis:{minValue:0,maxValue:c,gridlines:{count:5}}})});
+v("ae.Charts.rpcVariationCandlestick",function(a){var b=new google.visualization.DataTable;b.addColumn("string","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addRows(a);(new google.visualization.CandlestickChart(document.getElementById("rpcvariation"))).draw(b,{vAxis:{title:"RPC Latency variation (milliseconds)"},hAxis:{textPosition:"out",slantedText:!0,slantedTextAngle:45,textStyle:{fontSize:13}},height:250,chartArea:{top:10,height:100},
+legend:"none",tooltip:{trigger:"none"}})});v("ae.Charts.totalTimeBarChart",function(a,b,c,e){for(var d=[],f=0;f<b.length;f++)d[f]=b[f]+" milliseconds";Oa(a,b,d,c,e)});v("ae.Charts.rpcTimeBarChart",function(a,b,c,e,d){var f=[],g=[],h=c.indices,x=c.times;c=c.stats;for(var p=0;p<b;p++)f[p]=0,g[p]=null;for(p=0;p<h.length;p++){f[h[p]]=x[p];b=c[p];var l="Calls: "+b[0];if(0<b[1]||0<b[2]||0<b[3])l+=" Entities";0<b[1]&&(l+=" R:"+b[1]);0<b[2]&&(l+=" W:"+b[2]);0<b[3]&&(l+=" M:"+b[3]);g[h[p]]=l}Oa(a,f,g,e,d)});})();
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index f749f53..7d18518 100644
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -1,84 +1,82 @@
-/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,l=this,aa=function(){},ba=function(a){a.ca=function(){return a.Eb?a.Eb:a.Eb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
-typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},n=function(a){return"string"==typeof a},p=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},s=function(a){return a[ga]||
-(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ia=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},ja=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},t=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor=
-a};var ka=function(a){Error.captureStackTrace?Error.captureStackTrace(this,ka):this.stack=Error().stack||"";a&&(this.message=String(a))};t(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},ma=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},sa=function(a){if(!na.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(oa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(pa,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(qa,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(ra,"&quot;"));return a},oa=/&/g,pa=/</g,qa=/>/g,ra=/\"/g,na=/[&<>\"]/;var ta=function(a,b){b.unshift(a);ka.call(this,la.apply(null,b));b.shift()};t(ta,ka);ta.prototype.name="AssertionError";var ua=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new ta(""+d,e||[]);},u=function(a,b,c){a||ua("",b,Array.prototype.slice.call(arguments,2))},va=function(a,b,c,d){a instanceof b||ua("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var w=Array.prototype,wa=w.indexOf?function(a,b,c){u(null!=a.length);return w.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(n(a))return n(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},xa=w.forEach?function(a,b,c){u(null!=a.length);w.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=n(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},ya=w.filter?function(a,b,c){u(null!=a.length);return w.filter.call(a,
-b,c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=n(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var m=h[k];b.call(c,m,k,a)&&(e[g++]=m)}return e},za=w.every?function(a,b,c){u(null!=a.length);return w.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=n(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},x=function(a,b){return 0<=wa(a,b)},y=function(a,b){var c=wa(a,b),d;if(d=0<=c)u(null!=a.length),w.splice.call(a,c,1);return d},Aa=function(a){var b=a.length;if(0<b){for(var c=
-Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ca=function(a,b,c,d){u(null!=a.length);w.splice.apply(a,Ba(arguments,1))},Ba=function(a,b,c){u(null!=a.length);return 2>=arguments.length?w.slice.call(a,b):w.slice.call(a,b,c)};var Da=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ea=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Fa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Ga=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ha="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ia=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ha.length;g++)c=
-Ha[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var Ja,Ka,La,Ma,Na=function(){return l.navigator?l.navigator.userAgent:null};Ma=La=Ka=Ja=!1;var Oa;if(Oa=Na()){var Pa=l.navigator;Ja=0==Oa.lastIndexOf("Opera",0);Ka=!Ja&&(-1!=Oa.indexOf("MSIE")||-1!=Oa.indexOf("Trident"));La=!Ja&&-1!=Oa.indexOf("WebKit");Ma=!Ja&&!La&&!Ka&&"Gecko"==Pa.product}var Qa=Ja,z=Ka,A=Ma,B=La,Ra=l.navigator,C=-1!=(Ra&&Ra.platform||"").indexOf("Mac"),Sa=function(){var a=l.document;return a?a.documentMode:void 0},Ta;
-t:{var Ua="",Va;if(Qa&&l.opera)var Wa=l.opera.version,Ua="function"==typeof Wa?Wa():Wa;else if(A?Va=/rv\:([^\);]+)(\)|;)/:z?Va=/\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/:B&&(Va=/WebKit\/(\S+)/),Va)var Xa=Va.exec(Na()),Ua=Xa?Xa[1]:"";if(z){var Ya=Sa();if(Ya>parseFloat(Ua)){Ta=String(Ya);break t}}Ta=Ua}
-var Za=Ta,$a={},D=function(a){var b;if(!(b=$a[a])){b=0;for(var c=ma(String(Za)).split("."),d=ma(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",m=RegExp("(\\d*)(\\D*)","g"),v=RegExp("(\\d*)(\\D*)","g");do{var r=m.exec(h)||["","",""],q=v.exec(k)||["","",""];if(0==r[0].length&&0==q[0].length)break;b=((0==r[1].length?0:parseInt(r[1],10))<(0==q[1].length?0:parseInt(q[1],10))?-1:(0==r[1].length?0:parseInt(r[1],10))>(0==q[1].length?0:parseInt(q[1],10))?1:
-0)||((0==r[2].length)<(0==q[2].length)?-1:(0==r[2].length)>(0==q[2].length)?1:0)||(r[2]<q[2]?-1:r[2]>q[2]?1:0)}while(0==b)}b=$a[a]=0<=b}return b},ab=l.document,bb=ab&&z?Sa()||("CSS1Compat"==ab.compatMode?parseInt(Za,10):5):void 0;var cb,db=!z||z&&9<=bb;!A&&!z||z&&z&&9<=bb||A&&D("1.9.1");var eb=z&&!D("9");var fb=function(a){a=a.className;return n(a)&&a.match(/\S+/g)||[]},E=function(a,b){for(var c=fb(a),d=Ba(arguments,1),e=c.length+d.length,g=c,h=0;h<d.length;h++)x(g,d[h])||g.push(d[h]);a.className=c.join(" ");return c.length==e},hb=function(a,b){var c=fb(a),d=Ba(arguments,1),e=gb(c,d);a.className=e.join(" ");return e.length==c.length-d.length},gb=function(a,b){return ya(a,function(a){return!x(b,a)})};var kb=function(a){return a?new ib(jb(a)):cb||(cb=new ib)},lb=function(a){return n(a)?document.getElementById(a):a},mb=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=
-h.className,"function"==typeof a.split&&x(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},ob=function(a,b){Da(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in nb?a.setAttribute(nb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},nb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",
-usemap:"useMap",valign:"vAlign",width:"width"},qb=function(a,b,c){return pb(document,arguments)},pb=function(a,b){var c=b[0],d=b[1];if(!db&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',sa(d.name),'"');if(d.type){c.push(' type="',sa(d.type),'"');var e={};Ia(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(n(d)?c.className=d:da(d)?E.apply(null,[c].concat(d)):ob(c,d));2<b.length&&rb(a,c,b);return c},rb=function(a,b,c){function d(c){c&&b.appendChild(n(c)?a.createTextNode(c):
-c)}for(var e=2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(p(g)){h="function"==typeof g.item;break t}}h=!1}xa(h?Aa(g):g,d)}}},sb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},jb=function(a){return 9==
-a.nodeType?a:a.ownerDocument||a.document},tb=function(a,b){if("textContent"in a)a.textContent=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(jb(a).createTextNode(String(b)))}},ub={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},vb={IMG:" ",BR:"\n"},wb=function(a){var b=a.getAttributeNode("tabindex");return b&&b.specified?(a=a.tabIndex,"number"==typeof a&&0<=a&&
-32768>a):!1},xb=function(a,b,c){if(!(a.nodeName in ub))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in vb)b.push(vb[a.nodeName]);else for(a=a.firstChild;a;)xb(a,b,c),a=a.nextSibling},ib=function(a){this.F=a||l.document||document};f=ib.prototype;f.jb=kb;f.a=function(a){return n(a)?this.F.getElementById(a):a};f.r=function(a,b,c){return pb(this.F,arguments)};f.createElement=function(a){return this.F.createElement(a)};
-f.createTextNode=function(a){return this.F.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=sb;var yb=function(a){yb[" "](a);return a};yb[" "]=aa;var zb=!z||z&&9<=bb,Ab=!z||z&&9<=bb,Bb=z&&!D("9");!B||D("528");A&&D("1.9b")||z&&D("8")||Qa&&D("9.5")||B&&D("528");A&&!D("8")||z&&D("9");var Cb=function(){};Cb.prototype.Sb=!1;var F=function(a,b){this.type=a;this.currentTarget=this.target=b};f=F.prototype;f.T=!1;f.defaultPrevented=!1;f.wb=!0;f.stopPropagation=function(){this.T=!0};f.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var G=function(a,b){a&&Db(this,a,b)};t(G,F);var Eb=[1,4,2];f=G.prototype;f.target=null;f.relatedTarget=null;f.offsetX=0;f.offsetY=0;f.clientX=0;f.clientY=0;f.screenX=0;f.screenY=0;f.button=0;f.keyCode=0;f.charCode=0;f.ctrlKey=!1;f.altKey=!1;f.shiftKey=!1;f.metaKey=!1;f.ab=!1;f.R=null;
-var Db=function(a,b,c){var d=a.type=b.type;F.call(a,d);a.target=b.target||b.srcElement;a.currentTarget=c;if(c=b.relatedTarget){if(A){var e;t:{try{yb(c.nodeName);e=!0;break t}catch(g){}e=!1}e||(c=null)}}else"mouseover"==d?c=b.fromElement:"mouseout"==d&&(c=b.toElement);a.relatedTarget=c;a.offsetX=B||void 0!==b.offsetX?b.offsetX:b.layerX;a.offsetY=B||void 0!==b.offsetY?b.offsetY:b.layerY;a.clientX=void 0!==b.clientX?b.clientX:b.pageX;a.clientY=void 0!==b.clientY?b.clientY:b.pageY;a.screenX=b.screenX||
-0;a.screenY=b.screenY||0;a.button=b.button;a.keyCode=b.keyCode||0;a.charCode=b.charCode||("keypress"==d?b.keyCode:0);a.ctrlKey=b.ctrlKey;a.altKey=b.altKey;a.shiftKey=b.shiftKey;a.metaKey=b.metaKey;a.ab=C?b.metaKey:b.ctrlKey;a.state=b.state;a.R=b;b.defaultPrevented&&a.preventDefault();delete a.T},Fb=function(a){return zb?0==a.R.button:"click"==a.type?!0:!!(a.R.button&Eb[0])};
-G.prototype.stopPropagation=function(){G.f.stopPropagation.call(this);this.R.stopPropagation?this.R.stopPropagation():this.R.cancelBubble=!0};G.prototype.preventDefault=function(){G.f.preventDefault.call(this);var a=this.R;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Bb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Gb="closure_listenable_"+(1E6*Math.random()|0),Hb=0;var Ib=function(a,b,c,d,e,g){this.H=a;this.xb=b;this.src=c;this.type=d;this.capture=!!e;this.Y=g;this.key=++Hb;this.N=this.ja=!1},Jb=function(a){a.N=!0;a.H=null;a.xb=null;a.src=null;a.Y=null};var Kb={},H={},Lb={},Mb={},I=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)I(a,b[g],c,d,e);return null}c=Nb(c);if(a&&a[Gb])a=a.d(b,c,d,e);else t:{if(!b)throw Error("Invalid event type");d=!!d;var h=H;b in h||(h[b]={da:0});h=h[b];d in h||(h[d]={da:0},h.da++);var h=h[d],g=s(a),k;if(h[g]){k=h[g];for(var m=0;m<k.length;m++)if(h=k[m],h.H==c&&h.Y==e){if(h.N)break;k[m].ja=!1;a=k[m];break t}}else k=h[g]=[],h.da++;m=Ob();h=new Ib(c,m,a,b,d,e);h.ja=!1;m.src=a;m.H=h;k.push(h);Lb[g]||(Lb[g]=[]);Lb[g].push(h);
-a.addEventListener?a.addEventListener(b,m,d):a.attachEvent(b in Mb?Mb[b]:Mb[b]="on"+b,m);a=Kb[h.key]=h}return a},Ob=function(){var a=Pb,b=Ab?function(c){return a.call(b.src,b.H,c)}:function(c){c=a.call(b.src,b.H,c);if(!c)return c};return b},Qb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Qb(a,b[g],c,d,e);else if(c=Nb(c),a&&a[Gb])a.v(b,c,d,e);else if(d=!!d,a=Rb(a,b,d))for(g=0;g<a.length;g++)if(a[g].H==c&&a[g].capture==d&&a[g].Y==e){J(a[g]);break}},J=function(a){if("number"==typeof a||!a||
-a.N)return!1;var b=a.src;if(b&&b[Gb])return Sb(b.ea,a);var c=a.type,d=a.xb,e=a.capture;b.removeEventListener?b.removeEventListener(c,d,e):b.detachEvent&&b.detachEvent(c in Mb?Mb[c]:Mb[c]="on"+c,d);b=s(b);Lb[b]&&(d=Lb[b],y(d,a),0==d.length&&delete Lb[b]);Jb(a);if(d=H[c][e][b])y(d,a),0==d.length&&(delete H[c][e][b],H[c][e].da--),0==H[c][e].da&&(delete H[c][e],H[c].da--),0==H[c].da&&delete H[c];delete Kb[a.key];return!0},Rb=function(a,b,c){var d=H;return b in d&&(d=d[b],c in d&&(d=d[c],a=s(a),d[a]))?
-d[a]:null},Ub=function(a,b,c){var d=1;b=s(b);if(a[b])for(a=Aa(a[b]),b=0;b<a.length;b++){var e=a[b];e&&!e.N&&(d&=!1!==Tb(e,c))}return Boolean(d)},Tb=function(a,b){var c=a.H,d=a.Y||a.src;a.ja&&J(a);return c.call(d,b)},Pb=function(a,b){if(a.N)return!0;var c=a.type,d=H;if(!(c in d))return!0;var d=d[c],e,g;if(!Ab){if(!(c=b))t:{for(var c=["window","event"],h=l;e=c.shift();)if(null!=h[e])h=h[e];else{c=null;break t}c=h}e=c;c=!0 in d;h=!1 in d;if(c){if(0>e.keyCode||void 0!=e.returnValue)return!0;t:{var k=
-!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(m){k=!0}if(k||void 0==e.returnValue)e.returnValue=!0}}k=new G(e,this);e=!0;try{if(c){for(var v=[],r=k.currentTarget;r;r=r.parentNode)v.push(r);g=d[!0];for(var q=v.length-1;!k.T&&0<=q;q--)k.currentTarget=v[q],e&=Ub(g,v[q],k);if(h)for(g=d[!1],q=0;!k.T&&q<v.length;q++)k.currentTarget=v[q],e&=Ub(g,v[q],k)}else e=Tb(a,k)}finally{v&&(v.length=0)}return e}return Tb(a,new G(b,this))},Vb="__closure_events_fn_"+(1E9*Math.random()>>>0),Nb=function(a){u(a,"Listener can not be null.");
-if(p(a))return a;u(a.handleEvent,"An object listener must have handleEvent method.");return a[Vb]||(a[Vb]=function(b){return a.handleEvent(b)})};var K=function(a){this.yb=a;this.Ia={}};t(K,Cb);var Wb=[];K.prototype.d=function(a,b,c,d,e){da(b)||(Wb[0]=b,b=Wb);for(var g=0;g<b.length;g++){var h=I(a,b[g],c||this,d||!1,e||this.yb||this);if(!h)break;this.Ia[h.key]=h}return this};
-K.prototype.v=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.v(a,b[g],c,d,e);else{t:if(e=e||this.yb||this,d=!!d,c=Nb(c||this),a&&a[Gb])a=a.Za(b,c,d,e);else{if(a=Rb(a,b,d))for(b=0;b<a.length;b++)if(!a[b].N&&a[b].H==c&&a[b].capture==d&&a[b].Y==e){a=a[b];break t}a=null}a&&(J(a),delete this.Ia[a.key])}return this};K.prototype.Ya=function(){Da(this.Ia,J);this.Ia={}};K.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var Xb=function(a){this.src=a;this.q={}};Xb.prototype.add=function(a,b,c,d,e){var g=this.q[a];g||(g=this.q[a]=[]);var h=Yb(g,b,d,e);-1<h?(a=g[h],c||(a.ja=!1)):(a=new Ib(b,null,this.src,a,!!d,e),a.ja=c,g.push(a));return a};Xb.prototype.remove=function(a,b,c,d){if(!(a in this.q))return!1;var e=this.q[a];b=Yb(e,b,c,d);return-1<b?(Jb(e[b]),u(null!=e.length),w.splice.call(e,b,1),0==e.length&&delete this.q[a],!0):!1};
-var Sb=function(a,b){var c=b.type;if(!(c in a.q))return!1;var d=y(a.q[c],b);d&&(Jb(b),0==a.q[c].length&&delete a.q[c]);return d};Xb.prototype.Ya=function(a){var b=0,c;for(c in this.q)if(!a||c==a){for(var d=this.q[c],e=0;e<d.length;e++)++b,Jb(d[e]);delete this.q[c]}return b};Xb.prototype.Za=function(a,b,c,d){a=this.q[a];var e=-1;a&&(e=Yb(a,b,c,d));return-1<e?a[e]:null};var Yb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.N&&g.H==b&&g.capture==!!c&&g.Y==d)return e}return-1};var L=function(){this.ea=new Xb(this);this.bc=this};t(L,Cb);L.prototype[Gb]=!0;f=L.prototype;f.lb=null;f.Wa=function(a){this.lb=a};f.addEventListener=function(a,b,c,d){I(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Qb(this,a,b,c,d)};
-f.dispatchEvent=function(a){Zb(this);var b,c=this.lb;if(c){b=[];for(var d=1;c;c=c.lb)b.push(c),u(1E3>++d,"infinite loop")}c=this.bc;d=a.type||a;if(n(a))a=new F(a,c);else if(a instanceof F)a.target=a.target||c;else{var e=a;a=new F(d,c);Ia(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.T&&0<=h;h--)g=a.currentTarget=b[h],e=$b(g,d,!0,a)&&e;a.T||(g=a.currentTarget=c,e=$b(g,d,!0,a)&&e,a.T||(e=$b(g,d,!1,a)&&e));if(b)for(h=0;!a.T&&h<b.length;h++)g=a.currentTarget=b[h],e=$b(g,d,!1,a)&&e;return e};
-f.d=function(a,b,c,d){Zb(this);return this.ea.add(a,b,!1,c,d)};f.v=function(a,b,c,d){return this.ea.remove(a,b,c,d)};var $b=function(a,b,c,d){b=a.ea.q[b];if(!b)return!0;b=Aa(b);for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.N&&h.capture==c){var k=h.H,m=h.Y||h.src;h.ja&&Sb(a.ea,h);e=!1!==k.call(m,d)&&e}}return e&&!1!=d.wb};L.prototype.Za=function(a,b,c,d){return this.ea.Za(a,b,c,d)};var Zb=function(a){u(a.ea,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var M=function(a,b){a.style.display=b?"":"none"},ac=A?"MozUserSelect":B?"WebkitUserSelect":null,bc=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(ac){if(b=b?"none":"",a.style[ac]=b,c){a=0;for(var d;d=c[a];a++)d.style[ac]=b}}else if(z||Qa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var cc=function(){};ba(cc);cc.prototype.ec=0;cc.ca();var N=function(a){L.call(this);this.n=a||kb();this.va=dc};t(N,L);N.prototype.dc=cc.ca();var dc=null,ec=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=N.prototype;f.Q=null;f.e=!1;f.c=null;f.va=null;f.o=null;f.s=null;f.i=null;
-var fc=function(a){return a.Q||(a.Q=":"+(a.dc.ec++).toString(36))},gc=function(a,b){if(a.o&&a.o.i){var c=a.o.i,d=a.Q;d in c&&delete c[d];Fa(a.o.i,b,a)}a.Q=b};N.prototype.a=function(){return this.c};var hc=function(a){return a.hb||(a.hb=new K(a))},ic=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.o&&a.Q&&a.o.i&&a.Q&&(a.Q in a.o.i&&a.o.i[a.Q])&&a.o!=b)throw Error("Unable to set parent component");a.o=b;N.f.Wa.call(a,b)};f=N.prototype;f.getParent=function(){return this.o};
-f.Wa=function(a){if(this.o&&this.o!=a)throw Error("Method not supported");N.f.Wa.call(this,a)};f.jb=function(){return this.n};f.r=function(){this.c=this.n.createElement("div")};f.L=function(a){if(this.e)throw Error("Component already rendered");if(a&&this.ba(a)){var b=jb(a);this.n&&this.n.F==b||(this.n=kb(a));this.Va(a);this.G()}else throw Error("Invalid element to decorate");};f.ba=function(){return!0};f.Va=function(a){this.c=a};f.G=function(){this.e=!0;jc(this,function(a){!a.e&&a.a()&&a.G()})};
-f.ga=function(){jc(this,function(a){a.e&&a.ga()});this.hb&&this.hb.Ya();this.e=!1};f.Da=function(a,b){this.Ra(a,kc(this),b)};
-f.Ra=function(a,b,c){if(a.e&&(c||!this.e))throw Error("Component already rendered");if(0>b||b>kc(this))throw Error("Child component index out of bounds");this.i&&this.s||(this.i={},this.s=[]);if(a.getParent()==this){var d=fc(a);this.i[d]=a;y(this.s,a)}else Fa(this.i,fc(a),a);ic(a,this);Ca(this.s,b,0,a);if(a.e&&this.e&&a.getParent()==this)c=this.C(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.c||this.r();c=O(this,b+1);b=this.C();c=c?c.c:null;if(a.e)throw Error("Component already rendered");
+/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,l=this,aa=function(){},ba=function(a){a.aa=function(){return a.Eb?a.Eb:a.Eb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
+typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},m=function(a){return"string"==typeof a},n=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},p=function(a){return a[ga]||
+(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ia=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},ja=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},r=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor=a};var ka=function(a){Error.captureStackTrace?Error.captureStackTrace(this,ka):this.stack=Error().stack||"";a&&(this.message=String(a))};r(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},na=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},ta=function(a){if(!oa.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(pa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(qa,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(ra,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(sa,"&quot;"));return a},pa=/&/g,qa=/</g,ra=/>/g,sa=/\"/g,oa=/[&<>\"]/;var ua=function(a,b){b.unshift(a);ka.call(this,la.apply(null,b));b.shift()};r(ua,ka);ua.prototype.name="AssertionError";var va=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new ua(""+d,e||[]);},s=function(a,b,c){a||va("",b,Array.prototype.slice.call(arguments,2))},wa=function(a,b,c,d){a instanceof b||va("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var t=Array.prototype,xa=t.indexOf?function(a,b,c){s(null!=a.length);return t.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ya=t.forEach?function(a,b,c){s(null!=a.length);t.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},za=t.filter?function(a,b,c){s(null!=a.length);return t.filter.call(a,
+b,c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var q=h[k];b.call(c,q,k,a)&&(e[g++]=q)}return e},Aa=t.every?function(a,b,c){s(null!=a.length);return t.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},u=function(a,b){return 0<=xa(a,b)},Ba=function(a,b){var c=xa(a,b),d;if(d=0<=c)s(null!=a.length),t.splice.call(a,c,1);return d},Ca=function(a){var b=a.length;if(0<b){for(var c=
+Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ea=function(a,b,c,d){s(null!=a.length);t.splice.apply(a,Da(arguments,1))},Da=function(a,b,c){s(null!=a.length);return 2>=arguments.length?t.slice.call(a,b):t.slice.call(a,b,c)};var Fa=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ga=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Ha=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Ia=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ja="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ka=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ja.length;g++)c=
+Ja[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var v,La,Ma,Na,Oa=function(){return l.navigator?l.navigator.userAgent:null};Na=Ma=La=v=!1;var Pa;if(Pa=Oa()){var Qa=l.navigator;v=0==Pa.lastIndexOf("Opera",0);La=!v&&(-1!=Pa.indexOf("MSIE")||-1!=Pa.indexOf("Trident"));Ma=!v&&-1!=Pa.indexOf("WebKit");Na=!v&&!Ma&&!La&&"Gecko"==Qa.product}var Ra=v,w=La,x=Na,y=Ma,Sa=l.navigator,A=-1!=(Sa&&Sa.platform||"").indexOf("Mac"),Ta=function(){var a=l.document;return a?a.documentMode:void 0},Ua;
+t:{var Va="",Wa;if(Ra&&l.opera)var Xa=l.opera.version,Va="function"==typeof Xa?Xa():Xa;else if(x?Wa=/rv\:([^\);]+)(\)|;)/:w?Wa=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:y&&(Wa=/WebKit\/(\S+)/),Wa)var Ya=Wa.exec(Oa()),Va=Ya?Ya[1]:"";if(w){var Za=Ta();if(Za>parseFloat(Va)){Ua=String(Za);break t}}Ua=Va}
+var $a=Ua,ab={},B=function(a){var b;if(!(b=ab[a])){b=0;for(var c=na(String($a)).split("."),d=na(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",q=RegExp("(\\d*)(\\D*)","g"),ma=RegExp("(\\d*)(\\D*)","g");do{var z=q.exec(h)||["","",""],I=ma.exec(k)||["","",""];if(0==z[0].length&&0==I[0].length)break;b=((0==z[1].length?0:parseInt(z[1],10))<(0==I[1].length?0:parseInt(I[1],10))?-1:(0==z[1].length?0:parseInt(z[1],10))>(0==I[1].length?0:parseInt(I[1],10))?
+1:0)||((0==z[2].length)<(0==I[2].length)?-1:(0==z[2].length)>(0==I[2].length)?1:0)||(z[2]<I[2]?-1:z[2]>I[2]?1:0)}while(0==b)}b=ab[a]=0<=b}return b},bb=l.document,cb=bb&&w?Ta()||("CSS1Compat"==bb.compatMode?parseInt($a,10):5):void 0;var db,eb=!w||w&&9<=cb;!x&&!w||w&&w&&9<=cb||x&&B("1.9.1");var fb=w&&!B("9");var C=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},D=function(a,b){for(var c=C(a),d=Da(arguments,1),e=c.length+d.length,g=c,h=0;h<d.length;h++)u(g,d[h])||g.push(d[h]);a.className=c.join(" ");return c.length==e},E=function(a,b){var c=C(a),d=Da(arguments,1),e=gb(c,d);a.className=e.join(" ");return e.length==c.length-d.length},gb=function(a,b){return za(a,function(a){return!u(b,a)})};var jb=function(a){return a?new hb(ib(a)):db||(db=new hb)},kb=function(a,b){return m(b)?a.getElementById(b):b},lb=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
+"function"==typeof a.split&&u(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},nb=function(a,b){Fa(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in mb?a.setAttribute(mb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},mb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
+valign:"vAlign",width:"width"},pb=function(a,b,c){return ob(document,arguments)},ob=function(a,b){var c=b[0],d=b[1];if(!eb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',ta(d.name),'"');if(d.type){c.push(' type="',ta(d.type),'"');var e={};Ka(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?D.apply(null,[c].concat(d)):nb(c,d));2<b.length&&qb(a,c,b);return c},qb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
+2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}ya(h?Ca(g):g,d)}}},rb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},ib=function(a){return 9==a.nodeType?
+a:a.ownerDocument||a.document},sb=function(a,b){if("textContent"in a)a.textContent=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(ib(a).createTextNode(String(b)))}},tb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},ub={IMG:" ",BR:"\n"},vb=function(a){var b=a.getAttributeNode("tabindex");return b&&b.specified?(a=a.tabIndex,"number"==typeof a&&0<=a&&32768>a):!1},
+wb=function(a,b,c){if(!(a.nodeName in tb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in ub)b.push(ub[a.nodeName]);else for(a=a.firstChild;a;)wb(a,b,c),a=a.nextSibling},hb=function(a){this.F=a||l.document||document};f=hb.prototype;f.kb=jb;f.a=function(a){return kb(this.F,a)};f.r=function(a,b,c){return ob(this.F,arguments)};f.createElement=function(a){return this.F.createElement(a)};f.createTextNode=function(a){return this.F.createTextNode(String(a))};
+f.appendChild=function(a,b){a.appendChild(b)};f.contains=rb;var xb=function(a){xb[" "](a);return a};xb[" "]=aa;var yb=!w||w&&9<=cb,zb=!w||w&&9<=cb,Ab=w&&!B("9");!y||B("528");x&&B("1.9b")||w&&B("8")||Ra&&B("9.5")||y&&B("528");x&&!B("8")||w&&B("9");var Bb=function(){};Bb.prototype.Tb=!1;var F=function(a,b){this.type=a;this.currentTarget=this.target=b};f=F.prototype;f.R=!1;f.defaultPrevented=!1;f.xb=!0;f.stopPropagation=function(){this.R=!0};f.preventDefault=function(){this.defaultPrevented=!0;this.xb=!1};var G=function(a,b){a&&Cb(this,a,b)};r(G,F);var Db=[1,4,2];f=G.prototype;f.target=null;f.relatedTarget=null;f.offsetX=0;f.offsetY=0;f.clientX=0;f.clientY=0;f.screenX=0;f.screenY=0;f.button=0;f.keyCode=0;f.charCode=0;f.ctrlKey=!1;f.altKey=!1;f.shiftKey=!1;f.metaKey=!1;f.bb=!1;f.P=null;
+var Cb=function(a,b,c){var d=a.type=b.type;F.call(a,d);a.target=b.target||b.srcElement;a.currentTarget=c;if(c=b.relatedTarget){if(x){var e;t:{try{xb(c.nodeName);e=!0;break t}catch(g){}e=!1}e||(c=null)}}else"mouseover"==d?c=b.fromElement:"mouseout"==d&&(c=b.toElement);a.relatedTarget=c;a.offsetX=y||void 0!==b.offsetX?b.offsetX:b.layerX;a.offsetY=y||void 0!==b.offsetY?b.offsetY:b.layerY;a.clientX=void 0!==b.clientX?b.clientX:b.pageX;a.clientY=void 0!==b.clientY?b.clientY:b.pageY;a.screenX=b.screenX||
+0;a.screenY=b.screenY||0;a.button=b.button;a.keyCode=b.keyCode||0;a.charCode=b.charCode||("keypress"==d?b.keyCode:0);a.ctrlKey=b.ctrlKey;a.altKey=b.altKey;a.shiftKey=b.shiftKey;a.metaKey=b.metaKey;a.bb=A?b.metaKey:b.ctrlKey;a.state=b.state;a.P=b;b.defaultPrevented&&a.preventDefault();delete a.R},Eb=function(a){return yb?0==a.P.button:"click"==a.type?!0:!!(a.P.button&Db[0])};
+G.prototype.stopPropagation=function(){G.f.stopPropagation.call(this);this.P.stopPropagation?this.P.stopPropagation():this.P.cancelBubble=!0};G.prototype.preventDefault=function(){G.f.preventDefault.call(this);var a=this.P;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Ab)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Fb="closure_listenable_"+(1E6*Math.random()|0),Gb=0;var Hb=function(a,b,c,d,e){this.$=a;this.Da=null;this.src=b;this.type=c;this.capture=!!d;this.Ia=e;this.key=++Gb;this.ha=this.Ga=!1},Ib=function(a){a.ha=!0;a.$=null;a.Da=null;a.src=null;a.Ia=null};var Jb=function(a){this.src=a;this.p={};this.va=0};Jb.prototype.add=function(a,b,c,d,e){var g=this.p[a];g||(g=this.p[a]=[],this.va++);var h=Kb(g,b,d,e);-1<h?(a=g[h],c||(a.Ga=!1)):(a=new Hb(b,this.src,a,!!d,e),a.Ga=c,g.push(a));return a};Jb.prototype.remove=function(a,b,c,d){if(!(a in this.p))return!1;var e=this.p[a];b=Kb(e,b,c,d);return-1<b?(Ib(e[b]),s(null!=e.length),t.splice.call(e,b,1),0==e.length&&(delete this.p[a],this.va--),!0):!1};
+var Lb=function(a,b){var c=b.type;if(!(c in a.p))return!1;var d=Ba(a.p[c],b);d&&(Ib(b),0==a.p[c].length&&(delete a.p[c],a.va--));return d};Jb.prototype.$a=function(a){var b=0,c;for(c in this.p)if(!a||c==a){for(var d=this.p[c],e=0;e<d.length;e++)++b,Ib(d[e]);delete this.p[c];this.va--}return b};Jb.prototype.wa=function(a,b,c,d){a=this.p[a];var e=-1;a&&(e=Kb(a,b,c,d));return-1<e?a[e]:null};var Kb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ha&&g.$==b&&g.capture==!!c&&g.Ia==d)return e}return-1};var Mb={},Nb={},Ob={},H=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)H(a,b[g],c,d,e);return null}c=Pb(c);if(a&&a[Fb])a=a.d(b,c,d,e);else{g=c;if(!b)throw Error("Invalid event type");c=!!d;var h=p(a),k=Nb[h];k||(Nb[h]=k=new Jb(a));d=k.add(b,g,!1,d,e);d.Da||(e=Qb(),d.Da=e,e.src=a,e.$=d,a.addEventListener?a.addEventListener(b,e,c):a.attachEvent(b in Ob?Ob[b]:Ob[b]="on"+b,e),Mb[d.key]=d);a=d}return a},Qb=function(){var a=Rb,b=zb?function(c){return a.call(b.src,b.$,c)}:function(c){c=a.call(b.src,
+b.$,c);if(!c)return c};return b},Sb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Sb(a,b[g],c,d,e);else c=Pb(c),a&&a[Fb]?a.w(b,c,d,e):a&&(d=!!d,(a=Tb(a))&&(b=a.wa(b,c,d,e))&&J(b))},J=function(a){if("number"==typeof a||!a||a.ha)return!1;var b=a.src;if(b&&b[Fb])return Lb(b.ba,a);var c=a.type,d=a.Da;b.removeEventListener?b.removeEventListener(c,d,a.capture):b.detachEvent&&b.detachEvent(c in Ob?Ob[c]:Ob[c]="on"+c,d);(c=Tb(b))?(Lb(c,a),0==c.va&&(c.src=null,delete Nb[p(b)])):Ib(a);delete Mb[a.key];
+return!0},Vb=function(a,b,c,d){var e=1;if(a=Tb(a))if(b=a.p[b])for(b=Ca(b),a=0;a<b.length;a++){var g=b[a];g&&(g.capture==c&&!g.ha)&&(e&=!1!==Ub(g,d))}return Boolean(e)},Ub=function(a,b){var c=a.$,d=a.Ia||a.src;a.Ga&&J(a);return c.call(d,b)},Rb=function(a,b){if(a.ha)return!0;if(!zb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new G(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=
+-1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.R&&0<=k;k--)c.currentTarget=e[k],d&=Vb(e[k],g,!0,c);for(k=0;!c.R&&k<e.length;k++)c.currentTarget=e[k],d&=Vb(e[k],g,!1,c)}return d}return Ub(a,new G(b,this))},Tb=function(a){return a[ga]?Nb[p(a)]||null:null},Wb="__closure_events_fn_"+(1E9*Math.random()>>>0),Pb=function(a){s(a,"Listener can not be null.");if(n(a))return a;s(a.handleEvent,"An object listener must have handleEvent method.");
+return a[Wb]||(a[Wb]=function(b){return a.handleEvent(b)})};var K=function(a){this.Db=a;this.La={}};r(K,Bb);var Xb=[];K.prototype.d=function(a,b,c,d,e){da(b)||(Xb[0]=b,b=Xb);for(var g=0;g<b.length;g++){var h=H(a,b[g],c||this,d||!1,e||this.Db||this);if(!h)break;this.La[h.key]=h}return this};K.prototype.w=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.w(a,b[g],c,d,e);else e=e||this.Db||this,c=Pb(c||this),d=!!d,b=a&&a[Fb]?a.wa(b,c,d,e):a?(a=Tb(a))?a.wa(b,c,d,e):null:null,b&&(J(b),delete this.La[b.key]);return this};
+K.prototype.$a=function(){Fa(this.La,J);this.La={}};K.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var L=function(){this.ba=new Jb(this);this.cc=this};r(L,Bb);L.prototype[Fb]=!0;f=L.prototype;f.mb=null;f.Ya=function(a){this.mb=a};f.addEventListener=function(a,b,c,d){H(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Sb(this,a,b,c,d)};
+f.dispatchEvent=function(a){Yb(this);var b,c=this.mb;if(c){b=[];for(var d=1;c;c=c.mb)b.push(c),s(1E3>++d,"infinite loop")}c=this.cc;d=a.type||a;if(m(a))a=new F(a,c);else if(a instanceof F)a.target=a.target||c;else{var e=a;a=new F(d,c);Ka(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.R&&0<=h;h--)g=a.currentTarget=b[h],e=Zb(g,d,!0,a)&&e;a.R||(g=a.currentTarget=c,e=Zb(g,d,!0,a)&&e,a.R||(e=Zb(g,d,!1,a)&&e));if(b)for(h=0;!a.R&&h<b.length;h++)g=a.currentTarget=b[h],e=Zb(g,d,!1,a)&&e;return e};
+f.d=function(a,b,c,d){Yb(this);return this.ba.add(a,b,!1,c,d)};f.w=function(a,b,c,d){return this.ba.remove(a,b,c,d)};var Zb=function(a,b,c,d){b=a.ba.p[b];if(!b)return!0;b=Ca(b);for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.ha&&h.capture==c){var k=h.$,q=h.Ia||h.src;h.Ga&&Lb(a.ba,h);e=!1!==k.call(q,d)&&e}}return e&&!1!=d.xb};L.prototype.wa=function(a,b,c,d){return this.ba.wa(a,b,c,d)};var Yb=function(a){s(a.ba,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var M=function(a,b){a.style.display=b?"":"none"},$b=x?"MozUserSelect":y?"WebkitUserSelect":null,ac=function(a,b,c){c=c?null:a.getElementsByTagName("*");if($b){if(b=b?"none":"",a.style[$b]=b,c){a=0;for(var d;d=c[a];a++)d.style[$b]=b}}else if(w||Ra)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var bc=function(){};ba(bc);bc.prototype.fc=0;bc.aa();var N=function(a){L.call(this);this.n=a||jb();this.sa=cc};r(N,L);N.prototype.ec=bc.aa();var cc=null,dc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=N.prototype;f.O=null;f.e=!1;f.c=null;f.sa=null;f.o=null;f.s=null;f.i=null;
+f.Qb=!1;var ec=function(a){return a.O||(a.O=":"+(a.ec.fc++).toString(36))},fc=function(a,b){if(a.o&&a.o.i){var c=a.o.i,d=a.O;d in c&&delete c[d];Ha(a.o.i,b,a)}a.O=b};N.prototype.a=function(){return this.c};var gc=function(a){return a.ib||(a.ib=new K(a))},hc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.o&&a.O&&a.o.i&&a.O&&(a.O in a.o.i&&a.o.i[a.O])&&a.o!=b)throw Error("Unable to set parent component");a.o=b;N.f.Ya.call(a,b)};f=N.prototype;f.getParent=function(){return this.o};
+f.Ya=function(a){if(this.o&&this.o!=a)throw Error("Method not supported");N.f.Ya.call(this,a)};f.kb=function(){return this.n};f.r=function(){this.c=this.n.createElement("div")};f.K=function(a){if(this.e)throw Error("Component already rendered");if(a&&this.Z(a)){this.Qb=!0;var b=ib(a);this.n&&this.n.F==b||(this.n=jb(a));this.Wa(a);this.G()}else throw Error("Invalid element to decorate");};f.Z=function(){return!0};f.Wa=function(a){this.c=a};
+f.G=function(){this.e=!0;ic(this,function(a){!a.e&&a.a()&&a.G()})};f.da=function(){ic(this,function(a){a.e&&a.da()});this.ib&&this.ib.$a();this.e=!1};f.Ca=function(a,b){this.Ta(a,jc(this),b)};
+f.Ta=function(a,b,c){if(a.e&&(c||!this.e))throw Error("Component already rendered");if(0>b||b>jc(this))throw Error("Child component index out of bounds");this.i&&this.s||(this.i={},this.s=[]);if(a.getParent()==this){var d=ec(a);this.i[d]=a;Ba(this.s,a)}else Ha(this.i,ec(a),a);hc(a,this);Ea(this.s,b,0,a);if(a.e&&this.e&&a.getParent()==this)c=this.C(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.c||this.r();c=O(this,b+1);b=this.C();c=c?c.c:null;if(a.e)throw Error("Component already rendered");
 a.c||a.r();b?b.insertBefore(a.c,c||null):a.n.F.body.appendChild(a.c);a.o&&!a.o.e||a.G()}else this.e&&(!a.e&&a.c&&a.c.parentNode&&1==a.c.parentNode.nodeType)&&a.G()};f.C=function(){return this.c};
-var lc=function(a){if(null==a.va){var b;t:{b=a.e?a.c:a.n.F.body;var c=jb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,null))){b=b.direction||b.getPropertyValue("direction")||"";break t}b=""}a.va="rtl"==(b||((a.e?a.c:a.n.F.body).currentStyle?(a.e?a.c:a.n.F.body).currentStyle.direction:null)||(a.e?a.c:a.n.F.body).style&&(a.e?a.c:a.n.F.body).style.direction)}return a.va};
-N.prototype.ta=function(a){if(this.e)throw Error("Component already rendered");this.va=a};var kc=function(a){return a.s?a.s.length:0},O=function(a,b){return a.s?a.s[b]||null:null},jc=function(a,b,c){a.s&&xa(a.s,b,c)},mc=function(a,b){return a.s&&b?wa(a.s,b):-1};
-N.prototype.removeChild=function(a,b){if(a){var c=n(a)?a:fc(a);a=this.i&&c?(c in this.i?this.i[c]:void 0)||null:null;if(c&&a){var d=this.i;c in d&&delete d[c];y(this.s,a);b&&(a.ga(),a.c&&(c=a.c)&&c.parentNode&&c.parentNode.removeChild(c));ic(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var nc,oc={kc:"activedescendant",pc:"atomic",qc:"autocomplete",sc:"busy",vc:"checked",Ac:"controls",Cc:"describedby",Fc:"disabled",Hc:"dropeffect",Ic:"expanded",Jc:"flowto",Lc:"grabbed",Pc:"haspopup",Rc:"hidden",Tc:"invalid",Uc:"label",Vc:"labelledby",Wc:"level",ad:"live",ld:"multiline",md:"multiselectable",qd:"orientation",rd:"owns",sd:"posinset",ud:"pressed",yd:"readonly",Ad:"relevant",Bd:"required",Hd:"selected",Jd:"setsize",Ld:"sort",Yd:"valuemax",Zd:"valuemin",$d:"valuenow",ae:"valuetext"};var pc={lc:"alert",mc:"alertdialog",nc:"application",oc:"article",rc:"banner",tc:"button",uc:"checkbox",wc:"columnheader",xc:"combobox",yc:"complementary",zc:"contentinfo",Bc:"definition",Dc:"dialog",Ec:"directory",Gc:"document",Kc:"form",Mc:"grid",Nc:"gridcell",Oc:"group",Qc:"heading",Sc:"img",Xc:"link",Yc:"list",Zc:"listbox",$c:"listitem",bd:"log",cd:"main",dd:"marquee",ed:"math",fd:"menu",gd:"menubar",hd:"menuitem",jd:"menuitemcheckbox",kd:"menuitemradio",nd:"navigation",od:"note",pd:"option",
-td:"presentation",vd:"progressbar",wd:"radio",xd:"radiogroup",zd:"region",Cd:"row",Dd:"rowgroup",Ed:"rowheader",Fd:"scrollbar",Gd:"search",Id:"separator",Kd:"slider",Md:"spinbutton",Nd:"status",Od:"tab",Pd:"tablist",Qd:"tabpanel",Rd:"textbox",Sd:"timer",Td:"toolbar",Ud:"tooltip",Vd:"tree",Wd:"treegrid",Xd:"treeitem"};var qc=function(a,b){b?(u(Ea(pc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},sc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=rc(b);""===c||void 0==c?(nc||(nc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=nc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
-c)},rc=function(a){u(a,"ARIA attribute cannot be empty.");u(Ea(oc,a),"No such ARIA attribute "+a);return"aria-"+a};var uc=function(a,b,c,d,e){if(!(z||B&&D("525")))return!0;if(C&&e)return tc(a);if(e&&!d||!c&&(17==b||18==b||C&&91==b))return!1;if(B&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(z&&d&&b==a)return!1;switch(a){case 13:return!(z&&z&&9<=bb);case 27:return!B}return tc(a)},tc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||B&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
-default:return!1}},vc=function(a){switch(a){case 61:return 187;case 59:return 186;case 224:return 91;case 0:return 224;default:return a}};var P=function(a,b){L.call(this);a&&wc(this,a,b)};t(P,L);f=P.prototype;f.c=null;f.Ea=null;f.Ua=null;f.Fa=null;f.t=-1;f.P=-1;f.ib=!1;
-var xc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},yc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},zc=z||B&&D("525"),Ac=C&&A;
-P.prototype.Qb=function(a){B&&(17==this.t&&!a.ctrlKey||18==this.t&&!a.altKey||C&&91==this.t&&!a.metaKey)&&(this.P=this.t=-1);-1==this.t&&(a.ctrlKey&&17!=a.keyCode?this.t=17:a.altKey&&18!=a.keyCode?this.t=18:a.metaKey&&91!=a.keyCode&&(this.t=91));zc&&!uc(a.keyCode,this.t,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.P=A?vc(a.keyCode):a.keyCode,Ac&&(this.ib=a.altKey))};P.prototype.Rb=function(a){this.P=this.t=-1;this.ib=a.altKey};
-P.prototype.handleEvent=function(a){var b=a.R,c,d,e=b.altKey;z&&"keypress"==a.type?(c=this.P,d=13!=c&&27!=c?b.keyCode:0):B&&"keypress"==a.type?(c=this.P,d=0<=b.charCode&&63232>b.charCode&&tc(c)?b.charCode:0):Qa?(c=this.P,d=tc(c)?b.keyCode:0):(c=b.keyCode||this.P,d=b.charCode||0,Ac&&(e=this.ib),C&&(63==d&&224==c)&&(c=191));var g=c,h=b.keyIdentifier;c?63232<=c&&c in xc?g=xc[c]:25==c&&a.shiftKey&&(g=9):h&&h in yc&&(g=yc[h]);a=g==this.t;this.t=g;b=new Bc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
-P.prototype.a=function(){return this.c};var wc=function(a,b,c){a.Fa&&a.detach();a.c=b;a.Ea=I(a.c,"keypress",a,c);a.Ua=I(a.c,"keydown",a.Qb,c,a);a.Fa=I(a.c,"keyup",a.Rb,c,a)};P.prototype.detach=function(){this.Ea&&(J(this.Ea),J(this.Ua),J(this.Fa),this.Fa=this.Ua=this.Ea=null);this.c=null;this.P=this.t=-1};var Bc=function(a,b,c,d){d&&Db(this,d,void 0);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};t(Bc,G);var Dc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!p(b))throw Error("Invalid decorator function "+b);Cc[a]=b},Ec={},Cc={};var Q=function(){};ba(Q);Q.prototype.X=function(){};var Fc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.r=function(a){return a.jb().r("div",this.wa(a).join(" "))};f.C=function(a){return a};f.ba=function(a){return"DIV"==a.tagName};f.L=function(a,b){b.id&&gc(a,b.id);var c=this.A(),d=!1,e=fb(b);e&&xa(e,function(b){b==c?d=!0:b&&this.$a(a,b,c)},this);d||E(b,c);Gc(a,this.C(b));return b};
-f.$a=function(a,b,c){b==c+"-disabled"?a.sa(!1):b==c+"-horizontal"?Hc(a,"horizontal"):b==c+"-vertical"&&Hc(a,"vertical")};var Gc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{e=void 0;for(var g=fb(c),h=0,k=g.length;h<k;h++)if(e=g[h]in Cc?Cc[g[h]]():null)break t;e=null}e&&(e.c=c,a.isEnabled()||e.sa(!1),a.Da(e),e.L(c))}else c.nodeValue&&""!=ma(c.nodeValue)||b.removeChild(c);c=d}};
-Q.prototype.Ka=function(a){a=a.a();u(a,"The container DOM element cannot be null.");bc(a,!0,A);z&&(a.hideFocus=!0);var b=this.X();b&&qc(a,b)};Q.prototype.k=function(a){return a.a()};Q.prototype.A=function(){return"goog-container"};Q.prototype.wa=function(a){var b=this.A(),c=[b,"horizontal"==a.M?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Ic;ba(R);f=R.prototype;f.X=function(){};f.r=function(a){var b=a.jb().r("div",this.wa(a).join(" "),a.Ca);Jc(a,b);return b};f.C=function(a){return a};f.ua=function(a,b,c){if(a=a.a?a.a():a)if(z&&!D("7")){var d=Kc(fb(a),b);d.push(b);ia(c?E:hb,a).apply(null,d)}else c?E(a,b):hb(a,b)};f.ba=function(){return!0};
-f.L=function(a,b){b.id&&gc(a,b.id);var c=this.C(b);c&&c.firstChild?Lc(a,c.firstChild.nextSibling?Aa(c.childNodes):c.firstChild):a.Ca=null;var d=0,e=this.A(),g=this.A(),h=!1,k=!1,c=!1,m=fb(b);xa(m,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.sb||(this.Ga||Mc(this),this.sb=Ga(this.Ga));a=parseInt(this.sb[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(m.push(e),g==e&&(k=!0));k||m.push(g);var v=a.I;v&&m.push.apply(m,v);if(z&&!D("7")){var r=Kc(m);0<r.length&&(m.push.apply(m,
-r),c=!0)}if(!h||!k||v||c)b.className=m.join(" ");Jc(a,b);return b};f.Ka=function(a){lc(a)&&this.ta(a.a(),!0);a.isEnabled()&&this.qa(a,a.u())};var Nc=function(a,b,c){if(a=c||a.X())u(b,"The element passed as a first parameter cannot be null."),qc(b,a)},Jc=function(a,b){u(a);u(b);a.u()||sc(b,"hidden",!a.u());a.isEnabled()||Oc(b,1,!a.isEnabled());a.m&8&&Oc(b,8,!!(a.g&8));a.m&16&&Oc(b,16,!!(a.g&16));a.m&64&&Oc(b,64,!!(a.g&64))};f=R.prototype;f.Aa=function(a,b){bc(a,!b,!z&&!Qa)};
-f.ta=function(a,b){this.ua(a,this.A()+"-rtl",b)};f.U=function(a){var b;return a.m&32&&(b=a.k())?wb(b):!1};f.qa=function(a,b){var c;if(a.m&32&&(c=a.k())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.oa(null)}wb(c)!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ma=function(a,b){M(a,b);a&&sc(a,"hidden",!b)};f.w=function(a,b,c){var d=a.a();if(d){var e=Pc(this,b);e&&this.ua(a,e,c);Oc(d,b,c)}};
-var Oc=function(a,b,c){Ic||(Ic={1:"disabled",8:"selected",16:"checked",64:"expanded"});if(b=Ic[b])u(a,"The element passed as a first parameter cannot be null."),sc(a,b,c)};R.prototype.k=function(a){return a.a()};R.prototype.A=function(){return"goog-control"};R.prototype.wa=function(a){var b=this.A(),c=[b],d=this.A();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Pc(this,e));b&=~e}c.push.apply(c,d);(a=a.I)&&c.push.apply(c,a);z&&!D("7")&&c.push.apply(c,Kc(c));return c};
-var Kc=function(a,b){var c=[];b&&(a=a.concat([b]));xa([],function(d){!za(d,ia(x,a))||b&&!x(d,b)||c.push(d.join("_"))});return c},Pc=function(a,b){a.Ga||Mc(a);return a.Ga[b]},Mc=function(a){var b=a.A();a.Ga={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){N.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=s(b);if(d=Ec[d])break;b=b.f?b.f.constructor:null}b=d?p(d.ca)?d.ca():new d:null}this.b=b;this.Ca=a};t(S,N);f=S.prototype;f.Ca=null;f.g=0;f.m=39;f.cc=255;f.Z=0;f.p=!0;f.I=null;f.fa=!0;f.ya=!1;f.pb=null;f.ob=function(){return this.fa};f.La=function(a){this.e&&a!=this.fa&&Qc(this,a);this.fa=a};f.k=function(){return this.b.k(this)};f.za=function(){return this.ka||(this.ka=new P)};f.Ab=function(){return this.b};
-f.ua=function(a,b){b?a&&(this.I?x(this.I,a)||this.I.push(a):this.I=[a],this.b.ua(this,a,!0)):a&&(this.I&&y(this.I,a))&&(0==this.I.length&&(this.I=null),this.b.ua(this,a,!1))};f.r=function(){var a=this.b.r(this);this.c=a;Nc(this.b,a,this.pb);this.ya||this.b.Aa(a,!1);this.u()||this.b.ma(a,!1)};f.C=function(){return this.b.C(this.a())};f.ba=function(a){return this.b.ba(a)};f.Va=function(a){this.c=a=this.b.L(this,a);Nc(this.b,a,this.pb);this.ya||this.b.Aa(a,!1);this.p="none"!=a.style.display};
-f.G=function(){S.f.G.call(this);this.b.Ka(this);if(this.m&-2&&(this.ob()&&Qc(this,!0),this.m&32)){var a=this.k();if(a){var b=this.za();wc(b,a);hc(this).d(b,"key",this.K).d(a,"focus",this.pa).d(a,"blur",this.oa)}}};
-var Qc=function(a,b){var c=hc(a),d=a.a();b?(c.d(d,"mouseover",a.Oa).d(d,"mousedown",a.na).d(d,"mouseup",a.Pa).d(d,"mouseout",a.Na),a.ra!=aa&&c.d(d,"contextmenu",a.ra),z&&c.d(d,"dblclick",a.rb)):(c.v(d,"mouseover",a.Oa).v(d,"mousedown",a.na).v(d,"mouseup",a.Pa).v(d,"mouseout",a.Na),a.ra!=aa&&c.v(d,"contextmenu",a.ra),z&&c.v(d,"dblclick",a.rb))};S.prototype.ga=function(){S.f.ga.call(this);this.ka&&this.ka.detach();this.u()&&this.isEnabled()&&this.b.qa(this,!1)};var Lc=function(a,b){a.Ca=b};f=S.prototype;
-f.ta=function(a){S.f.ta.call(this,a);var b=this.a();b&&this.b.ta(b,a)};f.Aa=function(a){this.ya=a;var b=this.a();b&&this.b.Aa(b,a)};f.u=function(){return this.p};f.ma=function(a,b){if(b||this.p!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ma(c,a);this.isEnabled()&&this.b.qa(this,a);this.p=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
-f.sa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.D(!1)),this.u()&&this.b.qa(this,a),this.w(1,!a))};f.D=function(a){T(this,2,a)&&this.w(2,a)};f.setActive=function(a){T(this,4,a)&&this.w(4,a)};var Rc=function(a,b){T(a,8,b)&&a.w(8,b)},Sc=function(a,b){T(a,64,b)&&a.w(64,b)};S.prototype.w=function(a,b){this.m&a&&b!=!!(this.g&a)&&(this.b.w(this,a,b),this.g=b?this.g|a:this.g&~a)};
-var Tc=function(a,b,c){if(a.e&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.w(b,!1);a.m=c?a.m|b:a.m&~b},U=function(a,b){return!!(a.cc&b)&&!!(a.m&b)},T=function(a,b,c){return!!(a.m&b)&&!!(a.g&b)!=c&&(!(a.Z&b)||a.dispatchEvent(ec(b,c)))&&!a.Sb};f=S.prototype;f.Oa=function(a){(!a.relatedTarget||!sb(this.a(),a.relatedTarget))&&(this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2))&&this.D(!0)};
-f.Na=function(a){a.relatedTarget&&sb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.D(!1))};f.ra=aa;f.na=function(a){this.isEnabled()&&(U(this,2)&&this.D(!0),!Fb(a)||B&&C&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.U(this)&&this.k().focus()));this.ya||(!Fb(a)||B&&C&&a.ctrlKey)||a.preventDefault()};f.Pa=function(a){this.isEnabled()&&(U(this,2)&&this.D(!0),this.g&4&&(Uc(this,a)&&U(this,4))&&this.setActive(!1))};
-f.rb=function(a){this.isEnabled()&&Uc(this,a)};var Uc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.w(16,c)}U(a,8)&&Rc(a,!0);U(a,64)&&Sc(a,!(a.g&64));c=new F("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.ab=b.ab);return a.dispatchEvent(c)};S.prototype.pa=function(){U(this,32)&&T(this,32,!0)&&this.w(32,!0)};S.prototype.oa=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.w(32,!1)};
-S.prototype.K=function(a){return this.u()&&this.isEnabled()&&this.kb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.kb=function(a){return 13==a.keyCode&&Uc(this,a)};if(!p(S))throw Error("Invalid component class "+S);if(!p(R))throw Error("Invalid renderer class "+R);var Vc=s(S);Ec[Vc]=R;Dc("goog-control",function(){return new S(null)});var V=function(a,b,c){N.call(this,c);this.b=b||Q.ca();this.M=a||"vertical"};t(V,N);f=V.prototype;f.tb=null;f.ka=null;f.b=null;f.M=null;f.p=!0;f.$=!0;f.Xa=!0;f.j=-1;f.h=null;f.ha=!1;f.Pb=!1;f.Ob=!0;f.O=null;f.k=function(){return this.tb||this.b.k(this)};f.za=function(){return this.ka||(this.ka=new P(this.k()))};f.Ab=function(){return this.b};f.r=function(){this.c=this.b.r(this)};f.C=function(){return this.b.C(this.a())};f.ba=function(a){return this.b.ba(a)};
-f.Va=function(a){this.c=this.b.L(this,a);"none"==a.style.display&&(this.p=!1)};f.G=function(){V.f.G.call(this);jc(this,function(a){a.e&&Wc(this,a)},this);var a=this.a();this.b.Ka(this);this.ma(this.p,!0);hc(this).d(this,"enter",this.Ib).d(this,"highlight",this.Jb).d(this,"unhighlight",this.Lb).d(this,"open",this.Kb).d(this,"close",this.Gb).d(a,"mousedown",this.na).d(jb(a),"mouseup",this.Hb).d(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Fb);this.U()&&Xc(this,!0)};
-var Xc=function(a,b){var c=hc(a),d=a.k();b?c.d(d,"focus",a.pa).d(d,"blur",a.oa).d(a.za(),"key",a.K):c.v(d,"focus",a.pa).v(d,"blur",a.oa).v(a.za(),"key",a.K)};f=V.prototype;f.ga=function(){Yc(this,-1);this.h&&Sc(this.h,!1);this.ha=!1;V.f.ga.call(this)};f.Ib=function(){return!0};
-f.Jb=function(a){var b=mc(this,a.target);if(-1<b&&b!=this.j){var c=O(this,this.j);c&&c.D(!1);this.j=b;c=O(this,this.j);this.ha&&c.setActive(!0);this.Ob&&(this.h&&c!=this.h)&&(c.m&64?Sc(c,!0):Sc(this.h,!1))}b=this.a();u(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&sc(b,"activedescendant",a.target.a().id)};f.Lb=function(a){a.target==O(this,this.j)&&(this.j=-1);a=this.a();u(a,"The DOM element for the container cannot be null.");a.removeAttribute(rc("activedescendant"))};
-f.Kb=function(a){(a=a.target)&&(a!=this.h&&a.getParent()==this)&&(this.h&&Sc(this.h,!1),this.h=a)};f.Gb=function(a){a.target==this.h&&(this.h=null)};f.na=function(a){this.$&&(this.ha=!0);var b=this.k();b&&wb(b)?b.focus():a.preventDefault()};f.Hb=function(){this.ha=!1};
-f.Fb=function(a){var b;t:{b=a.target;if(this.O)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.O){b=this.O[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.na(a);break;case "mouseup":b.Pa(a);break;case "mouseover":b.Oa(a);break;case "mouseout":b.Na(a);break;case "contextmenu":b.ra(a)}};f.pa=function(){};f.oa=function(){Yc(this,-1);this.ha=!1;this.h&&Sc(this.h,!1)};
-f.K=function(a){return this.isEnabled()&&this.u()&&(0!=kc(this)||this.tb)&&this.kb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-f.kb=function(a){var b=O(this,this.j);if(b&&"function"==typeof b.K&&b.K(a)||this.h&&this.h!=b&&"function"==typeof this.h.K&&this.h.K(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.U())this.k().blur();else return!1;break;case 36:Zc(this);break;case 35:$c(this);break;case 38:if("vertical"==this.M)ad(this);else return!1;break;case 37:if("horizontal"==this.M)lc(this)?bd(this):ad(this);else return!1;break;case 40:if("vertical"==this.M)bd(this);else return!1;
-break;case 39:if("horizontal"==this.M)lc(this)?ad(this):bd(this);else return!1;break;default:return!1}return!0};var Wc=function(a,b){var c=b.a(),c=c.id||(c.id=fc(b));a.O||(a.O={});a.O[c]=b};V.prototype.Da=function(a,b){va(a,S,"The child of a container must be a control");V.f.Da.call(this,a,b)};V.prototype.Ra=function(a,b,c){a.Z|=2;a.Z|=64;!this.U()&&this.Pb||Tc(a,32,!1);a.La(!1);V.f.Ra.call(this,a,b,c);a.e&&this.e&&Wc(this,a);b<=this.j&&this.j++};
-V.prototype.removeChild=function(a,b){if(a=n(a)?this.i&&a?(a in this.i?this.i[a]:void 0)||null:null:a){var c=mc(this,a);-1!=c&&(c==this.j?a.D(!1):c<this.j&&this.j--);var d=a.a();d&&(d.id&&this.O)&&(c=this.O,d=d.id,d in c&&delete c[d])}a=V.f.removeChild.call(this,a,b);a.La(!0);return a};var Hc=function(a,b){if(a.a())throw Error("Component already rendered");a.M=b};f=V.prototype;f.u=function(){return this.p};
-f.ma=function(a,b){if(b||this.p!=a&&this.dispatchEvent(a?"show":"hide")){this.p=a;var c=this.a();c&&(M(c,a),this.U()&&Fc(this.k(),this.$&&this.p),b||this.dispatchEvent(this.p?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.$};f.sa=function(a){this.$!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.$=!0,jc(this,function(a){a.ub?delete a.ub:a.sa(!0)})):(jc(this,function(a){a.isEnabled()?a.sa(!1):a.ub=!0}),this.ha=this.$=!1),this.U()&&Fc(this.k(),a&&this.p))};
-f.U=function(){return this.Xa};f.qa=function(a){a!=this.Xa&&this.e&&Xc(this,a);this.Xa=a;this.$&&this.p&&Fc(this.k(),a)};var Yc=function(a,b){var c=O(a,b);c?c.D(!0):-1<a.j&&O(a,a.j).D(!1)};V.prototype.D=function(a){Yc(this,mc(this,a))};
-var Zc=function(a){cd(a,function(a,c){return(a+1)%c},kc(a)-1)},$c=function(a){cd(a,function(a,c){a--;return 0>a?c-1:a},0)},bd=function(a){cd(a,function(a,c){return(a+1)%c},a.j)},ad=function(a){cd(a,function(a,c){a--;return 0>a?c-1:a},a.j)},cd=function(a,b,c){c=0>c?mc(a,a.h):c;var d=kc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=O(a,c);if(g&&g.u()&&g.isEnabled()&&g.m&2){a.Sa(c);break}e++;c=b.call(a,c,d)}};V.prototype.Sa=function(a){Yc(this,a)};var dd=function(){};t(dd,R);ba(dd);f=dd.prototype;f.A=function(){return"goog-tab"};f.X=function(){return"tab"};f.r=function(a){var b=dd.f.r.call(this,a);(a=a.Qa())&&this.Ta(b,a);return b};f.L=function(a,b){b=dd.f.L.call(this,a,b);var c=this.Qa(b);c&&(a.qb=c);a.g&8&&(c=a.getParent())&&p(c.aa)&&(a.w(8,!1),c.aa(a));return b};f.Qa=function(a){return a.title||""};f.Ta=function(a,b){a&&(a.title=b||"")};var ed=function(a,b,c){S.call(this,a,b||dd.ca(),c);Tc(this,8,!0);this.Z|=9};t(ed,S);ed.prototype.Qa=function(){return this.qb};ed.prototype.Ta=function(a){this.Ab().Ta(this.a(),a);this.qb=a};Dc("goog-tab",function(){return new ed(null)});var W=function(){};t(W,Q);ba(W);W.prototype.A=function(){return"goog-tab-bar"};W.prototype.X=function(){return"tablist"};W.prototype.$a=function(a,b,c){this.Bb||(this.Ha||fd(this),this.Bb=Ga(this.Ha));var d=this.Bb[b];d?(Hc(a,gd(d)),a.vb=d):W.f.$a.call(this,a,b,c)};W.prototype.wa=function(a){var b=W.f.wa.call(this,a);this.Ha||fd(this);b.push(this.Ha[a.vb]);return b};var fd=function(a){var b=a.A();a.Ha={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Hc(this,gd(a));this.vb=a;V.call(this,this.M,b||W.ca(),c);hd(this)};t(X,V);f=X.prototype;f.Zb=!0;f.J=null;f.G=function(){X.f.G.call(this);hd(this)};f.removeChild=function(a,b){id(this,a);return X.f.removeChild.call(this,a,b)};f.Sa=function(a){X.f.Sa.call(this,a);this.Zb&&this.aa(O(this,a))};f.aa=function(a){a?Rc(a,!0):this.J&&Rc(this.J,!1)};
-var id=function(a,b){if(b&&b==a.J){for(var c=mc(a,b),d=c-1;b=O(a,d);d--)if(b.u()&&b.isEnabled()){a.aa(b);return}for(c+=1;b=O(a,c);c++)if(b.u()&&b.isEnabled()){a.aa(b);return}a.aa(null)}};f=X.prototype;f.Xb=function(a){this.J&&this.J!=a.target&&Rc(this.J,!1);this.J=a.target};f.Yb=function(a){a.target==this.J&&(this.J=null)};f.Vb=function(a){id(this,a.target)};f.Wb=function(a){id(this,a.target)};f.pa=function(){O(this,this.j)||this.D(this.J||O(this,0))};
-var hd=function(a){hc(a).d(a,"select",a.Xb).d(a,"unselect",a.Yb).d(a,"disable",a.Vb).d(a,"hide",a.Wb)},gd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Dc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,qc(a,h.X()),E(a,"goog-zippy-header"),jd(h,a),a&&h.Mb.d(a,"keydown",h.Nb))}L.call(this);this.n=e||kb();this.V=this.n.a(a)||null;this.Ba=this.n.a(d||null);this.ia=(this.Ma=p(b)?b:null)||!b?null:this.n.a(b);this.l=!0==c;this.Mb=new K(this);this.nb=new K(this);var h=this;g(this.V);g(this.Ba);this.W(this.l)};t(Y,L);f=Y.prototype;f.fa=!0;f.X=function(){return"tab"};f.C=function(){return this.ia};f.toggle=function(){this.W(!this.l)};
-f.W=function(a){this.ia?M(this.ia,a):a&&this.Ma&&(this.ia=this.Ma());this.ia&&E(this.ia,"goog-zippy-content");if(this.Ba)M(this.V,!a),M(this.Ba,a);else if(this.V){var b=this.V;a?E(b,"goog-zippy-expanded"):hb(b,"goog-zippy-expanded");b=this.V;a?hb(b,"goog-zippy-collapsed"):E(b,"goog-zippy-collapsed");sc(this.V,"expanded",a)}this.l=a;this.dispatchEvent(new kd("toggle",this))};f.ob=function(){return this.fa};f.La=function(a){this.fa!=a&&((this.fa=a)?(jd(this,this.V),jd(this,this.Ba)):this.nb.Ya())};
-var jd=function(a,b){b&&a.nb.d(b,"click",a.$b)};Y.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new F("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.$b=function(){this.toggle();this.dispatchEvent(new F("action",this))};var kd=function(a,b){F.call(this,a,b)};t(kd,F);var Z=function(a,b){this.mb=[];for(var c=lb(a),c=mb("span","ae-zippy",c),d=0,e;e=c[d];d++){var g;if(void 0!=e.parentNode.parentNode.parentNode.nextElementSibling)g=e.parentNode.parentNode.parentNode.nextElementSibling;else for(g=e.parentNode.parentNode.parentNode.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.mb.push(e)}this.fc=new ld(this.mb,lb(b))};Z.prototype.ic=function(){return this.fc};Z.prototype.jc=function(){return this.mb};
-var ld=function(a,b){this.xa=a;if(this.xa.length)for(var c=0,d;d=this.xa[c];c++)I(d,"toggle",this.Ub,!1,this);this.Ja=0;this.l=!1;c="ae-toggle ae-plus ae-action";this.xa.length||(c+=" ae-disabled");this.S=qb("span",{className:c},"Expand All");I(this.S,"click",this.Tb,!1,this);b&&b.appendChild(this.S)};ld.prototype.Tb=function(){this.xa.length&&this.W(!this.l)};
-ld.prototype.Ub=function(a){a=a.currentTarget;this.Ja=a.l?this.Ja+1:this.Ja-1;a.l!=this.l&&(a.l?(this.l=!0,md(this,!0)):0==this.Ja&&(this.l=!1,md(this,!1)))};ld.prototype.W=function(a){this.l=a;a=0;for(var b;b=this.xa[a];a++)b.l!=this.l&&b.W(this.l);md(this)};
-var md=function(a,b){(void 0!==b?b:a.l)?(hb(a.S,"ae-plus"),E(a.S,"ae-minus"),tb(a.S,"Collapse All")):(hb(a.S,"ae-minus"),E(a.S,"ae-plus"),tb(a.S,"Expand All"))},nd=function(a){this.ac=a;this.Db={};var b,c=qb("div",{},b=qb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),qb("div",{className:"goog-tab-bar-clear"}),a=qb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.L(b);I(d,"select",this.Cb,!1,this);I(d,"unselect",this.Cb,!1,this);b=
-0;for(var e;e=this.ac[b];b++)if(e=lb("ae-stats-details-"+e)){var g=mb("h2",null,e)[0],h;h=g;var k=void 0;eb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],xb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");eb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new ed(h);this.Db[s(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.aa(g):M(e,!1)}lb("bd").appendChild(c)};
-nd.prototype.Cb=function(a){var b=this.Db[s(a.target)];M(b,"select"==a.type)};ja("ae.Stats.Details.Tabs",nd);ja("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.W;ja("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.ic;Z.prototype.getZippys=Z.prototype.jc;ld.prototype.setExpanded=ld.prototype.W;var $=function(){this.bb=[];this.gb=[]},od=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],pd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<od.length;c++)if(a<=od[c][2])return[od[c][0],od[c][1]*b,od[c][2]*b];return[5,2*b,10*b]};$.prototype.fb="stats/static/pix.gif";$.prototype.B="ae-stats-gantt-";$.prototype.eb=0;$.prototype.write=function(a){this.gb.push(a)};
-var qd=function(a,b,c,d){a.write('<tr class="'+a.B+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.B+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.B+'tick" src="'+a.fb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.B+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.hc=function(){this.gb=[];var a=pd(this.eb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.B+'table">\n');qd(this,b,c,a);for(var d=0;d<this.bb.length;d++){var e=this.bb[d];this.write('<tr class="'+this.B+'datarow"><td width="20%">');0<e.label.length&&(0<e.la.length&&this.write('<a class="'+this.B+'link" href="'+e.la+'">'),this.write(e.label),0<e.la.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.B+'container">');0<e.la.length&&this.write('<a class="'+
-this.B+'link" href="'+e.la+'"\n>');this.write('<img class="'+this.B+'bar" src="'+this.fb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.cb&&(this.write('<img class="'+this.B+'extra" src="'+this.fb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.cb*a+'%"\n>'));0<e.zb.length&&(this.write('<span class="'+this.B+'inline" style="left:'+(e.start+Math.max(e.duration,e.cb))*a+'%">&nbsp;'),this.write(e.zb),this.write("</span>"));0<e.la.length&&
-this.write("</a>");this.write("</div></td></tr>\n")}qd(this,b,c,a);this.write("</table>\n");return this.gb.join("")};$.prototype.gc=function(a,b,c,d,e,g){this.eb=Math.max(this.eb,Math.max(b+c,b+d));this.bb.push({label:a,start:b,duration:c,cb:d,zb:e,la:g})};ja("Gantt",$);$.prototype.add_bar=$.prototype.gc;$.prototype.draw=$.prototype.hc;})();
+var kc=function(a){if(null==a.sa){var b;t:{b=a.e?a.c:a.n.F.body;var c=ib(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,null))){b=b.direction||b.getPropertyValue("direction")||"";break t}b=""}a.sa="rtl"==(b||((a.e?a.c:a.n.F.body).currentStyle?(a.e?a.c:a.n.F.body).currentStyle.direction:null)||(a.e?a.c:a.n.F.body).style&&(a.e?a.c:a.n.F.body).style.direction)}return a.sa};
+N.prototype.qa=function(a){if(this.e)throw Error("Component already rendered");this.sa=a};var jc=function(a){return a.s?a.s.length:0},O=function(a,b){return a.s?a.s[b]||null:null},ic=function(a,b,c){a.s&&ya(a.s,b,c)},lc=function(a,b){return a.s&&b?xa(a.s,b):-1};
+N.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:ec(a);a=this.i&&c?(c in this.i?this.i[c]:void 0)||null:null;if(c&&a){var d=this.i;c in d&&delete d[c];Ba(this.s,a);b&&(a.da(),a.c&&(c=a.c)&&c.parentNode&&c.parentNode.removeChild(c));hc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var mc,nc={lc:"activedescendant",qc:"atomic",rc:"autocomplete",tc:"busy",wc:"checked",Bc:"controls",Dc:"describedby",Gc:"disabled",Ic:"dropeffect",Jc:"expanded",Kc:"flowto",Mc:"grabbed",Qc:"haspopup",Sc:"hidden",Uc:"invalid",Vc:"label",Wc:"labelledby",Xc:"level",bd:"live",md:"multiline",nd:"multiselectable",rd:"orientation",sd:"owns",td:"posinset",vd:"pressed",zd:"readonly",Bd:"relevant",Cd:"required",Id:"selected",Kd:"setsize",Md:"sort",Zd:"valuemax",$d:"valuemin",ae:"valuenow",be:"valuetext"};var oc={mc:"alert",nc:"alertdialog",oc:"application",pc:"article",sc:"banner",uc:"button",vc:"checkbox",xc:"columnheader",yc:"combobox",zc:"complementary",Ac:"contentinfo",Cc:"definition",Ec:"dialog",Fc:"directory",Hc:"document",Lc:"form",Nc:"grid",Oc:"gridcell",Pc:"group",Rc:"heading",Tc:"img",Yc:"link",Zc:"list",$c:"listbox",ad:"listitem",cd:"log",dd:"main",ed:"marquee",fd:"math",gd:"menu",hd:"menubar",jd:"menuitem",kd:"menuitemcheckbox",ld:"menuitemradio",od:"navigation",pd:"note",qd:"option",
+ud:"presentation",wd:"progressbar",xd:"radio",yd:"radiogroup",Ad:"region",Dd:"row",Ed:"rowgroup",Fd:"rowheader",Gd:"scrollbar",Hd:"search",Jd:"separator",Ld:"slider",Nd:"spinbutton",Od:"status",Pd:"tab",Qd:"tablist",Rd:"tabpanel",Sd:"textbox",Td:"timer",Ud:"toolbar",Vd:"tooltip",Wd:"tree",Xd:"treegrid",Yd:"treeitem"};var pc=function(a,b){b?(s(Ga(oc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},rc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=qc(b);""===c||void 0==c?(mc||(mc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=mc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
+c)},qc=function(a){s(a,"ARIA attribute cannot be empty.");s(Ga(nc,a),"No such ARIA attribute "+a);return"aria-"+a};var tc=function(a,b,c,d,e){if(!(w||y&&B("525")))return!0;if(A&&e)return sc(a);if(e&&!d||!c&&(17==b||18==b||A&&91==b))return!1;if(y&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(w&&d&&b==a)return!1;switch(a){case 13:return!(w&&w&&9<=cb);case 27:return!y}return sc(a)},sc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||y&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
+default:return!1}},uc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var P=function(a,b){L.call(this);a&&vc(this,a,b)};r(P,L);f=P.prototype;f.c=null;f.Ea=null;f.Xa=null;f.Fa=null;f.t=-1;f.N=-1;f.jb=!1;
+var wc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},xc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},yc=w||y&&B("525"),zc=A&&x;
+P.prototype.Rb=function(a){y&&(17==this.t&&!a.ctrlKey||18==this.t&&!a.altKey||A&&91==this.t&&!a.metaKey)&&(this.N=this.t=-1);-1==this.t&&(a.ctrlKey&&17!=a.keyCode?this.t=17:a.altKey&&18!=a.keyCode?this.t=18:a.metaKey&&91!=a.keyCode&&(this.t=91));yc&&!tc(a.keyCode,this.t,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=x?uc(a.keyCode):a.keyCode,zc&&(this.jb=a.altKey))};P.prototype.Sb=function(a){this.N=this.t=-1;this.jb=a.altKey};
+P.prototype.handleEvent=function(a){var b=a.P,c,d,e=b.altKey;w&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):y&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&sc(c)?b.charCode:0):Ra?(c=this.N,d=sc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,zc&&(e=this.jb),A&&(63==d&&224==c)&&(c=191));var g=c,h=b.keyIdentifier;c?63232<=c&&c in wc?g=wc[c]:25==c&&a.shiftKey&&(g=9):h&&h in xc&&(g=xc[h]);a=g==this.t;this.t=g;b=new Ac(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
+P.prototype.a=function(){return this.c};var vc=function(a,b,c){a.Fa&&a.detach();a.c=b;a.Ea=H(a.c,"keypress",a,c);a.Xa=H(a.c,"keydown",a.Rb,c,a);a.Fa=H(a.c,"keyup",a.Sb,c,a)};P.prototype.detach=function(){this.Ea&&(J(this.Ea),J(this.Xa),J(this.Fa),this.Fa=this.Xa=this.Ea=null);this.c=null;this.N=this.t=-1};var Ac=function(a,b,c,d){d&&Cb(this,d,void 0);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};r(Ac,G);var Cc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Bc[a]=b},Dc={},Bc={};var Q=function(){};ba(Q);Q.prototype.V=function(){};var Ec=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.r=function(a){return a.kb().r("div",this.ta(a).join(" "))};f.C=function(a){return a};f.Z=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&fc(a,b.id);var c=this.A(),d=!1,e=C(b);e&&ya(e,function(b){b==c?d=!0:b&&this.ab(a,b,c)},this);d||D(b,c);Fc(a,this.C(b));return b};f.ab=function(a,b,c){b==c+"-disabled"?a.pa(!1):b==c+"-horizontal"?Gc(a,"horizontal"):b==c+"-vertical"&&Gc(a,"vertical")};
+var Fc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{e=void 0;for(var g=C(c),h=0,k=g.length;h<k;h++)if(e=g[h]in Bc?Bc[g[h]]():null)break t;e=null}e&&(e.c=c,a.isEnabled()||e.pa(!1),a.Ca(e),e.K(c))}else c.nodeValue&&""!=na(c.nodeValue)||b.removeChild(c);c=d}};Q.prototype.Ma=function(a){a=a.a();s(a,"The container DOM element cannot be null.");ac(a,!0,x);w&&(a.hideFocus=!0);var b=this.V();b&&pc(a,b)};Q.prototype.k=function(a){return a.a()};
+Q.prototype.A=function(){return"goog-container"};Q.prototype.ta=function(a){var b=this.A(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Hc;ba(R);f=R.prototype;f.V=function(){};f.r=function(a){var b=a.kb().r("div",this.ta(a).join(" "),a.Ba);Ic(a,b);return b};f.C=function(a){return a};f.ra=function(a,b,c){if(a=a.a?a.a():a)if(w&&!B("7")){var d=Jc(C(a),b);d.push(b);ia(c?D:E,a).apply(null,d)}else c?D(a,b):E(a,b)};f.Z=function(){return!0};
+f.K=function(a,b){b.id&&fc(a,b.id);var c=this.C(b);c&&c.firstChild?Kc(a,c.firstChild.nextSibling?Ca(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.A(),g=this.A(),h=!1,k=!1,c=!1,q=C(b);ya(q,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.tb||(this.Ha||Lc(this),this.tb=Ia(this.Ha));a=parseInt(this.tb[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(q.push(e),g==e&&(k=!0));k||q.push(g);var ma=a.H;ma&&q.push.apply(q,ma);if(w&&!B("7")){var z=Jc(q);0<z.length&&(q.push.apply(q,
+z),c=!0)}if(!h||!k||ma||c)b.className=q.join(" ");Ic(a,b);return b};f.Ma=function(a){kc(a)&&this.qa(a.a(),!0);a.isEnabled()&&this.na(a,a.u())};var Mc=function(a,b,c){if(a=c||a.V())s(b,"The element passed as a first parameter cannot be null."),pc(b,a)},Ic=function(a,b){s(a);s(b);a.u()||rc(b,"hidden",!a.u());a.isEnabled()||Nc(b,1,!a.isEnabled());a.m&8&&Nc(b,8,!!(a.g&8));a.m&16&&Nc(b,16,!!(a.g&16));a.m&64&&Nc(b,64,!!(a.g&64))};f=R.prototype;f.za=function(a,b){ac(a,!b,!w&&!Ra)};
+f.qa=function(a,b){this.ra(a,this.A()+"-rtl",b)};f.S=function(a){var b;return a.m&32&&(b=a.k())?vb(b):!1};f.na=function(a,b){var c;if(a.m&32&&(c=a.k())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.la(null)}vb(c)!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ja=function(a,b){M(a,b);a&&rc(a,"hidden",!b)};f.v=function(a,b,c){var d=a.a();if(d){var e=Oc(this,b);e&&this.ra(a,e,c);Nc(d,b,c)}};
+var Nc=function(a,b,c){Hc||(Hc={1:"disabled",8:"selected",16:"checked",64:"expanded"});if(b=Hc[b])s(a,"The element passed as a first parameter cannot be null."),rc(a,b,c)};R.prototype.k=function(a){return a.a()};R.prototype.A=function(){return"goog-control"};R.prototype.ta=function(a){var b=this.A(),c=[b],d=this.A();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Oc(this,e));b&=~e}c.push.apply(c,d);(a=a.H)&&c.push.apply(c,a);w&&!B("7")&&c.push.apply(c,Jc(c));return c};
+var Jc=function(a,b){var c=[];b&&(a=a.concat([b]));ya([],function(d){!Aa(d,ia(u,a))||b&&!u(d,b)||c.push(d.join("_"))});return c},Oc=function(a,b){a.Ha||Lc(a);return a.Ha[b]},Lc=function(a){var b=a.A();a.Ha={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){N.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=p(b);if(d=Dc[d])break;b=b.f?b.f.constructor:null}b=d?n(d.aa)?d.aa():new d:null}this.b=b;this.Ba=void 0!==a?a:null};r(S,N);f=S.prototype;f.Ba=null;f.g=0;f.m=39;f.dc=255;f.W=0;f.q=!0;f.H=null;f.ca=!0;f.xa=!1;f.qb=null;f.pb=function(){return this.ca};f.Na=function(a){this.e&&a!=this.ca&&Pc(this,a);this.ca=a};f.k=function(){return this.b.k(this)};f.ya=function(){return this.ga||(this.ga=new P)};f.zb=function(){return this.b};
+f.ra=function(a,b){b?a&&(this.H?u(this.H,a)||this.H.push(a):this.H=[a],this.b.ra(this,a,!0)):a&&(this.H&&Ba(this.H,a))&&(0==this.H.length&&(this.H=null),this.b.ra(this,a,!1))};f.r=function(){var a=this.b.r(this);this.c=a;Mc(this.b,a,this.qb);this.xa||this.b.za(a,!1);this.u()||this.b.ja(a,!1)};f.C=function(){return this.b.C(this.a())};f.Z=function(a){return this.b.Z(a)};f.Wa=function(a){this.c=a=this.b.K(this,a);Mc(this.b,a,this.qb);this.xa||this.b.za(a,!1);this.q="none"!=a.style.display};
+f.G=function(){S.f.G.call(this);this.b.Ma(this);if(this.m&-2&&(this.pb()&&Pc(this,!0),this.m&32)){var a=this.k();if(a){var b=this.ya();vc(b,a);gc(this).d(b,"key",this.J).d(a,"focus",this.ma).d(a,"blur",this.la)}}};
+var Pc=function(a,b){var c=gc(a),d=a.a();b?(c.d(d,"mouseover",a.Qa).d(d,"mousedown",a.ka).d(d,"mouseup",a.Ra).d(d,"mouseout",a.Pa),a.oa!=aa&&c.d(d,"contextmenu",a.oa),w&&c.d(d,"dblclick",a.sb)):(c.w(d,"mouseover",a.Qa).w(d,"mousedown",a.ka).w(d,"mouseup",a.Ra).w(d,"mouseout",a.Pa),a.oa!=aa&&c.w(d,"contextmenu",a.oa),w&&c.w(d,"dblclick",a.sb))};S.prototype.da=function(){S.f.da.call(this);this.ga&&this.ga.detach();this.u()&&this.isEnabled()&&this.b.na(this,!1)};var Kc=function(a,b){a.Ba=b};f=S.prototype;
+f.qa=function(a){S.f.qa.call(this,a);var b=this.a();b&&this.b.qa(b,a)};f.za=function(a){this.xa=a;var b=this.a();b&&this.b.za(b,a)};f.u=function(){return this.q};f.ja=function(a,b){if(b||this.q!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ja(c,a);this.isEnabled()&&this.b.na(this,a);this.q=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
+f.pa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.D(!1)),this.u()&&this.b.na(this,a),this.v(1,!a))};f.D=function(a){T(this,2,a)&&this.v(2,a)};f.setActive=function(a){T(this,4,a)&&this.v(4,a)};var Qc=function(a,b){T(a,8,b)&&a.v(8,b)},Rc=function(a,b){T(a,64,b)&&a.v(64,b)};S.prototype.v=function(a,b){this.m&a&&b!=!!(this.g&a)&&(this.b.v(this,a,b),this.g=b?this.g|a:this.g&~a)};
+var Sc=function(a,b,c){if(a.e&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.v(b,!1);a.m=c?a.m|b:a.m&~b},U=function(a,b){return!!(a.dc&b)&&!!(a.m&b)},T=function(a,b,c){return!!(a.m&b)&&!!(a.g&b)!=c&&(!(a.W&b)||a.dispatchEvent(dc(b,c)))&&!a.Tb};f=S.prototype;f.Qa=function(a){(!a.relatedTarget||!rb(this.a(),a.relatedTarget))&&(this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2))&&this.D(!0)};
+f.Pa=function(a){a.relatedTarget&&rb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.D(!1))};f.oa=aa;f.ka=function(a){this.isEnabled()&&(U(this,2)&&this.D(!0),!Eb(a)||y&&A&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.S(this)&&this.k().focus()));this.xa||(!Eb(a)||y&&A&&a.ctrlKey)||a.preventDefault()};f.Ra=function(a){this.isEnabled()&&(U(this,2)&&this.D(!0),this.g&4&&(Tc(this,a)&&U(this,4))&&this.setActive(!1))};
+f.sb=function(a){this.isEnabled()&&Tc(this,a)};var Tc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.v(16,c)}U(a,8)&&Qc(a,!0);U(a,64)&&Rc(a,!(a.g&64));c=new F("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.bb=b.bb);return a.dispatchEvent(c)};S.prototype.ma=function(){U(this,32)&&T(this,32,!0)&&this.v(32,!0)};S.prototype.la=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.v(32,!1)};
+S.prototype.J=function(a){return this.u()&&this.isEnabled()&&this.lb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.lb=function(a){return 13==a.keyCode&&Tc(this,a)};if(!n(S))throw Error("Invalid component class "+S);if(!n(R))throw Error("Invalid renderer class "+R);var Uc=p(S);Dc[Uc]=R;Cc("goog-control",function(){return new S(null)});var V=function(a,b,c){N.call(this,c);this.b=b||Q.aa();this.L=a||"vertical"};r(V,N);f=V.prototype;f.ub=null;f.ga=null;f.b=null;f.L=null;f.q=!0;f.X=!0;f.Za=!0;f.j=-1;f.h=null;f.ea=!1;f.Pb=!1;f.Ob=!0;f.M=null;f.k=function(){return this.ub||this.b.k(this)};f.ya=function(){return this.ga||(this.ga=new P(this.k()))};f.zb=function(){return this.b};f.r=function(){this.c=this.b.r(this)};f.C=function(){return this.b.C(this.a())};f.Z=function(a){return this.b.Z(a)};
+f.Wa=function(a){this.c=this.b.K(this,a);"none"==a.style.display&&(this.q=!1)};f.G=function(){V.f.G.call(this);ic(this,function(a){a.e&&Vc(this,a)},this);var a=this.a();this.b.Ma(this);this.ja(this.q,!0);gc(this).d(this,"enter",this.Ib).d(this,"highlight",this.Jb).d(this,"unhighlight",this.Lb).d(this,"open",this.Kb).d(this,"close",this.Gb).d(a,"mousedown",this.ka).d(ib(a),"mouseup",this.Hb).d(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Fb);this.S()&&Wc(this,!0)};
+var Wc=function(a,b){var c=gc(a),d=a.k();b?c.d(d,"focus",a.ma).d(d,"blur",a.la).d(a.ya(),"key",a.J):c.w(d,"focus",a.ma).w(d,"blur",a.la).w(a.ya(),"key",a.J)};f=V.prototype;f.da=function(){Xc(this,-1);this.h&&Rc(this.h,!1);this.ea=!1;V.f.da.call(this)};f.Ib=function(){return!0};
+f.Jb=function(a){var b=lc(this,a.target);if(-1<b&&b!=this.j){var c=O(this,this.j);c&&c.D(!1);this.j=b;c=O(this,this.j);this.ea&&c.setActive(!0);this.Ob&&(this.h&&c!=this.h)&&(c.m&64?Rc(c,!0):Rc(this.h,!1))}b=this.a();s(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&rc(b,"activedescendant",a.target.a().id)};f.Lb=function(a){a.target==O(this,this.j)&&(this.j=-1);a=this.a();s(a,"The DOM element for the container cannot be null.");a.removeAttribute(qc("activedescendant"))};
+f.Kb=function(a){(a=a.target)&&(a!=this.h&&a.getParent()==this)&&(this.h&&Rc(this.h,!1),this.h=a)};f.Gb=function(a){a.target==this.h&&(this.h=null)};f.ka=function(a){this.X&&(this.ea=!0);var b=this.k();b&&vb(b)?b.focus():a.preventDefault()};f.Hb=function(){this.ea=!1};
+f.Fb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Ra(a);break;case "mouseover":b.Qa(a);break;case "mouseout":b.Pa(a);break;case "contextmenu":b.oa(a)}};f.ma=function(){};f.la=function(){Xc(this,-1);this.ea=!1;this.h&&Rc(this.h,!1)};
+f.J=function(a){return this.isEnabled()&&this.u()&&(0!=jc(this)||this.ub)&&this.lb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
+f.lb=function(a){var b=O(this,this.j);if(b&&"function"==typeof b.J&&b.J(a)||this.h&&this.h!=b&&"function"==typeof this.h.J&&this.h.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.S())this.k().blur();else return!1;break;case 36:Yc(this);break;case 35:Zc(this);break;case 38:if("vertical"==this.L)$c(this);else return!1;break;case 37:if("horizontal"==this.L)kc(this)?ad(this):$c(this);else return!1;break;case 40:if("vertical"==this.L)ad(this);else return!1;
+break;case 39:if("horizontal"==this.L)kc(this)?$c(this):ad(this);else return!1;break;default:return!1}return!0};var Vc=function(a,b){var c=b.a(),c=c.id||(c.id=ec(b));a.M||(a.M={});a.M[c]=b};V.prototype.Ca=function(a,b){wa(a,S,"The child of a container must be a control");V.f.Ca.call(this,a,b)};V.prototype.Ta=function(a,b,c){a.W|=2;a.W|=64;!this.S()&&this.Pb||Sc(a,32,!1);a.Na(!1);V.f.Ta.call(this,a,b,c);a.e&&this.e&&Vc(this,a);b<=this.j&&this.j++};
+V.prototype.removeChild=function(a,b){if(a=m(a)?this.i&&a?(a in this.i?this.i[a]:void 0)||null:null:a){var c=lc(this,a);-1!=c&&(c==this.j?a.D(!1):c<this.j&&this.j--);var d=a.a();d&&(d.id&&this.M)&&(c=this.M,d=d.id,d in c&&delete c[d])}a=V.f.removeChild.call(this,a,b);a.Na(!0);return a};var Gc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=V.prototype;f.u=function(){return this.q};
+f.ja=function(a,b){if(b||this.q!=a&&this.dispatchEvent(a?"show":"hide")){this.q=a;var c=this.a();c&&(M(c,a),this.S()&&Ec(this.k(),this.X&&this.q),b||this.dispatchEvent(this.q?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.X};f.pa=function(a){this.X!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.X=!0,ic(this,function(a){a.vb?delete a.vb:a.pa(!0)})):(ic(this,function(a){a.isEnabled()?a.pa(!1):a.vb=!0}),this.ea=this.X=!1),this.S()&&Ec(this.k(),a&&this.q))};
+f.S=function(){return this.Za};f.na=function(a){a!=this.Za&&this.e&&Wc(this,a);this.Za=a;this.X&&this.q&&Ec(this.k(),a)};var Xc=function(a,b){var c=O(a,b);c?c.D(!0):-1<a.j&&O(a,a.j).D(!1)};V.prototype.D=function(a){Xc(this,lc(this,a))};
+var Yc=function(a){bd(a,function(a,c){return(a+1)%c},jc(a)-1)},Zc=function(a){bd(a,function(a,c){a--;return 0>a?c-1:a},0)},ad=function(a){bd(a,function(a,c){return(a+1)%c},a.j)},$c=function(a){bd(a,function(a,c){a--;return 0>a?c-1:a},a.j)},bd=function(a,b,c){c=0>c?lc(a,a.h):c;var d=jc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=O(a,c);if(g&&g.u()&&g.isEnabled()&&g.m&2){a.Ua(c);break}e++;c=b.call(a,c,d)}};V.prototype.Ua=function(a){Xc(this,a)};var cd=function(){};r(cd,R);ba(cd);f=cd.prototype;f.A=function(){return"goog-tab"};f.V=function(){return"tab"};f.r=function(a){var b=cd.f.r.call(this,a);(a=a.Sa())&&this.Va(b,a);return b};f.K=function(a,b){b=cd.f.K.call(this,a,b);var c=this.Sa(b);c&&(a.rb=c);a.g&8&&(c=a.getParent())&&n(c.Y)&&(a.v(8,!1),c.Y(a));return b};f.Sa=function(a){return a.title||""};f.Va=function(a,b){a&&(a.title=b||"")};var dd=function(a,b,c){S.call(this,a,b||cd.aa(),c);Sc(this,8,!0);this.W|=9};r(dd,S);dd.prototype.Sa=function(){return this.rb};dd.prototype.Va=function(a){this.zb().Va(this.a(),a);this.rb=a};Cc("goog-tab",function(){return new dd(null)});var W=function(){};r(W,Q);ba(W);W.prototype.A=function(){return"goog-tab-bar"};W.prototype.V=function(){return"tablist"};W.prototype.ab=function(a,b,c){this.Ab||(this.Ja||ed(this),this.Ab=Ia(this.Ja));var d=this.Ab[b];d?(Gc(a,fd(d)),a.wb=d):W.f.ab.call(this,a,b,c)};W.prototype.ta=function(a){var b=W.f.ta.call(this,a);this.Ja||ed(this);b.push(this.Ja[a.wb]);return b};var ed=function(a){var b=a.A();a.Ja={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Gc(this,fd(a));this.wb=a;V.call(this,this.L,b||W.aa(),c);gd(this)};r(X,V);f=X.prototype;f.$b=!0;f.I=null;f.G=function(){X.f.G.call(this);gd(this)};f.removeChild=function(a,b){hd(this,a);return X.f.removeChild.call(this,a,b)};f.Ua=function(a){X.f.Ua.call(this,a);this.$b&&this.Y(O(this,a))};f.Y=function(a){a?Qc(a,!0):this.I&&Qc(this.I,!1)};
+var hd=function(a,b){if(b&&b==a.I){for(var c=lc(a,b),d=c-1;b=O(a,d);d--)if(b.u()&&b.isEnabled()){a.Y(b);return}for(c+=1;b=O(a,c);c++)if(b.u()&&b.isEnabled()){a.Y(b);return}a.Y(null)}};f=X.prototype;f.Yb=function(a){this.I&&this.I!=a.target&&Qc(this.I,!1);this.I=a.target};f.Zb=function(a){a.target==this.I&&(this.I=null)};f.Wb=function(a){hd(this,a.target)};f.Xb=function(a){hd(this,a.target)};f.ma=function(){O(this,this.j)||this.D(this.I||O(this,0))};
+var gd=function(a){gc(a).d(a,"select",a.Yb).d(a,"unselect",a.Zb).d(a,"disable",a.Wb).d(a,"hide",a.Xb)},fd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Cc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,pc(a,h.V()),D(a,"goog-zippy-header"),id(h,a),a&&h.Mb.d(a,"keydown",h.Nb))}L.call(this);this.n=e||jb();this.T=this.n.a(a)||null;this.Aa=this.n.a(d||null);this.fa=(this.Oa=n(b)?b:null)||!b?null:this.n.a(b);this.l=!0==c;this.Mb=new K(this);this.ob=new K(this);var h=this;g(this.T);g(this.Aa);this.U(this.l)};r(Y,L);f=Y.prototype;f.ca=!0;f.V=function(){return"tab"};f.C=function(){return this.fa};f.toggle=function(){this.U(!this.l)};
+f.U=function(a){this.fa?M(this.fa,a):a&&this.Oa&&(this.fa=this.Oa());this.fa&&D(this.fa,"goog-zippy-content");if(this.Aa)M(this.T,!a),M(this.Aa,a);else if(this.T){var b=this.T;a?D(b,"goog-zippy-expanded"):E(b,"goog-zippy-expanded");b=this.T;a?E(b,"goog-zippy-collapsed"):D(b,"goog-zippy-collapsed");rc(this.T,"expanded",a)}this.l=a;this.dispatchEvent(new jd("toggle",this))};f.pb=function(){return this.ca};f.Na=function(a){this.ca!=a&&((this.ca=a)?(id(this,this.T),id(this,this.Aa)):this.ob.$a())};
+var id=function(a,b){b&&a.ob.d(b,"click",a.ac)};Y.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new F("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.ac=function(){this.toggle();this.dispatchEvent(new F("action",this))};var jd=function(a,b){F.call(this,a,b)};r(jd,F);var Z=function(a,b){this.nb=[];for(var c=lb("span","ae-zippy",kb(document,a)),d=0,e;e=c[d];d++){var g;if(void 0!=e.parentNode.parentNode.parentNode.nextElementSibling)g=e.parentNode.parentNode.parentNode.nextElementSibling;else for(g=e.parentNode.parentNode.parentNode.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.nb.push(e)}this.gc=new kd(this.nb,kb(document,b))};Z.prototype.jc=function(){return this.gc};Z.prototype.kc=function(){return this.nb};
+var kd=function(a,b){this.ua=a;if(this.ua.length)for(var c=0,d;d=this.ua[c];c++)H(d,"toggle",this.Vb,!1,this);this.Ka=0;this.l=!1;c="ae-toggle ae-plus ae-action";this.ua.length||(c+=" ae-disabled");this.Q=pb("span",{className:c},"Expand All");H(this.Q,"click",this.Ub,!1,this);b&&b.appendChild(this.Q)};kd.prototype.Ub=function(){this.ua.length&&this.U(!this.l)};
+kd.prototype.Vb=function(a){a=a.currentTarget;this.Ka=a.l?this.Ka+1:this.Ka-1;a.l!=this.l&&(a.l?(this.l=!0,ld(this,!0)):0==this.Ka&&(this.l=!1,ld(this,!1)))};kd.prototype.U=function(a){this.l=a;a=0;for(var b;b=this.ua[a];a++)b.l!=this.l&&b.U(this.l);ld(this)};
+var ld=function(a,b){(void 0!==b?b:a.l)?(E(a.Q,"ae-plus"),D(a.Q,"ae-minus"),sb(a.Q,"Collapse All")):(E(a.Q,"ae-minus"),D(a.Q,"ae-plus"),sb(a.Q,"Expand All"))},md=function(a){this.bc=a;this.Cb={};var b,c=pb("div",{},b=pb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),pb("div",{className:"goog-tab-bar-clear"}),a=pb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.K(b);H(d,"select",this.Bb,!1,this);H(d,"unselect",this.Bb,!1,this);b=0;
+for(var e;e=this.bc[b];b++)if(e=kb(document,"ae-stats-details-"+e)){var g=lb("h2",null,e)[0],h;h=g;var k=void 0;fb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],wb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");fb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new dd(h);this.Cb[p(g)]=e;d.Ca(g,!0);a.appendChild(e);0==b?d.Y(g):M(e,!1)}kb(document,"bd").appendChild(c)};
+md.prototype.Bb=function(a){var b=this.Cb[p(a.target)];M(b,"select"==a.type)};ja("ae.Stats.Details.Tabs",md);ja("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.U;ja("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.jc;Z.prototype.getZippys=Z.prototype.kc;kd.prototype.setExpanded=kd.prototype.U;var $=function(){this.cb=[];this.hb=[]},nd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],od=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<nd.length;c++)if(a<=nd[c][2])return[nd[c][0],nd[c][1]*b,nd[c][2]*b];return[5,2*b,10*b]};$.prototype.gb="stats/static/pix.gif";$.prototype.B="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.hb.push(a)};
+var pd=function(a,b,c,d){a.write('<tr class="'+a.B+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.B+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.B+'tick" src="'+a.gb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.B+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.ic=function(){this.hb=[];var a=od(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.B+'table">\n');pd(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.B+'datarow"><td width="20%">');0<e.label.length&&(0<e.ia.length&&this.write('<a class="'+this.B+'link" href="'+e.ia+'">'),this.write(e.label),0<e.ia.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.B+'container">');0<e.ia.length&&this.write('<a class="'+
+this.B+'link" href="'+e.ia+'"\n>');this.write('<img class="'+this.B+'bar" src="'+this.gb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.eb&&(this.write('<img class="'+this.B+'extra" src="'+this.gb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.eb*a+'%"\n>'));0<e.yb.length&&(this.write('<span class="'+this.B+'inline" style="left:'+(e.start+Math.max(e.duration,e.eb))*a+'%">&nbsp;'),this.write(e.yb),this.write("</span>"));0<e.ia.length&&
+this.write("</a>");this.write("</div></td></tr>\n")}pd(this,b,c,a);this.write("</table>\n");return this.hb.join("")};$.prototype.hc=function(a,b,c,d,e,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,yb:e,ia:g})};ja("Gantt",$);$.prototype.add_bar=$.prototype.hc;$.prototype.draw=$.prototype.ic;})();
diff --git a/google/appengine/ext/datastore_admin/backup_handler.py b/google/appengine/ext/datastore_admin/backup_handler.py
index c4bd376..2fbbb52 100644
--- a/google/appengine/ext/datastore_admin/backup_handler.py
+++ b/google/appengine/ext/datastore_admin/backup_handler.py
@@ -542,6 +542,17 @@
   def post(self):
     """Handler for post requests to datastore_admin/backup.create."""
     try:
+
+
+
+
+      if ('X-AppEngine-TaskName' not in self.request.headers and
+          'X-AppEngine-Cron' not in self.request.headers):
+        logging.critical('Scheduled backups must be started via task queue or '
+                         'cron.')
+        self.response.set_status(403)
+        return
+
       backup_prefix = self.request.get('name')
       if not backup_prefix:
         if self.request.headers.get('X-AppEngine-Cron'):
@@ -820,15 +831,15 @@
     if difference:
       return [('error', 'Backup does not have kind[s] %s' %
                ', '.join(difference))]
-    kinds = list(kinds) if len(backup_kinds) != len(kinds) else []
+
     if self.request.get('run_as_a_service', False):
-      if not backup.gs_handle:
+      if backup.filesystem != files.GS_FILESYSTEM:
         return [('error',
                  'Restore as a service is only available for GS backups')]
       datastore_admin_service = services_client.DatastoreAdminClient()
-      description = 'Remote restore job: %s' % backup
+      description = 'Remote restore job: %s' % backup.name
       remote_job_id = datastore_admin_service.restore_from_backup(
-          description, backup_id, kinds)
+          description, backup_id, list(kinds))
       return [('remote_job', remote_job_id)]
 
     queue = self.request.get('queue')
@@ -840,6 +851,9 @@
           ', '.join(kinds) if kinds else 'all', backup.name)
       job_operation = utils.StartOperation(operation_name)
       mapper_params = self._GetBasicMapperParams()
+
+
+      kinds = list(kinds) if len(backup_kinds) != len(kinds) else []
       mapper_params['files'] = get_backup_files(backup, kinds)
       mapper_params['kind_filter'] = kinds
       mapper_params['original_app'] = backup.original_app
@@ -977,12 +991,14 @@
 def BackupCompleteHandler(operation, job_id, mapreduce_state):
   """Updates BackupInformation record for a completed mapper job."""
   mapreduce_spec = mapreduce_state.mapreduce_spec
+  filenames = mapreduce_spec.mapper.output_writer_class().get_filenames(
+      mapreduce_state)
   _perform_backup_complete(operation,
                            job_id,
                            mapreduce_spec.mapper.params['entity_kind'],
                            mapreduce_spec.params['backup_info_pk'],
                            mapreduce_spec.mapper.params.get('gs_bucket_name'),
-                           mapreduce_state.writer_state['filenames'],
+                           filenames,
                            mapreduce_spec.params.get('done_callback_queue'))
 
 
diff --git a/google/appengine/ext/datastore_admin/config.py b/google/appengine/ext/datastore_admin/config.py
index a0ee391..d013401 100644
--- a/google/appengine/ext/datastore_admin/config.py
+++ b/google/appengine/ext/datastore_admin/config.py
@@ -69,11 +69,11 @@
   GOOGLE_API_HOSTNAME = 'www.googleapis.com'
   GOOGLE_API_OAUTH_SCOPE_HOSTNAME = 'https://www.googleapis.com'
   GS_API_HOSTNAME = 'storage.googleapis.com'
-  ADMIN_API_APP_ID = 'admin-api'
-  ADMIN_API_APP_VERSION = None
-  ADMIN_API_NAME = 'appengine'
-  ADMIN_API_VERSION = 'vdev'
-  ADMIN_API_VALIDATE_SSL = True
+  DATASTORE_ADMIN_API_APP_ID = None
+  DATASTORE_ADMIN_API_APP_VERSION = None
+  DATASTORE_ADMIN_API_NAME = None
+  DATASTORE_ADMIN_API_VERSION = None
+  DATASTORE_ADMIN_API_VALIDATE_SSL = True
   ADMIN_CONSOLE_URL = 'https://appengine.google.com'
 
   @property
@@ -89,12 +89,13 @@
 
   @property
   def DISCOVERY_URL(self):
-    if self.ADMIN_API_APP_VERSION:
-      hostname = '%s-dot-%s.%s' % (self.ADMIN_API_APP_VERSION,
-                                   self.ADMIN_API_APP_ID,
+    if self.DATASTORE_ADMIN_API_APP_VERSION:
+      hostname = '%s-dot-%s.%s' % (self.DATASTORE_ADMIN_API_APP_VERSION,
+                                   self.DATASTORE_ADMIN_API_APP_ID,
                                    self.DEFAULT_APP_DOMAIN)
     else:
-      hostname = '%s.%s' % (self.ADMIN_API_APP_ID, self.DEFAULT_APP_DOMAIN)
+      hostname = '%s.%s' % (self.DATASTORE_ADMIN_API_APP_ID,
+                            self.DEFAULT_APP_DOMAIN)
     path = '_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest'
     return 'https://%s/%s' % (hostname, path)
 
@@ -118,8 +119,10 @@
   import config_runtime
 
   RUNTIME_DATACENTER_TO_CLASS = config_runtime.RUNTIME_DATACENTER_TO_CLASS
+  PRODUCTION_CLASS = config_runtime.ProdRuntime
 except ImportError:
   RUNTIME_DATACENTER_TO_CLASS = {}
+  PRODUCTION_CLASS = Prod
 
 
 def GetConfig():
@@ -139,7 +142,7 @@
   for prefix, config in RUNTIME_DATACENTER_TO_CLASS.items():
     if datacenter.startswith(prefix):
       return config
-  return Prod
+  return PRODUCTION_CLASS
 
 
 def Export(cls):
diff --git a/google/appengine/ext/datastore_admin/static/js/compiled.js b/google/appengine/ext/datastore_admin/static/js/compiled.js
index 9d71f3a..e4d8333 100644
--- a/google/appengine/ext/datastore_admin/static/js/compiled.js
+++ b/google/appengine/ext/datastore_admin/static/js/compiled.js
@@ -1,20 +1,19 @@
-var h=document,k=Array,n=Error,q=parseInt,r=String;function aa(a,b){return a.currentTarget=b}function ba(a,b){return a.keyCode=b}function s(a,b){return a.disabled=b}
-var u="push",v="shift",w="slice",x="replace",y="value",ca="preventDefault",z="indexOf",A="keyCode",C="type",da="name",D="length",ea="propertyIsEnumerable",E="prototype",fa="checked",F="split",G="style",ga="target",H="call",ha="apply",I,J=this,K=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof k)return"array";if(a instanceof Object)return b;var c=Object[E].toString[H](a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a[D]&&"undefined"!=typeof a.splice&&
-"undefined"!=typeof a[ea]&&!a[ea]("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a[H]&&"undefined"!=typeof a[ea]&&!a[ea]("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a[H])return"object";return b},ia=function(a){var b=K(a);return"array"==b||"object"==b&&"number"==typeof a[D]},L=function(a){return"string"==typeof a},ja=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},M="closure_uid_"+(1E9*Math.random()>>>0),ka=0,la=
-function(a,b){var c=k[E][w][H](arguments,1);return function(){var b=k[E][w][H](arguments);b.unshift[ha](b,c);return a[ha](this,b)}},ma=function(a,b){function c(){}c.prototype=b[E];a.q=b[E];a.prototype=new c};var N=function(a){n.captureStackTrace?n.captureStackTrace(this,N):this.stack=n().stack||"";a&&(this.message=r(a))};ma(N,n);N[E].name="CustomError";var na=function(a,b){for(var c=a[F]("%s"),d="",f=k[E][w][H](arguments,1);f[D]&&1<c[D];)d+=c[v]()+f[v]();return d+c.join("%s")},ta=function(a,b){if(b)return a[x](oa,"&amp;")[x](pa,"&lt;")[x](qa,"&gt;")[x](ra,"&quot;");if(!sa.test(a))return a;-1!=a[z]("&")&&(a=a[x](oa,"&amp;"));-1!=a[z]("<")&&(a=a[x](pa,"&lt;"));-1!=a[z](">")&&(a=a[x](qa,"&gt;"));-1!=a[z]('"')&&(a=a[x](ra,"&quot;"));return a},oa=/&/g,pa=/</g,qa=/>/g,ra=/\"/g,sa=/[&<>\"]/,ua=function(a){return r(a)[x](/\-([a-z])/g,function(a,c){return c.toUpperCase()})},
-va=function(a,b){var c=L(b)?r(b)[x](/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1")[x](/\x08/g,"\\x08"):"\\s";return a[x](RegExp("(^"+(c?"|["+c+"]+":"")+")([a-z])","g"),function(a,b,c){return b+c.toUpperCase()})};var wa=function(a,b){b.unshift(a);N[H](this,na[ha](null,b));b[v]()};ma(wa,N);wa[E].name="AssertionError";var O=function(a,b,c){if(!a){var d=k[E][w][H](arguments,2),f="Assertion failed";if(b)var f=f+(": "+b),e=d;throw new wa(""+f,e||[]);}return a};var P=k[E],xa=P[z]?function(a,b,c){O(null!=a[D]);return P[z][H](a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a[D]+c):c;if(L(a))return L(b)&&1==b[D]?a[z](b,c):-1;for(;c<a[D];c++)if(c in a&&a[c]===b)return c;return-1},ya=P.forEach?function(a,b,c){O(null!=a[D]);P.forEach[H](a,b,c)}:function(a,b,c){for(var d=a[D],f=L(a)?a[F](""):a,e=0;e<d;e++)e in f&&b[H](c,f[e],e,a)},za=function(a,b){var c=xa(a,b),d;if(d=0<=c)O(null!=a[D]),P.splice[H](a,c,1);return d},Aa=function(a){var b=a[D];if(0<b){for(var c=
-k(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ba=function(a,b,c){O(null!=a[D]);return 2>=arguments[D]?P[w][H](a,b):P[w][H](a,b,c)};var Ca=function(a,b,c){for(var d in a)b[H](c,a[d],d,a)},Da="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ea=function(a,b){for(var c,d,f=1;f<arguments[D];f++){d=arguments[f];for(c in d)a[c]=d[c];for(var e=0;e<Da[D];e++)c=Da[e],Object[E].hasOwnProperty[H](d,c)&&(a[c]=d[c])}};var Q,Fa,Ga,Ha,Ia=function(){return J.navigator?J.navigator.userAgent:null};Ha=Ga=Fa=Q=!1;var R;if(R=Ia()){var Ja=J.navigator;Q=0==R.lastIndexOf("Opera",0);Fa=!Q&&(-1!=R[z]("MSIE")||-1!=R[z]("Trident"));Ga=!Q&&-1!=R[z]("WebKit");Ha=!Q&&!Ga&&!Fa&&"Gecko"==Ja.product}var Ka=Q,S=Fa,T=Ha,U=Ga,La=function(){var a=J.document;return a?a.documentMode:void 0},Ma;
-t:{var Na="",Oa;if(Ka&&J.opera)var Pa=J.opera.version,Na="function"==typeof Pa?Pa():Pa;else if(T?Oa=/rv\:([^\);]+)(\)|;)/:S?Oa=/\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/:U&&(Oa=/WebKit\/(\S+)/),Oa)var Qa=Oa.exec(Ia()),Na=Qa?Qa[1]:"";if(S){var Ra=La();if(Ra>parseFloat(Na)){Ma=r(Ra);break t}}Ma=Na}
-var Sa=Ma,Ta={},V=function(a){var b;if(!(b=Ta[a])){b=0;for(var c=r(Sa)[x](/^[\s\xa0]+|[\s\xa0]+$/g,"")[F]("."),d=r(a)[x](/^[\s\xa0]+|[\s\xa0]+$/g,"")[F]("."),f=Math.max(c[D],d[D]),e=0;0==b&&e<f;e++){var g=c[e]||"",m=d[e]||"",l=RegExp("(\\d*)(\\D*)","g"),Ua=RegExp("(\\d*)(\\D*)","g");do{var p=l.exec(g)||["","",""],t=Ua.exec(m)||["","",""];if(0==p[0][D]&&0==t[0][D])break;b=((0==p[1][D]?0:q(p[1],10))<(0==t[1][D]?0:q(t[1],10))?-1:(0==p[1][D]?0:q(p[1],10))>(0==t[1][D]?0:q(t[1],10))?1:0)||((0==p[2][D])<
-(0==t[2][D])?-1:(0==p[2][D])>(0==t[2][D])?1:0)||(p[2]<t[2]?-1:p[2]>t[2]?1:0)}while(0==b)}b=Ta[a]=0<=b}return b},Va=J.document,Wa=Va&&S?La()||("CSS1Compat"==Va.compatMode?q(Sa,10):5):void 0;var Xa=!S||S&&9<=Wa;!T&&!S||S&&S&&9<=Wa||T&&V("1.9.1");S&&V("9");var Ya=function(a,b){var c;c=a.className;c=L(c)&&c.match(/\S+/g)||[];for(var d=Ba(arguments,1),f=c[D]+d[D],e=c,g=0;g<d[D];g++)0<=xa(e,d[g])||e[u](d[g]);a.className=c.join(" ");return c[D]==f};var W=function(a){return L(a)?h.getElementById(a):a},Za=function(a,b,c,d){a=d||a;b=b&&"*"!=b?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(b||c))return a.querySelectorAll(b+(c?"."+c:""));if(c&&a.getElementsByClassName){a=a.getElementsByClassName(c);if(b){d={};for(var f=0,e=0,g;g=a[e];e++)b==g.nodeName&&(d[f++]=g);d.length=f;return d}return a}a=a.getElementsByTagName(b||"*");if(c){d={};for(e=f=0;g=a[e];e++)b=g.className,"function"==typeof b[F]&&0<=xa(b[F](/\s+/),c)&&(d[f++]=g);d.length=
-f;return d}return a},ab=function(a,b){Ca(b,function(b,d){"style"==d?a[G].cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in $a?a.setAttribute($a[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},$a={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",valign:"vAlign",width:"width"},cb=function(a,b,c){var d=arguments,
-f=d[0],e=d[1];if(!Xa&&e&&(e[da]||e[C])){f=["<",f];e[da]&&f[u](' name="',ta(e[da]),'"');if(e[C]){f[u](' type="',ta(e[C]),'"');var g={};Ea(g,e);delete g[C];e=g}f[u](">");f=f.join("")}f=h.createElement(f);e&&(L(e)?f.className=e:"array"==K(e)?Ya[ha](null,[f].concat(e)):ab(f,e));2<d[D]&&bb(h,f,d,2);return f},bb=function(a,b,c,d){function f(c){c&&b.appendChild(L(c)?a.createTextNode(c):c)}for(;d<c[D];d++){var e=c[d];if(!ia(e)||ja(e)&&0<e.nodeType)f(e);else{var g;t:{if(e&&"number"==typeof e[D]){if(ja(e)){g=
-"function"==typeof e.item||"string"==typeof e.item;break t}if("function"==K(e)){g="function"==typeof e.item;break t}}g=!1}ya(g?Aa(e):e,f)}}};var db=function(a){var b=a[C];if(void 0===b)return null;switch(b.toLowerCase()){case "checkbox":case "radio":return a[fa]?a[y]:null;case "select-one":return b=a.selectedIndex,0<=b?a.options[b][y]:null;case "select-multiple":for(var b=[],c,d=0;c=a.options[d];d++)c.selected&&b[u](c[y]);return b[D]?b:null;default:return void 0!==a[y]?a[y]:null}};var eb=function(a){eb[" "](a);return a};eb[" "]=function(){};var fb=!S||S&&9<=Wa,gb=S&&!V("9");!U||V("528");T&&V("1.9b")||S&&V("8")||Ka&&V("9.5")||U&&V("528");T&&!V("8")||S&&V("9");var hb=function(a,b){this.type=a;this.target=b;aa(this,this[ga])};hb[E].k=!1;hb[E].defaultPrevented=!1;hb[E].preventDefault=function(){this.defaultPrevented=!0};var ib=function(a,b){a&&this.r(a,b)};ma(ib,hb);I=ib[E];I.target=null;I.relatedTarget=null;I.offsetX=0;I.offsetY=0;I.clientX=0;I.clientY=0;I.screenX=0;I.screenY=0;I.button=0;ba(I,0);I.charCode=0;I.ctrlKey=!1;I.altKey=!1;I.shiftKey=!1;I.metaKey=!1;I.m=null;
-I.r=function(a,b){var c=this.type=a[C];hb[H](this,c);this.target=a[ga]||a.srcElement;aa(this,b);var d=a.relatedTarget;if(d){if(T){var f;t:{try{eb(d.nodeName);f=!0;break t}catch(e){}f=!1}f||(d=null)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=U||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=U||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=
-a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;ba(this,a[A]||0);this.charCode=a.charCode||("keypress"==c?a[A]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.m=a;a.defaultPrevented&&this[ca]();delete this.k};I.preventDefault=function(){ib.q[ca][H](this);var a=this.m;if(a[ca])a[ca]();else if(a.returnValue=!1,gb)try{(a.ctrlKey||112<=a[A]&&123>=a[A])&&ba(a,-1)}catch(b){}};var jb="closure_listenable_"+(1E6*Math.random()|0),kb=0;var lb=function(a,b,c,d,f,e){this.c=a;this.l=b;this.src=c;this.type=d;this.capture=!!f;this.i=e;this.key=++kb;this.e=this.j=!1};lb[E].n=function(){this.e=!0;this.i=this.src=this.l=this.c=null};var mb={},X={},Y={},Z={},nb=function(a,b,c,d,f){if("array"==K(b)){for(var e=0;e<b[D];e++)nb(a,b[e],c,d,f);return null}c=ob(c);if(a&&a[jb])a=a.t(b,c,d,f);else t:{if(!b)throw n("Invalid event type");d=!!d;var g=X;b in g||(g[b]={a:0});g=g[b];d in g||(g[d]={a:0},g.a++);var g=g[d],e=a[M]||(a[M]=++ka),m;if(g[e]){m=g[e];for(var l=0;l<m[D];l++)if(g=m[l],g.c==c&&g.i==f){if(g.e)break;m[l].j=!1;a=m[l];break t}}else m=g[e]=[],g.a++;l=pb();g=new lb(c,l,a,b,d,f);g.j=!1;l.src=a;l.c=g;m[u](g);Y[e]||(Y[e]=[]);Y[e][u](g);
-a.addEventListener?a.addEventListener(b,l,d):a.attachEvent(b in Z?Z[b]:Z[b]="on"+b,l);a=mb[g.key]=g}return a},pb=function(){var a=qb,b=fb?function(c){return a[H](b.src,b.c,c)}:function(c){c=a[H](b.src,b.c,c);if(!c)return c};return b},sb=function(a,b,c,d,f){c=1;b=b[M]||(b[M]=++ka);if(a[b])for(a=Aa(a[b]),b=0;b<a[D];b++)(d=a[b])&&!d.e&&(c&=!1!==rb(d,f));return Boolean(c)},rb=function(a,b){var c=a.c,d=a.i||a.src;if(a.j&&"number"!=typeof a&&a&&!a.e){var f=a.src;if(f&&f[jb])f.s(a);else{var e=a[C],g=a.l,
-m=a.capture;f.removeEventListener?f.removeEventListener(e,g,m):f.detachEvent&&f.detachEvent(e in Z?Z[e]:Z[e]="on"+e,g);f=f[M]||(f[M]=++ka);Y[f]&&(g=Y[f],za(g,a),0==g[D]&&delete Y[f]);a.n();if(g=X[e][m][f])za(g,a),0==g[D]&&(delete X[e][m][f],X[e][m].a--),0==X[e][m].a&&(delete X[e][m],X[e].a--),0==X[e].a&&delete X[e];delete mb[a.key]}}return c[H](d,b)},qb=function(a,b){if(a.e)return!0;var c=a[C],d=X;if(!(c in d))return!0;var d=d[c],f,e;if(!fb){var g;if(!(g=b))t:{g=["window","event"];for(var m=J;f=g[v]();)if(null!=
-m[f])m=m[f];else{g=null;break t}g=m}f=g;g=!0 in d;m=!1 in d;if(g){if(0>f[A]||void 0!=f.returnValue)return!0;t:{var l=!1;if(0==f[A])try{ba(f,-1);break t}catch(Ua){l=!0}if(l||void 0==f.returnValue)f.returnValue=!0}}l=new ib(f,this);f=!0;try{if(g){for(var p=[],t=l.currentTarget;t;t=t.parentNode)p[u](t);e=d[!0];for(var B=p[D]-1;!l.k&&0<=B;B--)aa(l,p[B]),f&=sb(e,p[B],c,!0,l);if(m)for(e=d[!1],B=0;!l.k&&B<p[D];B++)aa(l,p[B]),f&=sb(e,p[B],c,!1,l)}else f=rb(a,l)}finally{p&&(p.length=0)}return f}return rb(a,
-new ib(b,this))},tb="__closure_events_fn_"+(1E9*Math.random()>>>0),ob=function(a){O(a,"Listener can not be null.");if("function"==K(a))return a;O(a.handleEvent,"An object listener must have handleEvent method.");return a[tb]||(a[tb]=function(b){return a.handleEvent(b)})};var ub=function(a,b,c){var d;t:if(d=ua(c),void 0===a[G][d]&&(c=(U?"Webkit":T?"Moz":S?"ms":Ka?"O":null)+va(c),void 0!==a[G][c])){d=c;break t}d&&(a[G][d]=b)};var vb=function(a,b){var c=[];1<arguments[D]&&(c=k[E][w][H](arguments)[w](1));var d=Za(h,"th","tct-selectall",a);if(0!=d[D]){var d=d[0],f=0,e=Za(h,"tbody",null,a);e[D]&&(f=e[0].rows[D]);this.d=cb("input",{type:"checkbox"});d.appendChild(this.d);f?nb(this.d,"click",this.p,!1,this):s(this.d,!0);this.f=[];this.g=[];this.h=[];d=Za(h,"input",null,a);for(f=0;e=d[f];f++)"checkbox"==e[C]&&e!=this.d?(this.f[u](e),nb(e,"click",this.o,!1,this)):"action"==e[da]&&(0<=c[z](e[y])?this.h[u](e):this.g[u](e),s(e,!0))}};
-I=vb[E];I.f=null;I.b=0;I.d=null;I.g=null;I.h=null;I.p=function(a){for(var b=a[ga][fa],c=a=0,d;d=this.f[c];c++)d.checked=b,a+=1;this.b=b?this.f[D]:0;for(c=0;b=this.g[c];c++)s(b,!this.b);for(c=0;b=this.h[c];c++)s(b,1!=a?!0:!1)};I.o=function(a){this.b+=a[ga][fa]?1:-1;this.d.checked=this.b==this.f[D];a=0;for(var b;b=this.g[a];a++)s(b,!this.b);for(a=0;b=this.h[a];a++)s(b,1!=this.b?!0:!1)};var wb=function(){var a=W("kinds");a&&new vb(a);(a=W("pending_backups"))&&new vb(a);(a=W("backups"))&&new vb(a,"Restore");var b=W("ae-datastore-admin-filesystem");b&&nb(b,"change",function(){var a="gs"==db(b);W("gs_bucket_tr")[G].display=a?"":"none"});if(a=W("confirm_delete_form")){var c=W("confirm_readonly_delete");c&&(a.onsubmit=function(){var a=W("confirm_message");L("color")?ub(a,"red","color"):Ca("color",la(ub,a));return c[fa]})}},xb=["ae","Datastore","Admin","init"],$=J;
-xb[0]in $||!$.execScript||$.execScript("var "+xb[0]);for(var yb;xb[D]&&(yb=xb[v]());)xb[D]||void 0===wb?$=$[yb]?$[yb]:$[yb]={}:$[yb]=wb;
+var g=document,k=Array,l=Error,m=parseInt,p=String;function aa(a,b){return a.currentTarget=b}function ba(a,b){return a.keyCode=b}function q(a,b){return a.disabled=b}
+var r="push",s="shift",t="slice",u="replace",v="value",ca="preventDefault",w="indexOf",x="keyCode",A="type",da="name",B="length",ea="propertyIsEnumerable",C="prototype",fa="checked",D="split",E="style",ga="target",F="call",ha="apply",ia="navigator",G,H=this,I=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof k)return"array";if(a instanceof Object)return b;var c=Object[C].toString[F](a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a[B]&&"undefined"!=
+typeof a.splice&&"undefined"!=typeof a[ea]&&!a[ea]("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a[F]&&"undefined"!=typeof a[ea]&&!a[ea]("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a[F])return"object";return b},ja=function(a){var b=I(a);return"array"==b||"object"==b&&"number"==typeof a[B]},J=function(a){return"string"==typeof a},ka=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},na=function(a){return a[la]||(a[la]=
+++ma)},la="closure_uid_"+(1E9*Math.random()>>>0),ma=0,oa=function(a,b){var c=k[C][t][F](arguments,1);return function(){var b=c[t]();b[r][ha](b,arguments);return a[ha](this,b)}},pa=function(a,b){function c(){}c.prototype=b[C];a.t=b[C];a.prototype=new c};var K=function(a){l.captureStackTrace?l.captureStackTrace(this,K):this.stack=l().stack||"";a&&(this.message=p(a))};pa(K,l);K[C].name="CustomError";var qa=function(a,b){for(var c=a[D]("%s"),d="",f=k[C][t][F](arguments,1);f[B]&&1<c[B];)d+=c[s]()+f[s]();return d+c.join("%s")},wa=function(a,b){if(b)return a[u](ra,"&amp;")[u](sa,"&lt;")[u](ta,"&gt;")[u](ua,"&quot;");if(!va.test(a))return a;-1!=a[w]("&")&&(a=a[u](ra,"&amp;"));-1!=a[w]("<")&&(a=a[u](sa,"&lt;"));-1!=a[w](">")&&(a=a[u](ta,"&gt;"));-1!=a[w]('"')&&(a=a[u](ua,"&quot;"));return a},ra=/&/g,sa=/</g,ta=/>/g,ua=/\"/g,va=/[&<>\"]/,xa=function(a){return p(a)[u](/\-([a-z])/g,function(a,c){return c.toUpperCase()})},
+ya=function(a,b){var c=J(b)?p(b)[u](/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1")[u](/\x08/g,"\\x08"):"\\s";return a[u](RegExp("(^"+(c?"|["+c+"]+":"")+")([a-z])","g"),function(a,b,c){return b+c.toUpperCase()})};var za=function(a,b){b.unshift(a);K[F](this,qa[ha](null,b));b[s]()};pa(za,K);za[C].name="AssertionError";var L=function(a,b,c){if(!a){var d=k[C][t][F](arguments,2),f="Assertion failed";if(b)var f=f+(": "+b),e=d;throw new za(""+f,e||[]);}return a};var M=k[C],Aa=M[w]?function(a,b,c){L(null!=a[B]);return M[w][F](a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a[B]+c):c;if(J(a))return J(b)&&1==b[B]?a[w](b,c):-1;for(;c<a[B];c++)if(c in a&&a[c]===b)return c;return-1},Ba=M.forEach?function(a,b,c){L(null!=a[B]);M.forEach[F](a,b,c)}:function(a,b,c){for(var d=a[B],f=J(a)?a[D](""):a,e=0;e<d;e++)e in f&&b[F](c,f[e],e,a)},Ca=function(a){var b=a[B];if(0<b){for(var c=k(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Da=function(a,b,c){L(null!=a[B]);return 2>=
+arguments[B]?M[t][F](a,b):M[t][F](a,b,c)};var Ea=function(a,b,c){for(var d in a)b[F](c,a[d],d,a)},Fa="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ga=function(a,b){for(var c,d,f=1;f<arguments[B];f++){d=arguments[f];for(c in d)a[c]=d[c];for(var e=0;e<Fa[B];e++)c=Fa[e],Object[C].hasOwnProperty[F](d,c)&&(a[c]=d[c])}};var N,Ha,Ia,Ja,Ka=function(){return H[ia]?H[ia].userAgent:null};Ja=Ia=Ha=N=!1;var O;if(O=Ka()){var La=H[ia];N=0==O.lastIndexOf("Opera",0);Ha=!N&&(-1!=O[w]("MSIE")||-1!=O[w]("Trident"));Ia=!N&&-1!=O[w]("WebKit");Ja=!N&&!Ia&&!Ha&&"Gecko"==La.product}var Ma=N,P=Ha,Q=Ja,R=Ia,Na=H[ia],Oa=-1!=(Na&&Na.platform||"")[w]("Mac"),Pa=function(){var a=H.document;return a?a.documentMode:void 0},Qa;
+t:{var Ra="",S;if(Ma&&H.opera)var Sa=H.opera.version,Ra="function"==typeof Sa?Sa():Sa;else if(Q?S=/rv\:([^\);]+)(\)|;)/:P?S=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:R&&(S=/WebKit\/(\S+)/),S)var Ta=S.exec(Ka()),Ra=Ta?Ta[1]:"";if(P){var Ua=Pa();if(Ua>parseFloat(Ra)){Qa=p(Ua);break t}}Qa=Ra}
+var Va=Qa,Wa={},T=function(a){var b;if(!(b=Wa[a])){b=0;for(var c=p(Va)[u](/^[\s\xa0]+|[\s\xa0]+$/g,"")[D]("."),d=p(a)[u](/^[\s\xa0]+|[\s\xa0]+$/g,"")[D]("."),f=Math.max(c[B],d[B]),e=0;0==b&&e<f;e++){var h=c[e]||"",n=d[e]||"",ob=RegExp("(\\d*)(\\D*)","g"),pb=RegExp("(\\d*)(\\D*)","g");do{var y=ob.exec(h)||["","",""],z=pb.exec(n)||["","",""];if(0==y[0][B]&&0==z[0][B])break;b=((0==y[1][B]?0:m(y[1],10))<(0==z[1][B]?0:m(z[1],10))?-1:(0==y[1][B]?0:m(y[1],10))>(0==z[1][B]?0:m(z[1],10))?1:0)||((0==y[2][B])<
+(0==z[2][B])?-1:(0==y[2][B])>(0==z[2][B])?1:0)||(y[2]<z[2]?-1:y[2]>z[2]?1:0)}while(0==b)}b=Wa[a]=0<=b}return b},Xa=H.document,Ya=Xa&&P?Pa()||("CSS1Compat"==Xa.compatMode?m(Va,10):5):void 0;var Za=!P||P&&9<=Ya;!Q&&!P||P&&P&&9<=Ya||Q&&T("1.9.1");P&&T("9");var $a=function(a,b){var c;c=a.className;c=J(c)&&c.match(/\S+/g)||[];for(var d=Da(arguments,1),f=c[B]+d[B],e=c,h=0;h<d[B];h++)0<=Aa(e,d[h])||e[r](d[h]);a.className=c.join(" ");return c[B]==f};var U=function(a,b){return J(b)?a.getElementById(b):b},ab=function(a,b,c,d){a=d||a;b=b&&"*"!=b?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(b||c))return a.querySelectorAll(b+(c?"."+c:""));if(c&&a.getElementsByClassName){a=a.getElementsByClassName(c);if(b){d={};for(var f=0,e=0,h;h=a[e];e++)b==h.nodeName&&(d[f++]=h);d.length=f;return d}return a}a=a.getElementsByTagName(b||"*");if(c){d={};for(e=f=0;h=a[e];e++)b=h.className,"function"==typeof b[D]&&0<=Aa(b[D](/\s+/),c)&&(d[f++]=h);d.length=
+f;return d}return a},cb=function(a,b){Ea(b,function(b,d){"style"==d?a[E].cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in bb?a.setAttribute(bb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},bb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",valign:"vAlign",width:"width"},eb=function(a,b,c){var d=arguments,
+f=d[0],e=d[1];if(!Za&&e&&(e[da]||e[A])){f=["<",f];e[da]&&f[r](' name="',wa(e[da]),'"');if(e[A]){f[r](' type="',wa(e[A]),'"');var h={};Ga(h,e);delete h[A];e=h}f[r](">");f=f.join("")}f=g.createElement(f);e&&(J(e)?f.className=e:"array"==I(e)?$a[ha](null,[f].concat(e)):cb(f,e));2<d[B]&&db(g,f,d,2);return f},db=function(a,b,c,d){function f(c){c&&b.appendChild(J(c)?a.createTextNode(c):c)}for(;d<c[B];d++){var e=c[d];if(!ja(e)||ka(e)&&0<e.nodeType)f(e);else{var h;t:{if(e&&"number"==typeof e[B]){if(ka(e)){h=
+"function"==typeof e.item||"string"==typeof e.item;break t}if("function"==I(e)){h="function"==typeof e.item;break t}}h=!1}Ba(h?Ca(e):e,f)}}};var fb=function(a){var b=a[A];if(void 0===b)return null;switch(b.toLowerCase()){case "checkbox":case "radio":return a[fa]?a[v]:null;case "select-one":return b=a.selectedIndex,0<=b?a.options[b][v]:null;case "select-multiple":for(var b=[],c,d=0;c=a.options[d];d++)c.selected&&b[r](c[v]);return b[B]?b:null;default:return void 0!==a[v]?a[v]:null}};var gb=function(a){gb[" "](a);return a};gb[" "]=function(){};var hb=!P||P&&9<=Ya,ib=P&&!T("9");!R||T("528");Q&&T("1.9b")||P&&T("8")||Ma&&T("9.5")||R&&T("528");Q&&!T("8")||P&&T("9");var V=function(a,b){this.type=a;this.target=b;aa(this,this[ga])};V[C].m=!1;V[C].defaultPrevented=!1;V[C].v=!0;V[C].preventDefault=function(){this.defaultPrevented=!0;this.v=!1};var W=function(a,b){a&&this.u(a,b)};pa(W,V);G=W[C];G.target=null;G.relatedTarget=null;G.offsetX=0;G.offsetY=0;G.clientX=0;G.clientY=0;G.screenX=0;G.screenY=0;G.button=0;ba(G,0);G.charCode=0;G.ctrlKey=!1;G.altKey=!1;G.shiftKey=!1;G.metaKey=!1;G.s=!1;G.o=null;
+G.u=function(a,b){var c=this.type=a[A];V[F](this,c);this.target=a[ga]||a.srcElement;aa(this,b);var d=a.relatedTarget;if(d){if(Q){var f;t:{try{gb(d.nodeName);f=!0;break t}catch(e){}f=!1}f||(d=null)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=R||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=R||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=
+a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;ba(this,a[x]||0);this.charCode=a.charCode||("keypress"==c?a[x]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.s=Oa?a.metaKey:a.ctrlKey;this.state=a.state;this.o=a;a.defaultPrevented&&this[ca]();delete this.m};G.preventDefault=function(){W.t[ca][F](this);var a=this.o;if(a[ca])a[ca]();else if(a.returnValue=!1,ib)try{(a.ctrlKey||112<=a[x]&&123>=a[x])&&ba(a,-1)}catch(b){}};var jb="closure_listenable_"+(1E6*Math.random()|0),kb=0;var lb=function(a,b,c,d,f,e){this.c=a;this.g=b;this.src=c;this.type=d;this.capture=!!f;this.j=e;this.key=++kb;this.e=this.k=!1};lb[C].n=function(){this.e=!0;this.j=this.src=this.g=this.c=null};var mb=function(a){this.src=a;this.a={};this.l=0};mb[C].add=function(a,b,c,d,f){var e=this.a[a];e||(e=this.a[a]=[],this.l++);var h;t:{for(h=0;h<e[B];++h){var n=e[h];if(!n.e&&n.c==b&&n.capture==!!d&&n.j==f)break t}h=-1}-1<h?(a=e[h],c||(a.k=!1)):(a=new lb(b,null,this.src,a,!!d,f),a.k=c,e[r](a));return a};mb[C].p=function(a){var b=a[A];if(!(b in this.a))return!1;var c=this.a[b],d=Aa(c,a),f;if(f=0<=d)L(null!=c[B]),M.splice[F](c,d,1);f&&(a.n(),0==this.a[b][B]&&(delete this.a[b],this.l--));return f};var nb={},X={},Y={},qb=function(a,b,c,d,f){if("array"==I(b)){for(var e=0;e<b[B];e++)qb(a,b[e],c,d,f);return null}c=rb(c);if(a&&a[jb])a=a.A(b,c,d,f);else{e=c;if(!b)throw l("Invalid event type");c=!!d;var h=na(a),n=X[h];n||(X[h]=n=new mb(a));d=n.add(b,e,!1,d,f);d.g||(f=sb(),d.g=f,f.src=a,f.c=d,a.addEventListener?a.addEventListener(b,f,c):a.attachEvent(b in Y?Y[b]:Y[b]="on"+b,f),nb[d.key]=d);a=d}return a},sb=function(){var a=tb,b=hb?function(c){return a[F](b.src,b.c,c)}:function(c){c=a[F](b.src,b.c,
+c);if(!c)return c};return b},vb=function(a,b,c,d){var f=1;if(a=a[la]?X[na(a)]||null:null)if(b=a.a[b])for(b=Ca(b),a=0;a<b[B];a++){var e=b[a];e&&(e.capture==c&&!e.e)&&(f&=!1!==ub(e,d))}return Boolean(f)},ub=function(a,b){var c=a.c,d=a.j||a.src;if(a.k&&"number"!=typeof a&&a&&!a.e){var f=a.src;if(f&&f[jb])f.w(a);else{var e=a[A],h=a.g;f.removeEventListener?f.removeEventListener(e,h,a.capture):f.detachEvent&&f.detachEvent(e in Y?Y[e]:Y[e]="on"+e,h);(e=f[la]?X[na(f)]||null:null)?(e.p(a),0==e.l&&(e.src=null,
+delete X[na(f)])):a.n();delete nb[a.key]}}return c[F](d,b)},tb=function(a,b){if(a.e)return!0;if(!hb){var c;if(!(c=b))t:{c=["window","event"];for(var d=H,f;f=c[s]();)if(null!=d[f])d=d[f];else{c=null;break t}c=d}f=c;c=new W(f,this);d=!0;if(!(0>f[x]||void 0!=f.returnValue)){t:{var e=!1;if(0==f[x])try{ba(f,-1);break t}catch(h){e=!0}if(e||void 0==f.returnValue)f.returnValue=!0}f=[];for(e=c.currentTarget;e;e=e.parentNode)f[r](e);for(var e=a[A],n=f[B]-1;!c.m&&0<=n;n--)aa(c,f[n]),d&=vb(f[n],e,!0,c);for(n=
+0;!c.m&&n<f[B];n++)aa(c,f[n]),d&=vb(f[n],e,!1,c)}return d}return ub(a,new W(b,this))},wb="__closure_events_fn_"+(1E9*Math.random()>>>0),rb=function(a){L(a,"Listener can not be null.");if("function"==I(a))return a;L(a.handleEvent,"An object listener must have handleEvent method.");return a[wb]||(a[wb]=function(b){return a.handleEvent(b)})};var xb=function(a,b,c){var d;t:if(d=xa(c),void 0===a[E][d]&&(c=(R?"Webkit":Q?"Moz":P?"ms":Ma?"O":null)+ya(c),void 0!==a[E][c])){d=c;break t}d&&(a[E][d]=b)};var yb=function(a,b){var c=[];1<arguments[B]&&(c=k[C][t][F](arguments)[t](1));var d=ab(g,"th","tct-selectall",a);if(0!=d[B]){var d=d[0],f=0,e=ab(g,"tbody",null,a);e[B]&&(f=e[0].rows[B]);this.d=eb("input",{type:"checkbox"});d.appendChild(this.d);f?qb(this.d,"click",this.r,!1,this):q(this.d,!0);this.f=[];this.h=[];this.i=[];d=ab(g,"input",null,a);for(f=0;e=d[f];f++)"checkbox"==e[A]&&e!=this.d?(this.f[r](e),qb(e,"click",this.q,!1,this)):"action"==e[da]&&(0<=c[w](e[v])?this.i[r](e):this.h[r](e),q(e,!0))}};
+G=yb[C];G.f=null;G.b=0;G.d=null;G.h=null;G.i=null;G.r=function(a){for(var b=a[ga][fa],c=a=0,d;d=this.f[c];c++)d.checked=b,a+=1;this.b=b?this.f[B]:0;for(c=0;b=this.h[c];c++)q(b,!this.b);for(c=0;b=this.i[c];c++)q(b,1!=a?!0:!1)};G.q=function(a){this.b+=a[ga][fa]?1:-1;this.d.checked=this.b==this.f[B];a=0;for(var b;b=this.h[a];a++)q(b,!this.b);for(a=0;b=this.i[a];a++)q(b,1!=this.b?!0:!1)};var zb=function(){var a=U(g,"kinds");a&&new yb(a);(a=U(g,"pending_backups"))&&new yb(a);(a=U(g,"backups"))&&new yb(a,"Restore");var b=U(g,"ae-datastore-admin-filesystem");b&&qb(b,"change",function(){var a="gs"==fb(b);U(g,"gs_bucket_tr")[E].display=a?"":"none"});if(a=U(g,"confirm_delete_form")){var c=U(g,"confirm_readonly_delete");c&&(a.onsubmit=function(){var a=U(g,"confirm_message");J("color")?xb(a,"red","color"):Ea("color",oa(xb,a));return c[fa]})}},Z=["ae","Datastore","Admin","init"],$=H;
+Z[0]in $||!$.execScript||$.execScript("var "+Z[0]);for(var Ab;Z[B]&&(Ab=Z[s]());)Z[B]||void 0===zb?$=$[Ab]?$[Ab]:$[Ab]={}:$[Ab]=zb;
diff --git a/google/appengine/ext/datastore_admin/utils.py b/google/appengine/ext/datastore_admin/utils.py
index f8c78fd..bb269bf 100644
--- a/google/appengine/ext/datastore_admin/utils.py
+++ b/google/appengine/ext/datastore_admin/utils.py
@@ -38,7 +38,7 @@
 from google.appengine.ext.db import stats
 from google.appengine.ext.mapreduce import control
 from google.appengine.ext.mapreduce import model
-from google.appengine.ext.mapreduce import operation
+from google.appengine.ext.mapreduce import operation as mr_operation
 from google.appengine.ext.mapreduce import util
 from google.appengine.ext.webapp import _template
 
@@ -465,6 +465,7 @@
   return operation
 
 
+@db.non_transactional(allow_existing=False)
 def StartMap(operation_key,
              job_name,
              handler_spec,
@@ -472,7 +473,6 @@
              writer_spec,
              mapper_params,
              mapreduce_params=None,
-             start_transaction=True,
              queue_name=None,
              shard_count=MAPREDUCE_DEFAULT_SHARDS):
   """Start map as part of datastore admin operation.
@@ -487,7 +487,6 @@
     writer_spec: Output writer specification.
     mapper_params: Custom mapper parameters.
     mapreduce_params: Custom mapreduce parameters.
-    start_transaction: Specify if a new transaction should be started.
     queue_name: the name of the queue that will be used by the M/R.
     shard_count: the number of shards the M/R will try to use.
 
@@ -505,8 +504,17 @@
     mapreduce_params['done_callback_queue'] = queue_name
   mapreduce_params['force_writes'] = 'True'
 
-  def tx():
-    operation = DatastoreAdminOperation.get(operation_key)
+  def tx(is_xg_transaction):
+    """Start MapReduce job and update datastore admin state.
+
+    Args:
+      is_xg_transaction: True if we are running inside a xg-enabled
+        transaction, else False if we are running inside a non-xg-enabled
+        transaction (which means the datastore admin state is updated in one
+        transaction and the MapReduce job in an indepedent transaction).
+    Returns:
+      result MapReduce job id as a string.
+    """
     job_id = control.start_map(
         job_name, handler_spec, reader_spec,
         mapper_params,
@@ -514,18 +522,27 @@
         mapreduce_parameters=mapreduce_params,
         base_path=config.MAPREDUCE_PATH,
         shard_count=shard_count,
-        transactional=True,
-        queue_name=queue_name,
-        transactional_parent=operation)
+        in_xg_transaction=is_xg_transaction,
+        queue_name=queue_name)
+    operation = DatastoreAdminOperation.get(operation_key)
     operation.status = DatastoreAdminOperation.STATUS_ACTIVE
     operation.active_jobs += 1
     operation.active_job_ids = list(set(operation.active_job_ids + [job_id]))
     operation.put(config=_CreateDatastoreConfig())
     return job_id
-  if start_transaction:
-    return db.run_in_transaction(tx)
+
+
+
+
+
+
+  datastore_type = datastore_rpc._GetDatastoreType()
+
+  if datastore_type != datastore_rpc.BaseConnection.MASTER_SLAVE_DATASTORE:
+    return db.run_in_transaction_options(
+        db.create_transaction_options(xg=True), tx, True)
   else:
-    return tx()
+    return db.run_in_transaction(tx, False)
 
 
 def RunMapForKinds(operation_key,
@@ -668,7 +685,7 @@
     self.keys = []
 
 
-class ReserveKey(operation.Operation):
+class ReserveKey(mr_operation.Operation):
   """Mapper operation to reserve key ids."""
 
   def __init__(self, key, app_id):
diff --git a/google/appengine/ext/endpoints/protojson.py b/google/appengine/ext/endpoints/protojson.py
index 8e84c2d..554cfac 100644
--- a/google/appengine/ext/endpoints/protojson.py
+++ b/google/appengine/ext/endpoints/protojson.py
@@ -81,8 +81,10 @@
 
     if isinstance(field, messages.BytesField):
       try:
-        return base64.urlsafe_b64decode(value)
-      except TypeError, err:
+
+
+        return base64.urlsafe_b64decode(str(value))
+      except (TypeError, UnicodeEncodeError), err:
         raise messages.DecodeError('Base64 decoding error: %s' % err)
 
     return super(EndpointsProtoJson, self).decode_field(field, value)
diff --git a/google/appengine/ext/mapreduce/base_handler.py b/google/appengine/ext/mapreduce/base_handler.py
index ad790db..293e8d5 100644
--- a/google/appengine/ext/mapreduce/base_handler.py
+++ b/google/appengine/ext/mapreduce/base_handler.py
@@ -35,6 +35,7 @@
 
 
 
+
 import httplib
 import logging
 import simplejson
@@ -48,6 +49,8 @@
 from google.appengine.ext import webapp
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import model
+from google.appengine.ext.mapreduce import parameters
+from google.appengine.ext.mapreduce import util
 
 
 class Error(Exception):
@@ -59,7 +62,10 @@
 
 
 class BaseHandler(webapp.RequestHandler):
-  """Base class for all mapreduce handlers."""
+  """Base class for all mapreduce handlers.
+
+  In Python27 runtime, webapp2 will automatically replace webapp.
+  """
 
   def base_path(self):
     """Base path for all mapreduce-related urls."""
@@ -70,22 +76,92 @@
 class TaskQueueHandler(BaseHandler):
   """Base class for handlers intended to be run only from the task queue.
 
-  Sub-classes should implement the 'handle' method.
+  Sub-classes should implement
+  1. the 'handle' method for all POST request.
+  2. '_preprocess' method for decoding or validations before handle.
+  3. '_drop_gracefully' method if _preprocess fails and the task has to
+     be dropped.
   """
 
-  def post(self):
+  def __init__(self, *args, **kwargs):
+
+
+
+
+
+    self._preprocess_success = False
+    super(TaskQueueHandler, self).__init__(*args, **kwargs)
+
+  def initialize(self, request, response):
+    """Initialize.
+
+    1. call webapp init.
+    2. check request is indeed from taskqueue.
+    3. check the task has not been retried too many times.
+    4. run handler specific processing logic.
+    5. run error handling logic if precessing failed.
+
+    Args:
+      request: a webapp.Request instance.
+      response: a webapp.Response instance.
+    """
+    super(TaskQueueHandler, self).initialize(request, response)
+
+
     if "X-AppEngine-QueueName" not in self.request.headers:
       logging.error(self.request.headers)
       logging.error("Task queue handler received non-task queue request")
       self.response.set_status(
           403, message="Task queue handler received non-task queue request")
       return
-    self.handle()
+
+
+    if self.task_retry_count() > parameters._MAX_TASK_RETRIES:
+      logging.error(
+          "Task %s has been retried %s times. Dropping it permanently.",
+          self.request.headers["X-AppEngine-TaskName"], self.task_retry_count())
+      return
+
+    try:
+      self._preprocess()
+      self._preprocess_success = True
+
+    except:
+
+
+
+      self._preprocess_success = False
+      mr_id = self.request.headers.get(util._MR_ID_TASK_HEADER, None)
+      if mr_id is None:
+        raise
+      logging.error(
+          "Preprocess task %s failed. Dropping it permanently.",
+          self.request.headers["X-AppEngine-TaskName"])
+      self._drop_gracefully()
+
+  def post(self):
+    if self._preprocess_success:
+      self.handle()
 
   def handle(self):
     """To be implemented by subclasses."""
     raise NotImplementedError()
 
+  def _preprocess(self):
+    """Preprocess.
+
+    This method is called after webapp initialization code has been run
+    successfully. It can thus access self.request, self.response and so on.
+    """
+    pass
+
+  def _drop_gracefully(self):
+    """Drop task gracefully.
+
+    When preprocess failed, this method is called before the task is dropped.
+    """
+    pass
+
   def task_retry_count(self):
     """Number of times this task has been retried."""
     return int(self.request.headers.get("X-AppEngine-TaskExecutionCount", 0))
@@ -200,8 +276,7 @@
   def __init__(self, *args, **kwargs):
     super(HugeTaskHandler, self).__init__(*args, **kwargs)
 
-  def initialize(self, request, response):
-    super(HugeTaskHandler, self).initialize(request, response)
+  def _preprocess(self):
     self.request = self._RequestWrapper(self.request)
 
 
diff --git a/google/appengine/ext/mapreduce/control.py b/google/appengine/ext/mapreduce/control.py
index d6f33c0..559c7f6 100644
--- a/google/appengine/ext/mapreduce/control.py
+++ b/google/appengine/ext/mapreduce/control.py
@@ -40,19 +40,18 @@
 import logging
 import google
 
+from google.appengine.ext import db
 from google.appengine.ext.mapreduce import handlers
 from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import parameters
-
-
-_DEFAULT_SHARD_COUNT = 8
+from google.appengine.ext.mapreduce import util
 
 
 def start_map(name,
               handler_spec,
               reader_spec,
               mapper_parameters,
-              shard_count=_DEFAULT_SHARD_COUNT,
+              shard_count=None,
               output_writer_spec=None,
               mapreduce_parameters=None,
               base_path=None,
@@ -61,8 +60,7 @@
               countdown=None,
               hooks_class_name=None,
               _app=None,
-              transactional=False,
-              transactional_parent=None):
+              in_xg_transaction=False):
   """Start a new, mapper-only mapreduce.
 
   Args:
@@ -76,27 +74,27 @@
       whole job.
     base_path: base path of mapreduce library handler specified in app.yaml.
       "/mapreduce" by default.
-    queue_name: executor queue name to be used for mapreduce tasks. If
-      unspecified it will be the "default" queue or inherit the queue of
-      the currently running request.
+    queue_name: taskqueue queue name to be used for mapreduce tasks.
+      see util.get_queue_name.
     eta: absolute time when the MR should execute. May not be specified
       if 'countdown' is also supplied. This may be timezone-aware or
       timezone-naive.
     countdown: time in seconds into the future that this MR should execute.
       Defaults to zero.
     hooks_class_name: fully qualified name of a hooks.Hooks subclass.
-    transactional: specifies if job should be started as a part of already
-      opened transaction.
-    transactional_parent: specifies the entity which is already a part of
-      transaction. Child entity will be used to store task payload if mapreduce
-      specification is too big.
+    in_xg_transaction: controls what transaction scope to use to start this MR
+      job. If True, there has to be an already opened cross-group transaction
+      scope. MR will use one entity group from it.
+      If False, MR will create an independent transaction to start the job
+      regardless of any existing transaction scopes.
 
   Returns:
     mapreduce id as string.
   """
-  if not shard_count:
-    shard_count = _DEFAULT_SHARD_COUNT
+  if shard_count is None:
+    shard_count = parameters.DEFAULT_SHARD_COUNT
   if base_path is None:
+
     base_path = parameters._DEFAULT_BASE_PATH
 
   if mapper_parameters:
@@ -110,24 +108,18 @@
                                  shard_count,
                                  output_writer_spec=output_writer_spec)
 
-  if transactional and not transactional_parent:
-
-
-
-    logging.error(
-        "transactional_parent should be specified for transactional starts."
-        "Your job will fail to start if mapreduce specification is too big.")
+  if in_xg_transaction and not db.is_in_transaction():
+    logging.warning("Expects an opened xg transaction to start mapreduce "
+                    "when transactional is True.")
 
   return handlers.StartJobHandler._start_map(
       name,
       mapper_spec,
       mapreduce_parameters or {},
       base_path=base_path,
-      queue_name=queue_name,
+      queue_name=util.get_queue_name(queue_name),
       eta=eta,
       countdown=countdown,
       hooks_class_name=hooks_class_name,
       _app=_app,
-      transactional=transactional,
-      parent_entity=transactional_parent)
-
+      in_xg_transaction=in_xg_transaction)
diff --git a/google/appengine/ext/mapreduce/handlers.py b/google/appengine/ext/mapreduce/handlers.py
index 56d8215..27e3688 100644
--- a/google/appengine/ext/mapreduce/handlers.py
+++ b/google/appengine/ext/mapreduce/handlers.py
@@ -44,12 +44,14 @@
 import sys
 import time
 import traceback
+import simplejson
 
 from google.appengine.ext import ndb
 
 from google.appengine import runtime
 from google.appengine.api import datastore_errors
 from google.appengine.api import logservice
+from google.appengine.api import modules
 from google.appengine.api import taskqueue
 from google.appengine.ext import db
 from google.appengine.ext.mapreduce import base_handler
@@ -64,34 +66,6 @@
 
 
 
-using_modules = True
-try:
-  from google.appengine.api import modules
-except ImportError:
-
-  using_modules = False
-  from google.appengine.api import servers
-
-
-
-
-
-_SLICE_DURATION_SEC = 15
-
-
-_LEASE_GRACE_PERIOD = 1
-
-
-_REQUEST_EVENTUAL_TIMEOUT = 10 * 60 + 30
-
-
-_CONTROLLER_PERIOD_SEC = 2
-
-
-
-_RETRY_SLICE_ERROR_MAX_RETRIES = 10
-
-
 _TEST_INJECTED_FAULTS = set()
 
 
@@ -121,7 +95,6 @@
 
 class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler):
   """Callback handler for mapreduce worker task."""
-
   _TASK_STATE = util._enum(RETRY_TASK="retry_task",
                            DROP_TASK="drop_task")
 
@@ -130,6 +103,23 @@
     super(MapperWorkerCallbackHandler, self).__init__(*args)
     self._time = time.time
 
+  def _drop_gracefully(self):
+    """Drop worker task gracefully.
+
+    Set current shard_state to failed. Controller logic will take care of
+    other shards and the entire MR.
+    """
+    shard_id = self.request.headers[util._MR_SHARD_ID_TASK_HEADER]
+    mr_id = self.request.headers[util._MR_ID_TASK_HEADER]
+    shard_state, mr_state = db.get([
+        model.ShardState.get_key_by_shard_id(shard_id),
+        model.MapreduceState.get_key_by_job_id(mr_id)])
+
+    if shard_state and shard_state.active:
+      shard_state.set_for_failure()
+      config = util.create_datastore_write_config(mr_state.mapreduce_spec)
+      shard_state.put(config=config)
+
   def _try_acquire_lease(self, shard_state, tstate):
     """Validate datastore and the task payload are consistent.
 
@@ -197,7 +187,8 @@
 
     if shard_state.slice_start_time:
       countdown = self._wait_time(shard_state,
-                                  _LEASE_GRACE_PERIOD + _SLICE_DURATION_SEC)
+                                  parameters._LEASE_GRACE_PERIOD +
+                                  parameters._SLICE_DURATION_SEC)
       if countdown > 0:
         logging.warning(
             "Last retry of slice %s-%s may be still running."
@@ -210,7 +201,7 @@
         return self._TASK_STATE.RETRY_TASK
 
       else:
-        if self._wait_time(shard_state, _REQUEST_EVENTUAL_TIMEOUT):
+        if self._wait_time(shard_state, parameters._REQUEST_EVENTUAL_TIMEOUT):
           if not self._old_request_ended(shard_state):
             logging.warning(
                 "Last retry of slice %s-%s is still in flight with request_id "
@@ -221,7 +212,8 @@
           logging.warning(
               "Last retry of slice %s-%s has no log entry and has"
               "timed out after %s seconds",
-              tstate.shard_id, tstate.slice_id, _REQUEST_EVENTUAL_TIMEOUT)
+              tstate.shard_id, tstate.slice_id,
+              parameters._REQUEST_EVENTUAL_TIMEOUT)
 
 
     config = util.create_datastore_write_config(tstate.mapreduce_spec)
@@ -270,21 +262,11 @@
     assert shard_state.slice_start_time is not None
     assert shard_state.slice_request_id is not None
     request_ids = [shard_state.slice_request_id]
-    try:
-      logs = list(logservice.fetch(request_ids=request_ids))
-    except logservice.InvalidArgumentError:
+    logs = list(logservice.fetch(
+        request_ids=request_ids,
 
-      global using_modules
-      if using_modules:
-        logs = list(logservice.fetch(
-            request_ids=request_ids,
-            module_versions=[(modules.get_current_module_name(),
-                              modules.get_current_version_name())]))
-      else:
-        logs = list(logservice.fetch(
-            request_ids=request_ids,
-            server_versions=[(servers.get_current_server_name(),
-                              servers.get_current_version_name())]))
+        module_versions=[(os.environ["CURRENT_MODULE_ID"],
+                          modules.get_current_version_name())]))
 
     if not logs or not logs[0].finished:
       return False
@@ -510,7 +492,7 @@
             else:
               output_writer.write(output, ctx)
 
-    if self._time() - self._start_time >= _SLICE_DURATION_SEC:
+    if self._time() - self._start_time >= parameters._SLICE_DURATION_SEC:
       return False
     return True
 
@@ -607,8 +589,7 @@
     if type(e) is errors.FailJobError:
       logging.error("Got FailJobError. Shard %s failed permanently.",
                     shard_state.shard_id)
-      shard_state.active = False
-      shard_state.result_status = model.ShardState.RESULT_FAILED
+      shard_state.set_for_failure()
       return False
 
     if type(e) in errors.SHARD_RETRY_ERRORS:
@@ -646,8 +627,7 @@
       permanent_shard_failure = True
 
     if permanent_shard_failure:
-      shard_state.active = False
-      shard_state.result_status = model.ShardState.RESULT_FAILED
+      shard_state.set_for_failure()
       return False
 
     shard_state.reset_for_retry()
@@ -678,7 +658,7 @@
     Raises:
       errors.RetrySliceError: in order to trigger a slice retry.
     """
-    if shard_state.slice_retries < _RETRY_SLICE_ERROR_MAX_RETRIES:
+    if shard_state.slice_retries < parameters._RETRY_SLICE_ERROR_MAX_RETRIES:
       logging.error(
           "Will retry slice %s %s for the %s time.",
           tstate.shard_id,
@@ -696,8 +676,7 @@
                   "Shard %s failed permanently.",
                   self.task_retry_count(),
                   shard_state.shard_id)
-    shard_state.active = False
-    shard_state.result_status = model.ShardState.RESULT_FAILED
+    shard_state.set_for_failure()
     return False
 
   @staticmethod
@@ -731,7 +710,8 @@
     countdown = 0
     if self._processing_limit(spec) != -1:
       countdown = max(
-          int(_SLICE_DURATION_SEC - (self._time() - self._start_time)), 0)
+          int(parameters._SLICE_DURATION_SEC -
+              (self._time() - self._start_time)), 0)
     return countdown
 
   @classmethod
@@ -761,13 +741,17 @@
         tstate.slice_id,
         tstate.retries)
 
+    headers = util._get_task_headers(tstate.mapreduce_spec)
+    headers[util._MR_SHARD_ID_TASK_HEADER] = tstate.shard_id
+
     worker_task = model.HugeTask(
         url=base_path + "/worker_callback",
         params=tstate.to_dict(),
         name=task_name,
         eta=eta,
         countdown=countdown,
-        parent=shard_state)
+        parent=shard_state,
+        headers=headers)
     return worker_task
 
   @classmethod
@@ -812,7 +796,8 @@
     slice_processing_limit = -1
     if processing_rate > 0:
       slice_processing_limit = int(math.ceil(
-          _SLICE_DURATION_SEC*processing_rate/int(spec.mapper.shard_count)))
+          parameters._SLICE_DURATION_SEC*processing_rate/
+          int(spec.mapper.shard_count)))
     return slice_processing_limit
 
 
@@ -849,7 +834,8 @@
   Is also responsible for gathering execution status from shards together.
 
   This task is "continuously" running by adding itself again to taskqueue if
-  mapreduce is still active.
+  and only if mapreduce is still active. A mapreduce is active if it has
+  actively running shards.
   """
 
   def __init__(self, *args):
@@ -857,6 +843,35 @@
     super(ControllerCallbackHandler, self).__init__(*args)
     self._time = time.time
 
+  def _drop_gracefully(self):
+    """Gracefully drop controller task.
+
+    This method is called when decoding controller task payload failed.
+    Upon this we mark ShardState and MapreduceState as failed so all
+    tasks can stop.
+
+    Writing to datastore is forced (ignore read-only mode) because we
+    want the tasks to stop badly, and if force_writes was False,
+    the job would have never been started.
+    """
+    mr_id = self.request.headers[util._MR_ID_TASK_HEADER]
+    state = model.MapreduceState.get_by_job_id(mr_id)
+    if not state or not state.active:
+      return
+
+    state.active = False
+    state.result_status = model.MapreduceState.RESULT_FAILED
+    shard_states = model.ShardState.find_by_mapreduce_state(state)
+    puts = []
+    for ss in shard_states:
+      if ss.active:
+        ss.set_for_failure()
+        puts.append(ss)
+    config = util.create_datastore_write_config(state.mapreduce_spec)
+    db.put(puts, config=config)
+
+    db.put(state, config=config)
+
   def handle(self):
     """Handle request."""
     spec = model.MapreduceSpec.from_json_str(
@@ -940,7 +955,7 @@
 
         if not fresh_state.active:
           logging.warning(
-              "Job %s is not active. Look like spurious task execution. "
+              "Job %s is not active. Looks like spurious task execution. "
               "Dropping controller task.", spec.mapreduce_id)
           return
         config = util.create_datastore_write_config(spec)
@@ -991,7 +1006,7 @@
     """Finalize job execution.
 
     Invokes done callback and save mapreduce state in a transaction,
-    and schedule necessary clean ups.
+    and schedule necessary clean ups. This method is idempotent.
 
     Args:
       mapreduce_spec: an instance of MapreduceSpec
@@ -1009,7 +1024,8 @@
     if done_callback:
       done_task = taskqueue.Task(
           url=done_callback,
-          headers={"Mapreduce-Id": mapreduce_spec.mapreduce_id},
+          headers=util._get_task_headers(mapreduce_spec,
+                                         util.CALLBACK_MR_ID_TASK_HEADER),
           method=mapreduce_spec.params.get("done_callback_method", "POST"))
 
     @db.transactional(retries=5)
@@ -1018,8 +1034,8 @@
           mapreduce_spec.mapreduce_id)
       if not fresh_state.active:
         logging.warning(
-            "Job %s is not active. Look like spurious task execution. "
-            "Dropping controller task.", mapreduce_spec.mapreduce_id)
+            "Job %s is not active. Looks like spurious task execution. "
+            "Dropping task.", mapreduce_spec.mapreduce_id)
         return
       mapreduce_state.put(config=config)
 
@@ -1098,8 +1114,9 @@
     controller_callback_task = model.HugeTask(
         url=base_path + "/controller_callback",
         name=task_name, params=task_params,
-        countdown=_CONTROLLER_PERIOD_SEC,
-        parent=mapreduce_state)
+        countdown=parameters._CONTROLLER_PERIOD_SEC,
+        parent=mapreduce_state,
+        headers=util._get_task_headers(mapreduce_spec))
 
     if not _run_task_hook(mapreduce_spec.get_hooks(),
                           "enqueue_controller_task",
@@ -1113,121 +1130,198 @@
                         task_name, task_params, e.__class__, e)
 
 
-class KickOffJobHandler(base_handler.HugeTaskHandler):
+class KickOffJobHandler(base_handler.TaskQueueHandler):
   """Taskqueue handler which kicks off a mapreduce processing.
 
+  This handler is idempotent.
+
+  Precondition:
+    The Model.MapreduceState entity for this mr is already created and
+    saved to datastore by StartJobHandler._start_map.
+
   Request Parameters:
-    mapreduce_spec: MapreduceSpec of the mapreduce serialized to json.
-    input_readers: List of InputReaders objects separated by semi-colons.
+    mapreduce_id: in string.
   """
 
+
+  _SERIALIZED_INPUT_READERS_KEY = "input_readers_for_mr_%s"
+
   def handle(self):
     """Handles kick off request."""
-    spec = model.MapreduceSpec.from_json_str(
-        self._get_required_param("mapreduce_spec"))
 
-    app_id = self.request.get("app", None)
-    queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default")
-    mapper_input_reader_class = spec.mapper.input_reader_class()
+    mr_id = self.request.get("mapreduce_id")
 
-
-
-    state = model.MapreduceState.create_new(spec.mapreduce_id)
-    state.mapreduce_spec = spec
-    state.active = True
-    if app_id:
-      state.app_id = app_id
-
-    input_readers = mapper_input_reader_class.split_input(spec.mapper)
-    if not input_readers:
-
-      logging.warning("Found no mapper input data to process.")
-      state.active = False
-      state.active_shards = 0
-      state.result_status = model.MapreduceState.RESULT_SUCCESS
-      ControllerCallbackHandler._finalize_job(spec, state, self.base_path())
+    logging.info("Processing kickoff for job %s", mr_id)
+    state = model.MapreduceState.get_by_job_id(mr_id)
+    if not self._check_mr_state(state, mr_id):
       return
 
 
-    spec.mapper.shard_count = len(input_readers)
-    state.active_shards = len(input_readers)
-    state.mapreduce_spec = spec
+    readers, serialized_readers_entity = self._get_input_readers(state)
+    if readers is None:
 
-    output_writer_class = spec.mapper.output_writer_class()
-    if output_writer_class:
-      output_writer_class.init_job(state)
+      logging.warning("Found no mapper input data to process.")
+      state.active = False
+      state.result_status = model.MapreduceState.RESULT_SUCCESS
+      ControllerCallbackHandler._finalize_job(
+          state.mapreduce_spec, state, self.base_path())
+      return False
 
 
-    state.put(config=util.create_datastore_write_config(spec))
+    self._setup_output_writer(state)
 
-    KickOffJobHandler._schedule_shards(
-        spec, input_readers, queue_name, self.base_path(), state)
+
+
+    result = self._save_states(state, serialized_readers_entity)
+    if result is None:
+      readers, _ = self._get_input_readers(state)
+    elif not result:
+      return
+
+    queue_name = self.request.headers.get("X-AppEngine-QueueName")
+    KickOffJobHandler._schedule_shards(state.mapreduce_spec, readers,
+                                       queue_name, self.base_path(), state)
 
     ControllerCallbackHandler.reschedule(
-        state, self.base_path(), spec, queue_name=queue_name, serial_id=0)
+        state, self.base_path(), state.mapreduce_spec, serial_id=0,
+        queue_name=queue_name)
 
-  def _get_required_param(self, param_name):
-    """Get a required request parameter.
+  def _get_input_readers(self, state):
+    """Get input readers.
 
     Args:
-      param_name: name of request parameter to fetch.
+      state: a MapreduceState model.
 
     Returns:
-      parameter value
-
-    Raises:
-      errors.NotEnoughArgumentsError: if parameter is not specified.
+      A tuple: (a list of input readers, a model._HugeTaskPayload entity).
+    The payload entity contains the json serialized input readers.
+    (None, None) when input reader inplitting returned no data to process.
     """
-    value = self.request.get(param_name)
-    if not value:
-      raise errors.NotEnoughArgumentsError(param_name + " not specified")
-    return value
+    serialized_input_readers_key = (self._SERIALIZED_INPUT_READERS_KEY %
+                                    state.key().id_or_name())
+    serialized_input_readers = model._HugeTaskPayload.get_by_key_name(
+        serialized_input_readers_key, parent=state)
+
+
+    input_reader_class = state.mapreduce_spec.mapper.input_reader_class()
+    if serialized_input_readers is None:
+      readers = input_reader_class.split_input(
+          state.mapreduce_spec.mapper)
+    else:
+      readers = [input_reader_class.from_json_str(json) for json in
+                 simplejson.loads(serialized_input_readers.payload)]
+
+    if not readers:
+      return None, None
+
+
+    state.mapreduce_spec.mapper.shard_count = len(readers)
+    state.active_shards = len(readers)
+
+
+    if serialized_input_readers is None:
+
+      serialized_input_readers = model._HugeTaskPayload(
+          key_name=serialized_input_readers_key, parent=state)
+      readers_json_str = [i.to_json_str() for i in readers]
+      serialized_input_readers.payload = simplejson.dumps(readers_json_str)
+    return readers, serialized_input_readers
+
+  def _setup_output_writer(self, state):
+    if not state.writer_state:
+      output_writer_class = state.mapreduce_spec.mapper.output_writer_class()
+      if output_writer_class:
+        output_writer_class.init_job(state)
+
+  @db.transactional
+  def _save_states(self, state, serialized_readers_entity):
+    """Run transaction to save state.
+
+    Args:
+      state: a model.MapreduceState entity.
+      serialized_readers_entity: a model._HugeTaskPayload entity containing
+        json serialized input readers.
+
+    Returns:
+      False if a fatal error is encountered and this task should be dropped
+    immediately. True if transaction is successful. None if a previous
+    attempt of this same transaction has already succeeded.
+    """
+    mr_id = state.key().id_or_name()
+    fresh_state = model.MapreduceState.get_by_job_id(mr_id)
+    if not self._check_mr_state(fresh_state, mr_id):
+      return False
+    if fresh_state.active_shards != 0:
+      logging.warning(
+          "Mapreduce %s already has active shards. Looks like spurious task "
+          "execution.", mr_id)
+      return None
+    config = util.create_datastore_write_config(state.mapreduce_spec)
+    db.put([state, serialized_readers_entity], config=config)
+    return True
 
   @classmethod
   def _schedule_shards(cls,
                        spec,
-                       input_readers,
+                       readers,
                        queue_name,
                        base_path,
                        mr_state):
     """Prepares shard states and schedules their execution.
 
+    Even though this method does not schedule shard task and save shard state
+    transactionally, it's safe for taskqueue to retry this logic because
+    the initial shard_state for each shard is the same from any retry.
+    This is an important yet reasonable assumption on model.ShardState.
+
     Args:
       spec: mapreduce specification as MapreduceSpec.
-      input_readers: list of InputReaders describing shard splits.
+      readers: list of InputReaders describing shard splits.
       queue_name: The queue to run this job on.
       base_path: The base url path of mapreduce callbacks.
       mr_state: The MapReduceState of current job.
     """
 
-
-
     shard_states = []
-    writer_class = spec.mapper.output_writer_class()
-    output_writers = [None] * len(input_readers)
-    for shard_number, input_reader in enumerate(input_readers):
+    for shard_number, input_reader in enumerate(readers):
       shard_state = model.ShardState.create_new(spec.mapreduce_id, shard_number)
       shard_state.shard_description = str(input_reader)
-      if writer_class:
-        output_writers[shard_number] = writer_class.create(
-            mr_state, shard_state)
       shard_states.append(shard_state)
 
 
+    writer_class = spec.mapper.output_writer_class()
+    writers = [None] * len(readers)
+    if writer_class:
+      for shard_number, shard_state in enumerate(shard_states):
+        writers[shard_number] = writer_class.create(mr_state, shard_state)
+
+
     existing_shard_states = db.get(shard.key() for shard in shard_states)
     existing_shard_keys = set(shard.key() for shard in existing_shard_states
                               if shard is not None)
 
 
-
-
     db.put((shard for shard in shard_states
             if shard.key() not in existing_shard_keys),
            config=util.create_datastore_write_config(spec))
 
 
+
+
+
+
+
+
+
+
+
+
+
+
+
+
     for shard_number, (input_reader, output_writer) in enumerate(
-        zip(input_readers, output_writers)):
+        zip(readers, writers)):
       shard_id = model.ShardState.shard_id_from_number(
           spec.mapreduce_id, shard_number)
       task = MapperWorkerCallbackHandler._state_to_task(
@@ -1239,9 +1333,36 @@
                                             spec,
                                             queue_name)
 
+  @classmethod
+  def _check_mr_state(cls, state, mr_id):
+    """Check MapreduceState.
+
+    Args:
+      state: an MapreduceState instance.
+      mr_id: mapreduce id.
+
+    Returns:
+      True if state is valid. False if not and this task should be dropped.
+    """
+    if state is None:
+      logging.warning(
+          "Mapreduce State for job %s is missing. Dropping Task.",
+          mr_id)
+      return False
+    if not state.active:
+      logging.warning(
+          "Mapreduce %s is not active. Looks like spurious task "
+          "execution. Dropping Task.", mr_id)
+      return False
+    return True
+
 
 class StartJobHandler(base_handler.PostJsonHandler):
-  """Command handler starts a mapreduce job."""
+  """Command handler starts a mapreduce job.
+
+  This handler allows user to start a mr via a web form. It's _start_map
+  method can also be used independently to start a mapreduce.
+  """
 
   def handle(self):
     """Handles start request."""
@@ -1257,16 +1378,16 @@
 
 
     mapper_params["processing_rate"] = int(mapper_params.get(
-          "processing_rate") or model._DEFAULT_PROCESSING_RATE_PER_SEC)
-    queue_name = mapper_params["queue_name"] = mapper_params.get(
-        "queue_name", "default")
+        "processing_rate") or parameters._DEFAULT_PROCESSING_RATE_PER_SEC)
+    queue_name = mapper_params["queue_name"] = util.get_queue_name(
+        mapper_params.get("queue_name", None))
 
 
     mapper_spec = model.MapperSpec(
         mapper_handler_spec,
         mapper_input_reader_spec,
         mapper_params,
-        int(mapper_params.get("shard_count", model._DEFAULT_SHARD_COUNT)),
+        int(mapper_params.get("shard_count", parameters.DEFAULT_SHARD_COUNT)),
         output_writer_spec=mapper_output_writer_spec)
 
     mapreduce_id = type(self)._start_map(
@@ -1330,27 +1451,35 @@
                  name,
                  mapper_spec,
                  mapreduce_params,
-                 base_path=None,
-                 queue_name=None,
+                 base_path,
+                 queue_name,
                  eta=None,
                  countdown=None,
                  hooks_class_name=None,
                  _app=None,
-                 transactional=False,
-                 parent_entity=None):
-    """See control.start_map."""
-    if not transactional and parent_entity:
-      raise Exception("Parent shouldn't be specfied "
-                      "for non-transactional starts.")
+                 in_xg_transaction=False):
 
 
+    """See control.start_map.
+
+    Requirements for this method:
+    1. The request that invokes this method can either be regular or
+       from taskqueue. So taskqueue specific headers can not be used.
+    2. Each invocation transactionally starts an isolated mapreduce job with
+       a unique id. MapreduceState should be immediately available after
+       returning. See control.start_map's doc on transactional.
+    3. Method should be lightweight.
+    """
+
     mapper_input_reader_class = mapper_spec.input_reader_class()
     mapper_input_reader_class.validate(mapper_spec)
 
+
     mapper_output_writer_class = mapper_spec.output_writer_class()
     if mapper_output_writer_class:
       mapper_output_writer_class.validate(mapper_spec)
 
+
     mapreduce_id = model.MapreduceState.new_mapreduce_id()
     mapreduce_spec = model.MapreduceSpec(
         name,
@@ -1369,63 +1498,66 @@
       context.Context._set(None)
 
 
+    if in_xg_transaction:
+      propagation = db.MANDATORY
+    else:
+      propagation = db.INDEPENDENT
 
-
-    @db.non_transactional
+    @db.transactional(propagation=propagation)
     def _txn():
-
-      state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id)
-      state.mapreduce_spec = mapreduce_spec
-      state.active = True
-      state.active_shards = mapper_spec.shard_count
-      if _app:
-        state.app_id = _app
-      state.put(config=util.create_datastore_write_config(mapreduce_spec))
-      return state
-
-    state = _txn()
-    if not transactional:
-      parent_entity = state
-
-    cls._add_kickoff_task(
-        base_path, mapreduce_spec, eta, countdown, parent_entity,
-        queue_name, transactional, _app)
+      cls._create_and_save_state(mapreduce_spec, _app)
+      cls._add_kickoff_task(base_path, mapreduce_spec, eta,
+                            countdown, queue_name)
+    _txn()
 
     return mapreduce_id
 
   @classmethod
+  def _create_and_save_state(cls, mapreduce_spec, _app):
+    """Save mapreduce state to datastore.
+
+    Save state to datastore so that UI can see it immediately.
+
+    Args:
+      mapreduce_spec: model.MapreduceSpec,
+      _app: app id if specified. None otherwise.
+
+    Returns:
+      The saved Mapreduce state.
+    """
+    state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id)
+    state.mapreduce_spec = mapreduce_spec
+    state.active = True
+    state.active_shards = 0
+    if _app:
+      state.app_id = _app
+    config = util.create_datastore_write_config(mapreduce_spec)
+    state.put(config=config)
+    return state
+
+  @classmethod
   def _add_kickoff_task(cls,
                         base_path,
                         mapreduce_spec,
                         eta,
                         countdown,
-                        parent,
-                        queue_name,
-                        transactional,
-                        _app):
-    queue_name = queue_name or os.environ.get("HTTP_X_APPENGINE_QUEUENAME",
-                                              "default")
-    if queue_name[0] == "_":
+                        queue_name):
+    params = {"mapreduce_id": mapreduce_spec.mapreduce_id}
 
-      queue_name = "default"
-
-    kickoff_params = {"mapreduce_spec": mapreduce_spec.to_json_str()}
-    if _app:
-      kickoff_params["app"] = _app
-    kickoff_worker_task = model.HugeTask(
+    kickoff_task = taskqueue.Task(
         url=base_path + "/kickoffjob_callback",
-        params=kickoff_params,
+        headers=util._get_task_headers(mapreduce_spec),
+        params=params,
         eta=eta,
-        countdown=countdown,
-        parent=parent)
+        countdown=countdown)
     hooks = mapreduce_spec.get_hooks()
     if hooks is not None:
       try:
-        hooks.enqueue_kickoff_task(kickoff_worker_task, queue_name)
+        hooks.enqueue_kickoff_task(kickoff_task, queue_name)
+        return
       except NotImplementedError:
-        kickoff_worker_task.add(queue_name, transactional=transactional)
-    else:
-      kickoff_worker_task.add(queue_name)
+        pass
+    kickoff_task.add(queue_name, transactional=True)
 
 
 class FinalizeJobHandler(base_handler.TaskQueueHandler):
@@ -1456,8 +1588,9 @@
     finalize_task = taskqueue.Task(
         name=task_name,
         url=base_path + "/finalizejob_callback",
-        params={"mapreduce_id": mapreduce_spec.mapreduce_id})
-    queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default")
+        params={"mapreduce_id": mapreduce_spec.mapreduce_id},
+        headers=util._get_task_headers(mapreduce_spec))
+    queue_name = util.get_queue_name(None)
     if not _run_task_hook(mapreduce_spec.get_hooks(),
                           "enqueue_controller_task",
                           finalize_task,
diff --git a/google/appengine/ext/mapreduce/hooks.py b/google/appengine/ext/mapreduce/hooks.py
index a1ab327..0aa33aa 100644
--- a/google/appengine/ext/mapreduce/hooks.py
+++ b/google/appengine/ext/mapreduce/hooks.py
@@ -57,7 +57,7 @@
 
     Args:
       task: A taskqueue.Task that must be queued in order for the mapreduce
-        mappers to be run.
+        mappers to be run. The task is named.
       queue_name: The queue where the task should be run e.g. "default".
 
     Raises:
@@ -69,9 +69,11 @@
   def enqueue_kickoff_task(self, task, queue_name):
     """Enqueues a task that is used to start the mapreduce.
 
+    This hook will be called within a transaction scope.
+    Hook should add task transactionally.
+
     Args:
-      task: A taskqueue.Task that must be queued in order for the mapreduce
-        to start.
+      task: A taskqueue.Task that must be queued to run KickOffJobHandler.
       queue_name: The queue where the task should be run e.g. "default".
 
     Raises:
@@ -83,6 +85,9 @@
   def enqueue_done_task(self, task, queue_name):
     """Enqueues a task that is triggered when the mapreduce completes.
 
+    This hook will be called within a transaction scope.
+    Hook should add task transactionally.
+
     Args:
       task: A taskqueue.Task that must be queued in order for the client to be
         notified when the mapreduce is complete.
@@ -99,7 +104,7 @@
 
     Args:
       task: A taskqueue.Task that must be queued in order for updates to the
-        mapreduce process to be properly tracked.
+        mapreduce process to be properly tracked. The task is named.
       queue_name: The queue where the task should be run e.g. "default".
 
     Raises:
diff --git a/google/appengine/ext/mapreduce/mapper_pipeline.py b/google/appengine/ext/mapreduce/mapper_pipeline.py
index 2b91118..08d0c55 100644
--- a/google/appengine/ext/mapreduce/mapper_pipeline.py
+++ b/google/appengine/ext/mapreduce/mapper_pipeline.py
@@ -86,6 +86,21 @@
           output_writer_spec=None,
           params=None,
           shards=None):
+    """Start a mapreduce job.
+
+    Args:
+      job_name: mapreduce name. Only for display purpose.
+      handler_spec: fully qualified name to your map function/class.
+      input_reader_spec: fully qualified name to input reader class.
+      output_writer_spec: fully qualified name to output writer class.
+      params: a dictionary of parameters for input reader and output writer
+        initialization.
+      shards: number of shards. This provides a guide to mapreduce. The real
+        number of shards is determined by how input are splited.
+    """
+    if shards is None:
+      shards = parameters.DEFAULT_SHARD_COUNT
+
     mapreduce_id = control.start_map(
         job_name,
         handler_spec,
@@ -104,6 +119,7 @@
         (parameters._DEFAULT_BASE_PATH, mapreduce_id)))
 
   def callback(self):
+    """Callback after this async pipeline finishes."""
     mapreduce_id = self.outputs.job_id.value
     mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
     mapper_spec = mapreduce_state.mapreduce_spec.mapper
diff --git a/google/appengine/ext/mapreduce/model.py b/google/appengine/ext/mapreduce/model.py
index 1dea850..0e6e83b 100644
--- a/google/appengine/ext/mapreduce/model.py
+++ b/google/appengine/ext/mapreduce/model.py
@@ -79,13 +79,6 @@
 
 
 
-
-_DEFAULT_PROCESSING_RATE_PER_SEC = 1000000
-
-
-_DEFAULT_SHARD_COUNT = 8
-
-
 _MAP_REDUCE_KINDS = ("_GAE_MR_MapreduceControl",
                      "_GAE_MR_MapreduceState",
                      "_GAE_MR_ShardState",
@@ -128,7 +121,8 @@
                name=None,
                eta=None,
                countdown=None,
-               parent=None):
+               parent=None,
+               headers=None):
     """Init.
 
     Args:
@@ -138,6 +132,7 @@
       eta: task eta.
       countdown: task countdown.
       parent: parent entity of huge task's payload.
+      headers: a dict of headers for the task.
 
     Raises:
       ValueError: when payload is too big even for datastore, or parent is
@@ -151,6 +146,8 @@
         "Content-Type": "application/octet-stream",
         self.PAYLOAD_VERSION_HEADER: self.PAYLOAD_VERSION
     }
+    if headers:
+      self._headers.update(headers)
 
 
     payload_str = urllib.urlencode(params)
@@ -621,7 +618,7 @@
     self.handler_spec = handler_spec
     self.input_reader_spec = input_reader_spec
     self.output_writer_spec = output_writer_spec
-    self.shard_count = shard_count
+    self.shard_count = int(shard_count)
     self.params = params
 
   def get_handler(self):
@@ -677,6 +674,11 @@
                json.get("mapper_output_writer")
               )
 
+  def __eq__(self, other):
+    if not isinstance(other, self.__class__):
+      return False
+    return self.to_json() == other.to_json()
+
 
 class MapreduceSpec(JsonMixin):
   """Contains a specification for the whole mapreduce.
@@ -767,6 +769,14 @@
                          json.get("hooks_class_name"))
     return mapreduce_spec
 
+  def __str__(self):
+    return str(self.to_json())
+
+  def __eq__(self, other):
+    if not isinstance(other, self.__class__):
+      return False
+    return self.to_json() == other.to_json()
+
 
 class MapreduceState(db.Model):
   """Holds accumulated state of mapreduce execution.
@@ -776,7 +786,7 @@
 
   Properties:
     mapreduce_spec: cached deserialized MapreduceSpec instance. read-only
-    active: if we have this mapreduce running right now
+    active: if this MR is still running.
     last_poll_time: last time controller job has polled this mapreduce.
     counters_map: shard's counters map as CountersMap. Mirrors
       counters_map_json.
@@ -784,10 +794,14 @@
       progress of all the shards the best way it can.
     sparkline_url: last computed mapreduce status chart url in small format.
     result_status: If not None, the final status of the job.
-    active_shards: How many shards are still processing.
+    active_shards: How many shards are still processing. This starts as 0,
+      then set by KickOffJob handler to be the actual number of input
+      readers after input splitting, and is updated by Controller task
+      as shards finish.
     start_time: When the job started.
     writer_state: Json property to be used by writer to store its state.
       This is filled when single output per job. Will be deprecated.
+      Use OutputWriter.get_filenames instead.
   """
 
   RESULT_SUCCESS = "success"
@@ -803,15 +817,15 @@
   counters_map = JsonProperty(CountersMap, default=CountersMap(), indexed=False)
   app_id = db.StringProperty(required=False, indexed=True)
   writer_state = JsonProperty(dict, indexed=False)
+  active_shards = db.IntegerProperty(default=0, indexed=False)
+  failed_shards = db.IntegerProperty(default=0, indexed=False)
+  aborted_shards = db.IntegerProperty(default=0, indexed=False)
+  result_status = db.StringProperty(required=False, choices=_RESULTS)
 
 
   chart_url = db.TextProperty(default="")
   chart_width = db.IntegerProperty(default=300, indexed=False)
   sparkline_url = db.TextProperty(default="")
-  result_status = db.StringProperty(required=False, choices=_RESULTS)
-  active_shards = db.IntegerProperty(default=0, indexed=False)
-  failed_shards = db.IntegerProperty(default=0, indexed=False)
-  aborted_shards = db.IntegerProperty(default=0, indexed=False)
   start_time = db.DateTimeProperty(auto_now_add=True)
 
   @classmethod
@@ -900,6 +914,11 @@
     """Generate new mapreduce id."""
     return _get_descending_key()
 
+  def __eq__(self, other):
+    if not isinstance(other, self.__class__):
+      return False
+    return self.properties() == other.properties()
+
 
 class TransientShardState(object):
   """Shard's state kept in task payload.
@@ -1022,7 +1041,7 @@
   """Single shard execution state.
 
   The shard state is stored in the datastore and is later aggregated by
-  controller task. Shard key_name is equal to shard_id.
+  controller task. ShardState key_name is equal to shard_id.
 
   Properties:
     active: if we have this shard still running as boolean.
@@ -1037,7 +1056,7 @@
     shard_description: A string description of the work this shard will do.
     last_work_item: A string description of the last work item processed.
     writer_state: writer state for this shard. This is filled when a job
-      has one output per shard by OutputWriter's create method.
+      has one output per shard by MR worker after finalizing output files.
     slice_id: slice id of current executing slice. A task
       will not run unless its slice_id matches this. Initial
       value is 0. By the end of slice execution, this number is
@@ -1141,6 +1160,10 @@
     self.slice_retries = 0
     self.acquired_once = False
 
+  def set_for_failure(self):
+    self.active = False
+    self.result_status = self.RESULT_FAILED
+
   def set_for_abort(self):
     self.active = False
     self.result_status = self.RESULT_ABORTED
@@ -1158,6 +1181,11 @@
     for prop in self.properties().values():
       setattr(self, prop.name, getattr(other_state, prop.name))
 
+  def __eq__(self, other):
+    if not isinstance(other, self.__class__):
+      return False
+    return self.properties() == other.properties()
+
   def get_shard_number(self):
     """Gets the shard number from the key name."""
     return int(self.key().name().split("-")[-1])
@@ -1213,9 +1241,13 @@
     return cls.get_by_key_name(shard_id)
 
   @classmethod
+  @db.non_transactional
   def find_by_mapreduce_state(cls, mapreduce_state):
     """Find all shard states for given mapreduce.
 
+    Never runs within a transaction since it may touch >5 entity groups (one
+    for each shard).
+
     Args:
       mapreduce_state: MapreduceState instance
 
diff --git a/google/appengine/ext/mapreduce/output_writers.py b/google/appengine/ext/mapreduce/output_writers.py
index 2651409..707052b 100644
--- a/google/appengine/ext/mapreduce/output_writers.py
+++ b/google/appengine/ext/mapreduce/output_writers.py
@@ -95,7 +95,8 @@
        multiple slices.
     4) write() method is called to write data.
     5) finalize() is called when shard processing is done.
-    5) finalize_job() is called when job is completed.
+    6) finalize_job() is called when job is completed.
+    7) get_filenames() is called to get output file names.
   """
 
   @classmethod
@@ -117,21 +118,35 @@
   def init_job(cls, mapreduce_state):
     """Initialize job-level writer state.
 
+    This method is only to support the deprecated feature which is shared
+    output files by many shards. New output writers should not do anything
+    in this method.
+
     Args:
       mapreduce_state: an instance of model.MapreduceState describing current
-      job. State can be modified during initialization.
+      job. MapreduceState.writer_state can be modified during initialization
+      to save the information about the files shared by many shards.
     """
-    raise NotImplementedError("init_job() not implemented in %s" % cls)
+    pass
 
   @classmethod
   def finalize_job(cls, mapreduce_state):
     """Finalize job-level writer state.
 
+    This method is only to support the deprecated feature which is shared
+    output files by many shards. New output writers should not do anything
+    in this method.
+
+    This method should only be called when mapreduce_state.result_status shows
+    success. After finalizing the outputs, it should save the info for shard
+    shared files into mapreduce_state.writer_state so that other operations
+    can find the outputs.
+
     Args:
       mapreduce_state: an instance of model.MapreduceState describing current
-      job. State can be modified during finalization.
+      job. MapreduceState.writer_state can be modified during finalization.
     """
-    raise NotImplementedError("finalize_job() not implemented in %s" % cls)
+    pass
 
   @classmethod
   def from_json(cls, state):
@@ -161,7 +176,9 @@
     Args:
       mapreduce_state: an instance of model.MapreduceState describing current
       job. State can NOT be modified.
-      shard_state: shard state which can be modified.
+      shard_state: shard state can NOT be modified. Output file state should
+      be contained in the output writer instance. The serialized output writer
+      instance will be saved by mapreduce across slices.
     """
     raise NotImplementedError("create() not implemented in %s" % cls)
 
@@ -178,9 +195,14 @@
   def finalize(self, ctx, shard_state):
     """Finalize writer shard-level state.
 
+    This should only be called when shard_state.result_status shows success.
+    After finalizing the outputs, it should save per-shard output file info
+    into shard_state.writer_state so that other operations can find the
+    outputs.
+
     Args:
       ctx: an instance of context.Context.
-      shard_state: shard state.
+      shard_state: shard state. ShardState.writer_state can be modified.
     """
     raise NotImplementedError("finalize() not implemented in %s" %
                               self.__class__)
@@ -189,12 +211,17 @@
   def get_filenames(cls, mapreduce_state):
     """Obtain output filenames from mapreduce state.
 
+    This method should only be called when a MR is finished. Implementors of
+    this method should not assume any other methods of this class have been
+    called. In the case of no input data, no other method except validate
+    would have been called.
+
     Args:
       mapreduce_state: an instance of model.MapreduceState
 
     Returns:
-      list of filenames this writer writes to or None if writer
-      doesn't write to a file.
+      List of filenames this mapreduce successfully wrote to. The list can be
+    empty if no output file was successfully written.
     """
     raise NotImplementedError("get_filenames() not implemented in %s" % cls)
 
@@ -492,8 +519,18 @@
     def from_json(cls, json):
       return cls(json["filenames"], json["request_filenames"])
 
-  def __init__(self, filename):
+  def __init__(self, filename, request_filename):
+    """Init.
+
+    Args:
+      filename: writable filename from Files API.
+      request_filename: in the case of GCS files, we need this to compute
+        finalized filename. In the case of blobstore, this is useless as
+        finalized filename can be retrieved from a Files API internal
+        name mapping.
+    """
     self._filename = filename
+    self._request_filename = request_filename
 
   @classmethod
   def _get_output_sharding(cls, mapreduce_state=None, mapper_spec=None):
@@ -606,32 +643,18 @@
 
   @classmethod
   def finalize_job(cls, mapreduce_state):
-    """Finalize job-level writer state.
-
-    Collect from model.ShardState if this job has output per shard.
-
-    Args:
-      mapreduce_state: an instance of model.MapreduceState describing current
-      job.
-    """
-    state = cls._State.from_json(mapreduce_state.writer_state)
+    """See parent class."""
     output_sharding = cls._get_output_sharding(mapreduce_state=mapreduce_state)
-    filesystem = cls._get_filesystem(mapreduce_state.mapreduce_spec.mapper)
     if output_sharding != cls.OUTPUT_SHARDING_INPUT_SHARDS:
+      state = cls._State.from_json(mapreduce_state.writer_state)
       files.finalize(state.filenames[0])
-      finalized_filenames = [cls._get_finalized_filename(
-          filesystem, state.filenames[0], state.request_filenames[0])]
-    else:
-      shards = model.ShardState.find_by_mapreduce_state(mapreduce_state)
-      finalized_filenames = []
-      for shard in shards:
-        state = cls._State.from_json(shard.writer_state)
-        finalized_filenames.append(state.filenames[0])
 
-    state.filenames = finalized_filenames
-    state.request_filenames = []
+
+    finalized_filenames = cls.get_filenames(mapreduce_state)
+    state = cls._State(finalized_filenames, [])
     mapreduce_state.writer_state = state.to_json()
 
+
   @classmethod
   def from_json(cls, state):
     """Creates an instance of the OutputWriter for the given json state.
@@ -642,7 +665,12 @@
     Returns:
       An instance of the OutputWriter configured using the values of json.
     """
-    return cls(state["filename"])
+    if "request_filename" in state:
+      return cls(state["filename"], state["request_filename"])
+
+
+    return cls(state["filename"], None)
+
 
   def to_json(self):
     """Returns writer state to serialize in json.
@@ -650,7 +678,8 @@
     Returns:
       A json-izable version of the OutputWriter state.
     """
-    return {"filename": self._filename}
+    return {"filename": self._filename,
+            "request_filename": self._request_filename}
 
   def _can_be_retried(self, tstate):
     """Inherit doc.
@@ -697,12 +726,16 @@
                                   request_filename,
                                   mime_type,
                                   acl=acl)
+
+
       state = cls._State([filename], [request_filename])
       shard_state.writer_state = state.to_json()
+
     else:
       state = cls._State.from_json(mapreduce_state.writer_state)
       filename = state.filenames[0]
-    return cls(filename)
+      request_filename = state.request_filenames[0]
+    return cls(filename, request_filename)
 
   def finalize(self, ctx, shard_state):
     """Finalize writer shard-level state.
@@ -714,17 +747,20 @@
     mapreduce_spec = ctx.mapreduce_spec
     output_sharding = self.__class__._get_output_sharding(
         mapper_spec=mapreduce_spec.mapper)
+
+
+    if self._request_filename is None or hasattr(self, "_183_test"):
+      writer_state = self._State.from_json(shard_state.writer_state)
+      self._request_filename = writer_state.request_filenames[0]
+
     if output_sharding == self.OUTPUT_SHARDING_INPUT_SHARDS:
       filesystem = self._get_filesystem(mapreduce_spec.mapper)
-      state = self._State.from_json(shard_state.writer_state)
-      writable_filename = state.filenames[0]
-      files.finalize(writable_filename)
+      files.finalize(self._filename)
       finalized_filenames = [self._get_finalized_filename(
-          filesystem, state.filenames[0], state.request_filenames[0])]
+          filesystem, self._filename, self._request_filename)]
 
-      state.filenames = finalized_filenames
-      state.request_filenames = []
-      shard_state.writer_state = state.to_json()
+      shard_state.writer_state = self._State(
+          finalized_filenames, []).to_json()
 
 
 
@@ -733,21 +769,35 @@
             "Shard %s-%s finalized blobstore file %s.",
             mapreduce_spec.mapreduce_id,
             shard_state.shard_number,
-            writable_filename)
+            self._filename)
         logging.info("Finalized name is %s.", finalized_filenames[0])
 
   @classmethod
   def get_filenames(cls, mapreduce_state):
-    """Obtain output filenames from mapreduce state.
+    """See parent class."""
+    finalized_filenames = []
+    output_sharding = cls._get_output_sharding(mapreduce_state=mapreduce_state)
+    if output_sharding != cls.OUTPUT_SHARDING_INPUT_SHARDS:
+      if (mapreduce_state.writer_state and mapreduce_state.result_status ==
+          model.MapreduceState.RESULT_SUCCESS):
+        state = cls._State.from_json(mapreduce_state.writer_state)
+        filesystem = cls._get_filesystem(mapreduce_state.mapreduce_spec.mapper)
 
-    Args:
-      mapreduce_state: an instance of model.MapreduceState
 
-    Returns:
-      list of filenames this writer writes to.
-    """
-    state = cls._State.from_json(mapreduce_state.writer_state)
-    return state.filenames
+        if not state.request_filenames:
+          finalized_filenames = state.filenames
+        else:
+
+          finalized_filenames = [cls._get_finalized_filename(
+              filesystem, state.filenames[0], state.request_filenames[0])]
+    else:
+      shards = model.ShardState.find_by_mapreduce_state(mapreduce_state)
+      for shard in shards:
+        if shard.result_status == model.ShardState.RESULT_SUCCESS:
+          state = cls._State.from_json(shard.writer_state)
+          finalized_filenames.append(state.filenames[0])
+
+    return finalized_filenames
 
 
 class FileOutputWriter(FileOutputWriterBase):
@@ -883,14 +933,16 @@
   _JSON_PICKLE = "pickle"
 
 
-  def __init__(self, streaming_buffer, writer_spec=None):
+  def __init__(self, streaming_buffer, filename, writer_spec=None):
     """Initialize a GoogleCloudStorageOutputWriter instance.
 
     Args:
       streaming_buffer: an instance of writable buffer from cloudstorage_api.
+      filename: the GCS client filename this writer is writing to.
       writer_spec: the specification for the writer, useful for subclasses.
     """
     self._streaming_buffer = streaming_buffer
+    self._filename = filename
 
   @classmethod
   def _generate_filename(cls, writer_spec, name, job_id, num,
@@ -951,35 +1003,13 @@
     cls._generate_filename(writer_spec, "name", "id", 0, 0)
 
   @classmethod
-  def init_job(cls, mapreduce_state):
-    """Initialize any job-level state.
-
-    Args:
-      mapreduce_state: an instance of model.MapreduceState. State may be
-        modified during initialization.
-    """
-
-    pass
-
-  @classmethod
-  def finalize_job(cls, mapreduce_state):
-    """Finalize any job-level state.
-
-    Args:
-      mapreduce_state: an instance of model.MapreduceState. State may be
-        modified during finalization.
-    """
-
-    pass
-
-  @classmethod
   def create(cls, mapreduce_state, shard_state):
     """Create new writer for a shard.
 
     Args:
       mapreduce_state: an instance of model.MapreduceState describing current
         job. State can NOT be modified.
-      shard_state: an instance of model.ShardState which can be modified.
+      shard_state: an instance of model.ShardState.
 
     Returns:
       an output writer for the requested shard.
@@ -1008,10 +1038,7 @@
                                options=options,
                                _account_id=account_id)
 
-
-    shard_state.writer_state = {"filename": filename}
-
-    return cls(writer, writer_spec=writer_spec)
+    return cls(writer, filename, writer_spec=writer_spec)
 
   @classmethod
   def _get_filename(cls, shard_state):
@@ -1022,7 +1049,8 @@
     shards = model.ShardState.find_by_mapreduce_state(mapreduce_state)
     filenames = []
     for shard in shards:
-      filenames.append(cls._get_filename(shard))
+      if shard.result_status == model.ShardState.RESULT_SUCCESS:
+        filenames.append(cls._get_filename(shard))
     return filenames
 
   @classmethod
@@ -1055,6 +1083,8 @@
   def finalize(self, ctx, shard_state):
     self._streaming_buffer.close()
 
+    shard_state.writer_state = {"filename": self._filename}
+
 
 class _GoogleCloudStorageRecordOutputWriter(_GoogleCloudStorageOutputWriter):
   """Write data to the Google Cloud Storage file using LevelDB format.
@@ -1084,15 +1114,17 @@
 
   def __init__(self,
                streaming_buffer,
+               filename,
                writer_spec=None):
     """Initialize a CloudStorageOutputWriter instance.
 
     Args:
       streaming_buffer: an instance of writable buffer from cloudstorage_api.
+      filename: the GCS client filename this writer is writing to.
       writer_spec: the specification for the writer.
     """
     super(_GoogleCloudStorageRecordOutputWriter, self).__init__(
-        streaming_buffer)
+        streaming_buffer, filename, writer_spec)
     self._flush_size = writer_spec.get(self.FLUSH_SIZE_PARAM,
                                        self.DEFAULT_FLUSH_SIZE)
     self._reset()
@@ -1124,7 +1156,8 @@
       shard_state: an instance of model.ShardState for the shard.
     """
     self._flush(ctx)
-    self._streaming_buffer.close()
+    super(_GoogleCloudStorageRecordOutputWriter, self).finalize(ctx,
+                                                                shard_state)
 
   def _flush(self, ctx):
     record_writer = records.RecordsWriter(
diff --git a/google/appengine/ext/mapreduce/parameters.py b/google/appengine/ext/mapreduce/parameters.py
index 22a06e5..7e5d6c4 100644
--- a/google/appengine/ext/mapreduce/parameters.py
+++ b/google/appengine/ext/mapreduce/parameters.py
@@ -20,6 +20,35 @@
 __all__ = []
 
 DEFAULT_SHARD_RETRY_LIMIT = 3
+DEFAULT_QUEUE_NAME = "default"
+DEFAULT_SHARD_COUNT = 8
+
+
+
+
+
+_RETRY_SLICE_ERROR_MAX_RETRIES = 10
+
+
+_MAX_TASK_RETRIES = 30
+
+
+
+
+_SLICE_DURATION_SEC = 15
+
+
+_LEASE_GRACE_PERIOD = 1
+
+
+_REQUEST_EVENTUAL_TIMEOUT = 10 * 60 + 30
+
+
+_CONTROLLER_PERIOD_SEC = 2
+
+
+
+_DEFAULT_PROCESSING_RATE_PER_SEC = 1000000
 
 
 _DEFAULT_BASE_PATH = "/_ah/mapreduce"
diff --git a/google/appengine/ext/mapreduce/shuffler.py b/google/appengine/ext/mapreduce/shuffler.py
index 4e25a23..69023a6 100644
--- a/google/appengine/ext/mapreduce/shuffler.py
+++ b/google/appengine/ext/mapreduce/shuffler.py
@@ -470,16 +470,13 @@
 
   @classmethod
   def get_filenames(cls, mapreduce_state):
-    """Obtain output filenames from mapreduce state.
+    """See parent class."""
+    if mapreduce_state.writer_state:
+      return mapreduce_state.writer_state["filenames"]
+    return []
 
-    Args:
-      mapreduce_state: an instance of model.MapreduceState
-
-    Returns:
-        list of filenames this writer writes to or None if writer
-        doesn't write to a file.
-    """
-    return mapreduce_state.writer_state["filenames"]
+  def finalize(self, ctx, shard_state):
+    pass
 
   def write(self, data, ctx):
     """Write data.
diff --git a/google/appengine/ext/mapreduce/test_support.py b/google/appengine/ext/mapreduce/test_support.py
index f0337ee..6117aa9 100644
--- a/google/appengine/ext/mapreduce/test_support.py
+++ b/google/appengine/ext/mapreduce/test_support.py
@@ -44,6 +44,8 @@
 from google.appengine.ext.webapp import mock_webapp
 
 
+
+
 def decode_task_payload(task):
   """Decodes POST task payload.
 
@@ -91,6 +93,7 @@
   Raises:
     Exception: whatever the task raises.
   """
+
   if not handlers_map:
     handlers_map = main.create_handlers_map()
 
@@ -107,7 +110,25 @@
   request = mock_webapp.MockRequest()
   request.set_url(url)
 
-  request.environ["HTTP_HOST"] = "myapp.appspot.com"
+
+  version = "mr-test-support-version.1"
+  module = "mr-test-support-module"
+  default_version_hostname = "mr-test-support.appspot.com"
+  host = "%s.%s.%s" % (version.split(".")[0],
+                       module,
+                       default_version_hostname)
+
+  if "CURRENT_VERSION_ID" not in os.environ:
+    request.environ["CURRENT_VERSION_ID"] = version
+  if "DEFAULT_VERSION_HOSTNAME" not in os.environ:
+    request.environ["DEFAULT_VERSION_HOSTNAME"] = (
+        default_version_hostname)
+  if "CURRENT_MODULE_ID" not in os.environ:
+    request.environ["CURRENT_MODULE_ID"] = module
+  if "HTTP_HOST" not in os.environ:
+    request.environ["HTTP_HOST"] = host
+
+
   for k, v in task.get("headers", []):
     request.headers[k] = v
     environ_key = "HTTP_" + k.replace("-", "_").upper()
diff --git a/google/appengine/ext/mapreduce/util.py b/google/appengine/ext/mapreduce/util.py
index 2303842..7396790 100644
--- a/google/appengine/ext/mapreduce/util.py
+++ b/google/appengine/ext/mapreduce/util.py
@@ -38,6 +38,7 @@
 __all__ = [
     "create_datastore_write_config",
     "for_name",
+    "get_queue_name",
     "get_short_name",
     "handler_for_name",
     "is_generator",
@@ -45,13 +46,62 @@
     "total_seconds",
     "try_serialize_handler",
     "try_deserialize_handler",
+    "CALLBACK_MR_ID_TASK_HEADER",
     ]
 
 import inspect
+import os
 import pickle
 import types
 
 from google.appengine.datastore import datastore_rpc
+from google.appengine.ext.mapreduce import parameters
+
+
+_MR_ID_TASK_HEADER = "AE-MR-ID"
+_MR_SHARD_ID_TASK_HEADER = "AE-MR-SHARD-ID"
+
+
+CALLBACK_MR_ID_TASK_HEADER = "Mapreduce-Id"
+
+
+def _get_task_host():
+  """Get the Host header value for all mr tasks.
+
+  Task Host header determines which instance this task would be routed to.
+
+  Current version id format is: v7.368834058928280579
+  Current module id is just the module's name. It could be "default"
+  Default version hostname is app_id.appspot.com
+
+  Returns:
+    A complete host name is of format version.module.app_id.appspot.com
+  If module is the default module, just version.app_id.appspot.com. The reason
+  is if an app doesn't have modules enabled and the url is
+  "version.default.app_id", "version" is ignored and "default" is used as
+  version. If "default" version doesn't exist, the url is routed to the
+  default version.
+  """
+  version = os.environ["CURRENT_VERSION_ID"].split(".")[0]
+  default_host = os.environ["DEFAULT_VERSION_HOSTNAME"]
+  module = os.environ["CURRENT_MODULE_ID"]
+  if os.environ["CURRENT_MODULE_ID"] == "default":
+    return "%s.%s" % (version, default_host)
+  return "%s.%s.%s" % (version, module, default_host)
+
+
+def _get_task_headers(mr_spec, mr_id_header_key=_MR_ID_TASK_HEADER):
+  """Get headers for all mr tasks.
+
+  Args:
+    mr_spec: an instance of model.MapreduceSpec.
+    mr_id_header_key: the key to set mr id with.
+
+  Returns:
+    A dictionary of all headers.
+  """
+  return {mr_id_header_key: mr_spec.mapreduce_id,
+          "Host": _get_task_host()}
 
 
 def _enum(**enums):
@@ -59,6 +109,36 @@
   return type("Enum", (), enums)
 
 
+def get_queue_name(queue_name):
+  """Determine which queue MR should run on.
+
+  How to choose the queue:
+  1. If user provided one, use that.
+  2. If we are starting a mr from taskqueue, inherit that queue.
+     If it's a special queue, fall back to the default queue.
+  3. Default queue.
+
+  If user is using any MR pipeline interface, pipeline.start takes a
+  "queue_name" argument. The pipeline will run on that queue and MR will
+  simply inherit the queue_name.
+
+  Args:
+    queue_name: queue_name from user. Maybe None.
+
+  Returns:
+    The queue name to run on.
+  """
+  if queue_name:
+    return queue_name
+  queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME",
+                              parameters.DEFAULT_QUEUE_NAME)
+  if len(queue_name) > 1 and queue_name[0:2] == "__":
+
+    return parameters.DEFAULT_QUEUE_NAME
+  else:
+    return queue_name
+
+
 def total_seconds(td):
   """convert a timedelta to seconds.
 
diff --git a/google/appengine/ext/ndb/model.py b/google/appengine/ext/ndb/model.py
index 28897d1..dafebaf 100644
--- a/google/appengine/ext/ndb/model.py
+++ b/google/appengine/ext/ndb/model.py
@@ -304,8 +304,9 @@
            'Index', 'IndexState', 'IndexProperty',
            'ModelAdapter', 'ModelAttribute',
            'ModelKey', 'MetaModel', 'Model', 'Expando',
-           'transaction', 'transaction_async',
-           'in_transaction', 'transactional', 'non_transactional',
+           'transaction', 'transaction_async', 'in_transaction',
+           'transactional', 'transactional_async', 'transactional_tasklet',
+           'non_transactional',
            'get_multi', 'get_multi_async',
            'put_multi', 'put_multi_async',
            'delete_multi', 'delete_multi_async',
@@ -3586,12 +3587,11 @@
   return tasklets.get_context().in_transaction()
 
 
-@utils.positional(1)
-def transactional(_func=None, **ctx_options):
+@utils.decorator
+def transactional(func, args, kwds, **options):
   """Decorator to make a function automatically run in a transaction.
 
   Args:
-    _func: Do not use.
     **ctx_options: Transaction options (see transaction(), but propagation
       default to TransactionOptions.ALLOWED).
 
@@ -3607,37 +3607,38 @@
       def callback(arg):
         ...
   """
-  if _func is not None:
-    # Form (1), vanilla.
-    if ctx_options:
-      raise TypeError('@transactional() does not take positional arguments')
-    # TODO: Avoid recursion, call outer_transactional_wrapper() directly?
-    return transactional()(_func)
-
-  ctx_options.setdefault('propagation',
-                         datastore_rpc.TransactionOptions.ALLOWED)
-
-  # Form (2), with options.
-  def outer_transactional_wrapper(func):
-    @utils.wrapping(func)
-    def inner_transactional_wrapper(*args, **kwds):
-      f = func
-      if args or kwds:
-        f = lambda: func(*args, **kwds)
-      return transaction(f, **ctx_options)
-    return inner_transactional_wrapper
-  return outer_transactional_wrapper
+  return transactional_async.wrapped_decorator(
+      func, args, kwds, **options).get_result()
 
 
-@utils.positional(1)
-def non_transactional(_func=None, allow_existing=True):
+@utils.decorator
+def transactional_async(func, args, kwds, **options):
+  """The async version of @ndb.transaction."""
+  options.setdefault('propagation', datastore_rpc.TransactionOptions.ALLOWED)
+  if args or kwds:
+    return transaction_async(lambda: func(*args, **kwds), **options)
+  return transaction_async(func, **options)
+
+
+@utils.decorator
+def transactional_tasklet(func, args, kwds, **options):
+  """The async version of @ndb.transaction.
+
+  Will return the result of the wrapped function as a Future.
+  """
+  from . import tasklets
+  func = tasklets.tasklet(func)
+  return transactional_async.wrapped_decorator(func, args, kwds, **options)
+
+
+@utils.decorator
+def non_transactional(func, args, kwds, allow_existing=True):
   """A decorator that ensures a function is run outside a transaction.
 
   If there is an existing transaction (and allow_existing=True), the
   existing transaction is paused while the function is executed.
 
   Args:
-    _func: Do not use.
     allow_existing: If false, throw an exception if called from within
       a transaction.  If true, temporarily re-establish the
       previous non-transactional context.  Defaults to True.
@@ -3648,36 +3649,28 @@
     A wrapper for the decorated function that ensures it runs outside a
     transaction.
   """
-  if _func is not None:
-    # TODO: Avoid recursion, call outer_non_transactional_wrapper() directly?
-    return non_transactional()(_func)
-
-  def outer_non_transactional_wrapper(func):
-    from . import tasklets
-    @utils.wrapping(func)
-    def inner_non_transactional_wrapper(*args, **kwds):
-      ctx = tasklets.get_context()
-      if not ctx.in_transaction():
-        return func(*args, **kwds)
-      if not allow_existing:
-        raise datastore_errors.BadRequestError(
-          '%s cannot be called within a transaction.' % func.__name__)
-      save_ctx = ctx
-      while ctx.in_transaction():
-        ctx = ctx._parent_context
-        if ctx is None:
-          raise datastore_errors.BadRequestError(
-            'Context without non-transactional ancestor')
-      save_ds_conn = datastore._GetConnection()
-      try:
-        datastore._SetConnection(save_ctx._old_ds_conn)
-        tasklets.set_context(ctx)
-        return func(*args, **kwds)
-      finally:
-        tasklets.set_context(save_ctx)
-        datastore._SetConnection(save_ds_conn)
-    return inner_non_transactional_wrapper
-  return outer_non_transactional_wrapper
+  from . import tasklets
+  ctx = tasklets.get_context()
+  if not ctx.in_transaction():
+    return func(*args, **kwds)
+  if not allow_existing:
+    raise datastore_errors.BadRequestError(
+      '%s cannot be called within a transaction.' % func.__name__)
+  save_ctx = ctx
+  while ctx.in_transaction():
+    ctx = ctx._parent_context
+    if ctx is None:
+      raise datastore_errors.BadRequestError(
+        'Context without non-transactional ancestor')
+  save_ds_conn = datastore._GetConnection()
+  try:
+    if hasattr(save_ctx, '_old_ds_conn'):
+      datastore._SetConnection(save_ctx._old_ds_conn)
+    tasklets.set_context(ctx)
+    return func(*args, **kwds)
+  finally:
+    tasklets.set_context(save_ctx)
+    datastore._SetConnection(save_ds_conn)
 
 
 def get_multi_async(keys, **ctx_options):
diff --git a/google/appengine/ext/ndb/tasklets.py b/google/appengine/ext/ndb/tasklets.py
index eb31db0..3214def 100644
--- a/google/appengine/ext/ndb/tasklets.py
+++ b/google/appengine/ext/ndb/tasklets.py
@@ -974,6 +974,7 @@
 # on, and they found out it was error-prone.  Should I worry?
 Return = StopIteration
 
+
 def get_return_value(err):
   # XXX Docstring
   if not err.args:
@@ -1022,10 +1023,10 @@
   some web application framework (e.g. a Django view function or a
   webapp.RequestHandler.get method).
   """
+  taskletfunc = tasklet(func)  # wrap at declaration time.
   @utils.wrapping(func)
   def synctasklet_wrapper(*args, **kwds):
     __ndb_debug__ = utils.func_info(func)
-    taskletfunc = tasklet(func)
     return taskletfunc(*args, **kwds).get_result()
   return synctasklet_wrapper
 
@@ -1036,6 +1037,7 @@
   Use this for toplevel view functions such as
   webapp.RequestHandler.get() or Django view functions.
   """
+  synctaskletfunc = synctasklet(func)  # wrap at declaration time.
   @utils.wrapping(func)
   def add_context_wrapper(*args, **kwds):
     __ndb_debug__ = utils.func_info(func)
@@ -1044,7 +1046,7 @@
     ctx = make_default_context()
     try:
       set_context(ctx)
-      return synctasklet(func)(*args, **kwds)
+      return synctaskletfunc(*args, **kwds)
     finally:
       set_context(None)
       ctx.flush().check_success()
diff --git a/google/appengine/ext/ndb/utils.py b/google/appengine/ext/ndb/utils.py
index c78b782..3df332c 100644
--- a/google/appengine/ext/ndb/utils.py
+++ b/google/appengine/ext/ndb/utils.py
@@ -138,6 +138,54 @@
   return positional_decorator
 
 
+def decorator(wrapped_decorator):
+  """Converts a function into a decorator that optionally accepts keyword
+  arguments in its declaration.
+
+  Example usage:
+    @utils.decorator
+    def decorator(func, args, kwds, op1=None):
+      ... apply op1 ...
+      return func(*args, **kwds)
+
+    # Form (1), vanilla
+    @decorator
+    foo(...)
+      ...
+
+    # Form (2), with options
+    @decorator(op1=5)
+    foo(...)
+      ...
+
+  Args:
+    wrapped_decorator: A function that accepts positional args (func, args,
+      kwds) and any additional supported keyword arguments.
+
+  Returns:
+    A decorator with an additional 'wrapped_decorator' property that is set to
+  the original function.
+  """
+  def helper(_func=None, **options):
+    def outer_wrapper(func):
+      @wrapping(func)
+      def inner_wrapper(*args, **kwds):
+        return wrapped_decorator(func, args, kwds, **options)
+      return inner_wrapper
+
+    if _func is None:
+      # Form (2), with options.
+      return outer_wrapper
+
+    # Form (1), vanilla.
+    if options:
+      # Don't allow @decorator(foo, op1=5).
+      raise TypeError('positional arguments not supported')
+    return outer_wrapper(_func)
+  helper.wrapped_decorator = wrapped_decorator
+  return helper
+
+
 def tweak_logging():
   # Hack for running tests with verbose logging.  If there are two or
   # more -v flags, turn on INFO logging; if there are 3 or more, DEBUG.
diff --git a/google/appengine/ext/remote_api/remote_api_services.py b/google/appengine/ext/remote_api/remote_api_services.py
index d7c1ce6..289116b 100644
--- a/google/appengine/ext/remote_api/remote_api_services.py
+++ b/google/appengine/ext/remote_api/remote_api_services.py
@@ -45,7 +45,7 @@
 from google.appengine.api.taskqueue import taskqueue_service_pb
 from google.appengine.api.xmpp import xmpp_service_pb
 from google.appengine.datastore import datastore_pb
-from google.appengine.datastore import datastore_v4a_pb
+from google.appengine.datastore import datastore_v4_pb
 from google.appengine.ext.remote_api import remote_api_pb
 
 
@@ -107,8 +107,8 @@
                             datastore_pb.CompositeIndices),
     },
     'datastore_v4': {
-        'AllocateIds': (datastore_v4a_pb.AllocateIdsRequest,
-                        datastore_v4a_pb.AllocateIdsResponse),
+        'AllocateIds': (datastore_v4_pb.AllocateIdsRequest,
+                        datastore_v4_pb.AllocateIdsResponse),
     },
     'file': {
         'Create': (file_service_pb.CreateRequest,
diff --git a/google/appengine/ext/testbed/__init__.py b/google/appengine/ext/testbed/__init__.py
index 0bfe429..2395cb8 100644
--- a/google/appengine/ext/testbed/__init__.py
+++ b/google/appengine/ext/testbed/__init__.py
@@ -130,6 +130,10 @@
 except ImportError:
   logservice_stub = None
 from google.appengine.api.memcache import memcache_stub
+try:
+  from google.appengine.api.search import simple_search_stub
+except ImportError:
+  simple_search_stub = None
 from google.appengine.api.taskqueue import taskqueue_stub
 from google.appengine.api.xmpp import xmpp_service_stub
 try:
@@ -178,6 +182,7 @@
 URLFETCH_SERVICE_NAME = 'urlfetch'
 USER_SERVICE_NAME = 'user'
 XMPP_SERVICE_NAME = 'xmpp'
+SEARCH_SERVICE_NAME = 'search'
 
 
 INIT_STUB_METHOD_NAMES = {
@@ -195,6 +200,7 @@
     URLFETCH_SERVICE_NAME: 'init_urlfetch_stub',
     USER_SERVICE_NAME: 'init_user_stub',
     XMPP_SERVICE_NAME: 'init_xmpp_stub',
+    SEARCH_SERVICE_NAME: 'init_search_stub',
 }
 
 
@@ -636,6 +642,21 @@
     stub = xmpp_service_stub.XmppServiceStub()
     self._register_stub(XMPP_SERVICE_NAME, stub)
 
+  def init_search_stub(self, enable=True):
+    """Enable the search stub.
+
+    Args:
+      enable: True, if the fake service should be enabled, False if real
+              service should be disabled.
+    """
+    if not enable:
+      self._disable_stub(SEARCH_SERVICE_NAME)
+      return
+    if simple_search_stub is None:
+      raise StubNotSupportedError('Could not initialize search API')
+    stub = simple_search_stub.SearchServiceStub()
+    self._register_stub(SEARCH_SERVICE_NAME, stub)
+
   def _init_stub(self, service_name, *args, **kwargs):
     """Enable a stub by service name.
 
diff --git a/google/appengine/runtime/wsgi.py b/google/appengine/runtime/wsgi.py
index 0e3c1f3..87a5a35 100644
--- a/google/appengine/runtime/wsgi.py
+++ b/google/appengine/runtime/wsgi.py
@@ -60,6 +60,49 @@
     return type(x).__name__
 
 
+
+
+
+
+def LoadObject(object_name):
+  """Find and return a Python object specified by object_name.
+
+  Packages and modules are imported as necessary.
+
+  Args:
+    object_name: (string) An object specification.
+
+  Returns:
+    A tuple of the form (object, string, error).  If object_name can be
+    fully traversed, object is the specified object, string is the filename
+    containing the object, and error is None. Otherwise, object_name is
+    maximal partial match specified by object_name, string is the filename
+    containing object, and error is an ImportError.
+  """
+  containing_file = None
+  path = object_name.split('.')
+  obj = __import__(path[0])
+  is_parent_package = True
+  cumulative_path = path[0]
+  for name in path[1:]:
+    if hasattr(obj, '__file__'):
+      containing_file = obj.__file__
+    is_parent_package = is_parent_package and hasattr(obj, '__path__')
+    cumulative_path += '.' + name
+    if hasattr(obj, name):
+      obj = getattr(obj, name)
+    elif is_parent_package:
+      __import__(cumulative_path)
+      obj = getattr(obj, name)
+    else:
+      return obj, containing_file, ImportError(
+          '%s has no attribute %s' % (obj, name))
+
+
+
+  return obj, containing_file, None
+
+
 class WsgiRequest(object):
   """A single WSGI request."""
 
@@ -239,36 +282,23 @@
         result.close()
 
   def _LoadHandler(self):
-    """Find and return a Python object with name handler_name.
+    """Find and return a Python object with name self._handler.
 
-    Find and return a Python object specified by self._handler. Packages and
-    modules are imported as necessary. If successful, the filename of the module
-    is inserted into environ with key 'PATH_TRANSLATED' if it has one.
+    Sets _environ so that PATH_TRANSLATED is equal to the file containing the
+    handler.
+
+    Packages and modules are imported as necessary.
 
     Returns:
-      A Python object.
+      The python object specified by self._handler.
 
     Raises:
       ImportError: An element of the path cannot be resolved.
     """
-    path = self._handler.split('.')
-    handler = __import__(path[0])
-    is_parent_package = True
-    cumulative_path = path[0]
-    for name in path[1:]:
-      if hasattr(handler, '__file__'):
-        self._environ['PATH_TRANSLATED'] = handler.__file__
-      is_parent_package = is_parent_package and hasattr(handler, '__path__')
-      cumulative_path += '.' + name
-      if hasattr(handler, name):
-        handler = getattr(handler, name)
-      elif is_parent_package:
-        __import__(cumulative_path)
-        handler = getattr(handler, name)
-      else:
-        raise ImportError('%s has no attribute %s' % (handler, name))
-
-
+    handler, path, err = LoadObject(self._handler)
+    self._environ['PATH_TRANSLATED'] = path
+    if err:
+      raise err
     return handler
 
 
diff --git a/google/appengine/tools/appcfg.py b/google/appengine/tools/appcfg.py
index 313b868..e05d575 100644
--- a/google/appengine/tools/appcfg.py
+++ b/google/appengine/tools/appcfg.py
@@ -1821,12 +1821,24 @@
       if result == '0':
         raise CannotStartServingError(
             'Another operation on this version is in progress.')
-      success, unused_contents = RetryWithBackoff(
-          lambda: (self.IsServing(), None), PrintRetryMessage, 1, 2, 60, 20)
+      success, response = RetryWithBackoff(
+          self.IsServing, PrintRetryMessage, 1, 2, 60, 20)
       if not success:
 
         logging.warning('Version still not serving, aborting.')
         raise Exception('Version not ready.')
+
+
+
+      check_config_updated = response.get('check_endpoints_config')
+      if check_config_updated:
+        success, unused_contents = RetryWithBackoff(
+            lambda: (self.IsEndpointsConfigUpdated(), None),
+            PrintRetryMessage, 1, 2, 60, 20)
+        if not success:
+          logging.warning('Failed to update Endpoints configuration.  Try '
+                          'updating again.')
+          raise Exception('Endpoints config update failed.')
       self.in_transaction = False
 
     return app_summary
@@ -1908,7 +1920,10 @@
       Exception: Deploy has not yet been called.
 
     Returns:
-      True if the deployed app version is serving.
+      (serving, response) Where serving is True if the deployed app version is
+        serving, False otherwise.  response is a dict containing the parsed
+        response from the server, or an empty dict if the server's response was
+        an old style 0/1 response.
     """
     assert self.started, 'StartServing() must be called before IsServing().'
 
@@ -1918,7 +1933,7 @@
     result = self.Send('/api/appversion/isserving')
     del self.params['new_serving_resp']
     if result in ['0', '1']:
-      return result == '1'
+      return result == '1', {}
     result = AppVersionUpload._ValidateIsServingYaml(result)
     if not result:
       raise CannotStartServingError(
@@ -1929,7 +1944,52 @@
       StatusUpdate(message)
     if fatal:
       raise CannotStartServingError(fatal)
-    return result['serving']
+    return result['serving'], result
+
+  @staticmethod
+  def _ValidateIsEndpointsConfigUpdatedYaml(resp):
+    """Validates the YAML string response from an isconfigupdated request.
+
+    Args:
+      resp: A string containing the response from the server.
+
+    Returns:
+      The dictionary with the parsed response if the response is valid.
+      Otherwise returns False.
+    """
+    response_dict = yaml.safe_load(resp)
+    if 'updated' not in response_dict:
+      return None
+    return response_dict
+
+  def IsEndpointsConfigUpdated(self):
+    """Check if the Endpoints configuration for this app has been updated.
+
+    This should only be called if the app has a Google Cloud Endpoints
+    handler, or if it's removing one.  The server performs the check to see
+    if Endpoints support is added/updated/removed, and the response to the
+    isserving call indicates whether IsEndpointsConfigUpdated should be called.
+
+    Raises:
+      AssertionError: Deploy has not yet been called.
+      CannotStartServingError: There was an unexpected error with the server
+        response.
+
+    Returns:
+      True if the configuration has been updated, False if not.
+    """
+
+    assert self.started, ('StartServing() must be called before '
+                          'IsEndpointsConfigUpdated().')
+
+    StatusUpdate('Checking if Endpoints configuration has been updated.')
+
+    result = self.Send('/api/isconfigupdated')
+    result = AppVersionUpload._ValidateIsEndpointsConfigUpdatedYaml(result)
+    if result is None:
+      raise CannotStartServingError(
+          'Internal error: Could not parse IsEndpointsConfigUpdated response.')
+    return result['updated']
 
   def Rollback(self):
     """Rolls back the transaction if one is in progress."""
@@ -2701,6 +2761,10 @@
     if self.options.runtime:
       appyaml.runtime = self.options.runtime
 
+    if not appyaml.application:
+      self.parser.error('Expected -A app_id when application property in file '
+                        '%s.yaml is not set.' % basename)
+
     msg = 'Application: %s' % appyaml.application
     if appyaml.application != orig_application:
       msg += ' (was: %s)' % orig_application
@@ -2711,7 +2775,6 @@
         msg += '; module: %s' % appyaml.module
         if appyaml.module != orig_module:
           msg += ' (was: %s)' % orig_module
-
       msg += '; version: %s' % appyaml.version
       if appyaml.version != orig_version:
         msg += ' (was: %s)' % orig_version
@@ -3177,6 +3240,22 @@
     """Placeholder; we never expect this action to be invoked."""
     pass
 
+  def BackendsPhpCheck(self, appyaml):
+    """Don't support backends with the PHP runtime.
+
+    This should be used to prevent use of backends update/start/configure
+    with the PHP runtime.  We continue to allow backends
+    stop/delete/list/rollback just in case there are existing PHP backends.
+
+    Args:
+      appyaml: A parsed app.yaml file.
+    """
+    if appyaml.runtime == 'php':
+      _PrintErrorAndExit(
+          self.error_fh,
+          'Error: Backends are not supported with the PHP runtime. '
+          'Please use Modules instead.\n')
+
   def BackendsYamlCheck(self, appyaml, backend=None):
     """Check the backends.yaml file is sane and which backends to update."""
 
@@ -3224,6 +3303,7 @@
     yaml_file_basename = 'app'
     appyaml = self._ParseAppInfoFromYaml(self.basepath,
                                          basename=yaml_file_basename)
+    self.BackendsPhpCheck(appyaml)
     rpcserver = self._GetRpcServer()
 
     backends_to_update = self.BackendsYamlCheck(appyaml, self.backend)
@@ -3258,6 +3338,7 @@
 
     backend = self.args[0]
     appyaml = self._ParseAppInfoFromYaml(self.basepath)
+    self.BackendsPhpCheck(appyaml)
     rpcserver = self._GetRpcServer()
     response = rpcserver.Send('/api/backends/start',
                               app_id=appyaml.application,
@@ -3297,6 +3378,7 @@
 
     backend = self.args[0]
     appyaml = self._ParseAppInfoFromYaml(self.basepath)
+    self.BackendsPhpCheck(appyaml)
     backends_yaml = self._ParseBackendsYaml(self.basepath)
     rpcserver = self._GetRpcServer()
     response = rpcserver.Send('/api/backends/configure',
@@ -3321,6 +3403,24 @@
     else:
       print >> self.out_fh, response
 
+  def DeleteVersion(self):
+    """Deletes the specified version for an app."""
+    if not (self.options.app_id and self.options.version):
+      self.parser.error('Expected an <app_id> argument, a <version> argument '
+                        'and an optional <module> argument.')
+    if self.options.module:
+      module = self.options.module
+    else:
+      module = ''
+
+    rpcserver = self._GetRpcServer()
+    response = rpcserver.Send('/api/versions/delete',
+                              app_id=self.options.app_id,
+                              version_match=self.options.version,
+                              module=module)
+
+    print >> self.out_fh, response
+
   def _ParseAndValidateModuleYamls(self, yaml_paths):
     """Validates given yaml paths and returns the parsed yaml objects.
 
@@ -4263,9 +4363,30 @@
           long_desc="""
 The 'list_versions' command outputs the uploaded versions for each module of
 an application in YAML."""),
+
+      'delete_version': Action(
+          function='DeleteVersion',
+          usage='%prog [options] delete_version -A app_id -V version '
+          '[-M module]',
+          uses_basepath=False,
+          short_desc='Delete the specified version for an app.',
+          long_desc="""
+The 'delete_version' command deletes the specified version for the specified
+application."""),
   }
 
 
+def IsWarFileWithoutYaml(dir_path):
+  if os.path.isfile(os.path.join(dir_path, 'app.yaml')):
+    return False
+  web_inf = os.path.join(dir_path, 'WEB-INF')
+  if not os.path.isdir(web_inf):
+    return False
+  if not set(['appengine-web.xml', 'web.xml']).issubset(os.listdir(web_inf)):
+    return False
+  return True
+
+
 def main(argv):
   logging.basicConfig(format=('%(asctime)s %(levelname)s %(filename)s:'
                               '%(lineno)s %(message)s '))
diff --git a/google/appengine/tools/appengine_rpc_httplib2.py b/google/appengine/tools/appengine_rpc_httplib2.py
index 6998fd7..6e70c6c 100644
--- a/google/appengine/tools/appengine_rpc_httplib2.py
+++ b/google/appengine/tools/appengine_rpc_httplib2.py
@@ -227,8 +227,8 @@
                    url, headers,
                    self.debug_data and payload or payload and 'ELIDED' or '')
       try:
-        response_info, response = self.http.request(url, method, body=payload,
-                                                    headers=headers)
+        response_info, response = self.http.request(
+            url, method=method, body=payload, headers=headers)
       except client.AccessTokenRefreshError, e:
 
         logger.info('Got access token error', exc_info=1)
diff --git a/google/appengine/tools/backends_conversion.py b/google/appengine/tools/backends_conversion.py
index 95efd72..ed860b0 100644
--- a/google/appengine/tools/backends_conversion.py
+++ b/google/appengine/tools/backends_conversion.py
@@ -175,8 +175,7 @@
   _SetStart(result, backend_entry)
   _SetModule(result, backend_entry)
   _SetClass(result, backend_entry)
-  _SetInstances(result, backend_entry)
-  _SetDynamic(result, backend_entry)
+  _SetScalingType(result, backend_entry)
   return result
 
 
@@ -308,11 +307,11 @@
     target.instance_class = curr_class
 
 
-def _SetInstances(target, backend_entry):
-  """Sets number of instances for module if defined in backend.
+def _SetManualScaling(target, backend_entry):
+  """Sets scaling type to manual with specified number of instances.
 
-  If not defined in backend does nothing. Otherwise, sets the manual scaling
-  field to use the number of instances specified.
+  If instances not defined in backend does nothing. Otherwise, sets the manual
+  scaling field to use the number of instances specified.
 
   Args:
     target: A appinfo.AppInfoExternal object. Contains parsed app.yaml augmented
@@ -325,26 +324,23 @@
     target.manual_scaling = appinfo.ManualScaling(instances=instances)
 
 
-def _SetDynamic(target, backend_entry):
-  """Sets basic scaling if backend is dynamic.
+def _GetInstances(name):
+  """Gets a positive number of instances from the user.
 
-  If dynamic not set on the backend, does nothing. Otherwise, sets the
-  basic scaling field to use the number of instances provided via raw_input.
+  Uses the DYNAMIC_PROMPT_TEXT to prompt the user. Accepts no
+  input to mean the default value of 1.
 
   Args:
-    target: A appinfo.AppInfoExternal object. Contains parsed app.yaml augmented
-      by current backend info.
-    backend_entry: A backendinfo.BackendEntry object. Contains a parsed backend
-      definition from backends.yaml.
-  """
-  if not backend_entry.dynamic:
-    return
+    name: String, name of module.
 
-  prompt = DYNAMIC_PROMPT_TEXT % (backend_entry.name,)
+  Returns:
+    Integer parsed from user input, 1 if empty input or None if the input was
+      not a positive integer.
+  """
+  prompt = DYNAMIC_PROMPT_TEXT % (name,)
   result = raw_input(prompt).strip()
   if result == '':
-    target.basic_scaling = appinfo.BasicScaling(max_instances=1)
-    return
+    return 1
 
   max_instances = -1
   try:
@@ -356,7 +352,37 @@
     print 'Invalid max_instances value: %r' % (result,)
     return
 
-  target.basic_scaling = appinfo.BasicScaling(max_instances=max_instances)
+  return max_instances
+
+
+def _SetScalingType(target, backend_entry):
+  """Sets the scaling type of the modules based on the backend.
+
+  If dynamic, sets scaling type to Basic and passes the number of instances if
+  set in the backends config. If not dynamic but instances set, calls to
+  _SetManualScaling. If neither dynamic or instances set, does nothing.
+
+  Args:
+    target: A appinfo.AppInfoExternal object. Contains parsed app.yaml augmented
+      by current backend info.
+    backend_entry: A backendinfo.BackendEntry object. Contains a parsed backend
+      definition from backends.yaml.
+  """
+  if not (backend_entry.dynamic or backend_entry.instances):
+    return
+
+
+  if not backend_entry.dynamic:
+    _SetManualScaling(target, backend_entry)
+    return
+
+  if backend_entry.instances:
+    max_instances = backend_entry.instances
+  else:
+    max_instances = _GetInstances(backend_entry.name)
+
+  if max_instances:
+    target.basic_scaling = appinfo.BasicScaling(max_instances=max_instances)
 
 
 def MakeParser(prog):
diff --git a/google/appengine/tools/bulkloader.py b/google/appengine/tools/bulkloader.py
index 8d3f1f8..ce67163 100644
--- a/google/appengine/tools/bulkloader.py
+++ b/google/appengine/tools/bulkloader.py
@@ -1374,17 +1374,41 @@
     ReserveKeys(keys)
 
   def GetSchemaKinds(self):
-    """Returns the list of kinds for this app."""
-    global_stat = stats.GlobalStat.all().get()
+    """Returns the list of kinds for this app.
+
+    There can be 3 possible cases using namespaces:
+      a.) No namespace specified and Datastore has only default namespace ->
+          Query GlobalStat and KindStat.
+      b.) No namespace specified but Datastore has multiple namespace ->
+          Query NamespaceGlobalStat and NamespaceKindStat.
+      c.) Namespace specified and Datastore has multiple namespaces ->
+          Query NamespaceGlobalStat and NamespaceKindStat.
+
+    Returns:
+      A list of kinds.
+    """
+    namespaces = False
+
+    if (namespace_manager.get_namespace() or
+        stats.NamespaceStat.all().count() > 1):
+      namespaces = True
+
+    if namespaces:
+      global_kind = stats.NamespaceGlobalStat
+    else:
+      global_kind = stats.GlobalStat
+
+    kinds_kind = stats.NamespaceKindStat if namespaces else stats.KindStat
+
+    global_stat = global_kind.all().get()
     if not global_stat:
       raise KindStatError()
     timestamp = global_stat.timestamp
-    kind_stat = stats.KindStat.all().filter(
+    kind_stat = kinds_kind.all().filter(
         "timestamp =", timestamp).fetch(1000)
     kind_list = [stat.kind_name for stat in kind_stat
                  if stat.kind_name and not stat.kind_name.startswith('__')]
-    kind_set = set(kind_list)
-    return list(kind_set)
+    return list(set(kind_list))
 
   def EncodeContent(self, rows, loader=None):
     """Encodes row data to the wire format.
@@ -3779,7 +3803,7 @@
       continue
     option = option[2:]
     if option in DEPRECATED_OPTIONS:
-      print >>sys.stderr, ('--%s is deprecated, please use --%s.' %
+      print >> sys.stderr, ('--%s is deprecated, please use --%s.' %
                            (option, DEPRECATED_OPTIONS[option]))
       option = DEPRECATED_OPTIONS[option]
 
@@ -3885,7 +3909,7 @@
 
       m = re.search(r"[^']*'([^']*)'.*", str(e))
       if m.groups() and m.group(1) == 'Loader':
-        print >>sys.stderr, """
+        print >> sys.stderr, """
 The config file format has changed and you appear to be using an old-style
 config file.  Please make the following changes:
 
@@ -3916,7 +3940,7 @@
           hasattr(bulkloader_config, 'bulkloader') and
           isinstance(e, bulkloader_config.bulkloader.NameClashError)):
         print >> sys.stderr, (
-            'Found both %s and %s while aliasing old names on %s.'%
+            'Found both %s and %s while aliasing old names on %s.' %
             (e.old_name, e.new_name, e.klass))
         exit_fn(1)
       else:
@@ -3936,7 +3960,7 @@
   if name in kwargs:
     return kwargs[name]
   else:
-    print >>sys.stderr, '%s argument required' % name
+    print >> sys.stderr, '%s argument required' % name
     die_fn()
 
 
@@ -4061,7 +4085,7 @@
 
 
   if errors:
-    print >>sys.stderr, '\n'.join(errors)
+    print >> sys.stderr, '\n'.join(errors)
     die_fn()
 
   return arg_dict
@@ -4406,7 +4430,7 @@
             for (key, value) in arg_dict.iteritems()
             if value is REQUIRED_OPTION]
   if errors:
-    print >>sys.stderr, '\n'.join(errors)
+    print >> sys.stderr, '\n'.join(errors)
     PrintUsageExit(1)
 
   SetupLogging(arg_dict)
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index 66c6ab8..6eaac9f 100644
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -259,8 +259,8 @@
 goog.partial = function(fn, var_args) {
   var args = Array.prototype.slice.call(arguments, 1);
   return function() {
-    var newArgs = Array.prototype.slice.call(arguments);
-    newArgs.unshift.apply(newArgs, args);
+    var newArgs = args.slice();
+    newArgs.push.apply(newArgs, arguments);
     return fn.apply(this, newArgs)
   }
 };
@@ -873,20 +873,22 @@
   }
   return res
 };
-goog.array.reduce = function(arr, f, val$$0, opt_obj) {
-  if(arr.reduce) {
-    return opt_obj ? arr.reduce(goog.bind(f, opt_obj), val$$0) : arr.reduce(f, val$$0)
-  }
+goog.array.reduce = goog.NATIVE_ARRAY_PROTOTYPES && goog.array.ARRAY_PROTOTYPE_.reduce ? function(arr, f, val, opt_obj) {
+  goog.asserts.assert(null != arr.length);
+  opt_obj && (f = goog.bind(f, opt_obj));
+  return goog.array.ARRAY_PROTOTYPE_.reduce.call(arr, f, val)
+} : function(arr, f, val$$0, opt_obj) {
   var rval = val$$0;
   goog.array.forEach(arr, function(val, index) {
     rval = f.call(opt_obj, rval, val, index, arr)
   });
   return rval
 };
-goog.array.reduceRight = function(arr, f, val$$0, opt_obj) {
-  if(arr.reduceRight) {
-    return opt_obj ? arr.reduceRight(goog.bind(f, opt_obj), val$$0) : arr.reduceRight(f, val$$0)
-  }
+goog.array.reduceRight = goog.NATIVE_ARRAY_PROTOTYPES && goog.array.ARRAY_PROTOTYPE_.reduceRight ? function(arr, f, val, opt_obj) {
+  goog.asserts.assert(null != arr.length);
+  opt_obj && (f = goog.bind(f, opt_obj));
+  return goog.array.ARRAY_PROTOTYPE_.reduceRight.call(arr, f, val)
+} : function(arr, f, val$$0, opt_obj) {
   var rval = val$$0;
   goog.array.forEachRight(arr, function(val, index) {
     rval = f.call(opt_obj, rval, val, index, arr)
@@ -1194,6 +1196,103 @@
     arr[j] = tmp
   }
 };
+goog.functions = {};
+goog.functions.constant = function(retValue) {
+  return function() {
+    return retValue
+  }
+};
+goog.functions.FALSE = goog.functions.constant(!1);
+goog.functions.TRUE = goog.functions.constant(!0);
+goog.functions.NULL = goog.functions.constant(null);
+goog.functions.identity = function(opt_returnValue) {
+  return opt_returnValue
+};
+goog.functions.error = function(message) {
+  return function() {
+    throw Error(message);
+  }
+};
+goog.functions.fail = function(err) {
+  return function() {
+    throw err;
+  }
+};
+goog.functions.lock = function(f, opt_numArgs) {
+  opt_numArgs = opt_numArgs || 0;
+  return function() {
+    return f.apply(this, Array.prototype.slice.call(arguments, 0, opt_numArgs))
+  }
+};
+goog.functions.withReturnValue = function(f, retValue) {
+  return goog.functions.sequence(f, goog.functions.constant(retValue))
+};
+goog.functions.compose = function(fn, var_args) {
+  var functions = arguments, length = functions.length;
+  return function() {
+    var result;
+    length && (result = functions[length - 1].apply(this, arguments));
+    for(var i = length - 2;0 <= i;i--) {
+      result = functions[i].call(this, result)
+    }
+    return result
+  }
+};
+goog.functions.sequence = function(var_args) {
+  var functions = arguments, length = functions.length;
+  return function() {
+    for(var result, i = 0;i < length;i++) {
+      result = functions[i].apply(this, arguments)
+    }
+    return result
+  }
+};
+goog.functions.and = function(var_args) {
+  var functions = arguments, length = functions.length;
+  return function() {
+    for(var i = 0;i < length;i++) {
+      if(!functions[i].apply(this, arguments)) {
+        return!1
+      }
+    }
+    return!0
+  }
+};
+goog.functions.or = function(var_args) {
+  var functions = arguments, length = functions.length;
+  return function() {
+    for(var i = 0;i < length;i++) {
+      if(functions[i].apply(this, arguments)) {
+        return!0
+      }
+    }
+    return!1
+  }
+};
+goog.functions.not = function(f) {
+  return function() {
+    return!f.apply(this, arguments)
+  }
+};
+goog.functions.create = function(constructor, var_args) {
+  var temp = function() {
+  };
+  temp.prototype = constructor.prototype;
+  var obj = new temp;
+  constructor.apply(obj, Array.prototype.slice.call(arguments, 1));
+  return obj
+};
+goog.functions.CACHE_RETURN_VALUE = !0;
+goog.functions.cacheReturnValue = function(fn) {
+  var called = !1, value;
+  return function() {
+    if(!goog.functions.CACHE_RETURN_VALUE) {
+      return fn()
+    }
+    called || (value = fn(), called = !0);
+    return value
+  }
+};
 goog.math = {};
 goog.math.randomInt = function(a) {
   return Math.floor(Math.random() * a)
@@ -1663,7 +1762,7 @@
   if(goog.userAgent.OPERA && goog.global.opera) {
     var operaVersion = goog.global.opera.version, version = "function" == typeof operaVersion ? operaVersion() : operaVersion
   }else {
-    if(goog.userAgent.GECKO ? re = /rv\:([^\);]+)(\)|;)/ : goog.userAgent.IE ? re = /\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/ : goog.userAgent.WEBKIT && (re = /WebKit\/(\S+)/), re) {
+    if(goog.userAgent.GECKO ? re = /rv\:([^\);]+)(\)|;)/ : goog.userAgent.IE ? re = /\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/ : goog.userAgent.WEBKIT && (re = /WebKit\/(\S+)/), re) {
       var arr = re.exec(goog.userAgent.getUserAgentString()), version = arr ? arr[1] : ""
     }
   }
@@ -1757,6 +1856,7 @@
   goog.dom.classes.enable(element, className, add);
   return add
 };
+goog.dom.NodeType = {ELEMENT:1, ATTRIBUTE:2, TEXT:3, CDATA_SECTION:4, ENTITY_REFERENCE:5, ENTITY:6, PROCESSING_INSTRUCTION:7, COMMENT:8, DOCUMENT:9, DOCUMENT_TYPE:10, DOCUMENT_FRAGMENT:11, NOTATION:12};
 goog.dom.TagName = {A:"A", ABBR:"ABBR", ACRONYM:"ACRONYM", ADDRESS:"ADDRESS", APPLET:"APPLET", AREA:"AREA", ARTICLE:"ARTICLE", ASIDE:"ASIDE", AUDIO:"AUDIO", B:"B", BASE:"BASE", BASEFONT:"BASEFONT", BDI:"BDI", BDO:"BDO", BIG:"BIG", BLOCKQUOTE:"BLOCKQUOTE", BODY:"BODY", BR:"BR", BUTTON:"BUTTON", CANVAS:"CANVAS", CAPTION:"CAPTION", CENTER:"CENTER", CITE:"CITE", CODE:"CODE", COL:"COL", COLGROUP:"COLGROUP", COMMAND:"COMMAND", DATA:"DATA", DATALIST:"DATALIST", DD:"DD", DEL:"DEL", DETAILS:"DETAILS", DFN:"DFN", 
 DIALOG:"DIALOG", DIR:"DIR", DIV:"DIV", DL:"DL", DT:"DT", EM:"EM", EMBED:"EMBED", FIELDSET:"FIELDSET", FIGCAPTION:"FIGCAPTION", FIGURE:"FIGURE", FONT:"FONT", FOOTER:"FOOTER", FORM:"FORM", FRAME:"FRAME", FRAMESET:"FRAMESET", H1:"H1", H2:"H2", H3:"H3", H4:"H4", H5:"H5", H6:"H6", HEAD:"HEAD", HEADER:"HEADER", HGROUP:"HGROUP", HR:"HR", HTML:"HTML", I:"I", IFRAME:"IFRAME", IMG:"IMG", INPUT:"INPUT", INS:"INS", ISINDEX:"ISINDEX", KBD:"KBD", KEYGEN:"KEYGEN", LABEL:"LABEL", LEGEND:"LEGEND", LI:"LI", LINK:"LINK", 
 MAP:"MAP", MARK:"MARK", MATH:"MATH", MENU:"MENU", META:"META", METER:"METER", NAV:"NAV", NOFRAMES:"NOFRAMES", NOSCRIPT:"NOSCRIPT", OBJECT:"OBJECT", OL:"OL", OPTGROUP:"OPTGROUP", OPTION:"OPTION", OUTPUT:"OUTPUT", P:"P", PARAM:"PARAM", PRE:"PRE", PROGRESS:"PROGRESS", Q:"Q", RP:"RP", RT:"RT", RUBY:"RUBY", S:"S", SAMP:"SAMP", SCRIPT:"SCRIPT", SECTION:"SECTION", SELECT:"SELECT", SMALL:"SMALL", SOURCE:"SOURCE", SPAN:"SPAN", STRIKE:"STRIKE", STRONG:"STRONG", STYLE:"STYLE", SUB:"SUB", SUMMARY:"SUMMARY", 
@@ -1764,7 +1864,6 @@
 goog.dom.ASSUME_QUIRKS_MODE = !1;
 goog.dom.ASSUME_STANDARDS_MODE = !1;
 goog.dom.COMPAT_MODE_KNOWN_ = goog.dom.ASSUME_QUIRKS_MODE || goog.dom.ASSUME_STANDARDS_MODE;
-goog.dom.NodeType = {ELEMENT:1, ATTRIBUTE:2, TEXT:3, CDATA_SECTION:4, ENTITY_REFERENCE:5, ENTITY:6, PROCESSING_INSTRUCTION:7, COMMENT:8, DOCUMENT:9, DOCUMENT_TYPE:10, DOCUMENT_FRAGMENT:11, NOTATION:12};
 goog.dom.getDomHelper = function(opt_element) {
   return opt_element ? new goog.dom.DomHelper(goog.dom.getOwnerDocument(opt_element)) : goog.dom.defaultDomHelper_ || (goog.dom.defaultDomHelper_ = new goog.dom.DomHelper)
 };
@@ -1772,7 +1871,19 @@
   return document
 };
 goog.dom.getElement = function(element) {
-  return goog.isString(element) ? document.getElementById(element) : element
+  return goog.dom.getElementHelper_(document, element)
+};
+goog.dom.getElementHelper_ = function(doc, element) {
+  return goog.isString(element) ? doc.getElementById(element) : element
+};
+goog.dom.getRequiredElement = function(id) {
+  return goog.dom.getRequiredElementHelper_(document, id)
+};
+goog.dom.getRequiredElementHelper_ = function(doc, id) {
+  goog.asserts.assertString(id);
+  var element = goog.dom.getElement(id);
+  goog.asserts.assert(element, "No element found with id: " + id);
+  return element
 };
 goog.dom.$ = goog.dom.getElement;
 goog.dom.getElementsByTagNameAndClass = function(opt_tag, opt_class, opt_el) {
@@ -2210,7 +2321,7 @@
   return doc
 };
 goog.dom.getFrameContentWindow = function(frame) {
-  return frame.contentWindow || goog.dom.getWindow_(goog.dom.getFrameContentDocument(frame))
+  return frame.contentWindow || goog.dom.getWindow(goog.dom.getFrameContentDocument(frame))
 };
 goog.dom.setTextContent = function(element, text) {
   if("textContent" in element) {
@@ -2376,6 +2487,14 @@
   }
   return null
 };
+goog.dom.getPixelRatio = goog.functions.cacheReturnValue(function() {
+  var win = goog.dom.getWindow(), isFirefoxMobile = goog.userAgent.GECKO && goog.userAgent.MOBILE;
+  return goog.isDef(win.devicePixelRatio) && !isFirefoxMobile ? win.devicePixelRatio : win.matchMedia ? goog.dom.matchesPixelRatio_(0.75) || goog.dom.matchesPixelRatio_(1.5) || goog.dom.matchesPixelRatio_(2) || goog.dom.matchesPixelRatio_(3) || 1 : 1
+});
+goog.dom.matchesPixelRatio_ = function(pixelRatio) {
+  var win = goog.dom.getWindow(), query = "(-webkit-min-device-pixel-ratio: " + pixelRatio + "),(min--moz-device-pixel-ratio: " + pixelRatio + "),(min-resolution: " + pixelRatio + "dppx)";
+  return win.matchMedia(query).matches ? pixelRatio : 0
+};
 goog.dom.DomHelper = function(opt_document) {
   this.document_ = opt_document || goog.global.document || document
 };
@@ -2384,7 +2503,10 @@
   return this.document_
 };
 goog.dom.DomHelper.prototype.getElement = function(element) {
-  return goog.isString(element) ? this.document_.getElementById(element) : element
+  return goog.dom.getElementHelper_(this.document_, element)
+};
+goog.dom.DomHelper.prototype.getRequiredElement = function(id) {
+  return goog.dom.getRequiredElementHelper_(this.document_, id)
 };
 goog.dom.DomHelper.prototype.$ = goog.dom.DomHelper.prototype.getElement;
 goog.dom.DomHelper.prototype.getElementsByTagNameAndClass = function(opt_tag, opt_class, opt_el) {
@@ -2606,11 +2728,14 @@
 goog.events.BrowserFeature = {HAS_W3C_BUTTON:!goog.userAgent.IE || goog.userAgent.isDocumentModeOrHigher(9), HAS_W3C_EVENT_SUPPORT:!goog.userAgent.IE || goog.userAgent.isDocumentModeOrHigher(9), SET_KEY_CODE_TO_PREVENT_DEFAULT:goog.userAgent.IE && !goog.userAgent.isVersionOrHigher("9"), HAS_NAVIGATOR_ONLINE_PROPERTY:!goog.userAgent.WEBKIT || goog.userAgent.isVersionOrHigher("528"), HAS_HTML5_NETWORK_EVENT_SUPPORT:goog.userAgent.GECKO && goog.userAgent.isVersionOrHigher("1.9b") || goog.userAgent.IE && 
 goog.userAgent.isVersionOrHigher("8") || goog.userAgent.OPERA && goog.userAgent.isVersionOrHigher("9.5") || goog.userAgent.WEBKIT && goog.userAgent.isVersionOrHigher("528"), HTML5_NETWORK_EVENTS_FIRE_ON_BODY:goog.userAgent.GECKO && !goog.userAgent.isVersionOrHigher("8") || goog.userAgent.IE && !goog.userAgent.isVersionOrHigher("9"), TOUCH_ENABLED:"ontouchstart" in goog.global || !!(goog.global.document && document.documentElement && "ontouchstart" in document.documentElement) || !(!goog.global.navigator || 
 !goog.global.navigator.msMaxTouchPoints)};
+goog.events.getVendorPrefixedName_ = function(eventName) {
+  return goog.userAgent.WEBKIT ? "webkit" + eventName : goog.userAgent.OPERA ? "o" + eventName.toLowerCase() : eventName.toLowerCase()
+};
 goog.events.EventType = {CLICK:"click", DBLCLICK:"dblclick", MOUSEDOWN:"mousedown", MOUSEUP:"mouseup", MOUSEOVER:"mouseover", MOUSEOUT:"mouseout", MOUSEMOVE:"mousemove", SELECTSTART:"selectstart", KEYPRESS:"keypress", KEYDOWN:"keydown", KEYUP:"keyup", BLUR:"blur", FOCUS:"focus", DEACTIVATE:"deactivate", FOCUSIN:goog.userAgent.IE ? "focusin" : "DOMFocusIn", FOCUSOUT:goog.userAgent.IE ? "focusout" : "DOMFocusOut", CHANGE:"change", SELECT:"select", SUBMIT:"submit", INPUT:"input", PROPERTYCHANGE:"propertychange", 
-DRAGSTART:"dragstart", DRAG:"drag", DRAGENTER:"dragenter", DRAGOVER:"dragover", DRAGLEAVE:"dragleave", DROP:"drop", DRAGEND:"dragend", TOUCHSTART:"touchstart", TOUCHMOVE:"touchmove", TOUCHEND:"touchend", TOUCHCANCEL:"touchcancel", BEFOREUNLOAD:"beforeunload", CONSOLEMESSAGE:"consolemessage", CONTEXTMENU:"contextmenu", DOMCONTENTLOADED:"DOMContentLoaded", ERROR:"error", HELP:"help", LOAD:"load", LOSECAPTURE:"losecapture", READYSTATECHANGE:"readystatechange", RESIZE:"resize", SCROLL:"scroll", UNLOAD:"unload", 
-HASHCHANGE:"hashchange", PAGEHIDE:"pagehide", PAGESHOW:"pageshow", POPSTATE:"popstate", COPY:"copy", PASTE:"paste", CUT:"cut", BEFORECOPY:"beforecopy", BEFORECUT:"beforecut", BEFOREPASTE:"beforepaste", ONLINE:"online", OFFLINE:"offline", MESSAGE:"message", CONNECT:"connect", TRANSITIONEND:goog.userAgent.WEBKIT ? "webkitTransitionEnd" : goog.userAgent.OPERA ? "oTransitionEnd" : "transitionend", MSGESTURECHANGE:"MSGestureChange", MSGESTUREEND:"MSGestureEnd", MSGESTUREHOLD:"MSGestureHold", MSGESTURESTART:"MSGestureStart", 
-MSGESTURETAP:"MSGestureTap", MSGOTPOINTERCAPTURE:"MSGotPointerCapture", MSINERTIASTART:"MSInertiaStart", MSLOSTPOINTERCAPTURE:"MSLostPointerCapture", MSPOINTERCANCEL:"MSPointerCancel", MSPOINTERDOWN:"MSPointerDown", MSPOINTERMOVE:"MSPointerMove", MSPOINTEROVER:"MSPointerOver", MSPOINTEROUT:"MSPointerOut", MSPOINTERUP:"MSPointerUp", TEXTINPUT:"textinput", COMPOSITIONSTART:"compositionstart", COMPOSITIONUPDATE:"compositionupdate", COMPOSITIONEND:"compositionend", EXIT:"exit", LOADABORT:"loadabort", 
-LOADCOMMIT:"loadcommit", LOADREDIRECT:"loadredirect", LOADSTART:"loadstart", LOADSTOP:"loadstop", RESPONSIVE:"responsive", SIZECHANGED:"sizechanged", UNRESPONSIVE:"unresponsive"};
+DRAGSTART:"dragstart", DRAG:"drag", DRAGENTER:"dragenter", DRAGOVER:"dragover", DRAGLEAVE:"dragleave", DROP:"drop", DRAGEND:"dragend", TOUCHSTART:"touchstart", TOUCHMOVE:"touchmove", TOUCHEND:"touchend", TOUCHCANCEL:"touchcancel", BEFOREUNLOAD:"beforeunload", CONSOLEMESSAGE:"consolemessage", CONTEXTMENU:"contextmenu", DOMCONTENTLOADED:"DOMContentLoaded", ERROR:"error", HELP:"help", LOAD:"load", LOSECAPTURE:"losecapture", ORIENTATIONCHANGE:"orientationchange", READYSTATECHANGE:"readystatechange", 
+RESIZE:"resize", SCROLL:"scroll", UNLOAD:"unload", HASHCHANGE:"hashchange", PAGEHIDE:"pagehide", PAGESHOW:"pageshow", POPSTATE:"popstate", COPY:"copy", PASTE:"paste", CUT:"cut", BEFORECOPY:"beforecopy", BEFORECUT:"beforecut", BEFOREPASTE:"beforepaste", ONLINE:"online", OFFLINE:"offline", MESSAGE:"message", CONNECT:"connect", ANIMATIONSTART:goog.events.getVendorPrefixedName_("AnimationStart"), ANIMATIONEND:goog.events.getVendorPrefixedName_("AnimationEnd"), ANIMATIONITERATION:goog.events.getVendorPrefixedName_("AnimationIteration"), 
+TRANSITIONEND:goog.events.getVendorPrefixedName_("TransitionEnd"), MSGESTURECHANGE:"MSGestureChange", MSGESTUREEND:"MSGestureEnd", MSGESTUREHOLD:"MSGestureHold", MSGESTURESTART:"MSGestureStart", MSGESTURETAP:"MSGestureTap", MSGOTPOINTERCAPTURE:"MSGotPointerCapture", MSINERTIASTART:"MSInertiaStart", MSLOSTPOINTERCAPTURE:"MSLostPointerCapture", MSPOINTERCANCEL:"MSPointerCancel", MSPOINTERDOWN:"MSPointerDown", MSPOINTERMOVE:"MSPointerMove", MSPOINTEROVER:"MSPointerOver", MSPOINTEROUT:"MSPointerOut", 
+MSPOINTERUP:"MSPointerUp", TEXTINPUT:"textinput", COMPOSITIONSTART:"compositionstart", COMPOSITIONUPDATE:"compositionupdate", COMPOSITIONEND:"compositionend", EXIT:"exit", LOADABORT:"loadabort", LOADCOMMIT:"loadcommit", LOADREDIRECT:"loadredirect", LOADSTART:"loadstart", LOADSTOP:"loadstop", RESPONSIVE:"responsive", SIZECHANGED:"sizechanged", UNRESPONSIVE:"unresponsive"};
 goog.events.BrowserEvent = function(opt_e, opt_currentTarget) {
   opt_e && this.init(opt_e, opt_currentTarget)
 };
@@ -2632,6 +2757,7 @@
 goog.events.BrowserEvent.prototype.altKey = !1;
 goog.events.BrowserEvent.prototype.shiftKey = !1;
 goog.events.BrowserEvent.prototype.metaKey = !1;
+goog.events.BrowserEvent.prototype.platformModifierKey = !1;
 goog.events.BrowserEvent.prototype.event_ = null;
 goog.events.BrowserEvent.prototype.init = function(e, opt_currentTarget) {
   var type = this.type = e.type;
@@ -2654,6 +2780,7 @@
   this.altKey = e.altKey;
   this.shiftKey = e.shiftKey;
   this.metaKey = e.metaKey;
+  this.platformModifierKey = goog.userAgent.MAC ? e.metaKey : e.ctrlKey;
   this.state = e.state;
   this.event_ = e;
   e.defaultPrevented && this.preventDefault();
@@ -2711,329 +2838,24 @@
   this.removed = !0;
   this.handler = this.src = this.proxy = this.listener = null
 };
-goog.events.listeners_ = {};
-goog.events.listenerTree_ = {};
-goog.events.sources_ = {};
-goog.events.onString_ = "on";
-goog.events.onStringMap_ = {};
-goog.events.listen = function(src, type, listener, opt_capt, opt_handler) {
-  if(goog.isArray(type)) {
-    for(var i = 0;i < type.length;i++) {
-      goog.events.listen(src, type[i], listener, opt_capt, opt_handler)
-    }
-    return null
-  }
-  listener = goog.events.wrapListener_(listener);
-  return goog.events.Listenable.isImplementedBy(src) ? src.listen(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !1, opt_capt, opt_handler)
-};
-goog.events.listen_ = function(src, type, listener, callOnce, opt_capt, opt_handler) {
-  if(!type) {
-    throw Error("Invalid event type");
-  }
-  var capture = !!opt_capt, map = goog.events.listenerTree_;
-  type in map || (map[type] = {count_:0});
-  map = map[type];
-  capture in map || (map[capture] = {count_:0}, map.count_++);
-  var map = map[capture], srcUid = goog.getUid(src), listenerArray, listenerObj;
-  if(map[srcUid]) {
-    listenerArray = map[srcUid];
-    for(var i = 0;i < listenerArray.length;i++) {
-      if(listenerObj = listenerArray[i], listenerObj.listener == listener && listenerObj.handler == opt_handler) {
-        if(listenerObj.removed) {
-          break
-        }
-        callOnce || (listenerArray[i].callOnce = !1);
-        return listenerArray[i]
-      }
-    }
-  }else {
-    listenerArray = map[srcUid] = [], map.count_++
-  }
-  var proxy = goog.events.getProxy();
-  listenerObj = new goog.events.Listener(listener, proxy, src, type, capture, opt_handler);
-  listenerObj.callOnce = callOnce;
-  proxy.src = src;
-  proxy.listener = listenerObj;
-  listenerArray.push(listenerObj);
-  goog.events.sources_[srcUid] || (goog.events.sources_[srcUid] = []);
-  goog.events.sources_[srcUid].push(listenerObj);
-  src.addEventListener ? src.addEventListener(type, proxy, capture) : src.attachEvent(goog.events.getOnString_(type), proxy);
-  return goog.events.listeners_[listenerObj.key] = listenerObj
-};
-goog.events.getProxy = function() {
-  var proxyCallbackFunction = goog.events.handleBrowserEvent_, f = goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT ? function(eventObject) {
-    return proxyCallbackFunction.call(f.src, f.listener, eventObject)
-  } : function(eventObject) {
-    var v = proxyCallbackFunction.call(f.src, f.listener, eventObject);
-    if(!v) {
-      return v
-    }
-  };
-  return f
-};
-goog.events.listenOnce = function(src, type, listener, opt_capt, opt_handler) {
-  if(goog.isArray(type)) {
-    for(var i = 0;i < type.length;i++) {
-      goog.events.listenOnce(src, type[i], listener, opt_capt, opt_handler)
-    }
-    return null
-  }
-  listener = goog.events.wrapListener_(listener);
-  return goog.events.Listenable.isImplementedBy(src) ? src.listenOnce(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !0, opt_capt, opt_handler)
-};
-goog.events.listenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
-  wrapper.listen(src, listener, opt_capt, opt_handler)
-};
-goog.events.unlisten = function(src, type, listener, opt_capt, opt_handler) {
-  if(goog.isArray(type)) {
-    for(var i = 0;i < type.length;i++) {
-      goog.events.unlisten(src, type[i], listener, opt_capt, opt_handler)
-    }
-    return null
-  }
-  listener = goog.events.wrapListener_(listener);
-  if(goog.events.Listenable.isImplementedBy(src)) {
-    return src.unlisten(type, listener, opt_capt, opt_handler)
-  }
-  var capture = !!opt_capt, listenerArray = goog.events.getListeners_(src, type, capture);
-  if(!listenerArray) {
-    return!1
-  }
-  for(i = 0;i < listenerArray.length;i++) {
-    if(listenerArray[i].listener == listener && listenerArray[i].capture == capture && listenerArray[i].handler == opt_handler) {
-      return goog.events.unlistenByKey(listenerArray[i])
-    }
-  }
-  return!1
-};
-goog.events.unlistenByKey = function(key) {
-  if(goog.isNumber(key)) {
-    return!1
-  }
-  var listener = key;
-  if(!listener || listener.removed) {
-    return!1
-  }
-  var src = listener.src;
-  if(goog.events.Listenable.isImplementedBy(src)) {
-    return src.unlistenByKey(listener)
-  }
-  var type = listener.type, proxy = listener.proxy, capture = listener.capture;
-  src.removeEventListener ? src.removeEventListener(type, proxy, capture) : src.detachEvent && src.detachEvent(goog.events.getOnString_(type), proxy);
-  var srcUid = goog.getUid(src);
-  if(goog.events.sources_[srcUid]) {
-    var sourcesArray = goog.events.sources_[srcUid];
-    goog.array.remove(sourcesArray, listener);
-    0 == sourcesArray.length && delete goog.events.sources_[srcUid]
-  }
-  listener.markAsRemoved();
-  var listenerArray = goog.events.listenerTree_[type][capture][srcUid];
-  listenerArray && (goog.array.remove(listenerArray, listener), 0 == listenerArray.length && (delete goog.events.listenerTree_[type][capture][srcUid], goog.events.listenerTree_[type][capture].count_--), 0 == goog.events.listenerTree_[type][capture].count_ && (delete goog.events.listenerTree_[type][capture], goog.events.listenerTree_[type].count_--), 0 == goog.events.listenerTree_[type].count_ && delete goog.events.listenerTree_[type]);
-  delete goog.events.listeners_[listener.key];
-  return!0
-};
-goog.events.unlistenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
-  wrapper.unlisten(src, listener, opt_capt, opt_handler)
-};
-goog.events.removeAll = function(opt_obj, opt_type) {
-  if(opt_obj) {
-    if(goog.events.Listenable.isImplementedBy(opt_obj)) {
-      return opt_obj.removeAllListeners(opt_type)
-    }
-    var count = 0, noType = null == opt_type, srcUid = goog.getUid(opt_obj);
-    if(goog.events.sources_[srcUid]) {
-      for(var sourcesArray = goog.events.sources_[srcUid], i = sourcesArray.length - 1;0 <= i;i--) {
-        var listener = sourcesArray[i];
-        if(noType || opt_type == listener.type) {
-          goog.events.unlistenByKey(listener), count++
-        }
-      }
-    }
-    return count
-  }
-  return goog.events.removeAllNativeListeners()
-};
-goog.events.removeAllNativeListeners = function() {
-  var count = 0;
-  goog.object.forEach(goog.events.listeners_, function(listener) {
-    goog.events.unlistenByKey(listener);
-    count++
-  });
-  return count
-};
-goog.events.getListeners = function(obj, type, capture) {
-  return goog.events.Listenable.isImplementedBy(obj) ? obj.getListeners(type, capture) : goog.events.getListeners_(obj, type, capture) || []
-};
-goog.events.getListeners_ = function(obj, type, capture) {
-  var map = goog.events.listenerTree_;
-  if(type in map && (map = map[type], capture in map)) {
-    var map = map[capture], objUid = goog.getUid(obj);
-    if(map[objUid]) {
-      return map[objUid]
-    }
-  }
-  return null
-};
-goog.events.getListener = function(src, type, listener, opt_capt, opt_handler) {
-  var capture = !!opt_capt;
-  listener = goog.events.wrapListener_(listener);
-  if(goog.events.Listenable.isImplementedBy(src)) {
-    return src.getListener(type, listener, capture, opt_handler)
-  }
-  var listenerArray = goog.events.getListeners_(src, type, capture);
-  if(listenerArray) {
-    for(var i = 0;i < listenerArray.length;i++) {
-      if(!listenerArray[i].removed && listenerArray[i].listener == listener && listenerArray[i].capture == capture && listenerArray[i].handler == opt_handler) {
-        return listenerArray[i]
-      }
-    }
-  }
-  return null
-};
-goog.events.hasListener = function(obj, opt_type, opt_capture) {
-  if(goog.events.Listenable.isImplementedBy(obj)) {
-    return obj.hasListener(opt_type, opt_capture)
-  }
-  var objUid = goog.getUid(obj), listeners = goog.events.sources_[objUid];
-  if(listeners) {
-    var hasType = goog.isDef(opt_type), hasCapture = goog.isDef(opt_capture);
-    if(hasType && hasCapture) {
-      var map = goog.events.listenerTree_[opt_type];
-      return!!map && !!map[opt_capture] && objUid in map[opt_capture]
-    }
-    return hasType || hasCapture ? goog.array.some(listeners, function(listener) {
-      return hasType && listener.type == opt_type || hasCapture && listener.capture == opt_capture
-    }) : !0
-  }
-  return!1
-};
-goog.events.expose = function(e) {
-  var str = [], key;
-  for(key in e) {
-    e[key] && e[key].id ? str.push(key + " = " + e[key] + " (" + e[key].id + ")") : str.push(key + " = " + e[key])
-  }
-  return str.join("\n")
-};
-goog.events.getOnString_ = function(type) {
-  return type in goog.events.onStringMap_ ? goog.events.onStringMap_[type] : goog.events.onStringMap_[type] = goog.events.onString_ + type
-};
-goog.events.fireListeners = function(obj, type, capture, eventObject) {
-  if(goog.events.Listenable.isImplementedBy(obj)) {
-    return obj.fireListeners(type, capture, eventObject)
-  }
-  var map = goog.events.listenerTree_;
-  return type in map && (map = map[type], capture in map) ? goog.events.fireListeners_(map[capture], obj, type, capture, eventObject) : !0
-};
-goog.events.fireListeners_ = function(map, obj, type, capture, eventObject) {
-  var retval = 1, objUid = goog.getUid(obj);
-  if(map[objUid]) {
-    for(var listenerArray = goog.array.clone(map[objUid]), i = 0;i < listenerArray.length;i++) {
-      var listener = listenerArray[i];
-      listener && !listener.removed && (retval &= !1 !== goog.events.fireListener(listener, eventObject))
-    }
-  }
-  return Boolean(retval)
-};
-goog.events.fireListener = function(listener, eventObject) {
-  var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
-  listener.callOnce && goog.events.unlistenByKey(listener);
-  return listenerFn.call(listenerHandler, eventObject)
-};
-goog.events.getTotalListenerCount = function() {
-  return goog.object.getCount(goog.events.listeners_)
-};
-goog.events.dispatchEvent = function(src, e) {
-  goog.asserts.assert(goog.events.Listenable.isImplementedBy(src), "Can not use goog.events.dispatchEvent with non-goog.events.Listenable instance.");
-  return src.dispatchEvent(e)
-};
-goog.events.protectBrowserEventEntryPoint = function(errorHandler) {
-  goog.events.handleBrowserEvent_ = errorHandler.protectEntryPoint(goog.events.handleBrowserEvent_)
-};
-goog.events.handleBrowserEvent_ = function(listener, opt_evt) {
-  if(listener.removed) {
-    return!0
-  }
-  var type = listener.type, map = goog.events.listenerTree_;
-  if(!(type in map)) {
-    return!0
-  }
-  var map = map[type], retval, targetsMap;
-  if(!goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT) {
-    var ieEvent = opt_evt || goog.getObjectByName("window.event"), hasCapture = !0 in map, hasBubble = !1 in map;
-    if(hasCapture) {
-      if(goog.events.isMarkedIeEvent_(ieEvent)) {
-        return!0
-      }
-      goog.events.markIeEvent_(ieEvent)
-    }
-    var evt = new goog.events.BrowserEvent(ieEvent, this);
-    retval = !0;
-    try {
-      if(hasCapture) {
-        for(var ancestors = [], parent = evt.currentTarget;parent;parent = parent.parentNode) {
-          ancestors.push(parent)
-        }
-        targetsMap = map[!0];
-        for(var i = ancestors.length - 1;!evt.propagationStopped_ && 0 <= i;i--) {
-          evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(targetsMap, ancestors[i], type, !0, evt)
-        }
-        if(hasBubble) {
-          for(targetsMap = map[!1], i = 0;!evt.propagationStopped_ && i < ancestors.length;i++) {
-            evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(targetsMap, ancestors[i], type, !1, evt)
-          }
-        }
-      }else {
-        retval = goog.events.fireListener(listener, evt)
-      }
-    }finally {
-      ancestors && (ancestors.length = 0)
-    }
-    return retval
-  }
-  return goog.events.fireListener(listener, new goog.events.BrowserEvent(opt_evt, this))
-};
-goog.events.markIeEvent_ = function(e) {
-  var useReturnValue = !1;
-  if(0 == e.keyCode) {
-    try {
-      e.keyCode = -1;
-      return
-    }catch(ex) {
-      useReturnValue = !0
-    }
-  }
-  if(useReturnValue || void 0 == e.returnValue) {
-    e.returnValue = !0
-  }
-};
-goog.events.isMarkedIeEvent_ = function(e) {
-  return 0 > e.keyCode || void 0 != e.returnValue
-};
-goog.events.uniqueIdCounter_ = 0;
-goog.events.getUniqueId = function(identifier) {
-  return identifier + "_" + goog.events.uniqueIdCounter_++
-};
-goog.events.LISTENER_WRAPPER_PROP_ = "__closure_events_fn_" + (1E9 * Math.random() >>> 0);
-goog.events.wrapListener_ = function(listener) {
-  goog.asserts.assert(listener, "Listener can not be null.");
-  if(goog.isFunction(listener)) {
-    return listener
-  }
-  goog.asserts.assert(listener.handleEvent, "An object listener must have handleEvent method.");
-  return listener[goog.events.LISTENER_WRAPPER_PROP_] || (listener[goog.events.LISTENER_WRAPPER_PROP_] = function(e) {
-    return listener.handleEvent(e)
-  })
-};
-goog.debug.entryPointRegistry.register(function(transformer) {
-  goog.events.handleBrowserEvent_ = transformer(goog.events.handleBrowserEvent_)
-});
 goog.events.ListenerMap = function(src) {
   this.src = src;
-  this.listeners = {}
+  this.listeners = {};
+  this.typeCount_ = 0
+};
+goog.events.ListenerMap.prototype.getTypeCount = function() {
+  return this.typeCount_
+};
+goog.events.ListenerMap.prototype.getListenerCount = function() {
+  var count = 0, type;
+  for(type in this.listeners) {
+    count += this.listeners[type].length
+  }
+  return count
 };
 goog.events.ListenerMap.prototype.add = function(type, listener, callOnce, opt_useCapture, opt_listenerScope) {
   var listenerArray = this.listeners[type];
-  listenerArray || (listenerArray = this.listeners[type] = []);
+  listenerArray || (listenerArray = this.listeners[type] = [], this.typeCount_++);
   var listenerObj, index = goog.events.ListenerMap.findListenerIndex_(listenerArray, listener, opt_useCapture, opt_listenerScope);
   -1 < index ? (listenerObj = listenerArray[index], callOnce || (listenerObj.callOnce = !1)) : (listenerObj = new goog.events.Listener(listener, null, this.src, type, !!opt_useCapture, opt_listenerScope), listenerObj.callOnce = callOnce, listenerArray.push(listenerObj));
   return listenerObj
@@ -3047,7 +2869,7 @@
     var listenerObj = listenerArray[index];
     listenerObj.markAsRemoved();
     goog.array.removeAt(listenerArray, index);
-    0 == listenerArray.length && delete this.listeners[type];
+    0 == listenerArray.length && (delete this.listeners[type], this.typeCount_--);
     return!0
   }
   return!1
@@ -3058,7 +2880,7 @@
     return!1
   }
   var removed = goog.array.remove(this.listeners[type], listener);
-  removed && (listener.markAsRemoved(), 0 == this.listeners[type].length && delete this.listeners[type]);
+  removed && (listener.markAsRemoved(), 0 == this.listeners[type].length && (delete this.listeners[type], this.typeCount_--));
   return removed
 };
 goog.events.ListenerMap.prototype.removeAll = function(opt_type) {
@@ -3068,7 +2890,8 @@
       for(var listenerArray = this.listeners[type], i = 0;i < listenerArray.length;i++) {
         ++count, listenerArray[i].markAsRemoved()
       }
-      delete this.listeners[type]
+      delete this.listeners[type];
+      this.typeCount_--
     }
   }
   return count
@@ -3108,6 +2931,280 @@
   }
   return-1
 };
+goog.events.listeners_ = {};
+goog.events.listenerTree_ = {};
+goog.events.onString_ = "on";
+goog.events.onStringMap_ = {};
+goog.events.CaptureSimulationMode = {OFF_AND_FAIL:0, OFF_AND_SILENT:1, ON:2};
+goog.events.CAPTURE_SIMULATION_MODE = 2;
+goog.events.listen = function(src, type, listener, opt_capt, opt_handler) {
+  if(goog.isArray(type)) {
+    for(var i = 0;i < type.length;i++) {
+      goog.events.listen(src, type[i], listener, opt_capt, opt_handler)
+    }
+    return null
+  }
+  listener = goog.events.wrapListener_(listener);
+  return goog.events.Listenable.isImplementedBy(src) ? src.listen(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !1, opt_capt, opt_handler)
+};
+goog.events.listen_ = function(src, type, listener, callOnce, opt_capt, opt_handler) {
+  if(!type) {
+    throw Error("Invalid event type");
+  }
+  var capture = !!opt_capt;
+  if(capture && !goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT) {
+    if(goog.events.CAPTURE_SIMULATION_MODE == goog.events.CaptureSimulationMode.OFF_AND_FAIL) {
+      return goog.asserts.fail("Can not register capture listener in IE8-."), null
+    }
+    if(goog.events.CAPTURE_SIMULATION_MODE == goog.events.CaptureSimulationMode.OFF_AND_SILENT) {
+      return null
+    }
+  }
+  var srcUid = goog.getUid(src), listenerMap = goog.events.listenerTree_[srcUid];
+  listenerMap || (goog.events.listenerTree_[srcUid] = listenerMap = new goog.events.ListenerMap(src));
+  var listenerObj = listenerMap.add(type, listener, callOnce, opt_capt, opt_handler);
+  if(listenerObj.proxy) {
+    return listenerObj
+  }
+  var proxy = goog.events.getProxy();
+  listenerObj.proxy = proxy;
+  proxy.src = src;
+  proxy.listener = listenerObj;
+  src.addEventListener ? src.addEventListener(type, proxy, capture) : src.attachEvent(goog.events.getOnString_(type), proxy);
+  return goog.events.listeners_[listenerObj.key] = listenerObj
+};
+goog.events.getProxy = function() {
+  var proxyCallbackFunction = goog.events.handleBrowserEvent_, f = goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT ? function(eventObject) {
+    return proxyCallbackFunction.call(f.src, f.listener, eventObject)
+  } : function(eventObject) {
+    var v = proxyCallbackFunction.call(f.src, f.listener, eventObject);
+    if(!v) {
+      return v
+    }
+  };
+  return f
+};
+goog.events.listenOnce = function(src, type, listener, opt_capt, opt_handler) {
+  if(goog.isArray(type)) {
+    for(var i = 0;i < type.length;i++) {
+      goog.events.listenOnce(src, type[i], listener, opt_capt, opt_handler)
+    }
+    return null
+  }
+  listener = goog.events.wrapListener_(listener);
+  return goog.events.Listenable.isImplementedBy(src) ? src.listenOnce(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !0, opt_capt, opt_handler)
+};
+goog.events.listenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
+  wrapper.listen(src, listener, opt_capt, opt_handler)
+};
+goog.events.unlisten = function(src, type, listener, opt_capt, opt_handler) {
+  if(goog.isArray(type)) {
+    for(var i = 0;i < type.length;i++) {
+      goog.events.unlisten(src, type[i], listener, opt_capt, opt_handler)
+    }
+    return null
+  }
+  listener = goog.events.wrapListener_(listener);
+  if(goog.events.Listenable.isImplementedBy(src)) {
+    return src.unlisten(type, listener, opt_capt, opt_handler)
+  }
+  if(!src) {
+    return!1
+  }
+  var capture = !!opt_capt, listenerMap = goog.events.getListenerMap_(src);
+  if(listenerMap) {
+    var listenerObj = listenerMap.getListener(type, listener, capture, opt_handler);
+    if(listenerObj) {
+      return goog.events.unlistenByKey(listenerObj)
+    }
+  }
+  return!1
+};
+goog.events.unlistenByKey = function(key) {
+  if(goog.isNumber(key)) {
+    return!1
+  }
+  var listener = key;
+  if(!listener || listener.removed) {
+    return!1
+  }
+  var src = listener.src;
+  if(goog.events.Listenable.isImplementedBy(src)) {
+    return src.unlistenByKey(listener)
+  }
+  var type = listener.type, proxy = listener.proxy;
+  src.removeEventListener ? src.removeEventListener(type, proxy, listener.capture) : src.detachEvent && src.detachEvent(goog.events.getOnString_(type), proxy);
+  var listenerMap = goog.events.getListenerMap_(src);
+  listenerMap ? (listenerMap.removeByKey(listener), 0 == listenerMap.getTypeCount() && (listenerMap.src = null, delete goog.events.listenerTree_[goog.getUid(src)])) : listener.markAsRemoved();
+  delete goog.events.listeners_[listener.key];
+  return!0
+};
+goog.events.unlistenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
+  wrapper.unlisten(src, listener, opt_capt, opt_handler)
+};
+goog.events.removeAll = function(opt_obj, opt_type) {
+  return opt_obj ? goog.events.Listenable.isImplementedBy(opt_obj) ? opt_obj.removeAllListeners(opt_type) : goog.events.removeAll_(goog.getUid(opt_obj), opt_type) : goog.events.removeAllNativeListeners()
+};
+goog.events.removeAll_ = function(srcUid, opt_type) {
+  var listenerMap = goog.events.listenerTree_[srcUid];
+  if(!listenerMap) {
+    return 0
+  }
+  var count = 0, type;
+  for(type in listenerMap.listeners) {
+    if(!opt_type || type == opt_type) {
+      for(var listeners = goog.array.clone(listenerMap.listeners[type]), i = 0;i < listeners.length;++i) {
+        goog.events.unlistenByKey(listeners[i]) && ++count
+      }
+    }
+  }
+  return count
+};
+goog.events.removeAllNativeListeners = function() {
+  var count = 0, srcUid;
+  for(srcUid in goog.events.listenerTree_) {
+    count += goog.events.removeAll_(srcUid)
+  }
+  return count
+};
+goog.events.getListeners = function(obj, type, capture) {
+  if(goog.events.Listenable.isImplementedBy(obj)) {
+    return obj.getListeners(type, capture)
+  }
+  if(!obj) {
+    return[]
+  }
+  var listenerMap = goog.events.getListenerMap_(obj);
+  return listenerMap ? listenerMap.getListeners(type, capture) : []
+};
+goog.events.getListener = function(src, type, listener, opt_capt, opt_handler) {
+  listener = goog.events.wrapListener_(listener);
+  var capture = !!opt_capt;
+  if(goog.events.Listenable.isImplementedBy(src)) {
+    return src.getListener(type, listener, capture, opt_handler)
+  }
+  if(!src) {
+    return null
+  }
+  var listenerMap = goog.events.getListenerMap_(src);
+  return listenerMap ? listenerMap.getListener(type, listener, capture, opt_handler) : null
+};
+goog.events.hasListener = function(obj, opt_type, opt_capture) {
+  if(goog.events.Listenable.isImplementedBy(obj)) {
+    return obj.hasListener(opt_type, opt_capture)
+  }
+  var listenerMap = goog.events.getListenerMap_(obj);
+  return!!listenerMap && listenerMap.hasListener(opt_type, opt_capture)
+};
+goog.events.expose = function(e) {
+  var str = [], key;
+  for(key in e) {
+    e[key] && e[key].id ? str.push(key + " = " + e[key] + " (" + e[key].id + ")") : str.push(key + " = " + e[key])
+  }
+  return str.join("\n")
+};
+goog.events.getOnString_ = function(type) {
+  return type in goog.events.onStringMap_ ? goog.events.onStringMap_[type] : goog.events.onStringMap_[type] = goog.events.onString_ + type
+};
+goog.events.fireListeners = function(obj, type, capture, eventObject) {
+  return goog.events.Listenable.isImplementedBy(obj) ? obj.fireListeners(type, capture, eventObject) : goog.events.fireListeners_(obj, type, capture, eventObject)
+};
+goog.events.fireListeners_ = function(obj, type, capture, eventObject) {
+  var retval = 1, listenerMap = goog.events.getListenerMap_(obj);
+  if(listenerMap) {
+    var listenerArray = listenerMap.listeners[type];
+    if(listenerArray) {
+      for(var listenerArray = goog.array.clone(listenerArray), i = 0;i < listenerArray.length;i++) {
+        var listener = listenerArray[i];
+        listener && (listener.capture == capture && !listener.removed) && (retval &= !1 !== goog.events.fireListener(listener, eventObject))
+      }
+    }
+  }
+  return Boolean(retval)
+};
+goog.events.fireListener = function(listener, eventObject) {
+  var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
+  listener.callOnce && goog.events.unlistenByKey(listener);
+  return listenerFn.call(listenerHandler, eventObject)
+};
+goog.events.getTotalListenerCount = function() {
+  var count = 0, srcUid;
+  for(srcUid in goog.events.listenerTree_) {
+    var listenerMap = goog.events.listenerTree_[srcUid], count = count + listenerMap.getListenerCount()
+  }
+  return count
+};
+goog.events.dispatchEvent = function(src, e) {
+  goog.asserts.assert(goog.events.Listenable.isImplementedBy(src), "Can not use goog.events.dispatchEvent with non-goog.events.Listenable instance.");
+  return src.dispatchEvent(e)
+};
+goog.events.protectBrowserEventEntryPoint = function(errorHandler) {
+  goog.events.handleBrowserEvent_ = errorHandler.protectEntryPoint(goog.events.handleBrowserEvent_)
+};
+goog.events.handleBrowserEvent_ = function(listener, opt_evt) {
+  if(listener.removed) {
+    return!0
+  }
+  if(!goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT) {
+    var ieEvent = opt_evt || goog.getObjectByName("window.event"), evt = new goog.events.BrowserEvent(ieEvent, this), retval = !0;
+    if(goog.events.CAPTURE_SIMULATION_MODE == goog.events.CaptureSimulationMode.ON) {
+      if(!goog.events.isMarkedIeEvent_(ieEvent)) {
+        goog.events.markIeEvent_(ieEvent);
+        for(var ancestors = [], parent = evt.currentTarget;parent;parent = parent.parentNode) {
+          ancestors.push(parent)
+        }
+        for(var type = listener.type, i = ancestors.length - 1;!evt.propagationStopped_ && 0 <= i;i--) {
+          evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(ancestors[i], type, !0, evt)
+        }
+        for(i = 0;!evt.propagationStopped_ && i < ancestors.length;i++) {
+          evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(ancestors[i], type, !1, evt)
+        }
+      }
+    }else {
+      retval = goog.events.fireListener(listener, evt)
+    }
+    return retval
+  }
+  return goog.events.fireListener(listener, new goog.events.BrowserEvent(opt_evt, this))
+};
+goog.events.markIeEvent_ = function(e) {
+  var useReturnValue = !1;
+  if(0 == e.keyCode) {
+    try {
+      e.keyCode = -1;
+      return
+    }catch(ex) {
+      useReturnValue = !0
+    }
+  }
+  if(useReturnValue || void 0 == e.returnValue) {
+    e.returnValue = !0
+  }
+};
+goog.events.isMarkedIeEvent_ = function(e) {
+  return 0 > e.keyCode || void 0 != e.returnValue
+};
+goog.events.uniqueIdCounter_ = 0;
+goog.events.getUniqueId = function(identifier) {
+  return identifier + "_" + goog.events.uniqueIdCounter_++
+};
+goog.events.getListenerMap_ = function(src) {
+  return goog.hasUid(src) ? goog.events.listenerTree_[goog.getUid(src)] || null : null
+};
+goog.events.LISTENER_WRAPPER_PROP_ = "__closure_events_fn_" + (1E9 * Math.random() >>> 0);
+goog.events.wrapListener_ = function(listener) {
+  goog.asserts.assert(listener, "Listener can not be null.");
+  if(goog.isFunction(listener)) {
+    return listener
+  }
+  goog.asserts.assert(listener.handleEvent, "An object listener must have handleEvent method.");
+  return listener[goog.events.LISTENER_WRAPPER_PROP_] || (listener[goog.events.LISTENER_WRAPPER_PROP_] = function(e) {
+    return listener.handleEvent(e)
+  })
+};
+goog.debug.entryPointRegistry.register(function(transformer) {
+  goog.events.handleBrowserEvent_ = transformer(goog.events.handleBrowserEvent_)
+});
 goog.events.EventTarget = function() {
   goog.Disposable.call(this);
   this.eventTargetListeners_ = new goog.events.ListenerMap(this);
@@ -4147,12 +4244,13 @@
 goog.debug.LogRecord = function(level, msg, loggerName, opt_time, opt_sequenceNumber) {
   this.reset(level, msg, loggerName, opt_time, opt_sequenceNumber)
 };
+goog.debug.LogRecord.prototype.sequenceNumber_ = 0;
 goog.debug.LogRecord.prototype.exception_ = null;
 goog.debug.LogRecord.prototype.exceptionText_ = null;
 goog.debug.LogRecord.ENABLE_SEQUENCE_NUMBERS = !0;
 goog.debug.LogRecord.nextSequenceNumber_ = 0;
 goog.debug.LogRecord.prototype.reset = function(level, msg, loggerName, opt_time, opt_sequenceNumber) {
-  goog.debug.LogRecord.ENABLE_SEQUENCE_NUMBERS && ("number" == typeof opt_sequenceNumber || goog.debug.LogRecord.nextSequenceNumber_++);
+  goog.debug.LogRecord.ENABLE_SEQUENCE_NUMBERS && (this.sequenceNumber_ = "number" == typeof opt_sequenceNumber ? opt_sequenceNumber : goog.debug.LogRecord.nextSequenceNumber_++);
   opt_time || goog.now();
   this.level_ = level;
   this.msg_ = msg;
@@ -5329,7 +5427,7 @@
 goog.Uri.QueryData.prototype.getValues = function(opt_key) {
   this.ensureKeyMapInitialized_();
   var rv = [];
-  if(opt_key) {
+  if(goog.isString(opt_key)) {
     this.containsKey(opt_key) && (rv = goog.array.concat(rv, this.keyMap_.get(this.getKeyName_(opt_key))))
   }else {
     for(var values = this.keyMap_.getValues(), i = 0;i < values.length;i++) {
diff --git a/google/appengine/tools/devappserver2/api_server.py b/google/appengine/tools/devappserver2/api_server.py
index 1a86f59..74cec4d 100644
--- a/google/appengine/tools/devappserver2/api_server.py
+++ b/google/appengine/tools/devappserver2/api_server.py
@@ -162,14 +162,24 @@
       api_response = _execute_request(request).Encode()
       response.set_response(api_response)
     except Exception, e:
-      logging.debug('Exception while handling %s\n%s',
-                    request,
-                    traceback.format_exc())
-      response.set_exception(pickle.dumps(e))
       if isinstance(e, apiproxy_errors.ApplicationError):
+        level = logging.DEBUG
         application_error = response.mutable_application_error()
         application_error.set_code(e.application_error)
         application_error.set_detail(e.error_detail)
+        # TODO: is this necessary? Python remote stub ignores exception
+        # when application error is specified; do other runtimes use it?
+        response.set_exception(pickle.dumps(e))
+      else:
+        # If the runtime instance is not Python, it won't be able to unpickle
+        # the exception so use level that won't be ignored by default.
+        level = logging.ERROR
+        # Even if the runtime is Python, the exception may be unpicklable if
+        # it requires importing a class blocked by the sandbox so just send
+        # back the exception representation.
+        response.set_exception(pickle.dumps(RuntimeError(repr(e))))
+      logging.log(level, 'Exception while handling %s\n%s', request,
+                  traceback.format_exc())
     encoded_response = response.Encode()
     logging.debug('Handled %s.%s in %0.4f',
                   request.service_name(),
diff --git a/google/appengine/tools/devappserver2/api_server_test.py b/google/appengine/tools/devappserver2/api_server_test.py
index c5ce1bc..fe2635b 100644
--- a/google/appengine/tools/devappserver2/api_server_test.py
+++ b/google/appengine/tools/devappserver2/api_server_test.py
@@ -18,14 +18,19 @@
 
 
 import cStringIO
+import pickle
 import tempfile
 import unittest
 import urllib
 import wsgiref.util
 
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import urlfetch_service_pb
 from google.appengine.api import user_service_pb
 from google.appengine.datastore import datastore_stub_util
 from google.appengine.ext.remote_api import remote_api_pb
+from google.appengine.runtime import apiproxy_errors
 from google.appengine.tools.devappserver2 import api_server
 from google.appengine.tools.devappserver2 import wsgi_request_info
 from google.appengine.tools.devappserver2 import wsgi_test_utils
@@ -53,6 +58,17 @@
 request_data = wsgi_request_info.WSGIRequestInfo(None)
 
 
+class FakeURLFetchServiceStub(apiproxy_stub.APIProxyStub):
+  def __init__(self):
+    super(FakeURLFetchServiceStub, self).__init__('urlfetch')
+
+  def _Dynamic_Fetch(self, request, unused_response):
+    if request.url() == 'exception':
+      raise IOError('the remote error')
+    elif request.url() == 'application_error':
+      raise apiproxy_errors.ApplicationError(23, 'details')
+
+
 def setup_stubs():
   """Setup the API stubs. This can only be done once."""
   api_server.test_setup_stubs(
@@ -77,6 +93,7 @@
       taskqueue_default_http_server=TASKQUEUE_DEFAULT_HTTP_SERVER,
       user_login_url=USER_LOGIN_URL,
       user_logout_url=USER_LOGOUT_URL)
+  apiproxy_stub_map.apiproxy.ReplaceStub('urlfetch', FakeURLFetchServiceStub())
 
 
 class TestAPIServer(wsgi_test_utils.WSGITestCase):
@@ -91,25 +108,24 @@
   def tearDown(self):
     api_server.cleanup_stubs()
 
-  def test_user_api_call(self):
-    logout_response = user_service_pb.CreateLogoutURLRequest()
-    logout_response.set_destination_url(
-        USER_LOGOUT_URL % urllib.quote('http://machine:8080/crazy_logout'))
+  def _assert_remote_call(
+      self, expected_remote_response, stub_request, service, method):
+    """Test a call across the remote API to the API server.
 
-    expected_remote_response = remote_api_pb.Response()
-    expected_remote_response.set_response(logout_response.Encode())
-
-    logout_request = user_service_pb.CreateLogoutURLRequest()
-    logout_request.set_destination_url('/crazy_logout')
-
+    Args:
+      expected_remote_response: the remote response that is expected.
+      stub_request: the request protobuf that the stub expects.
+      service: the stub's service name.
+      method: which service method to call.
+    """
     request_environ = {'HTTP_HOST': 'machine:8080'}
     wsgiref.util.setup_testing_defaults(request_environ)
 
     with request_data.request(request_environ, None) as request_id:
       remote_request = remote_api_pb.Request()
-      remote_request.set_service_name('user')
-      remote_request.set_method('CreateLogoutURL')
-      remote_request.set_request(logout_request.Encode())
+      remote_request.set_service_name(service)
+      remote_request.set_method(method)
+      remote_request.set_request(stub_request.Encode())
       remote_request.set_request_id(request_id)
       remote_payload = remote_request.Encode()
 
@@ -124,6 +140,20 @@
                           self.server,
                           environ)
 
+  def test_user_api_call(self):
+    logout_response = user_service_pb.CreateLogoutURLResponse()
+    logout_response.set_logout_url(
+        USER_LOGOUT_URL % urllib.quote('http://machine:8080/crazy_logout'))
+
+    expected_remote_response = remote_api_pb.Response()
+    expected_remote_response.set_response(logout_response.Encode())
+
+    logout_request = user_service_pb.CreateLogoutURLRequest()
+    logout_request.set_destination_url('/crazy_logout')
+
+    self._assert_remote_call(
+        expected_remote_response, logout_request, 'user', 'CreateLogoutURL')
+
   def test_GET(self):
     environ = {'REQUEST_METHOD': 'GET',
                'QUERY_STRING': 'rtok=23'}
@@ -133,6 +163,33 @@
                         self.server,
                         environ)
 
+  def test_exception(self):
+    urlfetch_request = urlfetch_service_pb.URLFetchRequest()
+    urlfetch_request.set_url('exception')
+    urlfetch_request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
+
+    expected_remote_response = remote_api_pb.Response()
+    expected_remote_response.set_exception(pickle.dumps(
+        RuntimeError(repr(IOError('the remote error')))))
+
+    self._assert_remote_call(
+        expected_remote_response, urlfetch_request, 'urlfetch', 'Fetch')
+
+  def test_application_error(self):
+    urlfetch_request = urlfetch_service_pb.URLFetchRequest()
+    urlfetch_request.set_url('application_error')
+    urlfetch_request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
+
+    expected_remote_response = remote_api_pb.Response()
+    expected_remote_response.mutable_application_error().set_code(23)
+    expected_remote_response.mutable_application_error().set_detail('details')
+    expected_remote_response.set_exception(pickle.dumps(
+        apiproxy_errors.ApplicationError(23, 'details')))
+
+    self._assert_remote_call(
+        expected_remote_response, urlfetch_request, 'urlfetch', 'Fetch')
+
+
 if __name__ == '__main__':
   unittest.main()
 
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py
index 493b4dd..e1dadca 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py
@@ -17,6 +17,7 @@
 """Regression tests for Endpoints server in devappserver2."""
 
 
+import base64
 import json
 import os.path
 
@@ -140,6 +141,23 @@
                          'var_sint64': '-554', 'var_uint64': '4321'}
     self.assertEqual(expected_response, response_json)
 
+  def test_echo_bytes(self):
+    """Test sending and receiving a BytesField parameter."""
+    value = 'This is a test of a message encoded as a BytesField.01234\000\001'
+    bytes_value = base64.urlsafe_b64encode(value)
+    body_json = {'bytes_value': bytes_value}
+    body = json.dumps(body_json)
+    send_headers = {'content-type': 'application/json'}
+    status, content, headers = self.fetch_url(
+        'default', 'POST', '/_ah/api/test_service/v1/echo_bytes',
+        body, send_headers)
+    self.assertEqual(200, status)
+    self.assertEqual('application/json', headers['Content-Type'])
+
+    response_json = json.loads(content)
+    self.assertEqual(response_json, body_json)
+    self.assertEqual(value, base64.urlsafe_b64decode(body_json['bytes_value']))
+
   def test_empty_test(self):
     """Test that an empty response that should have an object returns 200."""
     status, content, headers = self.fetch_url(
diff --git a/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py b/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
index bb346b4..76bb9aa 100644
--- a/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
+++ b/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
@@ -51,6 +51,11 @@
   var_uint64 = messages.IntegerField(5, variant=messages.Variant.UINT64)
 
 
+class TestBytes(messages.Message):
+  """Simple ProtoRPC request/response with a bytes field."""
+  bytes_value = messages.BytesField(1)
+
+
 my_api = endpoints.api(name='test_service', version='v1')
 
 
@@ -105,6 +110,12 @@
         var_uint64=request.var_uint64 + 1)
     return response
 
+  @endpoints.method(TestBytes, TestBytes,
+                    path='echo_bytes', scopes=[])
+  def echo_bytes(self, request):
+    logging.info('Found bytes: %s', request.bytes_value)
+    return request
+
   @endpoints.method(message_types.VoidMessage, message_types.VoidMessage,
                     path='empty_response', http_method='GET', scopes=[])
   def empty_response(self, unused_request):
diff --git a/google/appengine/tools/devappserver2/inotify_file_watcher.py b/google/appengine/tools/devappserver2/inotify_file_watcher.py
index 014db65..ba07b9e 100644
--- a/google/appengine/tools/devappserver2/inotify_file_watcher.py
+++ b/google/appengine/tools/devappserver2/inotify_file_watcher.py
@@ -30,6 +30,8 @@
 import struct
 import sys
 
+from google.appengine.tools.devappserver2 import watcher_common
+
 IN_MODIFY = 0x00000002
 IN_ATTRIB = 0x00000004
 IN_MOVED_FROM = 0x00000040
@@ -97,8 +99,9 @@
     logging.debug('_add_watch_for_path(%r)', path)
 
     for dirpath, directories, _ in itertools.chain(
-        [('', [path], None)],
+        [(os.path.dirname(path), [os.path.basename(path)], None)],
         os.walk(path, topdown=True, followlinks=True)):
+      watcher_common.remove_ignored_dirs(directories)
       for directory in directories:
         directory_path = os.path.join(dirpath, directory)
         # dirpath cannot be used as the parent directory path because it is the
diff --git a/google/appengine/tools/devappserver2/mtime_file_watcher.py b/google/appengine/tools/devappserver2/mtime_file_watcher.py
index 6a62512..a8c387c 100644
--- a/google/appengine/tools/devappserver2/mtime_file_watcher.py
+++ b/google/appengine/tools/devappserver2/mtime_file_watcher.py
@@ -20,6 +20,8 @@
 import threading
 import warnings
 
+from google.appengine.tools.devappserver2 import watcher_common
+
 
 class MtimeFileWatcher(object):
   """Monitors a directory tree for changes using mtime polling."""
@@ -76,6 +78,7 @@
     num_files = 0
     for dirname, dirnames, filenames in os.walk(self._directory,
                                                 followlinks=True):
+      watcher_common.remove_ignored_dirs(dirnames)
       for filename in filenames + dirnames:
         if num_files == 10000:
           warnings.warn(
diff --git a/google/appengine/tools/devappserver2/php/runtime.py b/google/appengine/tools/devappserver2/php/runtime.py
index 8a5e834..f328ce5 100644
--- a/google/appengine/tools/devappserver2/php/runtime.py
+++ b/google/appengine/tools/devappserver2/php/runtime.py
@@ -29,6 +29,7 @@
 
 import google
 
+from google.appengine.api import appinfo
 from google.appengine.tools.devappserver2 import http_runtime_constants
 from google.appengine.tools.devappserver2 import php
 from google.appengine.tools.devappserver2 import request_rewriter
@@ -46,9 +47,15 @@
   def __init__(self, config):
     logging.debug('Initializing runtime with %s', config)
     self.config = config
+    if appinfo.MODULE_SEPARATOR not in config.version_id:
+      module_id = appinfo.DEFAULT_MODULE
+      version_id = config.version_id
+    else:
+      module_id, version_id = config.version_id.split(appinfo.MODULE_SEPARATOR)
     self.environ_template = {
         'APPLICATION_ID': str(config.app_id),
-        'CURRENT_VERSION_ID': str(config.version_id),
+        'CURRENT_MODULE_ID': module_id,
+        'CURRENT_VERSION_ID': version_id,
         'DATACENTER': str(config.datacenter),
         'INSTANCE_ID': str(config.instance_id),
         'APPENGINE_RUNTIME': 'php',
diff --git a/google/appengine/tools/devappserver2/php_runtime.py b/google/appengine/tools/devappserver2/php_runtime.py
index ea05823..84a9ae0 100644
--- a/google/appengine/tools/devappserver2/php_runtime.py
+++ b/google/appengine/tools/devappserver2/php_runtime.py
@@ -143,17 +143,27 @@
                             'php-cgi binary.')
 
     if not os.path.exists(php_executable_path):
-      raise _PHPBinaryError('The path specified with the --php_exectuable_path '
+      raise _PHPBinaryError('The path specified with the --php_executable_path '
                             'flag (%s) does not exist.' % php_executable_path)
 
     if not os.access(php_executable_path, os.X_OK):
-      raise _PHPBinaryError('The path specified with the --php_exectuable_path '
+      raise _PHPBinaryError('The path specified with the --php_executable_path '
+                            'flag (%s) is not executable' % php_executable_path)
+
+    env = {}
+    # On Windows, in order to run a side-by-side assembly the specified env
+    # must include a valid SystemRoot.
+    if 'SYSTEMROOT' in os.environ:
+      env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
+
+    if not os.access(php_executable_path, os.X_OK):
+      raise _PHPBinaryError('The path specified with the --php_executable_path '
                             'flag (%s) is not executable' % php_executable_path)
 
     version_process = safe_subprocess.start_process([php_executable_path, '-v'],
                                                     stdout=subprocess.PIPE,
                                                     stderr=subprocess.PIPE,
-                                                    env={})
+                                                    env=env)
     version_stdout, version_stderr = version_process.communicate()
     if version_process.returncode:
       raise _PHPEnvironmentError(
@@ -180,7 +190,7 @@
         [php_executable_path, '-f', _CHECK_ENVIRONMENT_SCRIPT_PATH],
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE,
-        env={})
+        env=env)
     check_process_stdout, _ = check_process.communicate()
     if check_process.returncode:
       raise _PHPEnvironmentError(check_process_stdout)
diff --git a/google/appengine/tools/devappserver2/watcher_common.py b/google/appengine/tools/devappserver2/watcher_common.py
new file mode 100644
index 0000000..ddc0c51
--- /dev/null
+++ b/google/appengine/tools/devappserver2/watcher_common.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Common functionality for file watchers."""
+
+
+# Directories that we should not watch at all.
+_IGNORED_DIRS = ('.git', '.hg', '.svn')
+
+
+def remove_ignored_dirs(dirs):
+  """Remove directories from dirs that should not be watched."""
+  for d in _IGNORED_DIRS:
+    if d in dirs:
+      dirs.remove(d)
diff --git a/google/appengine/tools/devappserver2/wsgi_server.py b/google/appengine/tools/devappserver2/wsgi_server.py
index 390c1a6..c1729fc 100644
--- a/google/appengine/tools/devappserver2/wsgi_server.py
+++ b/google/appengine/tools/devappserver2/wsgi_server.py
@@ -37,7 +37,13 @@
 
 _HAS_POLL = hasattr(select, 'poll')
 
-_PORT_0_RETRIES = 5
+# Due to reports of failure to find a consistent port, trying a higher value
+# to see if that reduces the problem sufficiently.  If it doesn't we can try
+# increasing it (on my circa 2010 desktop, it takes about 1/2 second per 1024
+# tries) but it would probably be better to either figure out a better
+# algorithm or make it possible for code to work with inconsistent ports.
+
+_PORT_0_RETRIES = 2048
 
 
 class BindError(errors.Error):
@@ -295,7 +301,7 @@
         if self._start_all_dynamic_port(host_ports):
           break
       else:
-        raise BindError('Unable to find a consistent port %s' % host)
+        raise BindError('Unable to find a consistent port for %s' % host)
 
   def _start_all_fixed_port(self, host_ports):
     """Starts a server for each specified address with a fixed port.
diff --git a/google/net/proto2/proto/descriptor_pb2.py b/google/net/proto2/proto/descriptor_pb2.py
index fac0cad..cb2aefe 100644
--- a/google/net/proto2/proto/descriptor_pb2.py
+++ b/google/net/proto2/proto/descriptor_pb2.py
@@ -28,7 +28,7 @@
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='net/proto2/proto/descriptor.proto',
   package='proto2',
-  serialized_pb='\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xba\t\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x33\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x04true\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xca\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xec\x04\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"/\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x0f\n\x0b\x42YTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9c\x06\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xde\x03\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
+  serialized_pb='\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xba\t\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x33\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x04true\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xca\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xf9\x04\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd4\x06\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xde\x03\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
 
 
 
@@ -232,14 +232,14 @@
       options=None,
       type=None),
     _descriptor.EnumValueDescriptor(
-      name='BYTE_BUFFER', index=2, number=2,
+      name='EXPERIMENTAL_BYTE_BUFFER', index=2, number=2,
       options=None,
       type=None),
   ],
   containing_type=None,
   options=None,
   serialized_start=4221,
-  serialized_end=4268,
+  serialized_end=4281,
 )
 
 _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
@@ -263,8 +263,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=4270,
-  serialized_end=4323,
+  serialized_start=4283,
+  serialized_end=4336,
 )
 
 _METHODOPTIONS_PROTOCOL = _descriptor.EnumDescriptor(
@@ -284,8 +284,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5400,
-  serialized_end=5428,
+  serialized_start=5469,
+  serialized_end=5497,
 )
 
 _METHODOPTIONS_SECURITYLEVEL = _descriptor.EnumDescriptor(
@@ -313,8 +313,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5430,
-  serialized_end=5531,
+  serialized_start=5499,
+  serialized_end=5600,
 )
 
 _METHODOPTIONS_FORMAT = _descriptor.EnumDescriptor(
@@ -334,8 +334,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5533,
-  serialized_end=5581,
+  serialized_start=5602,
+  serialized_end=5650,
 )
 
 _STREAMOPTIONS_TOKENUNIT = _descriptor.EnumDescriptor(
@@ -355,8 +355,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6028,
-  serialized_end=6062,
+  serialized_start=6097,
+  serialized_end=6131,
 )
 
 
@@ -1327,7 +1327,7 @@
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
   serialized_start=3714,
-  serialized_end=4334,
+  serialized_end=4347,
 )
 
 
@@ -1375,8 +1375,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4337,
-  serialized_end=4490,
+  serialized_start=4350,
+  serialized_end=4503,
 )
 
 
@@ -1410,8 +1410,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4492,
-  serialized_end=4608,
+  serialized_start=4505,
+  serialized_end=4621,
 )
 
 
@@ -1459,8 +1459,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4611,
-  serialized_end=4793,
+  serialized_start=4624,
+  serialized_end=4806,
 )
 
 
@@ -1563,14 +1563,28 @@
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='deprecated', full_name='proto2.MethodOptions.deprecated', index=13,
+      name='legacy_stream_type', full_name='proto2.MethodOptions.legacy_stream_type', index=13,
+      number=22, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=unicode("", "utf-8"),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='legacy_result_type', full_name='proto2.MethodOptions.legacy_result_type', index=14,
+      number=23, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=unicode("", "utf-8"),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='proto2.MethodOptions.deprecated', index=15,
       number=33, type=8, cpp_type=7, label=1,
       has_default_value=True, default_value=False,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='proto2.MethodOptions.uninterpreted_option', index=14,
+      name='uninterpreted_option', full_name='proto2.MethodOptions.uninterpreted_option', index=16,
       number=999, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -1588,8 +1602,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4796,
-  serialized_end=5592,
+  serialized_start=4809,
+  serialized_end=5661,
 )
 
 
@@ -1687,8 +1701,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=5595,
-  serialized_end=6073,
+  serialized_start=5664,
+  serialized_end=6142,
 )
 
 
@@ -1722,8 +1736,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6302,
-  serialized_end=6353,
+  serialized_start=6371,
+  serialized_end=6422,
 )
 
 _UNINTERPRETEDOPTION = _descriptor.Descriptor(
@@ -1791,8 +1805,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6076,
-  serialized_end=6353,
+  serialized_start=6145,
+  serialized_end=6422,
 )
 
 
@@ -1840,8 +1854,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6425,
-  serialized_end=6524,
+  serialized_start=6494,
+  serialized_end=6593,
 )
 
 _SOURCECODEINFO = _descriptor.Descriptor(
@@ -1867,8 +1881,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6356,
-  serialized_end=6524,
+  serialized_start=6425,
+  serialized_end=6593,
 )
 
 _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
diff --git a/google/net/proto2/python/public/descriptor.py b/google/net/proto2/python/public/descriptor.py
index e9a22c0..94dc421 100644
--- a/google/net/proto2/python/public/descriptor.py
+++ b/google/net/proto2/python/public/descriptor.py
@@ -23,11 +23,14 @@
 """
 
 
-
 from google.net.proto2.python.internal import api_implementation
 
 
 if api_implementation.Type() == 'cpp':
+
+  import os
+  import uuid
+
   if api_implementation.Version() == 2:
     from google.net.proto2.python.internal.cpp import _message
   else:
@@ -704,6 +707,33 @@
   Returns:
     A Descriptor for protobuf messages.
   """
+  if api_implementation.Type() == 'cpp':
+
+
+
+
+    from google.net.proto2.proto import descriptor_pb2
+    file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
+    file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
+
+
+
+
+
+    proto_name = str(uuid.uuid4())
+
+    if package:
+      file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
+                                                proto_name + '.proto')
+      file_descriptor_proto.package = package
+    else:
+      file_descriptor_proto.name = proto_name + '.proto'
+
+    if api_implementation.Version() == 2:
+      _message.BuildFile(file_descriptor_proto.SerializeToString())
+    else:
+      cpp_message.BuildFile(file_descriptor_proto.SerializeToString())
+
   full_message_name = [desc_proto.name]
   if package: full_message_name.insert(0, package)
 
diff --git a/google_sql.py b/google_sql.py
index af18e1c..661f26e 100644
--- a/google_sql.py
+++ b/google_sql.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/httplib2/__init__.py b/httplib2/__init__.py
deleted file mode 100644
index 191ef1e..0000000
--- a/httplib2/__init__.py
+++ /dev/null
@@ -1,1673 +0,0 @@
-from __future__ import generators
-"""
-httplib2
-
-A caching http interface that supports ETags and gzip
-to conserve bandwidth.
-
-Requires Python 2.3 or later
-
-Changelog:
-2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
-
-"""
-
-__author__ = "Joe Gregorio (joe@bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
-                    "James Antill",
-                    "Xavier Verges Farrero",
-                    "Jonathan Feinberg",
-                    "Blair Zajac",
-                    "Sam Ruby",
-                    "Louis Nyffenegger"]
-__license__ = "MIT"
-__version__ = "0.8"
-
-import re
-import sys
-import email
-import email.Utils
-import email.Message
-import email.FeedParser
-import StringIO
-import gzip
-import zlib
-import httplib
-import urlparse
-import urllib
-import base64
-import os
-import copy
-import calendar
-import time
-import random
-import errno
-try:
-    from hashlib import sha1 as _sha, md5 as _md5
-except ImportError:
-    # prior to Python 2.5, these were separate modules
-    import sha
-    import md5
-    _sha = sha.new
-    _md5 = md5.new
-import hmac
-from gettext import gettext as _
-import socket
-
-try:
-    from httplib2 import socks
-except ImportError:
-    try:
-        import socks
-    except (ImportError, AttributeError):
-        socks = None
-
-# Build the appropriate socket wrapper for ssl
-try:
-    import ssl # python 2.6
-    ssl_SSLError = ssl.SSLError
-    def _ssl_wrap_socket(sock, key_file, cert_file,
-                         disable_validation, ca_certs):
-        if disable_validation:
-            cert_reqs = ssl.CERT_NONE
-        else:
-            cert_reqs = ssl.CERT_REQUIRED
-        # We should be specifying SSL version 3 or TLS v1, but the ssl module
-        # doesn't expose the necessary knobs. So we need to go with the default
-        # of SSLv23.
-        return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
-                               cert_reqs=cert_reqs, ca_certs=ca_certs)
-except (AttributeError, ImportError):
-    ssl_SSLError = None
-    def _ssl_wrap_socket(sock, key_file, cert_file,
-                         disable_validation, ca_certs):
-        if not disable_validation:
-            raise CertificateValidationUnsupported(
-                    "SSL certificate validation is not supported without "
-                    "the ssl module installed. To avoid this error, install "
-                    "the ssl module, or explicity disable validation.")
-        ssl_sock = socket.ssl(sock, key_file, cert_file)
-        return httplib.FakeSocket(sock, ssl_sock)
-
-
-if sys.version_info >= (2,3):
-    from iri2uri import iri2uri
-else:
-    def iri2uri(uri):
-        return uri
-
-def has_timeout(timeout): # python 2.6
-    if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
-        return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
-    return (timeout is not None)
-
-__all__ = [
-    'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
-    'RedirectLimit', 'FailedToDecompressContent',
-    'UnimplementedDigestAuthOptionError',
-    'UnimplementedHmacDigestAuthOptionError',
-    'debuglevel', 'ProxiesUnavailableError']
-
-
-# The httplib debug level, set to a non-zero value to get debug output
-debuglevel = 0
-
-# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
-RETRIES = 2
-
-# Python 2.3 support
-if sys.version_info < (2,4):
-    def sorted(seq):
-        seq.sort()
-        return seq
-
-# Python 2.3 support
-def HTTPResponse__getheaders(self):
-    """Return list of (header, value) tuples."""
-    if self.msg is None:
-        raise httplib.ResponseNotReady()
-    return self.msg.items()
-
-if not hasattr(httplib.HTTPResponse, 'getheaders'):
-    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
-
-# All exceptions raised here derive from HttpLib2Error
-class HttpLib2Error(Exception): pass
-
-# Some exceptions can be caught and optionally
-# be turned back into responses.
-class HttpLib2ErrorWithResponse(HttpLib2Error):
-    def __init__(self, desc, response, content):
-        self.response = response
-        self.content = content
-        HttpLib2Error.__init__(self, desc)
-
-class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
-class RedirectLimit(HttpLib2ErrorWithResponse): pass
-class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
-class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-
-class MalformedHeader(HttpLib2Error): pass
-class RelativeURIError(HttpLib2Error): pass
-class ServerNotFoundError(HttpLib2Error): pass
-class ProxiesUnavailableError(HttpLib2Error): pass
-class CertificateValidationUnsupported(HttpLib2Error): pass
-class SSLHandshakeError(HttpLib2Error): pass
-class NotSupportedOnThisPlatform(HttpLib2Error): pass
-class CertificateHostnameMismatch(SSLHandshakeError):
-    def __init__(self, desc, host, cert):
-        HttpLib2Error.__init__(self, desc)
-        self.host = host
-        self.cert = cert
-
-# Open Items:
-# -----------
-# Proxy support
-
-# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
-
-# Pluggable cache storage (supports storing the cache in
-#   flat files by default. We need a plug-in architecture
-#   that can support Berkeley DB and Squid)
-
-# == Known Issues ==
-# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
-# Does not handle Cache-Control: max-stale
-# Does not use Age: headers when calculating cache freshness.
-
-
-# The number of redirections to follow before giving up.
-# Note that only GET redirects are automatically followed.
-# Will also honor 301 requests by saving that info and never
-# requesting that URI again.
-DEFAULT_MAX_REDIRECTS = 5
-
-try:
-    # Users can optionally provide a module that tells us where the CA_CERTS
-    # are located.
-    import ca_certs_locater
-    CA_CERTS = ca_certs_locater.get()
-except ImportError:
-    # Default CA certificates file bundled with httplib2.
-    CA_CERTS = os.path.join(
-        os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
-
-# Which headers are hop-by-hop headers by default
-HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
-
-def _get_end2end_headers(response):
-    hopbyhop = list(HOP_BY_HOP)
-    hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
-    return [header for header in response.keys() if header not in hopbyhop]
-
-URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
-
-def parse_uri(uri):
-    """Parses a URI using the regex given in Appendix B of RFC 3986.
-
-        (scheme, authority, path, query, fragment) = parse_uri(uri)
-    """
-    groups = URI.match(uri).groups()
-    return (groups[1], groups[3], groups[4], groups[6], groups[8])
-
-def urlnorm(uri):
-    (scheme, authority, path, query, fragment) = parse_uri(uri)
-    if not scheme or not authority:
-        raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
-    authority = authority.lower()
-    scheme = scheme.lower()
-    if not path:
-        path = "/"
-    # Could do syntax based normalization of the URI before
-    # computing the digest. See Section 6.2.2 of Std 66.
-    request_uri = query and "?".join([path, query]) or path
-    scheme = scheme.lower()
-    defrag_uri = scheme + "://" + authority + request_uri
-    return scheme, authority, request_uri, defrag_uri
-
-
-# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
-re_url_scheme    = re.compile(r'^\w+://')
-re_slash         = re.compile(r'[?/:|]+')
-
-def safename(filename):
-    """Return a filename suitable for the cache.
-
-    Strips dangerous and common characters to create a filename we
-    can use to store the cache in.
-    """
-
-    try:
-        if re_url_scheme.match(filename):
-            if isinstance(filename,str):
-                filename = filename.decode('utf-8')
-                filename = filename.encode('idna')
-            else:
-                filename = filename.encode('idna')
-    except UnicodeError:
-        pass
-    if isinstance(filename,unicode):
-        filename=filename.encode('utf-8')
-    filemd5 = _md5(filename).hexdigest()
-    filename = re_url_scheme.sub("", filename)
-    filename = re_slash.sub(",", filename)
-
-    # limit length of filename
-    if len(filename)>200:
-        filename=filename[:200]
-    return ",".join((filename, filemd5))
-
-NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
-def _normalize_headers(headers):
-    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.iteritems()])
-
-def _parse_cache_control(headers):
-    retval = {}
-    if headers.has_key('cache-control'):
-        parts =  headers['cache-control'].split(',')
-        parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
-        parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
-        retval = dict(parts_with_args + parts_wo_args)
-    return retval
-
-# Whether to use a strict mode to parse WWW-Authenticate headers
-# Might lead to bad results in case of ill-formed header value,
-# so disabled by default, falling back to relaxed parsing.
-# Set to true to turn on, usefull for testing servers.
-USE_WWW_AUTH_STRICT_PARSING = 0
-
-# In regex below:
-#    [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+             matches a "token" as defined by HTTP
-#    "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?"    matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
-# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
-#    \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
-WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
-WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
-UNQUOTE_PAIRS = re.compile(r'\\(.)')
-def _parse_www_authenticate(headers, headername='www-authenticate'):
-    """Returns a dictionary of dictionaries, one dict
-    per auth_scheme."""
-    retval = {}
-    if headers.has_key(headername):
-        try:
-
-            authenticate = headers[headername].strip()
-            www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
-            while authenticate:
-                # Break off the scheme at the beginning of the line
-                if headername == 'authentication-info':
-                    (auth_scheme, the_rest) = ('digest', authenticate)
-                else:
-                    (auth_scheme, the_rest) = authenticate.split(" ", 1)
-                # Now loop over all the key value pairs that come after the scheme,
-                # being careful not to roll into the next scheme
-                match = www_auth.search(the_rest)
-                auth_params = {}
-                while match:
-                    if match and len(match.groups()) == 3:
-                        (key, value, the_rest) = match.groups()
-                        auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
-                    match = www_auth.search(the_rest)
-                retval[auth_scheme.lower()] = auth_params
-                authenticate = the_rest.strip()
-
-        except ValueError:
-            raise MalformedHeader("WWW-Authenticate")
-    return retval
-
-
-def _entry_disposition(response_headers, request_headers):
-    """Determine freshness from the Date, Expires and Cache-Control headers.
-
-    We don't handle the following:
-
-    1. Cache-Control: max-stale
-    2. Age: headers are not used in the calculations.
-
-    Not that this algorithm is simpler than you might think
-    because we are operating as a private (non-shared) cache.
-    This lets us ignore 's-maxage'. We can also ignore
-    'proxy-invalidate' since we aren't a proxy.
-    We will never return a stale document as
-    fresh as a design decision, and thus the non-implementation
-    of 'max-stale'. This also lets us safely ignore 'must-revalidate'
-    since we operate as if every server has sent 'must-revalidate'.
-    Since we are private we get to ignore both 'public' and
-    'private' parameters. We also ignore 'no-transform' since
-    we don't do any transformations.
-    The 'no-store' parameter is handled at a higher level.
-    So the only Cache-Control parameters we look at are:
-
-    no-cache
-    only-if-cached
-    max-age
-    min-fresh
-    """
-
-    retval = "STALE"
-    cc = _parse_cache_control(request_headers)
-    cc_response = _parse_cache_control(response_headers)
-
-    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
-        retval = "TRANSPARENT"
-        if 'cache-control' not in request_headers:
-            request_headers['cache-control'] = 'no-cache'
-    elif cc.has_key('no-cache'):
-        retval = "TRANSPARENT"
-    elif cc_response.has_key('no-cache'):
-        retval = "STALE"
-    elif cc.has_key('only-if-cached'):
-        retval = "FRESH"
-    elif response_headers.has_key('date'):
-        date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
-        now = time.time()
-        current_age = max(0, now - date)
-        if cc_response.has_key('max-age'):
-            try:
-                freshness_lifetime = int(cc_response['max-age'])
-            except ValueError:
-                freshness_lifetime = 0
-        elif response_headers.has_key('expires'):
-            expires = email.Utils.parsedate_tz(response_headers['expires'])
-            if None == expires:
-                freshness_lifetime = 0
-            else:
-                freshness_lifetime = max(0, calendar.timegm(expires) - date)
-        else:
-            freshness_lifetime = 0
-        if cc.has_key('max-age'):
-            try:
-                freshness_lifetime = int(cc['max-age'])
-            except ValueError:
-                freshness_lifetime = 0
-        if cc.has_key('min-fresh'):
-            try:
-                min_fresh = int(cc['min-fresh'])
-            except ValueError:
-                min_fresh = 0
-            current_age += min_fresh
-        if freshness_lifetime > current_age:
-            retval = "FRESH"
-    return retval
-
-def _decompressContent(response, new_content):
-    content = new_content
-    try:
-        encoding = response.get('content-encoding', None)
-        if encoding in ['gzip', 'deflate']:
-            if encoding == 'gzip':
-                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
-            if encoding == 'deflate':
-                content = zlib.decompress(content)
-            response['content-length'] = str(len(content))
-            # Record the historical presence of the encoding in a way the won't interfere.
-            response['-content-encoding'] = response['content-encoding']
-            del response['content-encoding']
-    except IOError:
-        content = ""
-        raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
-    return content
-
-def _updateCache(request_headers, response_headers, content, cache, cachekey):
-    if cachekey:
-        cc = _parse_cache_control(request_headers)
-        cc_response = _parse_cache_control(response_headers)
-        if cc.has_key('no-store') or cc_response.has_key('no-store'):
-            cache.delete(cachekey)
-        else:
-            info = email.Message.Message()
-            for key, value in response_headers.iteritems():
-                if key not in ['status','content-encoding','transfer-encoding']:
-                    info[key] = value
-
-            # Add annotations to the cache to indicate what headers
-            # are variant for this request.
-            vary = response_headers.get('vary', None)
-            if vary:
-                vary_headers = vary.lower().replace(' ', '').split(',')
-                for header in vary_headers:
-                    key = '-varied-%s' % header
-                    try:
-                        info[key] = request_headers[header]
-                    except KeyError:
-                        pass
-
-            status = response_headers.status
-            if status == 304:
-                status = 200
-
-            status_header = 'status: %d\r\n' % status
-
-            header_str = info.as_string()
-
-            header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
-            text = "".join([status_header, header_str, content])
-
-            cache.set(cachekey, text)
-
-def _cnonce():
-    dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
-    return dig[:16]
-
-def _wsse_username_token(cnonce, iso_now, password):
-    return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
-
-
-# For credentials we need two things, first
-# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
-# Then we also need a list of URIs that have already demanded authentication
-# That list is tricky since sub-URIs can take the same auth, or the
-# auth scheme may change as you descend the tree.
-# So we also need each Auth instance to be able to tell us
-# how close to the 'top' it is.
-
-class Authentication(object):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        self.path = path
-        self.host = host
-        self.credentials = credentials
-        self.http = http
-
-    def depth(self, request_uri):
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        return request_uri[len(self.path):].count("/")
-
-    def inscope(self, host, request_uri):
-        # XXX Should we normalize the request_uri?
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        return (host == self.host) and path.startswith(self.path)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header. Over-ride this in sub-classes."""
-        pass
-
-    def response(self, response, content):
-        """Gives us a chance to update with new nonces
-        or such returned from the last authorized response.
-        Over-rise this in sub-classes if necessary.
-
-        Return TRUE is the request is to be retried, for
-        example Digest may return stale=true.
-        """
-        return False
-
-
-
-class BasicAuthentication(Authentication):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
-
-
-class DigestAuthentication(Authentication):
-    """Only do qop='auth' and MD5, since that
-    is all Apache currently implements"""
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        self.challenge = challenge['digest']
-        qop = self.challenge.get('qop', 'auth')
-        self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
-        if self.challenge['qop'] is None:
-            raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
-        self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
-        if self.challenge['algorithm'] != 'MD5':
-            raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
-        self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
-        self.challenge['nc'] = 1
-
-    def request(self, method, request_uri, headers, content, cnonce = None):
-        """Modify the request headers"""
-        H = lambda x: _md5(x).hexdigest()
-        KD = lambda s, d: H("%s:%s" % (s, d))
-        A2 = "".join([method, ":", request_uri])
-        self.challenge['cnonce'] = cnonce or _cnonce()
-        request_digest  = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
-                self.challenge['nonce'],
-                '%08x' % self.challenge['nc'],
-                self.challenge['cnonce'],
-                self.challenge['qop'], H(A2)))
-        headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
-                self.credentials[0],
-                self.challenge['realm'],
-                self.challenge['nonce'],
-                request_uri,
-                self.challenge['algorithm'],
-                request_digest,
-                self.challenge['qop'],
-                self.challenge['nc'],
-                self.challenge['cnonce'])
-        if self.challenge.get('opaque'):
-            headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
-        self.challenge['nc'] += 1
-
-    def response(self, response, content):
-        if not response.has_key('authentication-info'):
-            challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
-            if 'true' == challenge.get('stale'):
-                self.challenge['nonce'] = challenge['nonce']
-                self.challenge['nc'] = 1
-                return True
-        else:
-            updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
-
-            if updated_challenge.has_key('nextnonce'):
-                self.challenge['nonce'] = updated_challenge['nextnonce']
-                self.challenge['nc'] = 1
-        return False
-
-
-class HmacDigestAuthentication(Authentication):
-    """Adapted from Robert Sayre's code and DigestAuthentication above."""
-    __author__ = "Thomas Broyer (t.broyer@ltgt.net)"
-
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        self.challenge = challenge['hmacdigest']
-        # TODO: self.challenge['domain']
-        self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
-        if self.challenge['reason'] not in ['unauthorized', 'integrity']:
-            self.challenge['reason'] = 'unauthorized'
-        self.challenge['salt'] = self.challenge.get('salt', '')
-        if not self.challenge.get('snonce'):
-            raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
-        self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
-        if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
-            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
-        self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
-        if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
-            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
-        if self.challenge['algorithm'] == 'HMAC-MD5':
-            self.hashmod = _md5
-        else:
-            self.hashmod = _sha
-        if self.challenge['pw-algorithm'] == 'MD5':
-            self.pwhashmod = _md5
-        else:
-            self.pwhashmod = _sha
-        self.key = "".join([self.credentials[0], ":",
-                            self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
-                            ":", self.challenge['realm']])
-        self.key = self.pwhashmod.new(self.key).hexdigest().lower()
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers"""
-        keys = _get_end2end_headers(headers)
-        keylist = "".join(["%s " % k for k in keys])
-        headers_val = "".join([headers[k] for k in keys])
-        created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
-        cnonce = _cnonce()
-        request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
-        request_digest  = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
-        headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
-                self.credentials[0],
-                self.challenge['realm'],
-                self.challenge['snonce'],
-                cnonce,
-                request_uri,
-                created,
-                request_digest,
-                keylist)
-
-    def response(self, response, content):
-        challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
-        if challenge.get('reason') in ['integrity', 'stale']:
-            return True
-        return False
-
-
-class WsseAuthentication(Authentication):
-    """This is thinly tested and should not be relied upon.
-    At this time there isn't any third party server to test against.
-    Blogger and TypePad implemented this algorithm at one point
-    but Blogger has since switched to Basic over HTTPS and
-    TypePad has implemented it wrong, by never issuing a 401
-    challenge but instead requiring your client to telepathically know that
-    their endpoint is expecting WSSE profile="UsernameToken"."""
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'WSSE profile="UsernameToken"'
-        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
-        cnonce = _cnonce()
-        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
-        headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
-                self.credentials[0],
-                password_digest,
-                cnonce,
-                iso_now)
-
-class GoogleLoginAuthentication(Authentication):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        from urllib import urlencode
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        service = challenge['googlelogin'].get('service', 'xapi')
-        # Bloggger actually returns the service in the challenge
-        # For the rest we guess based on the URI
-        if service == 'xapi' and  request_uri.find("calendar") > 0:
-            service = "cl"
-        # No point in guessing Base or Spreadsheet
-        #elif request_uri.find("spreadsheets") > 0:
-        #    service = "wise"
-
-        auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
-        resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
-        lines = content.split('\n')
-        d = dict([tuple(line.split("=", 1)) for line in lines if line])
-        if resp.status == 403:
-            self.Auth = ""
-        else:
-            self.Auth = d['Auth']
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
-
-
-AUTH_SCHEME_CLASSES = {
-    "basic": BasicAuthentication,
-    "wsse": WsseAuthentication,
-    "digest": DigestAuthentication,
-    "hmacdigest": HmacDigestAuthentication,
-    "googlelogin": GoogleLoginAuthentication
-}
-
-AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
-
-class FileCache(object):
-    """Uses a local directory as a store for cached files.
-    Not really safe to use if multiple threads or processes are going to
-    be running on the same cache.
-    """
-    def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
-        self.cache = cache
-        self.safe = safe
-        if not os.path.exists(cache):
-            os.makedirs(self.cache)
-
-    def get(self, key):
-        retval = None
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        try:
-            f = file(cacheFullPath, "rb")
-            retval = f.read()
-            f.close()
-        except IOError:
-            pass
-        return retval
-
-    def set(self, key, value):
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        f = file(cacheFullPath, "wb")
-        f.write(value)
-        f.close()
-
-    def delete(self, key):
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        if os.path.exists(cacheFullPath):
-            os.remove(cacheFullPath)
-
-class Credentials(object):
-    def __init__(self):
-        self.credentials = []
-
-    def add(self, name, password, domain=""):
-        self.credentials.append((domain.lower(), name, password))
-
-    def clear(self):
-        self.credentials = []
-
-    def iter(self, domain):
-        for (cdomain, name, password) in self.credentials:
-            if cdomain == "" or domain == cdomain:
-                yield (name, password)
-
-class KeyCerts(Credentials):
-    """Identical to Credentials except that
-    name/password are mapped to key/cert."""
-    pass
-
-class AllHosts(object):
-    pass
-
-class ProxyInfo(object):
-    """Collect information required to use a proxy."""
-    bypass_hosts = ()
-
-    def __init__(self, proxy_type, proxy_host, proxy_port,
-                 proxy_rdns=None, proxy_user=None, proxy_pass=None):
-        """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
-        constants. For example:
-
-        p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
-            proxy_host='localhost', proxy_port=8000)
-        """
-        self.proxy_type = proxy_type
-        self.proxy_host = proxy_host
-        self.proxy_port = proxy_port
-        self.proxy_rdns = proxy_rdns
-        self.proxy_user = proxy_user
-        self.proxy_pass = proxy_pass
-
-    def astuple(self):
-        return (self.proxy_type, self.proxy_host, self.proxy_port,
-                self.proxy_rdns, self.proxy_user, self.proxy_pass)
-
-    def isgood(self):
-        return (self.proxy_host != None) and (self.proxy_port != None)
-
-    def applies_to(self, hostname):
-        return not self.bypass_host(hostname)
-
-    def bypass_host(self, hostname):
-        """Has this host been excluded from the proxy config"""
-        if self.bypass_hosts is AllHosts:
-            return True
-
-        bypass = False
-        for domain in self.bypass_hosts:
-            if hostname.endswith(domain):
-                bypass = True
-
-        return bypass
-
-
-def proxy_info_from_environment(method='http'):
-    """
-    Read proxy info from the environment variables.
-    """
-    if method not in ['http', 'https']:
-        return
-
-    env_var = method + '_proxy'
-    url = os.environ.get(env_var, os.environ.get(env_var.upper()))
-    if not url:
-        return
-    pi = proxy_info_from_url(url, method)
-
-    no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
-    bypass_hosts = []
-    if no_proxy:
-        bypass_hosts = no_proxy.split(',')
-    # special case, no_proxy=* means all hosts bypassed
-    if no_proxy == '*':
-        bypass_hosts = AllHosts
-
-    pi.bypass_hosts = bypass_hosts
-    return pi
-
-def proxy_info_from_url(url, method='http'):
-    """
-    Construct a ProxyInfo from a URL (such as http_proxy env var)
-    """
-    url = urlparse.urlparse(url)
-    username = None
-    password = None
-    port = None
-    if '@' in url[1]:
-        ident, host_port = url[1].split('@', 1)
-        if ':' in ident:
-            username, password = ident.split(':', 1)
-        else:
-            password = ident
-    else:
-        host_port = url[1]
-    if ':' in host_port:
-        host, port = host_port.split(':', 1)
-    else:
-        host = host_port
-
-    if port:
-        port = int(port)
-    else:
-        port = dict(https=443, http=80)[method]
-
-    proxy_type = 3 # socks.PROXY_TYPE_HTTP
-    return ProxyInfo(
-        proxy_type = proxy_type,
-        proxy_host = host,
-        proxy_port = port,
-        proxy_user = username or None,
-        proxy_pass = password or None,
-    )
-
-
-class HTTPConnectionWithTimeout(httplib.HTTPConnection):
-    """
-    HTTPConnection subclass that supports timeouts
-
-    All timeouts are in seconds. If None is passed for timeout then
-    Python's default timeout for sockets will be used. See for example
-    the docs of socket.setdefaulttimeout():
-    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-    """
-
-    def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
-        httplib.HTTPConnection.__init__(self, host, port, strict)
-        self.timeout = timeout
-        self.proxy_info = proxy_info
-
-    def connect(self):
-        """Connect to the host and port specified in __init__."""
-        # Mostly verbatim from httplib.py.
-        if self.proxy_info and socks is None:
-            raise ProxiesUnavailableError(
-                'Proxy support missing but proxy use was requested!')
-        msg = "getaddrinfo returns an empty list"
-        if self.proxy_info and self.proxy_info.isgood():
-            use_proxy = True
-            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
-        else:
-            use_proxy = False
-        if use_proxy and proxy_rdns:
-            host = proxy_host
-            port = proxy_port
-        else:
-            host = self.host
-            port = self.port
-
-        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
-            af, socktype, proto, canonname, sa = res
-            try:
-                if use_proxy:
-                    self.sock = socks.socksocket(af, socktype, proto)
-                    self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
-                else:
-                    self.sock = socket.socket(af, socktype, proto)
-                    self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-                # Different from httplib: support timeouts.
-                if has_timeout(self.timeout):
-                    self.sock.settimeout(self.timeout)
-                    # End of difference from httplib.
-                if self.debuglevel > 0:
-                    print "connect: (%s, %s) ************" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-
-                self.sock.connect((self.host, self.port) + sa[2:])
-            except socket.error, msg:
-                if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                continue
-            break
-        if not self.sock:
-            raise socket.error, msg
-
-class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
-    """
-    This class allows communication via SSL.
-
-    All timeouts are in seconds. If None is passed for timeout then
-    Python's default timeout for sockets will be used. See for example
-    the docs of socket.setdefaulttimeout():
-    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-    """
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=None, proxy_info=None,
-                 ca_certs=None, disable_ssl_certificate_validation=False):
-        httplib.HTTPSConnection.__init__(self, host, port=port,
-                                         key_file=key_file,
-                                         cert_file=cert_file, strict=strict)
-        self.timeout = timeout
-        self.proxy_info = proxy_info
-        if ca_certs is None:
-            ca_certs = CA_CERTS
-        self.ca_certs = ca_certs
-        self.disable_ssl_certificate_validation = \
-                disable_ssl_certificate_validation
-
-    # The following two methods were adapted from https_wrapper.py, released
-    # with the Google Appengine SDK at
-    # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
-    # under the following license:
-    #
-    # Copyright 2007 Google Inc.
-    #
-    # Licensed under the Apache License, Version 2.0 (the "License");
-    # you may not use this file except in compliance with the License.
-    # You may obtain a copy of the License at
-    #
-    #     http://www.apache.org/licenses/LICENSE-2.0
-    #
-    # Unless required by applicable law or agreed to in writing, software
-    # distributed under the License is distributed on an "AS IS" BASIS,
-    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    # See the License for the specific language governing permissions and
-    # limitations under the License.
-    #
-
-    def _GetValidHostsForCert(self, cert):
-        """Returns a list of valid host globs for an SSL certificate.
-
-        Args:
-          cert: A dictionary representing an SSL certificate.
-        Returns:
-          list: A list of valid host globs.
-        """
-        if 'subjectAltName' in cert:
-            return [x[1] for x in cert['subjectAltName']
-                    if x[0].lower() == 'dns']
-        else:
-            return [x[0][1] for x in cert['subject']
-                    if x[0][0].lower() == 'commonname']
-
-    def _ValidateCertificateHostname(self, cert, hostname):
-        """Validates that a given hostname is valid for an SSL certificate.
-
-        Args:
-          cert: A dictionary representing an SSL certificate.
-          hostname: The hostname to test.
-        Returns:
-          bool: Whether or not the hostname is valid for this certificate.
-        """
-        hosts = self._GetValidHostsForCert(cert)
-        for host in hosts:
-            host_re = host.replace('.', '\.').replace('*', '[^.]*')
-            if re.search('^%s$' % (host_re,), hostname, re.I):
-                return True
-        return False
-
-    def connect(self):
-        "Connect to a host on a given (SSL) port."
-
-        msg = "getaddrinfo returns an empty list"
-        if self.proxy_info and self.proxy_info.isgood():
-            use_proxy = True
-            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
-        else:
-            use_proxy = False
-        if use_proxy and proxy_rdns:
-            host = proxy_host
-            port = proxy_port
-        else:
-            host = self.host
-            port = self.port
-
-        address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
-        for family, socktype, proto, canonname, sockaddr in address_info:
-            try:
-                if use_proxy:
-                    sock = socks.socksocket(family, socktype, proto)
-
-                    sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
-                else:
-                    sock = socket.socket(family, socktype, proto)
-                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
-                if has_timeout(self.timeout):
-                    sock.settimeout(self.timeout)
-                sock.connect((self.host, self.port))
-                self.sock =_ssl_wrap_socket(
-                    sock, self.key_file, self.cert_file,
-                    self.disable_ssl_certificate_validation, self.ca_certs)
-                if self.debuglevel > 0:
-                    print "connect: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if not self.disable_ssl_certificate_validation:
-                    cert = self.sock.getpeercert()
-                    hostname = self.host.split(':', 0)[0]
-                    if not self._ValidateCertificateHostname(cert, hostname):
-                        raise CertificateHostnameMismatch(
-                            'Server presented certificate that does not match '
-                            'host %s: %s' % (hostname, cert), hostname, cert)
-            except ssl_SSLError, e:
-                if sock:
-                    sock.close()
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                # Unfortunately the ssl module doesn't seem to provide any way
-                # to get at more detailed error information, in particular
-                # whether the error is due to certificate validation or
-                # something else (such as SSL protocol mismatch).
-                if e.errno == ssl.SSL_ERROR_SSL:
-                    raise SSLHandshakeError(e)
-                else:
-                    raise
-            except (socket.timeout, socket.gaierror):
-                raise
-            except socket.error, msg:
-                if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                continue
-            break
-        if not self.sock:
-            raise socket.error, msg
-
-SCHEME_TO_CONNECTION = {
-    'http': HTTPConnectionWithTimeout,
-    'https': HTTPSConnectionWithTimeout
-}
-
-# Use a different connection object for Google App Engine
-try:
-    try:
-        from google.appengine.api import apiproxy_stub_map
-        if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
-            raise ImportError  # Bail out; we're not actually running on App Engine.
-        from google.appengine.api.urlfetch import fetch
-        from google.appengine.api.urlfetch import InvalidURLError
-    except (ImportError, AttributeError):
-        from google3.apphosting.api import apiproxy_stub_map
-        if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
-            raise ImportError  # Bail out; we're not actually running on App Engine.
-        from google3.apphosting.api.urlfetch import fetch
-        from google3.apphosting.api.urlfetch import InvalidURLError
-
-    def _new_fixed_fetch(validate_certificate):
-        def fixed_fetch(url, payload=None, method="GET", headers={},
-                        allow_truncated=False, follow_redirects=True,
-                        deadline=5):
-            return fetch(url, payload=payload, method=method, headers=headers,
-                         allow_truncated=allow_truncated,
-                         follow_redirects=follow_redirects, deadline=deadline,
-                         validate_certificate=validate_certificate)
-        return fixed_fetch
-
-    class AppEngineHttpConnection(httplib.HTTPConnection):
-        """Use httplib on App Engine, but compensate for its weirdness.
-
-        The parameters key_file, cert_file, proxy_info, ca_certs, and
-        disable_ssl_certificate_validation are all dropped on the ground.
-        """
-        def __init__(self, host, port=None, key_file=None, cert_file=None,
-                     strict=None, timeout=None, proxy_info=None, ca_certs=None,
-                     disable_ssl_certificate_validation=False):
-            httplib.HTTPConnection.__init__(self, host, port=port,
-                                            strict=strict, timeout=timeout)
-
-    class AppEngineHttpsConnection(httplib.HTTPSConnection):
-        """Same as AppEngineHttpConnection, but for HTTPS URIs."""
-        def __init__(self, host, port=None, key_file=None, cert_file=None,
-                     strict=None, timeout=None, proxy_info=None, ca_certs=None,
-                     disable_ssl_certificate_validation=False):
-            httplib.HTTPSConnection.__init__(self, host, port=port,
-                                             key_file=key_file,
-                                             cert_file=cert_file, strict=strict,
-                                             timeout=timeout)
-            self._fetch = _new_fixed_fetch(
-                    not disable_ssl_certificate_validation)
-
-    # Update the connection classes to use the Googel App Engine specific ones.
-    SCHEME_TO_CONNECTION = {
-        'http': AppEngineHttpConnection,
-        'https': AppEngineHttpsConnection
-    }
-except (ImportError, AttributeError):
-    pass
-
-
-class Http(object):
-    """An HTTP client that handles:
-
-    - all methods
-    - caching
-    - ETags
-    - compression,
-    - HTTPS
-    - Basic
-    - Digest
-    - WSSE
-
-    and more.
-    """
-    def __init__(self, cache=None, timeout=None,
-                 proxy_info=proxy_info_from_environment,
-                 ca_certs=None, disable_ssl_certificate_validation=False):
-        """If 'cache' is a string then it is used as a directory name for
-        a disk cache. Otherwise it must be an object that supports the
-        same interface as FileCache.
-
-        All timeouts are in seconds. If None is passed for timeout
-        then Python's default timeout for sockets will be used. See
-        for example the docs of socket.setdefaulttimeout():
-        http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-
-        `proxy_info` may be:
-          - a callable that takes the http scheme ('http' or 'https') and
-            returns a ProxyInfo instance per request. By default, uses
-            proxy_nfo_from_environment.
-          - a ProxyInfo instance (static proxy config).
-          - None (proxy disabled).
-
-        ca_certs is the path of a file containing root CA certificates for SSL
-        server certificate validation.  By default, a CA cert file bundled with
-        httplib2 is used.
-
-        If disable_ssl_certificate_validation is true, SSL cert validation will
-        not be performed.
-        """
-        self.proxy_info = proxy_info
-        self.ca_certs = ca_certs
-        self.disable_ssl_certificate_validation = \
-                disable_ssl_certificate_validation
-
-        # Map domain name to an httplib connection
-        self.connections = {}
-        # The location of the cache, for now a directory
-        # where cached responses are held.
-        if cache and isinstance(cache, basestring):
-            self.cache = FileCache(cache)
-        else:
-            self.cache = cache
-
-        # Name/password
-        self.credentials = Credentials()
-
-        # Key/cert
-        self.certificates = KeyCerts()
-
-        # authorization objects
-        self.authorizations = []
-
-        # If set to False then no redirects are followed, even safe ones.
-        self.follow_redirects = True
-
-        # Which HTTP methods do we apply optimistic concurrency to, i.e.
-        # which methods get an "if-match:" etag header added to them.
-        self.optimistic_concurrency_methods = ["PUT", "PATCH"]
-
-        # If 'follow_redirects' is True, and this is set to True then
-        # all redirecs are followed, including unsafe ones.
-        self.follow_all_redirects = False
-
-        self.ignore_etag = False
-
-        self.force_exception_to_status_code = False
-
-        self.timeout = timeout
-
-        # Keep Authorization: headers on a redirect.
-        self.forward_authorization_headers = False
-
-    def __getstate__(self):
-        state_dict = copy.copy(self.__dict__)
-        # In case request is augmented by some foreign object such as
-        # credentials which handle auth
-        if 'request' in state_dict:
-            del state_dict['request']
-        if 'connections' in state_dict:
-            del state_dict['connections']
-        return state_dict
-
-    def __setstate__(self, state):
-        self.__dict__.update(state)
-        self.connections = {}
-
-    def _auth_from_challenge(self, host, request_uri, headers, response, content):
-        """A generator that creates Authorization objects
-           that can be applied to requests.
-        """
-        challenges = _parse_www_authenticate(response, 'www-authenticate')
-        for cred in self.credentials.iter(host):
-            for scheme in AUTH_SCHEME_ORDER:
-                if challenges.has_key(scheme):
-                    yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
-
-    def add_credentials(self, name, password, domain=""):
-        """Add a name and password that will be used
-        any time a request requires authentication."""
-        self.credentials.add(name, password, domain)
-
-    def add_certificate(self, key, cert, domain):
-        """Add a key and cert that will be used
-        any time a request requires authentication."""
-        self.certificates.add(key, cert, domain)
-
-    def clear_credentials(self):
-        """Remove all the names and passwords
-        that are used for authentication"""
-        self.credentials.clear()
-        self.authorizations = []
-
-    def _conn_request(self, conn, request_uri, method, body, headers):
-        i = 0
-        seen_bad_status_line = False
-        while i < RETRIES:
-            i += 1
-            try:
-                if hasattr(conn, 'sock') and conn.sock is None:
-                    conn.connect()
-                conn.request(method, request_uri, body, headers)
-            except socket.timeout:
-                raise
-            except socket.gaierror:
-                conn.close()
-                raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
-            except ssl_SSLError:
-                conn.close()
-                raise
-            except socket.error, e:
-                err = 0
-                if hasattr(e, 'args'):
-                    err = getattr(e, 'args')[0]
-                else:
-                    err = e.errno
-                if err == errno.ECONNREFUSED: # Connection refused
-                    raise
-            except httplib.HTTPException:
-                # Just because the server closed the connection doesn't apparently mean
-                # that the server didn't send a response.
-                if hasattr(conn, 'sock') and conn.sock is None:
-                    if i < RETRIES-1:
-                        conn.close()
-                        conn.connect()
-                        continue
-                    else:
-                        conn.close()
-                        raise
-                if i < RETRIES-1:
-                    conn.close()
-                    conn.connect()
-                    continue
-            try:
-                response = conn.getresponse()
-            except httplib.BadStatusLine:
-                # If we get a BadStatusLine on the first try then that means
-                # the connection just went stale, so retry regardless of the
-                # number of RETRIES set.
-                if not seen_bad_status_line and i == 1:
-                    i = 0
-                    seen_bad_status_line = True
-                    conn.close()
-                    conn.connect()
-                    continue
-                else:
-                    conn.close()
-                    raise
-            except (socket.error, httplib.HTTPException):
-                if i < RETRIES-1:
-                    conn.close()
-                    conn.connect()
-                    continue
-                else:
-                    conn.close()
-                    raise
-            else:
-                content = ""
-                if method == "HEAD":
-                    conn.close()
-                else:
-                    content = response.read()
-                response = Response(response)
-                if method != "HEAD":
-                    content = _decompressContent(response, content)
-            break
-        return (response, content)
-
-
-    def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
-        """Do the actual request using the connection object
-        and also follow one level of redirects if necessary"""
-
-        auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
-        auth = auths and sorted(auths)[0][1] or None
-        if auth:
-            auth.request(method, request_uri, headers, body)
-
-        (response, content) = self._conn_request(conn, request_uri, method, body, headers)
-
-        if auth:
-            if auth.response(response, body):
-                auth.request(method, request_uri, headers, body)
-                (response, content) = self._conn_request(conn, request_uri, method, body, headers )
-                response._stale_digest = 1
-
-        if response.status == 401:
-            for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
-                authorization.request(method, request_uri, headers, body)
-                (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
-                if response.status != 401:
-                    self.authorizations.append(authorization)
-                    authorization.response(response, body)
-                    break
-
-        if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
-            if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
-                # Pick out the location header and basically start from the beginning
-                # remembering first to strip the ETag header and decrement our 'depth'
-                if redirections:
-                    if not response.has_key('location') and response.status != 300:
-                        raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
-                    # Fix-up relative redirects (which violate an RFC 2616 MUST)
-                    if response.has_key('location'):
-                        location = response['location']
-                        (scheme, authority, path, query, fragment) = parse_uri(location)
-                        if authority == None:
-                            response['location'] = urlparse.urljoin(absolute_uri, location)
-                    if response.status == 301 and method in ["GET", "HEAD"]:
-                        response['-x-permanent-redirect-url'] = response['location']
-                        if not response.has_key('content-location'):
-                            response['content-location'] = absolute_uri
-                        _updateCache(headers, response, content, self.cache, cachekey)
-                    if headers.has_key('if-none-match'):
-                        del headers['if-none-match']
-                    if headers.has_key('if-modified-since'):
-                        del headers['if-modified-since']
-                    if 'authorization' in headers and not self.forward_authorization_headers:
-                        del headers['authorization']
-                    if response.has_key('location'):
-                        location = response['location']
-                        old_response = copy.deepcopy(response)
-                        if not old_response.has_key('content-location'):
-                            old_response['content-location'] = absolute_uri
-                        redirect_method = method
-                        if response.status in [302, 303]:
-                            redirect_method = "GET"
-                            body = None
-                        (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
-                        response.previous = old_response
-                else:
-                    raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
-            elif response.status in [200, 203] and method in ["GET", "HEAD"]:
-                # Don't cache 206's since we aren't going to handle byte range requests
-                if not response.has_key('content-location'):
-                    response['content-location'] = absolute_uri
-                _updateCache(headers, response, content, self.cache, cachekey)
-
-        return (response, content)
-
-    def _normalize_headers(self, headers):
-        return _normalize_headers(headers)
-
-# Need to catch and rebrand some exceptions
-# Then need to optionally turn all exceptions into status codes
-# including all socket.* and httplib.* exceptions.
-
-
-    def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
-        """ Performs a single HTTP request.
-
-        The 'uri' is the URI of the HTTP resource and can begin with either
-        'http' or 'https'. The value of 'uri' must be an absolute URI.
-
-        The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
-        etc. There is no restriction on the methods allowed.
-
-        The 'body' is the entity body to be sent with the request. It is a
-        string object.
-
-        Any extra headers that are to be sent with the request should be
-        provided in the 'headers' dictionary.
-
-        The maximum number of redirect to follow before raising an
-        exception is 'redirections. The default is 5.
-
-        The return value is a tuple of (response, content), the first
-        being and instance of the 'Response' class, the second being
-        a string that contains the response entity body.
-        """
-        try:
-            if headers is None:
-                headers = {}
-            else:
-                headers = self._normalize_headers(headers)
-
-            if not headers.has_key('user-agent'):
-                headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
-
-            uri = iri2uri(uri)
-
-            (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
-            domain_port = authority.split(":")[0:2]
-            if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
-                scheme = 'https'
-                authority = domain_port[0]
-
-            proxy_info = self._get_proxy_info(scheme, authority)
-
-            conn_key = scheme+":"+authority
-            if conn_key in self.connections:
-                conn = self.connections[conn_key]
-            else:
-                if not connection_type:
-                    connection_type = SCHEME_TO_CONNECTION[scheme]
-                certs = list(self.certificates.iter(authority))
-                if scheme == 'https':
-                    if certs:
-                        conn = self.connections[conn_key] = connection_type(
-                                authority, key_file=certs[0][0],
-                                cert_file=certs[0][1], timeout=self.timeout,
-                                proxy_info=proxy_info,
-                                ca_certs=self.ca_certs,
-                                disable_ssl_certificate_validation=
-                                        self.disable_ssl_certificate_validation)
-                    else:
-                        conn = self.connections[conn_key] = connection_type(
-                                authority, timeout=self.timeout,
-                                proxy_info=proxy_info,
-                                ca_certs=self.ca_certs,
-                                disable_ssl_certificate_validation=
-                                        self.disable_ssl_certificate_validation)
-                else:
-                    conn = self.connections[conn_key] = connection_type(
-                            authority, timeout=self.timeout,
-                            proxy_info=proxy_info)
-                conn.set_debuglevel(debuglevel)
-
-            if 'range' not in headers and 'accept-encoding' not in headers:
-                headers['accept-encoding'] = 'gzip, deflate'
-
-            info = email.Message.Message()
-            cached_value = None
-            if self.cache:
-                cachekey = defrag_uri
-                cached_value = self.cache.get(cachekey)
-                if cached_value:
-                    # info = email.message_from_string(cached_value)
-                    #
-                    # Need to replace the line above with the kludge below
-                    # to fix the non-existent bug not fixed in this
-                    # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
-                    try:
-                        info, content = cached_value.split('\r\n\r\n', 1)
-                        feedparser = email.FeedParser.FeedParser()
-                        feedparser.feed(info)
-                        info = feedparser.close()
-                        feedparser._parse = None
-                    except (IndexError, ValueError):
-                        self.cache.delete(cachekey)
-                        cachekey = None
-                        cached_value = None
-            else:
-                cachekey = None
-
-            if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
-                # http://www.w3.org/1999/04/Editing/
-                headers['if-match'] = info['etag']
-
-            if method not in ["GET", "HEAD"] and self.cache and cachekey:
-                # RFC 2616 Section 13.10
-                self.cache.delete(cachekey)
-
-            # Check the vary header in the cache to see if this request
-            # matches what varies in the cache.
-            if method in ['GET', 'HEAD'] and 'vary' in info:
-                vary = info['vary']
-                vary_headers = vary.lower().replace(' ', '').split(',')
-                for header in vary_headers:
-                    key = '-varied-%s' % header
-                    value = info[key]
-                    if headers.get(header, None) != value:
-                        cached_value = None
-                        break
-
-            if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
-                if info.has_key('-x-permanent-redirect-url'):
-                    # Should cached permanent redirects be counted in our redirection count? For now, yes.
-                    if redirections <= 0:
-                        raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
-                    (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
-                    response.previous = Response(info)
-                    response.previous.fromcache = True
-                else:
-                    # Determine our course of action:
-                    #   Is the cached entry fresh or stale?
-                    #   Has the client requested a non-cached response?
-                    #
-                    # There seems to be three possible answers:
-                    # 1. [FRESH] Return the cache entry w/o doing a GET
-                    # 2. [STALE] Do the GET (but add in cache validators if available)
-                    # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
-                    entry_disposition = _entry_disposition(info, headers)
-
-                    if entry_disposition == "FRESH":
-                        if not cached_value:
-                            info['status'] = '504'
-                            content = ""
-                        response = Response(info)
-                        if cached_value:
-                            response.fromcache = True
-                        return (response, content)
-
-                    if entry_disposition == "STALE":
-                        if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
-                            headers['if-none-match'] = info['etag']
-                        if info.has_key('last-modified') and not 'last-modified' in headers:
-                            headers['if-modified-since'] = info['last-modified']
-                    elif entry_disposition == "TRANSPARENT":
-                        pass
-
-                    (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-
-                if response.status == 304 and method == "GET":
-                    # Rewrite the cache entry with the new end-to-end headers
-                    # Take all headers that are in response
-                    # and overwrite their values in info.
-                    # unless they are hop-by-hop, or are listed in the connection header.
-
-                    for key in _get_end2end_headers(response):
-                        info[key] = response[key]
-                    merged_response = Response(info)
-                    if hasattr(response, "_stale_digest"):
-                        merged_response._stale_digest = response._stale_digest
-                    _updateCache(headers, merged_response, content, self.cache, cachekey)
-                    response = merged_response
-                    response.status = 200
-                    response.fromcache = True
-
-                elif response.status == 200:
-                    content = new_content
-                else:
-                    self.cache.delete(cachekey)
-                    content = new_content
-            else:
-                cc = _parse_cache_control(headers)
-                if cc.has_key('only-if-cached'):
-                    info['status'] = '504'
-                    response = Response(info)
-                    content = ""
-                else:
-                    (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-        except Exception, e:
-            if self.force_exception_to_status_code:
-                if isinstance(e, HttpLib2ErrorWithResponse):
-                    response = e.response
-                    content = e.content
-                    response.status = 500
-                    response.reason = str(e)
-                elif isinstance(e, socket.timeout):
-                    content = "Request Timeout"
-                    response = Response({
-                        "content-type": "text/plain",
-                        "status": "408",
-                        "content-length": len(content)
-                    })
-                    response.reason = "Request Timeout"
-                else:
-                    content = str(e)
-                    response = Response({
-                        "content-type": "text/plain",
-                        "status": "400",
-                        "content-length": len(content)
-                    })
-                    response.reason = "Bad Request"
-            else:
-                raise
-
-
-        return (response, content)
-
-    def _get_proxy_info(self, scheme, authority):
-        """Return a ProxyInfo instance (or None) based on the scheme
-        and authority.
-        """
-        hostname, port = urllib.splitport(authority)
-        proxy_info = self.proxy_info
-        if callable(proxy_info):
-            proxy_info = proxy_info(scheme)
-
-        if (hasattr(proxy_info, 'applies_to')
-            and not proxy_info.applies_to(hostname)):
-            proxy_info = None
-        return proxy_info
-
-
-class Response(dict):
-    """An object more like email.Message than httplib.HTTPResponse."""
-
-    """Is this response from our local cache"""
-    fromcache = False
-
-    """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
-    version = 11
-
-    "Status code returned by server. "
-    status = 200
-
-    """Reason phrase returned by server."""
-    reason = "Ok"
-
-    previous = None
-
-    def __init__(self, info):
-        # info is either an email.Message or
-        # an httplib.HTTPResponse object.
-        if isinstance(info, httplib.HTTPResponse):
-            for key, value in info.getheaders():
-                self[key.lower()] = value
-            self.status = info.status
-            self['status'] = str(self.status)
-            self.reason = info.reason
-            self.version = info.version
-        elif isinstance(info, email.Message.Message):
-            for key, value in info.items():
-                self[key.lower()] = value
-            self.status = int(self['status'])
-        else:
-            for key, value in info.iteritems():
-                self[key.lower()] = value
-            self.status = int(self.get('status', self.status))
-            self.reason = self.get('reason', self.reason)
-
-
-    def __getattr__(self, name):
-        if name == 'dict':
-            return self
-        else:
-            raise AttributeError, name
diff --git a/httplib2/ca_certs_locater.py b/httplib2/ca_certs_locater.py
deleted file mode 100644
index e4946aa..0000000
--- a/httplib2/ca_certs_locater.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""Custom locater for CA_CERTS files for google3 code."""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import os
-
-from google3.pyglib import resources
-
-
-# pylint: disable-msg=g-bad-name
-def get():
-  """Locate the ca_certs.txt file.
-
-  The httplib2 library will look for local ca_certs_locater module to override
-  the default location for the ca_certs.txt file. We override it here to first
-  try loading via pyglib.resources, falling back to the traditional method if
-  that fails.
-
-  Returns:
-    The file location returned as a string.
-  """
-  try:
-    ca_certs = resources.GetResourceFilename(
-        'google3/third_party/py/httplib2/cacerts.txt')
-  except (IOError, AttributeError):
-    # We're either running in an environment where we don't have access to
-    # google3.pyglib.resources, or an environment where it won't work correctly
-    # (e.g., //apphosting/tools:dev_appserver_internal_main). In either of these
-    # cases, we fall back on the os.path.join approach.
-    ca_certs = os.path.join(
-        os.path.dirname(os.path.abspath(__file__)), 'cacerts.txt')
-  return ca_certs
diff --git a/httplib2/cacerts.txt b/httplib2/cacerts.txt
deleted file mode 100644
index d8a0027..0000000
--- a/httplib2/cacerts.txt
+++ /dev/null
@@ -1,739 +0,0 @@
-# Certifcate Authority certificates for validating SSL connections.
-#
-# This file contains PEM format certificates generated from
-# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt
-#
-# ***** BEGIN LICENSE BLOCK *****
-# Version: MPL 1.1/GPL 2.0/LGPL 2.1
-#
-# The contents of this file are subject to the Mozilla Public License Version
-# 1.1 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS" basis,
-# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
-# for the specific language governing rights and limitations under the
-# License.
-#
-# The Original Code is the Netscape security libraries.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1994-2000
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#
-# Alternatively, the contents of this file may be used under the terms of
-# either the GNU General Public License Version 2 or later (the "GPL"), or
-# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
-# in which case the provisions of the GPL or the LGPL are applicable instead
-# of those above. If you wish to allow use of your version of this file only
-# under the terms of either the GPL or the LGPL, and not to allow others to
-# use your version of this file under the terms of the MPL, indicate your
-# decision by deleting the provisions above and replace them with the notice
-# and other provisions required by the GPL or the LGPL. If you do not delete
-# the provisions above, a recipient may use your version of this file under
-# the terms of any one of the MPL, the GPL or the LGPL.
-#
-# ***** END LICENSE BLOCK *****
-
-Verisign/RSA Secure Server CA
-=============================
-
------BEGIN CERTIFICATE-----
-MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD
-VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0
-MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV
-BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy
-dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ
-ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII
-0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI
-uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI
-hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3
-YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc
-1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA==
------END CERTIFICATE-----
-
-Thawte Personal Basic CA
-========================
-
------BEGIN CERTIFICATE-----
-MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj
-IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X
-DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw
-EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE
-ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy
-dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD
-QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN
-BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53
-dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK
-wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7
-G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF
-AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7
-c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P
-9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ==
------END CERTIFICATE-----
-
-Thawte Personal Premium CA
-==========================
-
------BEGIN CERTIFICATE-----
-MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p
-dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv
-bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa
-QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY
-BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u
-IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl
-bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu
-Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs
-Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI
-Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD
-ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
-SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH
-b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh
-KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ
------END CERTIFICATE-----
-
-Thawte Personal Freemail CA
-===========================
-
------BEGIN CERTIFICATE-----
-MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt
-YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu
-Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT
-AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa
-MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG
-cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh
-d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY
-DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E
-rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq
-uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN
-BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP
-MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa
-/RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei
-gQ==
------END CERTIFICATE-----
-
-Thawte Server CA
-================
-
------BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
------END CERTIFICATE-----
-
-Thawte Premium Server CA
-========================
-
------BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
------END CERTIFICATE-----
-
-Equifax Secure CA
-=================
-
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
-dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
-MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
-dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
-BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
-cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
-aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
-ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
-IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
-7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
-1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
------END CERTIFICATE-----
-
-Verisign Class 1 Public Primary Certification Authority
-=======================================================
-
------BEGIN CERTIFICATE-----
-MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh
-c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05
-NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD
-VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp
-bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB
-jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N
-H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR
-4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN
-BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo
-EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5
-FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx
-lA==
------END CERTIFICATE-----
-
-Verisign Class 2 Public Primary Certification Authority
-=======================================================
-
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh
-YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7
-FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg
-J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc
-r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY
------END CERTIFICATE-----
-
-Verisign Class 3 Public Primary Certification Authority
-=======================================================
-
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
-lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
-AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
------END CERTIFICATE-----
-
-Verisign Class 1 Public Primary Certification Authority - G2
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK
-VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm
-Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J
-h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul
-uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68
-DzFc6PLZ
------END CERTIFICATE-----
-
-Verisign Class 2 Public Primary Certification Authority - G2
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns
-YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
-MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y
-aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe
-Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj
-IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx
-KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
-eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM
-HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw
-DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC
-AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji
-nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX
-rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn
-jBJ7xUS0rg==
------END CERTIFICATE-----
-
-Verisign Class 3 Public Primary Certification Authority - G2
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
-pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
-13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
-U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
-F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
-oJ2daZH9
------END CERTIFICATE-----
-
-Verisign Class 4 Public Primary Certification Authority - G2
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM
-HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK
-qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj
-cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y
-cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP
-T8qAkbYp
------END CERTIFICATE-----
-
-Verisign Class 1 Public Primary Certification Authority - G3
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4
-nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO
-8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV
-ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb
-PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2
-6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr
-n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a
-qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4
-wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3
-ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs
-pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4
-E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g==
------END CERTIFICATE-----
-
-Verisign Class 2 Public Primary Certification Authority - G3
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy
-aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s
-IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp
-Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
-eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV
-BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp
-Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu
-Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g
-Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt
-IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU
-J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO
-JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY
-wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o
-koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN
-qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E
-Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe
-xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u
-7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU
-sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI
-sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP
-cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q
------END CERTIFICATE-----
-
-Verisign Class 3 Public Primary Certification Authority - G3
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
-N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
-KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
-kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
-CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
-Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
-imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
-2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
-DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
-/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
-F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
-TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
------END CERTIFICATE-----
-
-Verisign Class 4 Public Primary Certification Authority - G3
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
-GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
-+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
-U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
-NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
-ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
-ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
-CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
-g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
-fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
-2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
-bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
------END CERTIFICATE-----
-
-Equifax Secure Global eBusiness CA
-==================================
-
------BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
------END CERTIFICATE-----
-
-Equifax Secure eBusiness CA 1
-=============================
-
------BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
------END CERTIFICATE-----
-
-Equifax Secure eBusiness CA 2
-=============================
-
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
-dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
-NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
-VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
-vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
-BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
-IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
-NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
-y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
-0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
-E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
------END CERTIFICATE-----
-
-Thawte Time Stamping CA
-=======================
-
------BEGIN CERTIFICATE-----
-MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN
-BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd
-BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN
-MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g
-Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG
-A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l
-c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT
-6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa
-Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL
-8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB
-Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC
-9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ
-pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ
-CayJSdM=
------END CERTIFICATE-----
-
-thawte Primary Root CA
-======================
-
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-VeriSign Class 3 Public Primary Certification Authority - G5
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-Entrust.net Secure Server Certification Authority
-=================================================
-
------BEGIN CERTIFICATE-----
-MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
-MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
-ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
-b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
-bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
-U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
-A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
-I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
-wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
-AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
-oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
-BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
-dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
-MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
-b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
-dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
-MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
-E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
-MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
-hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
-95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
-2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
------END CERTIFICATE-----
-
-Go Daddy Certification Authority Root Certificate Bundle
-========================================================
-
------BEGIN CERTIFICATE-----
-MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx
-ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g
-RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw
-MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH
-QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j
-b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j
-b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H
-KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm
-VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR
-SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT
-cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ
-6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu
-MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS
-kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB
-BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f
-BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv
-c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH
-AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO
-BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG
-OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU
-A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o
-0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX
-RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH
-qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV
-U+4=
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh
-bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu
-Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g
-QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe
-BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX
-DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE
-YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC
-ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv
-2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q
-N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO
-r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN
-f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH
-U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU
-TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb
-VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg
-SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv
-biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg
-MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw
-AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv
-ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu
-Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd
-IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv
-bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1
-QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O
-WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf
-SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw==
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-GeoTrust Global CA
-==================
-
------BEGIN CERTIFICATE-----
-MIIDfTCCAuagAwIBAgIDErvmMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNVBAYTAlVT
-MRAwDgYDVQQKEwdFcXVpZmF4MS0wKwYDVQQLEyRFcXVpZmF4IFNlY3VyZSBDZXJ0
-aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDIwNTIxMDQwMDAwWhcNMTgwODIxMDQwMDAw
-WjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UE
-AxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
-CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9m
-OSm9BXiLnTjoBbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIu
-T8rxh0PBFpVXLVDviS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6c
-JmTM386DGXHKTubU1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmR
-Cw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5asz
-PeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo4HwMIHtMB8GA1UdIwQYMBaAFEjm
-aPkr0rKV10fYIyAQTzOYkJ/UMB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrM
-TjAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjA6BgNVHR8EMzAxMC+g
-LaArhilodHRwOi8vY3JsLmdlb3RydXN0LmNvbS9jcmxzL3NlY3VyZWNhLmNybDBO
-BgNVHSAERzBFMEMGBFUdIAAwOzA5BggrBgEFBQcCARYtaHR0cHM6Ly93d3cuZ2Vv
-dHJ1c3QuY29tL3Jlc291cmNlcy9yZXBvc2l0b3J5MA0GCSqGSIb3DQEBBQUAA4GB
-AHbhEm5OSxYShjAGsoEIz/AIx8dxfmbuwu3UOx//8PDITtZDOLC5MH0Y0FWDomrL
-NhGc6Ehmo21/uBPUR/6LWlxz/K7ZGzIZOKuXNBSqltLroxwUCEm2u+WR74M26x1W
-b8ravHNjkOR/ez4iyz0H7V84dJzjA1BOoa+Y7mHyhD8S
------END CERTIFICATE-----
-
diff --git a/httplib2/iri2uri.py b/httplib2/iri2uri.py
deleted file mode 100644
index d88c91f..0000000
--- a/httplib2/iri2uri.py
+++ /dev/null
@@ -1,110 +0,0 @@
-"""
-iri2uri
-
-Converts an IRI to a URI.
-
-"""
-__author__ = "Joe Gregorio (joe@bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = []
-__version__ = "1.0.0"
-__license__ = "MIT"
-__history__ = """
-"""
-
-import urlparse
-
-
-# Convert an IRI to a URI following the rules in RFC 3987
-#
-# The characters we need to enocde and escape are defined in the spec:
-#
-# iprivate =  %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
-# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
-#         / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
-#         / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
-#         / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
-#         / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
-#         / %xD0000-DFFFD / %xE1000-EFFFD
-
-escape_range = [
-    (0xA0, 0xD7FF),
-    (0xE000, 0xF8FF),
-    (0xF900, 0xFDCF),
-    (0xFDF0, 0xFFEF),
-    (0x10000, 0x1FFFD),
-    (0x20000, 0x2FFFD),
-    (0x30000, 0x3FFFD),
-    (0x40000, 0x4FFFD),
-    (0x50000, 0x5FFFD),
-    (0x60000, 0x6FFFD),
-    (0x70000, 0x7FFFD),
-    (0x80000, 0x8FFFD),
-    (0x90000, 0x9FFFD),
-    (0xA0000, 0xAFFFD),
-    (0xB0000, 0xBFFFD),
-    (0xC0000, 0xCFFFD),
-    (0xD0000, 0xDFFFD),
-    (0xE1000, 0xEFFFD),
-    (0xF0000, 0xFFFFD),
-    (0x100000, 0x10FFFD),
-]
-
-def encode(c):
-    retval = c
-    i = ord(c)
-    for low, high in escape_range:
-        if i < low:
-            break
-        if i >= low and i <= high:
-            retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
-            break
-    return retval
-
-
-def iri2uri(uri):
-    """Convert an IRI to a URI. Note that IRIs must be
-    passed in a unicode strings. That is, do not utf-8 encode
-    the IRI before passing it into the function."""
-    if isinstance(uri ,unicode):
-        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
-        authority = authority.encode('idna')
-        # For each character in 'ucschar' or 'iprivate'
-        #  1. encode as utf-8
-        #  2. then %-encode each octet of that utf-8
-        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
-        uri = "".join([encode(c) for c in uri])
-    return uri
-
-if __name__ == "__main__":
-    import unittest
-
-    class Test(unittest.TestCase):
-
-        def test_uris(self):
-            """Test that URIs are invariant under the transformation."""
-            invariant = [
-                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
-                u"http://www.ietf.org/rfc/rfc2396.txt",
-                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
-                u"mailto:John.Doe@example.com",
-                u"news:comp.infosystems.www.servers.unix",
-                u"tel:+1-816-555-1212",
-                u"telnet://192.0.2.16:80/",
-                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
-            for uri in invariant:
-                self.assertEqual(uri, iri2uri(uri))
-
-        def test_iri(self):
-            """ Test that the right type of escaping is done for each part of the URI."""
-            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
-            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
-            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
-            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
-            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
-
-    unittest.main()
-
-
diff --git a/lib/cacerts/urlfetch_cacerts.txt b/lib/cacerts/urlfetch_cacerts.txt
index 75f3990..6f43cc4 100644
--- a/lib/cacerts/urlfetch_cacerts.txt
+++ b/lib/cacerts/urlfetch_cacerts.txt
@@ -30824,33 +30824,6 @@
 # recreating from firefox.
 # ***** END LICENSE BLOCK *****
 
-subject= /C=US/O=American Express Company, Inc./OU=American Express Technologies/CN=American Express Global Certificate Authority
-serial=85
------BEGIN CERTIFICATE-----
-MIIEBDCCAuygAwIBAgICAIUwDQYJKoZIhvcNAQEFBQAwgZYxCzAJBgNVBAYTAlVT
-MScwJQYDVQQKEx5BbWVyaWNhbiBFeHByZXNzIENvbXBhbnksIEluYy4xJjAkBgNV
-BAsTHUFtZXJpY2FuIEV4cHJlc3MgVGVjaG5vbG9naWVzMTYwNAYDVQQDEy1BbWVy
-aWNhbiBFeHByZXNzIEdsb2JhbCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNOTgw
-ODE0MTkwNjAwWhcNMTMwODE0MjM1OTAwWjCBljELMAkGA1UEBhMCVVMxJzAlBgNV
-BAoTHkFtZXJpY2FuIEV4cHJlc3MgQ29tcGFueSwgSW5jLjEmMCQGA1UECxMdQW1l
-cmljYW4gRXhwcmVzcyBUZWNobm9sb2dpZXMxNjA0BgNVBAMTLUFtZXJpY2FuIEV4
-cHJlc3MgR2xvYmFsIENlcnRpZmljYXRlIEF1dGhvcml0eTCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAPAkJmYu++tKc3FTiUfLJjxTkpRMysKFtQ34w1e9
-Lyofahi3V68MABb6oLaQpvcaoS5mJsdoo4qTaWa1RlYtHYLqkAwKIsKJUI0F89Sr
-c0HwzxKsKLRvFJSWWUuekHWG3+JH6+HpT0N+h8onGGaetcFAZX38YW+tm3LPqV7Y
-8/nabpEQ+ky16n4g3qk5L/WI5IpvNcYgnCuGRjMK/DFVpWusFkDpzTVZbzIEw3u1
-D3t3cPNIuypSgs6vKW3xEW9t5gcAAe+a8yYNpnkTZ6/4qxx1rJG1a75AsN6cDLFp
-hRlxkRNFyt/R/eayypaDedvFuKpbepALeFY+xteflEgR9a0CAwEAAaNaMFgwEgYD
-VR0TAQH/BAgwBgEB/wIBBTAOBgNVHQ8BAf8EBAMCAQYwFwYDVR0gBBAwDjAMBgoq
-hkiG+Q8KAQUBMBkGA1UdDgQSBBBXRzV7NicRqAj8L0Yl6yRpMA0GCSqGSIb3DQEB
-BQUAA4IBAQDHYUWoinG5vjTpIXshzVYTmNUwY+kYqkuSFb8LHbvskmnFLsNhi+gw
-RcsQRsFzOFyLGdIr80DrfHKzLh4n43WVihybLsSVBYZy0FX0oZJSeVzb9Pjc5dcS
-sUDHPIbkMWVKyjfG3nZXGWlMRmn8Kq0WN3qTrPchSy3766lQy8HRQAjaA2mHpzde
-VcHF7cTjjgwml5tcV0ty4/IDBdACOyYDQJCevgtbSQx48dVMVSng9v1MA6lUAjLR
-V1qFrEPtWzsWX6C/NdtLnnvo/+cNPDuom0lBRvVzTv+SZSGDE1Vx60k8f4gawhIo
-JaFGS0E3l3/sjvHUoZbCILZerakcHhGg
------END CERTIFICATE-----
-
 subject= /C=US/O=Equifax/OU=Equifax Premium Certificate Authority
 serial=35E1EF1E
 -----BEGIN CERTIFICATE-----
@@ -30873,54 +30846,6 @@
 XBPO6snrCVzFc+cSAdXZUwee4A+W8Iu0u0VIn4bFGVWgy5bFA/xI
 -----END CERTIFICATE-----
 
-subject= /C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc./CN=GTE CyberTrust Root 4
-serial=01A8
------BEGIN CERTIFICATE-----
-MIIDVTCCAj0CAgGoMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEeMBwGA1UEAxMVR1RFIEN5YmVyVHJ1c3QgUm9vdCA0MB4X
-DTk4MDgxMzEzNTEwMFoXDTEzMDgxMzIzNTkwMFowcDELMAkGA1UEBhMCVVMxGDAW
-BgNVBAoTD0dURSBDb3Jwb3JhdGlvbjEnMCUGA1UECxMeR1RFIEN5YmVyVHJ1c3Qg
-U29sdXRpb25zLCBJbmMuMR4wHAYDVQQDExVHVEUgQ3liZXJUcnVzdCBSb290IDQw
-ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6nSJuf9pmPDlCsaMqb9P3
-vK6sMVrXEZBHuZ0ZLvnzGyKgw+GnusT8XgqUS5haSybkH/Tc8/6OiNxsLXx3hyZQ
-wF5OqCih6hdpT03GAQ7amg0GViYVtqRdejWvje14Uob5OKuzAdPaBZaxtlCrwKGu
-F1P6QzkgcWUj223Etu2YRYPX0vbiqWv7+XXM78WrcZY16N+OkZuoEHUft84Tjmuz
-lneXGpEvxyxpmfAPKmgAmHZEG4wo0uuO9IO0f6QlXmw72cZo1WG41F4xB7VbkDVS
-V3sXIO0tuB6OiDk+Usvf8FyxZbulErSQY79xnTLB2r9QSpW+BjrEK+vNmHZETQvl
-AgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAEOvHIfJSbpliTRJPOoHO0eiedSgO5Bs
-3n+oVMPoTEAyvMjsHOXZrEC6/Iw/wnOc9GTq36ntTlvIAWDuOW1DJ/N/qgjS/k5v
-FDJNfeQ0gKU1xNZGULQ7oC1lH09lfjQoLcCndn0xyQ0zFvYgGSARULsDzHBtlrfv
-TKfaNhXPu03UltyITWyY7blz/ihXoO1k+AqBKXP29pcyhzm0ge/ZTRoHNPe6QjXe
-V9xc1vfF6wonDIGmwtBoTv2SW0iD9haKjzZb7TFsP0F6cfeSPzGkCkBM84biYcE8
-SYEtpbjvupcPvCsdm4ny0o4eTYbywqv2LZnAGyoNobZP+SxYTT19Nwo=
------END CERTIFICATE-----
-
-subject= /C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc./CN=GTE CyberTrust Root 5
-serial=01B6
------BEGIN CERTIFICATE-----
-MIIDtjCCAp6gAwIBAgICAbYwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVVMx
-GDAWBgNVBAoTD0dURSBDb3Jwb3JhdGlvbjEnMCUGA1UECxMeR1RFIEN5YmVyVHJ1
-c3QgU29sdXRpb25zLCBJbmMuMR4wHAYDVQQDExVHVEUgQ3liZXJUcnVzdCBSb290
-IDUwHhcNOTgwODE0MTQ1MDAwWhcNMTMwODE0MjM1OTAwWjBwMQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xHjAcBgNVBAMTFUdURSBDeWJlclRydXN0IFJv
-b3QgNTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALwSbj+KfHqXAewe
-uzlaAvR4RKJIG457SVJ6uHtHs6+Um2+7lvoramVcuByUc76/iQoigO5X/IwFu3Cf
-lzkE2qOHXKjlyq/AM5rVN1xLrOSA0KYjYPv9ci6UncfOwgQy73hgXe2thw9FZR48
-mgqavl0dmezn8tHGehfZrZtUln/EfGC/haoVNR1A2hG87FQhKC0joajwzy3N3fx+
-D17hZQdWywe00lboXjHMGGPEhtIthc+Tkqtt/mg5+95zvYb45EZ66p8My/QZ/mO8
-0Sx7iDM29uThnAxTgWAc2i6rlqkWiBNQmbK9Vd8VMH7o5Zj7cH5stQf8/Ea30O03
-ln4y/iECAwEAAaNaMFgwEgYDVR0TAQH/BAgwBgEB/wIBBTAOBgNVHQ8BAf8EBAMC
-AQYwFwYDVR0gBBAwDjAMBgoqhkiG+GMBAgEDMBkGA1UdDgQSBBB2CkkhOEyf3vjE
-ScdxcZGdMA0GCSqGSIb3DQEBBQUAA4IBAQBBOtQYW9q43iEc4Y4J5fFoNP/elvQH
-9ac886xKsZv6kvqb7eYyIapKdsXcTzjl39WG5NXIdn2Y17HNj021kSNsi4rr6nzv
-FJTExvAfSi0ycWMrY5EmAgm2gB3t4sy4f9uHY8jh0GwmsTUdQGYQG82VVBgzYewT
-T9oT95mvPtDPjqZyorPDBZrJJ32SzH5SjbOrcG2eiZ9N6xp1wpiq1QIW1wyKvyXk
-6y28mOlYOBl8uTf+2+KZCHMGx5eDan0QAS8yuRcFSmXmL86+XlOmgumaUwqEdC2D
-ysiUFnZflGEo8IWnObvXi9moshMdVAk0JH0ggX1mfqKQdFwQxr3sqxvC
------END CERTIFICATE-----
-
 subject= /C=US/ST=California/L=Mountain View/O=Google Inc./CN=GoogleCA/emailAddress=bsy+ca@google.com
 serial=00
 -----BEGIN CERTIFICATE-----
diff --git a/oauth2client/__init__.py b/oauth2client/__init__.py
deleted file mode 100644
index 13d949f..0000000
--- a/oauth2client/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-__version__ = "1.0"
-
-GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
-GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
-GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
diff --git a/oauth2client/anyjson.py b/oauth2client/anyjson.py
deleted file mode 100644
index ae21c33..0000000
--- a/oauth2client/anyjson.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utility module to import a JSON module
-
-Hides all the messy details of exactly where
-we get a simplejson module from.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-
-try: # pragma: no cover
-  # Should work for Python2.6 and higher.
-  import json as simplejson
-except ImportError: # pragma: no cover
-  try:
-    import simplejson
-  except ImportError:
-    # Try to import from django, should work on App Engine
-    from django.utils import simplejson
diff --git a/oauth2client/appengine.py b/oauth2client/appengine.py
deleted file mode 100644
index 570f0f5..0000000
--- a/oauth2client/appengine.py
+++ /dev/null
@@ -1,902 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for Google App Engine
-
-Utilities for making it easier to use OAuth 2.0 on Google App Engine.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import base64
-import cgi
-import httplib2
-import logging
-import os
-import pickle
-import time
-import urllib
-import urlparse
-
-from google.appengine.api import app_identity
-from google.appengine.api import memcache
-from google.appengine.api import users
-from google.appengine.ext import db
-from google.appengine.ext import webapp
-from google.appengine.ext.webapp.util import login_required
-from google.appengine.ext.webapp.util import run_wsgi_app
-from oauth2client import GOOGLE_AUTH_URI
-from oauth2client import GOOGLE_REVOKE_URI
-from oauth2client import GOOGLE_TOKEN_URI
-from oauth2client import clientsecrets
-from oauth2client import util
-from oauth2client import xsrfutil
-from oauth2client.anyjson import simplejson
-from oauth2client.client import AccessTokenRefreshError
-from oauth2client.client import AssertionCredentials
-from oauth2client.client import Credentials
-from oauth2client.client import Flow
-from oauth2client.client import OAuth2WebServerFlow
-from oauth2client.client import Storage
-
-# TODO(dhermes): Resolve import issue.
-# This is a temporary fix for a Google internal issue.
-try:
-  from google.appengine.ext import ndb
-except ImportError:
-  ndb = None
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
-
-logger = logging.getLogger(__name__)
-
-OAUTH2CLIENT_NAMESPACE = 'oauth2client#ns'
-
-XSRF_MEMCACHE_ID = 'xsrf_secret_key'
-
-
-def _safe_html(s):
-  """Escape text to make it safe to display.
-
-  Args:
-    s: string, The text to escape.
-
-  Returns:
-    The escaped text as a string.
-  """
-  return cgi.escape(s, quote=1).replace("'", '&#39;')
-
-
-class InvalidClientSecretsError(Exception):
-  """The client_secrets.json file is malformed or missing required fields."""
-
-
-class InvalidXsrfTokenError(Exception):
-  """The XSRF token is invalid or expired."""
-
-
-class SiteXsrfSecretKey(db.Model):
-  """Storage for the sites XSRF secret key.
-
-  There will only be one instance stored of this model, the one used for the
-  site.
-  """
-  secret = db.StringProperty()
-
-if ndb is not None:
-  class SiteXsrfSecretKeyNDB(ndb.Model):
-    """NDB Model for storage for the sites XSRF secret key.
-
-    Since this model uses the same kind as SiteXsrfSecretKey, it can be used
-    interchangeably. This simply provides an NDB model for interacting with the
-    same data the DB model interacts with.
-
-    There should only be one instance stored of this model, the one used for the
-    site.
-    """
-    secret = ndb.StringProperty()
-
-    @classmethod
-    def _get_kind(cls):
-      """Return the kind name for this class."""
-      return 'SiteXsrfSecretKey'
-
-
-def _generate_new_xsrf_secret_key():
-  """Returns a random XSRF secret key.
-  """
-  return os.urandom(16).encode("hex")
-
-
-def xsrf_secret_key():
-  """Return the secret key for use for XSRF protection.
-
-  If the Site entity does not have a secret key, this method will also create
-  one and persist it.
-
-  Returns:
-    The secret key.
-  """
-  secret = memcache.get(XSRF_MEMCACHE_ID, namespace=OAUTH2CLIENT_NAMESPACE)
-  if not secret:
-    # Load the one and only instance of SiteXsrfSecretKey.
-    model = SiteXsrfSecretKey.get_or_insert(key_name='site')
-    if not model.secret:
-      model.secret = _generate_new_xsrf_secret_key()
-      model.put()
-    secret = model.secret
-    memcache.add(XSRF_MEMCACHE_ID, secret, namespace=OAUTH2CLIENT_NAMESPACE)
-
-  return str(secret)
-
-
-class AppAssertionCredentials(AssertionCredentials):
-  """Credentials object for App Engine Assertion Grants
-
-  This object will allow an App Engine application to identify itself to Google
-  and other OAuth 2.0 servers that can verify assertions. It can be used for the
-  purpose of accessing data stored under an account assigned to the App Engine
-  application itself.
-
-  This credential does not require a flow to instantiate because it represents
-  a two legged flow, and therefore has all of the required information to
-  generate and refresh its own access tokens.
-  """
-
-  @util.positional(2)
-  def __init__(self, scope, **kwargs):
-    """Constructor for AppAssertionCredentials
-
-    Args:
-      scope: string or iterable of strings, scope(s) of the credentials being
-        requested.
-    """
-    self.scope = util.scopes_to_string(scope)
-
-    # Assertion type is no longer used, but still in the parent class signature.
-    super(AppAssertionCredentials, self).__init__(None)
-
-  @classmethod
-  def from_json(cls, json):
-    data = simplejson.loads(json)
-    return AppAssertionCredentials(data['scope'])
-
-  def _refresh(self, http_request):
-    """Refreshes the access_token.
-
-    Since the underlying App Engine app_identity implementation does its own
-    caching we can skip all the storage hoops and just to a refresh using the
-    API.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the refresh request.
-
-    Raises:
-      AccessTokenRefreshError: When the refresh fails.
-    """
-    try:
-      scopes = self.scope.split()
-      (token, _) = app_identity.get_access_token(scopes)
-    except app_identity.Error, e:
-      raise AccessTokenRefreshError(str(e))
-    self.access_token = token
-
-
-class FlowProperty(db.Property):
-  """App Engine datastore Property for Flow.
-
-  Utility property that allows easy storage and retrieval of an
-  oauth2client.Flow"""
-
-  # Tell what the user type is.
-  data_type = Flow
-
-  # For writing to datastore.
-  def get_value_for_datastore(self, model_instance):
-    flow = super(FlowProperty,
-                 self).get_value_for_datastore(model_instance)
-    return db.Blob(pickle.dumps(flow))
-
-  # For reading from datastore.
-  def make_value_from_datastore(self, value):
-    if value is None:
-      return None
-    return pickle.loads(value)
-
-  def validate(self, value):
-    if value is not None and not isinstance(value, Flow):
-      raise db.BadValueError('Property %s must be convertible '
-                          'to a FlowThreeLegged instance (%s)' %
-                          (self.name, value))
-    return super(FlowProperty, self).validate(value)
-
-  def empty(self, value):
-    return not value
-
-
-if ndb is not None:
-  class FlowNDBProperty(ndb.PickleProperty):
-    """App Engine NDB datastore Property for Flow.
-
-    Serves the same purpose as the DB FlowProperty, but for NDB models. Since
-    PickleProperty inherits from BlobProperty, the underlying representation of
-    the data in the datastore will be the same as in the DB case.
-
-    Utility property that allows easy storage and retrieval of an
-    oauth2client.Flow
-    """
-
-    def _validate(self, value):
-      """Validates a value as a proper Flow object.
-
-      Args:
-        value: A value to be set on the property.
-
-      Raises:
-        TypeError if the value is not an instance of Flow.
-      """
-      logger.info('validate: Got type %s', type(value))
-      if value is not None and not isinstance(value, Flow):
-        raise TypeError('Property %s must be convertible to a flow '
-                        'instance; received: %s.' % (self._name, value))
-
-
-class CredentialsProperty(db.Property):
-  """App Engine datastore Property for Credentials.
-
-  Utility property that allows easy storage and retrieval of
-  oath2client.Credentials
-  """
-
-  # Tell what the user type is.
-  data_type = Credentials
-
-  # For writing to datastore.
-  def get_value_for_datastore(self, model_instance):
-    logger.info("get: Got type " + str(type(model_instance)))
-    cred = super(CredentialsProperty,
-                 self).get_value_for_datastore(model_instance)
-    if cred is None:
-      cred = ''
-    else:
-      cred = cred.to_json()
-    return db.Blob(cred)
-
-  # For reading from datastore.
-  def make_value_from_datastore(self, value):
-    logger.info("make: Got type " + str(type(value)))
-    if value is None:
-      return None
-    if len(value) == 0:
-      return None
-    try:
-      credentials = Credentials.new_from_json(value)
-    except ValueError:
-      credentials = None
-    return credentials
-
-  def validate(self, value):
-    value = super(CredentialsProperty, self).validate(value)
-    logger.info("validate: Got type " + str(type(value)))
-    if value is not None and not isinstance(value, Credentials):
-      raise db.BadValueError('Property %s must be convertible '
-                          'to a Credentials instance (%s)' %
-                            (self.name, value))
-    #if value is not None and not isinstance(value, Credentials):
-    #  return None
-    return value
-
-
-if ndb is not None:
-  # TODO(dhermes): Turn this into a JsonProperty and overhaul the Credentials
-  #                and subclass mechanics to use new_from_dict, to_dict,
-  #                from_dict, etc.
-  class CredentialsNDBProperty(ndb.BlobProperty):
-    """App Engine NDB datastore Property for Credentials.
-
-    Serves the same purpose as the DB CredentialsProperty, but for NDB models.
-    Since CredentialsProperty stores data as a blob and this inherits from
-    BlobProperty, the data in the datastore will be the same as in the DB case.
-
-    Utility property that allows easy storage and retrieval of Credentials and
-    subclasses.
-    """
-    def _validate(self, value):
-      """Validates a value as a proper credentials object.
-
-      Args:
-        value: A value to be set on the property.
-
-      Raises:
-        TypeError if the value is not an instance of Credentials.
-      """
-      logger.info('validate: Got type %s', type(value))
-      if value is not None and not isinstance(value, Credentials):
-        raise TypeError('Property %s must be convertible to a credentials '
-                        'instance; received: %s.' % (self._name, value))
-
-    def _to_base_type(self, value):
-      """Converts our validated value to a JSON serialized string.
-
-      Args:
-        value: A value to be set in the datastore.
-
-      Returns:
-        A JSON serialized version of the credential, else '' if value is None.
-      """
-      if value is None:
-        return ''
-      else:
-        return value.to_json()
-
-    def _from_base_type(self, value):
-      """Converts our stored JSON string back to the desired type.
-
-      Args:
-        value: A value from the datastore to be converted to the desired type.
-
-      Returns:
-        A deserialized Credentials (or subclass) object, else None if the
-            value can't be parsed.
-      """
-      if not value:
-        return None
-      try:
-        # Uses the from_json method of the implied class of value
-        credentials = Credentials.new_from_json(value)
-      except ValueError:
-        credentials = None
-      return credentials
-
-
-class StorageByKeyName(Storage):
-  """Store and retrieve a credential to and from the App Engine datastore.
-
-  This Storage helper presumes the Credentials have been stored as a
-  CredentialsProperty or CredentialsNDBProperty on a datastore model class, and
-  that entities are stored by key_name.
-  """
-
-  @util.positional(4)
-  def __init__(self, model, key_name, property_name, cache=None):
-    """Constructor for Storage.
-
-    Args:
-      model: db.Model or ndb.Model, model class
-      key_name: string, key name for the entity that has the credentials
-      property_name: string, name of the property that is a CredentialsProperty
-        or CredentialsNDBProperty.
-      cache: memcache, a write-through cache to put in front of the datastore.
-        If the model you are using is an NDB model, using a cache will be
-        redundant since the model uses an instance cache and memcache for you.
-    """
-    self._model = model
-    self._key_name = key_name
-    self._property_name = property_name
-    self._cache = cache
-
-  def _is_ndb(self):
-    """Determine whether the model of the instance is an NDB model.
-
-    Returns:
-      Boolean indicating whether or not the model is an NDB or DB model.
-    """
-    # issubclass will fail if one of the arguments is not a class, only need
-    # worry about new-style classes since ndb and db models are new-style
-    if isinstance(self._model, type):
-      if ndb is not None and issubclass(self._model, ndb.Model):
-        return True
-      elif issubclass(self._model, db.Model):
-        return False
-
-    raise TypeError('Model class not an NDB or DB model: %s.' % (self._model,))
-
-  def _get_entity(self):
-    """Retrieve entity from datastore.
-
-    Uses a different model method for db or ndb models.
-
-    Returns:
-      Instance of the model corresponding to the current storage object
-          and stored using the key name of the storage object.
-    """
-    if self._is_ndb():
-      return self._model.get_by_id(self._key_name)
-    else:
-      return self._model.get_by_key_name(self._key_name)
-
-  def _delete_entity(self):
-    """Delete entity from datastore.
-
-    Attempts to delete using the key_name stored on the object, whether or not
-    the given key is in the datastore.
-    """
-    if self._is_ndb():
-      ndb.Key(self._model, self._key_name).delete()
-    else:
-      entity_key = db.Key.from_path(self._model.kind(), self._key_name)
-      db.delete(entity_key)
-
-  def locked_get(self):
-    """Retrieve Credential from datastore.
-
-    Returns:
-      oauth2client.Credentials
-    """
-    if self._cache:
-      json = self._cache.get(self._key_name)
-      if json:
-        return Credentials.new_from_json(json)
-
-    credentials = None
-    entity = self._get_entity()
-    if entity is not None:
-      credentials = getattr(entity, self._property_name)
-      if credentials and hasattr(credentials, 'set_store'):
-        credentials.set_store(self)
-        if self._cache:
-          self._cache.set(self._key_name, credentials.to_json())
-
-    return credentials
-
-  def locked_put(self, credentials):
-    """Write a Credentials to the datastore.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-    entity = self._model.get_or_insert(self._key_name)
-    setattr(entity, self._property_name, credentials)
-    entity.put()
-    if self._cache:
-      self._cache.set(self._key_name, credentials.to_json())
-
-  def locked_delete(self):
-    """Delete Credential from datastore."""
-
-    if self._cache:
-      self._cache.delete(self._key_name)
-
-    self._delete_entity()
-
-
-class CredentialsModel(db.Model):
-  """Storage for OAuth 2.0 Credentials
-
-  Storage of the model is keyed by the user.user_id().
-  """
-  credentials = CredentialsProperty()
-
-
-if ndb is not None:
-  class CredentialsNDBModel(ndb.Model):
-    """NDB Model for storage of OAuth 2.0 Credentials
-
-    Since this model uses the same kind as CredentialsModel and has a property
-    which can serialize and deserialize Credentials correctly, it can be used
-    interchangeably with a CredentialsModel to access, insert and delete the
-    same entities. This simply provides an NDB model for interacting with the
-    same data the DB model interacts with.
-
-    Storage of the model is keyed by the user.user_id().
-    """
-    credentials = CredentialsNDBProperty()
-
-    @classmethod
-    def _get_kind(cls):
-      """Return the kind name for this class."""
-      return 'CredentialsModel'
-
-
-def _build_state_value(request_handler, user):
-  """Composes the value for the 'state' parameter.
-
-  Packs the current request URI and an XSRF token into an opaque string that
-  can be passed to the authentication server via the 'state' parameter.
-
-  Args:
-    request_handler: webapp.RequestHandler, The request.
-    user: google.appengine.api.users.User, The current user.
-
-  Returns:
-    The state value as a string.
-  """
-  uri = request_handler.request.url
-  token = xsrfutil.generate_token(xsrf_secret_key(), user.user_id(),
-                                  action_id=str(uri))
-  return  uri + ':' + token
-
-
-def _parse_state_value(state, user):
-  """Parse the value of the 'state' parameter.
-
-  Parses the value and validates the XSRF token in the state parameter.
-
-  Args:
-    state: string, The value of the state parameter.
-    user: google.appengine.api.users.User, The current user.
-
-  Raises:
-    InvalidXsrfTokenError: if the XSRF token is invalid.
-
-  Returns:
-    The redirect URI.
-  """
-  uri, token = state.rsplit(':', 1)
-  if not xsrfutil.validate_token(xsrf_secret_key(), token, user.user_id(),
-                                 action_id=uri):
-    raise InvalidXsrfTokenError()
-
-  return uri
-
-
-class OAuth2Decorator(object):
-  """Utility for making OAuth 2.0 easier.
-
-  Instantiate and then use with oauth_required or oauth_aware
-  as decorators on webapp.RequestHandler methods.
-
-  Example:
-
-    decorator = OAuth2Decorator(
-        client_id='837...ent.com',
-        client_secret='Qh...wwI',
-        scope='https://www.googleapis.com/auth/plus')
-
-
-    class MainHandler(webapp.RequestHandler):
-
-      @decorator.oauth_required
-      def get(self):
-        http = decorator.http()
-        # http is authorized with the user's Credentials and can be used
-        # in API calls
-
-  """
-
-  @util.positional(4)
-  def __init__(self, client_id, client_secret, scope,
-               auth_uri=GOOGLE_AUTH_URI,
-               token_uri=GOOGLE_TOKEN_URI,
-               revoke_uri=GOOGLE_REVOKE_URI,
-               user_agent=None,
-               message=None,
-               callback_path='/oauth2callback',
-               token_response_param=None,
-               **kwargs):
-
-    """Constructor for OAuth2Decorator
-
-    Args:
-      client_id: string, client identifier.
-      client_secret: string client secret.
-      scope: string or iterable of strings, scope(s) of the credentials being
-        requested.
-      auth_uri: string, URI for authorization endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      token_uri: string, URI for token endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      revoke_uri: string, URI for revoke endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      user_agent: string, User agent of your application, default to None.
-      message: Message to display if there are problems with the OAuth 2.0
-        configuration. The message may contain HTML and will be presented on the
-        web interface for any method that uses the decorator.
-      callback_path: string, The absolute path to use as the callback URI. Note
-        that this must match up with the URI given when registering the
-        application in the APIs Console.
-      token_response_param: string. If provided, the full JSON response
-        to the access token request will be encoded and included in this query
-        parameter in the callback URI. This is useful with providers (e.g.
-        wordpress.com) that include extra fields that the client may want.
-      **kwargs: dict, Keyword arguments are be passed along as kwargs to the
-        OAuth2WebServerFlow constructor.
-    """
-    self.flow = None
-    self.credentials = None
-    self._client_id = client_id
-    self._client_secret = client_secret
-    self._scope = util.scopes_to_string(scope)
-    self._auth_uri = auth_uri
-    self._token_uri = token_uri
-    self._revoke_uri = revoke_uri
-    self._user_agent = user_agent
-    self._kwargs = kwargs
-    self._message = message
-    self._in_error = False
-    self._callback_path = callback_path
-    self._token_response_param = token_response_param
-
-  def _display_error_message(self, request_handler):
-    request_handler.response.out.write('<html><body>')
-    request_handler.response.out.write(_safe_html(self._message))
-    request_handler.response.out.write('</body></html>')
-
-  def oauth_required(self, method):
-    """Decorator that starts the OAuth 2.0 dance.
-
-    Starts the OAuth dance for the logged in user if they haven't already
-    granted access for this application.
-
-    Args:
-      method: callable, to be decorated method of a webapp.RequestHandler
-        instance.
-    """
-
-    def check_oauth(request_handler, *args, **kwargs):
-      if self._in_error:
-        self._display_error_message(request_handler)
-        return
-
-      user = users.get_current_user()
-      # Don't use @login_decorator as this could be used in a POST request.
-      if not user:
-        request_handler.redirect(users.create_login_url(
-            request_handler.request.uri))
-        return
-
-      self._create_flow(request_handler)
-
-      # Store the request URI in 'state' so we can use it later
-      self.flow.params['state'] = _build_state_value(request_handler, user)
-      self.credentials = StorageByKeyName(
-          CredentialsModel, user.user_id(), 'credentials').get()
-
-      if not self.has_credentials():
-        return request_handler.redirect(self.authorize_url())
-      try:
-        return method(request_handler, *args, **kwargs)
-      except AccessTokenRefreshError:
-        return request_handler.redirect(self.authorize_url())
-
-    return check_oauth
-
-  def _create_flow(self, request_handler):
-    """Create the Flow object.
-
-    The Flow is calculated lazily since we don't know where this app is
-    running until it receives a request, at which point redirect_uri can be
-    calculated and then the Flow object can be constructed.
-
-    Args:
-      request_handler: webapp.RequestHandler, the request handler.
-    """
-    if self.flow is None:
-      redirect_uri = request_handler.request.relative_url(
-          self._callback_path) # Usually /oauth2callback
-      self.flow = OAuth2WebServerFlow(self._client_id, self._client_secret,
-                                      self._scope, redirect_uri=redirect_uri,
-                                      user_agent=self._user_agent,
-                                      auth_uri=self._auth_uri,
-                                      token_uri=self._token_uri,
-                                      revoke_uri=self._revoke_uri,
-                                      **self._kwargs)
-
-  def oauth_aware(self, method):
-    """Decorator that sets up for OAuth 2.0 dance, but doesn't do it.
-
-    Does all the setup for the OAuth dance, but doesn't initiate it.
-    This decorator is useful if you want to create a page that knows
-    whether or not the user has granted access to this application.
-    From within a method decorated with @oauth_aware the has_credentials()
-    and authorize_url() methods can be called.
-
-    Args:
-      method: callable, to be decorated method of a webapp.RequestHandler
-        instance.
-    """
-
-    def setup_oauth(request_handler, *args, **kwargs):
-      if self._in_error:
-        self._display_error_message(request_handler)
-        return
-
-      user = users.get_current_user()
-      # Don't use @login_decorator as this could be used in a POST request.
-      if not user:
-        request_handler.redirect(users.create_login_url(
-            request_handler.request.uri))
-        return
-
-      self._create_flow(request_handler)
-
-      self.flow.params['state'] = _build_state_value(request_handler, user)
-      self.credentials = StorageByKeyName(
-          CredentialsModel, user.user_id(), 'credentials').get()
-      return method(request_handler, *args, **kwargs)
-    return setup_oauth
-
-  def has_credentials(self):
-    """True if for the logged in user there are valid access Credentials.
-
-    Must only be called from with a webapp.RequestHandler subclassed method
-    that had been decorated with either @oauth_required or @oauth_aware.
-    """
-    return self.credentials is not None and not self.credentials.invalid
-
-  def authorize_url(self):
-    """Returns the URL to start the OAuth dance.
-
-    Must only be called from with a webapp.RequestHandler subclassed method
-    that had been decorated with either @oauth_required or @oauth_aware.
-    """
-    url = self.flow.step1_get_authorize_url()
-    return str(url)
-
-  def http(self):
-    """Returns an authorized http instance.
-
-    Must only be called from within an @oauth_required decorated method, or
-    from within an @oauth_aware decorated method where has_credentials()
-    returns True.
-    """
-    return self.credentials.authorize(httplib2.Http())
-
-  @property
-  def callback_path(self):
-    """The absolute path where the callback will occur.
-
-    Note this is the absolute path, not the absolute URI, that will be
-    calculated by the decorator at runtime. See callback_handler() for how this
-    should be used.
-
-    Returns:
-      The callback path as a string.
-    """
-    return self._callback_path
-
-
-  def callback_handler(self):
-    """RequestHandler for the OAuth 2.0 redirect callback.
-
-    Usage:
-       app = webapp.WSGIApplication([
-         ('/index', MyIndexHandler),
-         ...,
-         (decorator.callback_path, decorator.callback_handler())
-       ])
-
-    Returns:
-      A webapp.RequestHandler that handles the redirect back from the
-      server during the OAuth 2.0 dance.
-    """
-    decorator = self
-
-    class OAuth2Handler(webapp.RequestHandler):
-      """Handler for the redirect_uri of the OAuth 2.0 dance."""
-
-      @login_required
-      def get(self):
-        error = self.request.get('error')
-        if error:
-          errormsg = self.request.get('error_description', error)
-          self.response.out.write(
-              'The authorization request failed: %s' % _safe_html(errormsg))
-        else:
-          user = users.get_current_user()
-          decorator._create_flow(self)
-          credentials = decorator.flow.step2_exchange(self.request.params)
-          StorageByKeyName(
-              CredentialsModel, user.user_id(), 'credentials').put(credentials)
-          redirect_uri = _parse_state_value(str(self.request.get('state')),
-                                            user)
-
-          if decorator._token_response_param and credentials.token_response:
-            resp_json = simplejson.dumps(credentials.token_response)
-            redirect_uri = util._add_query_parameter(
-              redirect_uri, decorator._token_response_param, resp_json)
-
-          self.redirect(redirect_uri)
-
-    return OAuth2Handler
-
-  def callback_application(self):
-    """WSGI application for handling the OAuth 2.0 redirect callback.
-
-    If you need finer grained control use `callback_handler` which returns just
-    the webapp.RequestHandler.
-
-    Returns:
-      A webapp.WSGIApplication that handles the redirect back from the
-      server during the OAuth 2.0 dance.
-    """
-    return webapp.WSGIApplication([
-        (self.callback_path, self.callback_handler())
-        ])
-
-
-class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
-  """An OAuth2Decorator that builds from a clientsecrets file.
-
-  Uses a clientsecrets file as the source for all the information when
-  constructing an OAuth2Decorator.
-
-  Example:
-
-    decorator = OAuth2DecoratorFromClientSecrets(
-      os.path.join(os.path.dirname(__file__), 'client_secrets.json')
-      scope='https://www.googleapis.com/auth/plus')
-
-
-    class MainHandler(webapp.RequestHandler):
-
-      @decorator.oauth_required
-      def get(self):
-        http = decorator.http()
-        # http is authorized with the user's Credentials and can be used
-        # in API calls
-  """
-
-  @util.positional(3)
-  def __init__(self, filename, scope, message=None, cache=None):
-    """Constructor
-
-    Args:
-      filename: string, File name of client secrets.
-      scope: string or iterable of strings, scope(s) of the credentials being
-        requested.
-      message: string, A friendly string to display to the user if the
-        clientsecrets file is missing or invalid. The message may contain HTML
-        and will be presented on the web interface for any method that uses the
-        decorator.
-      cache: An optional cache service client that implements get() and set()
-        methods. See clientsecrets.loadfile() for details.
-    """
-    client_type, client_info = clientsecrets.loadfile(filename, cache=cache)
-    if client_type not in [
-        clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]:
-      raise InvalidClientSecretsError(
-          'OAuth2Decorator doesn\'t support this OAuth 2.0 flow.')
-    constructor_kwargs = {
-      'auth_uri': client_info['auth_uri'],
-      'token_uri': client_info['token_uri'],
-      'message': message,
-    }
-    revoke_uri = client_info.get('revoke_uri')
-    if revoke_uri is not None:
-      constructor_kwargs['revoke_uri'] = revoke_uri
-    super(OAuth2DecoratorFromClientSecrets, self).__init__(
-        client_info['client_id'], client_info['client_secret'],
-        scope, **constructor_kwargs)
-    if message is not None:
-      self._message = message
-    else:
-      self._message = 'Please configure your application for OAuth 2.0.'
-
-
-@util.positional(2)
-def oauth2decorator_from_clientsecrets(filename, scope,
-                                       message=None, cache=None):
-  """Creates an OAuth2Decorator populated from a clientsecrets file.
-
-  Args:
-    filename: string, File name of client secrets.
-    scope: string or list of strings, scope(s) of the credentials being
-      requested.
-    message: string, A friendly string to display to the user if the
-      clientsecrets file is missing or invalid. The message may contain HTML and
-      will be presented on the web interface for any method that uses the
-      decorator.
-    cache: An optional cache service client that implements get() and set()
-      methods. See clientsecrets.loadfile() for details.
-
-  Returns: An OAuth2Decorator
-
-  """
-  return OAuth2DecoratorFromClientSecrets(filename, scope,
-                                          message=message, cache=cache)
diff --git a/oauth2client/client.py b/oauth2client/client.py
deleted file mode 100644
index 6b580a0..0000000
--- a/oauth2client/client.py
+++ /dev/null
@@ -1,1364 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""An OAuth 2.0 client.
-
-Tools for interacting with OAuth 2.0 protected resources.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import base64
-import clientsecrets
-import copy
-import datetime
-import httplib2
-import logging
-import os
-import sys
-import time
-import urllib
-import urlparse
-
-from oauth2client import GOOGLE_AUTH_URI
-from oauth2client import GOOGLE_REVOKE_URI
-from oauth2client import GOOGLE_TOKEN_URI
-from oauth2client import util
-from oauth2client.anyjson import simplejson
-
-HAS_OPENSSL = False
-HAS_CRYPTO = False
-try:
-  from oauth2client import crypt
-  HAS_CRYPTO = True
-  if crypt.OpenSSLVerifier is not None:
-    HAS_OPENSSL = True
-except ImportError:
-  pass
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
-
-logger = logging.getLogger(__name__)
-
-# Expiry is stored in RFC3339 UTC format
-EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
-
-# Which certs to use to validate id_tokens received.
-ID_TOKEN_VERIFICATON_CERTS = 'https://www.googleapis.com/oauth2/v1/certs'
-
-# Constant to use for the out of band OAuth 2.0 flow.
-OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob'
-
-# Google Data client libraries may need to set this to [401, 403].
-REFRESH_STATUS_CODES = [401]
-
-
-class Error(Exception):
-  """Base error for this module."""
-
-
-class FlowExchangeError(Error):
-  """Error trying to exchange an authorization grant for an access token."""
-
-
-class AccessTokenRefreshError(Error):
-  """Error trying to refresh an expired access token."""
-
-
-class TokenRevokeError(Error):
-  """Error trying to revoke a token."""
-
-
-class UnknownClientSecretsFlowError(Error):
-  """The client secrets file called for an unknown type of OAuth 2.0 flow. """
-
-
-class AccessTokenCredentialsError(Error):
-  """Having only the access_token means no refresh is possible."""
-
-
-class VerifyJwtTokenError(Error):
-  """Could on retrieve certificates for validation."""
-
-
-class NonAsciiHeaderError(Error):
-  """Header names and values must be ASCII strings."""
-
-
-def _abstract():
-  raise NotImplementedError('You need to override this function')
-
-
-class MemoryCache(object):
-  """httplib2 Cache implementation which only caches locally."""
-
-  def __init__(self):
-    self.cache = {}
-
-  def get(self, key):
-    return self.cache.get(key)
-
-  def set(self, key, value):
-    self.cache[key] = value
-
-  def delete(self, key):
-    self.cache.pop(key, None)
-
-
-class Credentials(object):
-  """Base class for all Credentials objects.
-
-  Subclasses must define an authorize() method that applies the credentials to
-  an HTTP transport.
-
-  Subclasses must also specify a classmethod named 'from_json' that takes a JSON
-  string as input and returns an instaniated Credentials object.
-  """
-
-  NON_SERIALIZED_MEMBERS = ['store']
-
-  def authorize(self, http):
-    """Take an httplib2.Http instance (or equivalent) and authorizes it.
-
-    Authorizes it for the set of credentials, usually by replacing
-    http.request() with a method that adds in the appropriate headers and then
-    delegates to the original Http.request() method.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the refresh
-        request.
-    """
-    _abstract()
-
-  def refresh(self, http):
-    """Forces a refresh of the access_token.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the refresh
-        request.
-    """
-    _abstract()
-
-  def revoke(self, http):
-    """Revokes a refresh_token and makes the credentials void.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the revoke
-        request.
-    """
-    _abstract()
-
-  def apply(self, headers):
-    """Add the authorization to the headers.
-
-    Args:
-      headers: dict, the headers to add the Authorization header to.
-    """
-    _abstract()
-
-  def _to_json(self, strip):
-    """Utility function that creates JSON repr. of a Credentials object.
-
-    Args:
-      strip: array, An array of names of members to not include in the JSON.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    t = type(self)
-    d = copy.copy(self.__dict__)
-    for member in strip:
-      if member in d:
-        del d[member]
-    if 'token_expiry' in d and isinstance(d['token_expiry'], datetime.datetime):
-      d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT)
-    # Add in information we will need later to reconsistitue this instance.
-    d['_class'] = t.__name__
-    d['_module'] = t.__module__
-    return simplejson.dumps(d)
-
-  def to_json(self):
-    """Creating a JSON representation of an instance of Credentials.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
-
-  @classmethod
-  def new_from_json(cls, s):
-    """Utility class method to instantiate a Credentials subclass from a JSON
-    representation produced by to_json().
-
-    Args:
-      s: string, JSON from to_json().
-
-    Returns:
-      An instance of the subclass of Credentials that was serialized with
-      to_json().
-    """
-    data = simplejson.loads(s)
-    # Find and call the right classmethod from_json() to restore the object.
-    module = data['_module']
-    try:
-      m = __import__(module)
-    except ImportError:
-      # In case there's an object from the old package structure, update it
-      module = module.replace('.apiclient', '')
-      m = __import__(module)
-
-    m = __import__(module, fromlist=module.split('.')[:-1])
-    kls = getattr(m, data['_class'])
-    from_json = getattr(kls, 'from_json')
-    return from_json(s)
-
-  @classmethod
-  def from_json(cls, s):
-    """Instantiate a Credentials object from a JSON description of it.
-
-    The JSON should have been produced by calling .to_json() on the object.
-
-    Args:
-      data: dict, A deserialized JSON object.
-
-    Returns:
-      An instance of a Credentials subclass.
-    """
-    return Credentials()
-
-
-class Flow(object):
-  """Base class for all Flow objects."""
-  pass
-
-
-class Storage(object):
-  """Base class for all Storage objects.
-
-  Store and retrieve a single credential. This class supports locking
-  such that multiple processes and threads can operate on a single
-  store.
-  """
-
-  def acquire_lock(self):
-    """Acquires any lock necessary to access this Storage.
-
-    This lock is not reentrant.
-    """
-    pass
-
-  def release_lock(self):
-    """Release the Storage lock.
-
-    Trying to release a lock that isn't held will result in a
-    RuntimeError.
-    """
-    pass
-
-  def locked_get(self):
-    """Retrieve credential.
-
-    The Storage lock must be held when this is called.
-
-    Returns:
-      oauth2client.client.Credentials
-    """
-    _abstract()
-
-  def locked_put(self, credentials):
-    """Write a credential.
-
-    The Storage lock must be held when this is called.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-    _abstract()
-
-  def locked_delete(self):
-    """Delete a credential.
-
-    The Storage lock must be held when this is called.
-    """
-    _abstract()
-
-  def get(self):
-    """Retrieve credential.
-
-    The Storage lock must *not* be held when this is called.
-
-    Returns:
-      oauth2client.client.Credentials
-    """
-    self.acquire_lock()
-    try:
-      return self.locked_get()
-    finally:
-      self.release_lock()
-
-  def put(self, credentials):
-    """Write a credential.
-
-    The Storage lock must be held when this is called.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-    self.acquire_lock()
-    try:
-      self.locked_put(credentials)
-    finally:
-      self.release_lock()
-
-  def delete(self):
-    """Delete credential.
-
-    Frees any resources associated with storing the credential.
-    The Storage lock must *not* be held when this is called.
-
-    Returns:
-      None
-    """
-    self.acquire_lock()
-    try:
-      return self.locked_delete()
-    finally:
-      self.release_lock()
-
-
-def clean_headers(headers):
-  """Forces header keys and values to be strings, i.e not unicode.
-
-  The httplib module just concats the header keys and values in a way that may
-  make the message header a unicode string, which, if it then tries to
-  contatenate to a binary request body may result in a unicode decode error.
-
-  Args:
-    headers: dict, A dictionary of headers.
-
-  Returns:
-    The same dictionary but with all the keys converted to strings.
-  """
-  clean = {}
-  try:
-    for k, v in headers.iteritems():
-      clean[str(k)] = str(v)
-  except UnicodeEncodeError:
-    raise NonAsciiHeaderError(k + ': ' + v)
-  return clean
-
-
-def _update_query_params(uri, params):
-  """Updates a URI with new query parameters.
-
-  Args:
-    uri: string, A valid URI, with potential existing query parameters.
-    params: dict, A dictionary of query parameters.
-
-  Returns:
-    The same URI but with the new query parameters added.
-  """
-  parts = list(urlparse.urlparse(uri))
-  query_params = dict(parse_qsl(parts[4])) # 4 is the index of the query part
-  query_params.update(params)
-  parts[4] = urllib.urlencode(query_params)
-  return urlparse.urlunparse(parts)
-
-
-class OAuth2Credentials(Credentials):
-  """Credentials object for OAuth 2.0.
-
-  Credentials can be applied to an httplib2.Http object using the authorize()
-  method, which then adds the OAuth 2.0 access token to each request.
-
-  OAuth2Credentials objects may be safely pickled and unpickled.
-  """
-
-  @util.positional(8)
-  def __init__(self, access_token, client_id, client_secret, refresh_token,
-               token_expiry, token_uri, user_agent, revoke_uri=None,
-               id_token=None, token_response=None):
-    """Create an instance of OAuth2Credentials.
-
-    This constructor is not usually called by the user, instead
-    OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
-
-    Args:
-      access_token: string, access token.
-      client_id: string, client identifier.
-      client_secret: string, client secret.
-      refresh_token: string, refresh token.
-      token_expiry: datetime, when the access_token expires.
-      token_uri: string, URI of token endpoint.
-      user_agent: string, The HTTP User-Agent to provide for this application.
-      revoke_uri: string, URI for revoke endpoint. Defaults to None; a token
-        can't be revoked if this is None.
-      id_token: object, The identity of the resource owner.
-      token_response: dict, the decoded response to the token request. None
-        if a token hasn't been requested yet. Stored because some providers
-        (e.g. wordpress.com) include extra fields that clients may want.
-
-    Notes:
-      store: callable, A callable that when passed a Credential
-        will store the credential back to where it came from.
-        This is needed to store the latest access_token if it
-        has expired and been refreshed.
-    """
-    self.access_token = access_token
-    self.client_id = client_id
-    self.client_secret = client_secret
-    self.refresh_token = refresh_token
-    self.store = None
-    self.token_expiry = token_expiry
-    self.token_uri = token_uri
-    self.user_agent = user_agent
-    self.revoke_uri = revoke_uri
-    self.id_token = id_token
-    self.token_response = token_response
-
-    # True if the credentials have been revoked or expired and can't be
-    # refreshed.
-    self.invalid = False
-
-  def authorize(self, http):
-    """Authorize an httplib2.Http instance with these credentials.
-
-    The modified http.request method will add authentication headers to each
-    request and will refresh access_tokens when a 401 is received on a
-    request. In addition the http.request method has a credentials property,
-    http.request.credentials, which is the Credentials object that authorized
-    it.
-
-    Args:
-       http: An instance of httplib2.Http
-         or something that acts like it.
-
-    Returns:
-       A modified instance of http that was passed in.
-
-    Example:
-
-      h = httplib2.Http()
-      h = credentials.authorize(h)
-
-    You can't create a new OAuth subclass of httplib2.Authenication
-    because it never gets passed the absolute URI, which is needed for
-    signing. So instead we have to overload 'request' with a closure
-    that adds in the Authorization header and then calls the original
-    version of 'request()'.
-    """
-    request_orig = http.request
-
-    # The closure that will replace 'httplib2.Http.request'.
-    @util.positional(1)
-    def new_request(uri, method='GET', body=None, headers=None,
-                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                    connection_type=None):
-      if not self.access_token:
-        logger.info('Attempting refresh to obtain initial access_token')
-        self._refresh(request_orig)
-
-      # Modify the request headers to add the appropriate
-      # Authorization header.
-      if headers is None:
-        headers = {}
-      self.apply(headers)
-
-      if self.user_agent is not None:
-        if 'user-agent' in headers:
-          headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
-        else:
-          headers['user-agent'] = self.user_agent
-
-      resp, content = request_orig(uri, method, body, clean_headers(headers),
-                                   redirections, connection_type)
-
-      if resp.status in REFRESH_STATUS_CODES:
-        logger.info('Refreshing due to a %s' % str(resp.status))
-        self._refresh(request_orig)
-        self.apply(headers)
-        return request_orig(uri, method, body, clean_headers(headers),
-                            redirections, connection_type)
-      else:
-        return (resp, content)
-
-    # Replace the request method with our own closure.
-    http.request = new_request
-
-    # Set credentials as a property of the request method.
-    setattr(http.request, 'credentials', self)
-
-    return http
-
-  def refresh(self, http):
-    """Forces a refresh of the access_token.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the refresh
-        request.
-    """
-    self._refresh(http.request)
-
-  def revoke(self, http):
-    """Revokes a refresh_token and makes the credentials void.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the revoke
-        request.
-    """
-    self._revoke(http.request)
-
-  def apply(self, headers):
-    """Add the authorization to the headers.
-
-    Args:
-      headers: dict, the headers to add the Authorization header to.
-    """
-    headers['Authorization'] = 'Bearer ' + self.access_token
-
-  def to_json(self):
-    return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
-
-  @classmethod
-  def from_json(cls, s):
-    """Instantiate a Credentials object from a JSON description of it. The JSON
-    should have been produced by calling .to_json() on the object.
-
-    Args:
-      data: dict, A deserialized JSON object.
-
-    Returns:
-      An instance of a Credentials subclass.
-    """
-    data = simplejson.loads(s)
-    if 'token_expiry' in data and not isinstance(data['token_expiry'],
-        datetime.datetime):
-      try:
-        data['token_expiry'] = datetime.datetime.strptime(
-            data['token_expiry'], EXPIRY_FORMAT)
-      except:
-        data['token_expiry'] = None
-    retval = cls(
-        data['access_token'],
-        data['client_id'],
-        data['client_secret'],
-        data['refresh_token'],
-        data['token_expiry'],
-        data['token_uri'],
-        data['user_agent'],
-        revoke_uri=data.get('revoke_uri', None),
-        id_token=data.get('id_token', None),
-        token_response=data.get('token_response', None))
-    retval.invalid = data['invalid']
-    return retval
-
-  @property
-  def access_token_expired(self):
-    """True if the credential is expired or invalid.
-
-    If the token_expiry isn't set, we assume the token doesn't expire.
-    """
-    if self.invalid:
-      return True
-
-    if not self.token_expiry:
-      return False
-
-    now = datetime.datetime.utcnow()
-    if now >= self.token_expiry:
-      logger.info('access_token is expired. Now: %s, token_expiry: %s',
-                  now, self.token_expiry)
-      return True
-    return False
-
-  def set_store(self, store):
-    """Set the Storage for the credential.
-
-    Args:
-      store: Storage, an implementation of Stroage object.
-        This is needed to store the latest access_token if it
-        has expired and been refreshed. This implementation uses
-        locking to check for updates before updating the
-        access_token.
-    """
-    self.store = store
-
-  def _updateFromCredential(self, other):
-    """Update this Credential from another instance."""
-    self.__dict__.update(other.__getstate__())
-
-  def __getstate__(self):
-    """Trim the state down to something that can be pickled."""
-    d = copy.copy(self.__dict__)
-    del d['store']
-    return d
-
-  def __setstate__(self, state):
-    """Reconstitute the state of the object from being pickled."""
-    self.__dict__.update(state)
-    self.store = None
-
-  def _generate_refresh_request_body(self):
-    """Generate the body that will be used in the refresh request."""
-    body = urllib.urlencode({
-        'grant_type': 'refresh_token',
-        'client_id': self.client_id,
-        'client_secret': self.client_secret,
-        'refresh_token': self.refresh_token,
-        })
-    return body
-
-  def _generate_refresh_request_headers(self):
-    """Generate the headers that will be used in the refresh request."""
-    headers = {
-        'content-type': 'application/x-www-form-urlencoded',
-    }
-
-    if self.user_agent is not None:
-      headers['user-agent'] = self.user_agent
-
-    return headers
-
-  def _refresh(self, http_request):
-    """Refreshes the access_token.
-
-    This method first checks by reading the Storage object if available.
-    If a refresh is still needed, it holds the Storage lock until the
-    refresh is completed.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the refresh request.
-
-    Raises:
-      AccessTokenRefreshError: When the refresh fails.
-    """
-    if not self.store:
-      self._do_refresh_request(http_request)
-    else:
-      self.store.acquire_lock()
-      try:
-        new_cred = self.store.locked_get()
-        if (new_cred and not new_cred.invalid and
-            new_cred.access_token != self.access_token):
-          logger.info('Updated access_token read from Storage')
-          self._updateFromCredential(new_cred)
-        else:
-          self._do_refresh_request(http_request)
-      finally:
-        self.store.release_lock()
-
-  def _do_refresh_request(self, http_request):
-    """Refresh the access_token using the refresh_token.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the refresh request.
-
-    Raises:
-      AccessTokenRefreshError: When the refresh fails.
-    """
-    body = self._generate_refresh_request_body()
-    headers = self._generate_refresh_request_headers()
-
-    logger.info('Refreshing access_token')
-    resp, content = http_request(
-        self.token_uri, method='POST', body=body, headers=headers)
-    if resp.status == 200:
-      # TODO(jcgregorio) Raise an error if loads fails?
-      d = simplejson.loads(content)
-      self.token_response = d
-      self.access_token = d['access_token']
-      self.refresh_token = d.get('refresh_token', self.refresh_token)
-      if 'expires_in' in d:
-        self.token_expiry = datetime.timedelta(
-            seconds=int(d['expires_in'])) + datetime.datetime.utcnow()
-      else:
-        self.token_expiry = None
-      if self.store:
-        self.store.locked_put(self)
-    else:
-      # An {'error':...} response body means the token is expired or revoked,
-      # so we flag the credentials as such.
-      logger.info('Failed to retrieve access token: %s' % content)
-      error_msg = 'Invalid response %s.' % resp['status']
-      try:
-        d = simplejson.loads(content)
-        if 'error' in d:
-          error_msg = d['error']
-          self.invalid = True
-          if self.store:
-            self.store.locked_put(self)
-      except StandardError:
-        pass
-      raise AccessTokenRefreshError(error_msg)
-
-  def _revoke(self, http_request):
-    """Revokes the refresh_token and deletes the store if available.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the revoke request.
-    """
-    self._do_revoke(http_request, self.refresh_token)
-
-  def _do_revoke(self, http_request, token):
-    """Revokes the credentials and deletes the store if available.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the refresh request.
-      token: A string used as the token to be revoked. Can be either an
-        access_token or refresh_token.
-
-    Raises:
-      TokenRevokeError: If the revoke request does not return with a 200 OK.
-    """
-    logger.info('Revoking token')
-    query_params = {'token': token}
-    token_revoke_uri = _update_query_params(self.revoke_uri, query_params)
-    resp, content = http_request(token_revoke_uri)
-    if resp.status == 200:
-      self.invalid = True
-    else:
-      error_msg = 'Invalid response %s.' % resp.status
-      try:
-        d = simplejson.loads(content)
-        if 'error' in d:
-          error_msg = d['error']
-      except StandardError:
-        pass
-      raise TokenRevokeError(error_msg)
-
-    if self.store:
-      self.store.delete()
-
-
-class AccessTokenCredentials(OAuth2Credentials):
-  """Credentials object for OAuth 2.0.
-
-  Credentials can be applied to an httplib2.Http object using the
-  authorize() method, which then signs each request from that object
-  with the OAuth 2.0 access token. This set of credentials is for the
-  use case where you have acquired an OAuth 2.0 access_token from
-  another place such as a JavaScript client or another web
-  application, and wish to use it from Python. Because only the
-  access_token is present it can not be refreshed and will in time
-  expire.
-
-  AccessTokenCredentials objects may be safely pickled and unpickled.
-
-  Usage:
-    credentials = AccessTokenCredentials('<an access token>',
-      'my-user-agent/1.0')
-    http = httplib2.Http()
-    http = credentials.authorize(http)
-
-  Exceptions:
-    AccessTokenCredentialsExpired: raised when the access_token expires or is
-      revoked.
-  """
-
-  def __init__(self, access_token, user_agent, revoke_uri=None):
-    """Create an instance of OAuth2Credentials
-
-    This is one of the few types if Credentials that you should contrust,
-    Credentials objects are usually instantiated by a Flow.
-
-    Args:
-      access_token: string, access token.
-      user_agent: string, The HTTP User-Agent to provide for this application.
-      revoke_uri: string, URI for revoke endpoint. Defaults to None; a token
-        can't be revoked if this is None.
-    """
-    super(AccessTokenCredentials, self).__init__(
-        access_token,
-        None,
-        None,
-        None,
-        None,
-        None,
-        user_agent,
-        revoke_uri=revoke_uri)
-
-
-  @classmethod
-  def from_json(cls, s):
-    data = simplejson.loads(s)
-    retval = AccessTokenCredentials(
-        data['access_token'],
-        data['user_agent'])
-    return retval
-
-  def _refresh(self, http_request):
-    raise AccessTokenCredentialsError(
-        'The access_token is expired or invalid and can\'t be refreshed.')
-
-  def _revoke(self, http_request):
-    """Revokes the access_token and deletes the store if available.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the revoke request.
-    """
-    self._do_revoke(http_request, self.access_token)
-
-
-class AssertionCredentials(OAuth2Credentials):
-  """Abstract Credentials object used for OAuth 2.0 assertion grants.
-
-  This credential does not require a flow to instantiate because it
-  represents a two legged flow, and therefore has all of the required
-  information to generate and refresh its own access tokens. It must
-  be subclassed to generate the appropriate assertion string.
-
-  AssertionCredentials objects may be safely pickled and unpickled.
-  """
-
-  @util.positional(2)
-  def __init__(self, assertion_type, user_agent=None,
-               token_uri=GOOGLE_TOKEN_URI,
-               revoke_uri=GOOGLE_REVOKE_URI,
-               **unused_kwargs):
-    """Constructor for AssertionFlowCredentials.
-
-    Args:
-      assertion_type: string, assertion type that will be declared to the auth
-        server
-      user_agent: string, The HTTP User-Agent to provide for this application.
-      token_uri: string, URI for token endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      revoke_uri: string, URI for revoke endpoint.
-    """
-    super(AssertionCredentials, self).__init__(
-        None,
-        None,
-        None,
-        None,
-        None,
-        token_uri,
-        user_agent,
-        revoke_uri=revoke_uri)
-    self.assertion_type = assertion_type
-
-  def _generate_refresh_request_body(self):
-    assertion = self._generate_assertion()
-
-    body = urllib.urlencode({
-        'assertion': assertion,
-        'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
-        })
-
-    return body
-
-  def _generate_assertion(self):
-    """Generate the assertion string that will be used in the access token
-    request.
-    """
-    _abstract()
-
-  def _revoke(self, http_request):
-    """Revokes the access_token and deletes the store if available.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the revoke request.
-    """
-    self._do_revoke(http_request, self.access_token)
-
-
-if HAS_CRYPTO:
-  # PyOpenSSL and PyCrypto are not prerequisites for oauth2client, so if it is
-  # missing then don't create the SignedJwtAssertionCredentials or the
-  # verify_id_token() method.
-
-  class SignedJwtAssertionCredentials(AssertionCredentials):
-    """Credentials object used for OAuth 2.0 Signed JWT assertion grants.
-
-    This credential does not require a flow to instantiate because it represents
-    a two legged flow, and therefore has all of the required information to
-    generate and refresh its own access tokens.
-
-    SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto 2.6 or
-    later. For App Engine you may also consider using AppAssertionCredentials.
-    """
-
-    MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
-
-    @util.positional(4)
-    def __init__(self,
-        service_account_name,
-        private_key,
-        scope,
-        private_key_password='notasecret',
-        user_agent=None,
-        token_uri=GOOGLE_TOKEN_URI,
-        revoke_uri=GOOGLE_REVOKE_URI,
-        **kwargs):
-      """Constructor for SignedJwtAssertionCredentials.
-
-      Args:
-        service_account_name: string, id for account, usually an email address.
-        private_key: string, private key in PKCS12 or PEM format.
-        scope: string or iterable of strings, scope(s) of the credentials being
-          requested.
-        private_key_password: string, password for private_key, unused if
-          private_key is in PEM format.
-        user_agent: string, HTTP User-Agent to provide for this application.
-        token_uri: string, URI for token endpoint. For convenience
-          defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-        revoke_uri: string, URI for revoke endpoint.
-        kwargs: kwargs, Additional parameters to add to the JWT token, for
-          example prn=joe@xample.org."""
-
-      super(SignedJwtAssertionCredentials, self).__init__(
-          None,
-          user_agent=user_agent,
-          token_uri=token_uri,
-          revoke_uri=revoke_uri,
-          )
-
-      self.scope = util.scopes_to_string(scope)
-
-      # Keep base64 encoded so it can be stored in JSON.
-      self.private_key = base64.b64encode(private_key)
-
-      self.private_key_password = private_key_password
-      self.service_account_name = service_account_name
-      self.kwargs = kwargs
-
-    @classmethod
-    def from_json(cls, s):
-      data = simplejson.loads(s)
-      retval = SignedJwtAssertionCredentials(
-          data['service_account_name'],
-          base64.b64decode(data['private_key']),
-          data['scope'],
-          private_key_password=data['private_key_password'],
-          user_agent=data['user_agent'],
-          token_uri=data['token_uri'],
-          **data['kwargs']
-          )
-      retval.invalid = data['invalid']
-      retval.access_token = data['access_token']
-      return retval
-
-    def _generate_assertion(self):
-      """Generate the assertion that will be used in the request."""
-      now = long(time.time())
-      payload = {
-          'aud': self.token_uri,
-          'scope': self.scope,
-          'iat': now,
-          'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS,
-          'iss': self.service_account_name
-      }
-      payload.update(self.kwargs)
-      logger.debug(str(payload))
-
-      private_key = base64.b64decode(self.private_key)
-      return crypt.make_signed_jwt(crypt.Signer.from_string(
-          private_key, self.private_key_password), payload)
-
-  # Only used in verify_id_token(), which is always calling to the same URI
-  # for the certs.
-  _cached_http = httplib2.Http(MemoryCache())
-
-  @util.positional(2)
-  def verify_id_token(id_token, audience, http=None,
-      cert_uri=ID_TOKEN_VERIFICATON_CERTS):
-    """Verifies a signed JWT id_token.
-
-    This function requires PyOpenSSL and because of that it does not work on
-    App Engine.
-
-    Args:
-      id_token: string, A Signed JWT.
-      audience: string, The audience 'aud' that the token should be for.
-      http: httplib2.Http, instance to use to make the HTTP request. Callers
-        should supply an instance that has caching enabled.
-      cert_uri: string, URI of the certificates in JSON format to
-        verify the JWT against.
-
-    Returns:
-      The deserialized JSON in the JWT.
-
-    Raises:
-      oauth2client.crypt.AppIdentityError if the JWT fails to verify.
-    """
-    if http is None:
-      http = _cached_http
-
-    resp, content = http.request(cert_uri)
-
-    if resp.status == 200:
-      certs = simplejson.loads(content)
-      return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
-    else:
-      raise VerifyJwtTokenError('Status code: %d' % resp.status)
-
-
-def _urlsafe_b64decode(b64string):
-  # Guard against unicode strings, which base64 can't handle.
-  b64string = b64string.encode('ascii')
-  padded = b64string + '=' * (4 - len(b64string) % 4)
-  return base64.urlsafe_b64decode(padded)
-
-
-def _extract_id_token(id_token):
-  """Extract the JSON payload from a JWT.
-
-  Does the extraction w/o checking the signature.
-
-  Args:
-    id_token: string, OAuth 2.0 id_token.
-
-  Returns:
-    object, The deserialized JSON payload.
-  """
-  segments = id_token.split('.')
-
-  if (len(segments) != 3):
-    raise VerifyJwtTokenError(
-      'Wrong number of segments in token: %s' % id_token)
-
-  return simplejson.loads(_urlsafe_b64decode(segments[1]))
-
-
-def _parse_exchange_token_response(content):
-  """Parses response of an exchange token request.
-
-  Most providers return JSON but some (e.g. Facebook) return a
-  url-encoded string.
-
-  Args:
-    content: The body of a response
-
-  Returns:
-    Content as a dictionary object. Note that the dict could be empty,
-    i.e. {}. That basically indicates a failure.
-  """
-  resp = {}
-  try:
-    resp = simplejson.loads(content)
-  except StandardError:
-    # different JSON libs raise different exceptions,
-    # so we just do a catch-all here
-    resp = dict(parse_qsl(content))
-
-  # some providers respond with 'expires', others with 'expires_in'
-  if resp and 'expires' in resp:
-    resp['expires_in'] = resp.pop('expires')
-
-  return resp
-
-
-@util.positional(4)
-def credentials_from_code(client_id, client_secret, scope, code,
-                          redirect_uri='postmessage', http=None,
-                          user_agent=None, token_uri=GOOGLE_TOKEN_URI,
-                          auth_uri=GOOGLE_AUTH_URI,
-                          revoke_uri=GOOGLE_REVOKE_URI):
-  """Exchanges an authorization code for an OAuth2Credentials object.
-
-  Args:
-    client_id: string, client identifier.
-    client_secret: string, client secret.
-    scope: string or iterable of strings, scope(s) to request.
-    code: string, An authroization code, most likely passed down from
-      the client
-    redirect_uri: string, this is generally set to 'postmessage' to match the
-      redirect_uri that the client specified
-    http: httplib2.Http, optional http instance to use to do the fetch
-    token_uri: string, URI for token endpoint. For convenience
-      defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-    auth_uri: string, URI for authorization endpoint. For convenience
-      defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-    revoke_uri: string, URI for revoke endpoint. For convenience
-      defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-
-  Returns:
-    An OAuth2Credentials object.
-
-  Raises:
-    FlowExchangeError if the authorization code cannot be exchanged for an
-     access token
-  """
-  flow = OAuth2WebServerFlow(client_id, client_secret, scope,
-                             redirect_uri=redirect_uri, user_agent=user_agent,
-                             auth_uri=auth_uri, token_uri=token_uri,
-                             revoke_uri=revoke_uri)
-
-  credentials = flow.step2_exchange(code, http=http)
-  return credentials
-
-
-@util.positional(3)
-def credentials_from_clientsecrets_and_code(filename, scope, code,
-                                            message = None,
-                                            redirect_uri='postmessage',
-                                            http=None,
-                                            cache=None):
-  """Returns OAuth2Credentials from a clientsecrets file and an auth code.
-
-  Will create the right kind of Flow based on the contents of the clientsecrets
-  file or will raise InvalidClientSecretsError for unknown types of Flows.
-
-  Args:
-    filename: string, File name of clientsecrets.
-    scope: string or iterable of strings, scope(s) to request.
-    code: string, An authorization code, most likely passed down from
-      the client
-    message: string, A friendly string to display to the user if the
-      clientsecrets file is missing or invalid. If message is provided then
-      sys.exit will be called in the case of an error. If message in not
-      provided then clientsecrets.InvalidClientSecretsError will be raised.
-    redirect_uri: string, this is generally set to 'postmessage' to match the
-      redirect_uri that the client specified
-    http: httplib2.Http, optional http instance to use to do the fetch
-    cache: An optional cache service client that implements get() and set()
-      methods. See clientsecrets.loadfile() for details.
-
-  Returns:
-    An OAuth2Credentials object.
-
-  Raises:
-    FlowExchangeError if the authorization code cannot be exchanged for an
-     access token
-    UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
-    clientsecrets.InvalidClientSecretsError if the clientsecrets file is
-      invalid.
-  """
-  flow = flow_from_clientsecrets(filename, scope, message=message, cache=cache,
-                                 redirect_uri=redirect_uri)
-  credentials = flow.step2_exchange(code, http=http)
-  return credentials
-
-
-class OAuth2WebServerFlow(Flow):
-  """Does the Web Server Flow for OAuth 2.0.
-
-  OAuth2WebServerFlow objects may be safely pickled and unpickled.
-  """
-
-  @util.positional(4)
-  def __init__(self, client_id, client_secret, scope,
-               redirect_uri=None,
-               user_agent=None,
-               auth_uri=GOOGLE_AUTH_URI,
-               token_uri=GOOGLE_TOKEN_URI,
-               revoke_uri=GOOGLE_REVOKE_URI,
-               **kwargs):
-    """Constructor for OAuth2WebServerFlow.
-
-    The kwargs argument is used to set extra query parameters on the
-    auth_uri. For example, the access_type and approval_prompt
-    query parameters can be set via kwargs.
-
-    Args:
-      client_id: string, client identifier.
-      client_secret: string client secret.
-      scope: string or iterable of strings, scope(s) of the credentials being
-        requested.
-      redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
-        a non-web-based application, or a URI that handles the callback from
-        the authorization server.
-      user_agent: string, HTTP User-Agent to provide for this application.
-      auth_uri: string, URI for authorization endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      token_uri: string, URI for token endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      revoke_uri: string, URI for revoke endpoint. For convenience
-        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
-      **kwargs: dict, The keyword arguments are all optional and required
-                        parameters for the OAuth calls.
-    """
-    self.client_id = client_id
-    self.client_secret = client_secret
-    self.scope = util.scopes_to_string(scope)
-    self.redirect_uri = redirect_uri
-    self.user_agent = user_agent
-    self.auth_uri = auth_uri
-    self.token_uri = token_uri
-    self.revoke_uri = revoke_uri
-    self.params = {
-        'access_type': 'offline',
-        'response_type': 'code',
-    }
-    self.params.update(kwargs)
-
-  @util.positional(1)
-  def step1_get_authorize_url(self, redirect_uri=None):
-    """Returns a URI to redirect to the provider.
-
-    Args:
-      redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
-        a non-web-based application, or a URI that handles the callback from
-        the authorization server. This parameter is deprecated, please move to
-        passing the redirect_uri in via the constructor.
-
-    Returns:
-      A URI as a string to redirect the user to begin the authorization flow.
-    """
-    if redirect_uri is not None:
-      logger.warning(('The redirect_uri parameter for'
-          'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. Please'
-          'move to passing the redirect_uri in via the constructor.'))
-      self.redirect_uri = redirect_uri
-
-    if self.redirect_uri is None:
-      raise ValueError('The value of redirect_uri must not be None.')
-
-    query_params = {
-        'client_id': self.client_id,
-        'redirect_uri': self.redirect_uri,
-        'scope': self.scope,
-    }
-    query_params.update(self.params)
-    return _update_query_params(self.auth_uri, query_params)
-
-  @util.positional(2)
-  def step2_exchange(self, code, http=None):
-    """Exhanges a code for OAuth2Credentials.
-
-    Args:
-      code: string or dict, either the code as a string, or a dictionary
-        of the query parameters to the redirect_uri, which contains
-        the code.
-      http: httplib2.Http, optional http instance to use to do the fetch
-
-    Returns:
-      An OAuth2Credentials object that can be used to authorize requests.
-
-    Raises:
-      FlowExchangeError if a problem occured exchanging the code for a
-      refresh_token.
-    """
-
-    if not (isinstance(code, str) or isinstance(code, unicode)):
-      if 'code' not in code:
-        if 'error' in code:
-          error_msg = code['error']
-        else:
-          error_msg = 'No code was supplied in the query parameters.'
-        raise FlowExchangeError(error_msg)
-      else:
-        code = code['code']
-
-    body = urllib.urlencode({
-        'grant_type': 'authorization_code',
-        'client_id': self.client_id,
-        'client_secret': self.client_secret,
-        'code': code,
-        'redirect_uri': self.redirect_uri,
-        'scope': self.scope,
-        })
-    headers = {
-        'content-type': 'application/x-www-form-urlencoded',
-    }
-
-    if self.user_agent is not None:
-      headers['user-agent'] = self.user_agent
-
-    if http is None:
-      http = httplib2.Http()
-
-    resp, content = http.request(self.token_uri, method='POST', body=body,
-                                 headers=headers)
-    d = _parse_exchange_token_response(content)
-    if resp.status == 200 and 'access_token' in d:
-      access_token = d['access_token']
-      refresh_token = d.get('refresh_token', None)
-      token_expiry = None
-      if 'expires_in' in d:
-        token_expiry = datetime.datetime.utcnow() + datetime.timedelta(
-            seconds=int(d['expires_in']))
-
-      if 'id_token' in d:
-        d['id_token'] = _extract_id_token(d['id_token'])
-
-      logger.info('Successfully retrieved access token')
-      return OAuth2Credentials(access_token, self.client_id,
-                               self.client_secret, refresh_token, token_expiry,
-                               self.token_uri, self.user_agent,
-                               revoke_uri=self.revoke_uri,
-                               id_token=d.get('id_token', None),
-                               token_response=d)
-    else:
-      logger.info('Failed to retrieve access token: %s' % content)
-      if 'error' in d:
-        # you never know what those providers got to say
-        error_msg = unicode(d['error'])
-      else:
-        error_msg = 'Invalid response: %s.' % str(resp.status)
-      raise FlowExchangeError(error_msg)
-
-
-@util.positional(2)
-def flow_from_clientsecrets(filename, scope, redirect_uri=None,
-                            message=None, cache=None):
-  """Create a Flow from a clientsecrets file.
-
-  Will create the right kind of Flow based on the contents of the clientsecrets
-  file or will raise InvalidClientSecretsError for unknown types of Flows.
-
-  Args:
-    filename: string, File name of client secrets.
-    scope: string or iterable of strings, scope(s) to request.
-    redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
-      a non-web-based application, or a URI that handles the callback from
-      the authorization server.
-    message: string, A friendly string to display to the user if the
-      clientsecrets file is missing or invalid. If message is provided then
-      sys.exit will be called in the case of an error. If message in not
-      provided then clientsecrets.InvalidClientSecretsError will be raised.
-    cache: An optional cache service client that implements get() and set()
-      methods. See clientsecrets.loadfile() for details.
-
-  Returns:
-    A Flow object.
-
-  Raises:
-    UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
-    clientsecrets.InvalidClientSecretsError if the clientsecrets file is
-      invalid.
-  """
-  try:
-    client_type, client_info = clientsecrets.loadfile(filename, cache=cache)
-    if client_type in (clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED):
-      constructor_kwargs = {
-          'redirect_uri': redirect_uri,
-          'auth_uri': client_info['auth_uri'],
-          'token_uri': client_info['token_uri'],
-      }
-      revoke_uri = client_info.get('revoke_uri')
-      if revoke_uri is not None:
-        constructor_kwargs['revoke_uri'] = revoke_uri
-      return OAuth2WebServerFlow(
-          client_info['client_id'], client_info['client_secret'],
-          scope, **constructor_kwargs)
-
-  except clientsecrets.InvalidClientSecretsError:
-    if message:
-      sys.exit(message)
-    else:
-      raise
-  else:
-    raise UnknownClientSecretsFlowError(
-        'This OAuth 2.0 flow is unsupported: %r' % client_type)
diff --git a/oauth2client/clientsecrets.py b/oauth2client/clientsecrets.py
deleted file mode 100644
index ac99aae..0000000
--- a/oauth2client/clientsecrets.py
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright (C) 2011 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for reading OAuth 2.0 client secret files.
-
-A client_secrets.json file contains all the information needed to interact with
-an OAuth 2.0 protected service.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-
-from anyjson import simplejson
-
-# Properties that make a client_secrets.json file valid.
-TYPE_WEB = 'web'
-TYPE_INSTALLED = 'installed'
-
-VALID_CLIENT = {
-    TYPE_WEB: {
-        'required': [
-            'client_id',
-            'client_secret',
-            'redirect_uris',
-            'auth_uri',
-            'token_uri',
-        ],
-        'string': [
-            'client_id',
-            'client_secret',
-        ],
-    },
-    TYPE_INSTALLED: {
-        'required': [
-            'client_id',
-            'client_secret',
-            'redirect_uris',
-            'auth_uri',
-            'token_uri',
-        ],
-        'string': [
-            'client_id',
-            'client_secret',
-        ],
-    },
-}
-
-
-class Error(Exception):
-  """Base error for this module."""
-  pass
-
-
-class InvalidClientSecretsError(Error):
-  """Format of ClientSecrets file is invalid."""
-  pass
-
-
-def _validate_clientsecrets(obj):
-  if obj is None or len(obj) != 1:
-    raise InvalidClientSecretsError('Invalid file format.')
-  client_type = obj.keys()[0]
-  if client_type not in VALID_CLIENT.keys():
-    raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
-  client_info = obj[client_type]
-  for prop_name in VALID_CLIENT[client_type]['required']:
-    if prop_name not in client_info:
-      raise InvalidClientSecretsError(
-        'Missing property "%s" in a client type of "%s".' % (prop_name,
-                                                           client_type))
-  for prop_name in VALID_CLIENT[client_type]['string']:
-    if client_info[prop_name].startswith('[['):
-      raise InvalidClientSecretsError(
-        'Property "%s" is not configured.' % prop_name)
-  return client_type, client_info
-
-
-def load(fp):
-  obj = simplejson.load(fp)
-  return _validate_clientsecrets(obj)
-
-
-def loads(s):
-  obj = simplejson.loads(s)
-  return _validate_clientsecrets(obj)
-
-
-def _loadfile(filename):
-  try:
-    fp = file(filename, 'r')
-    try:
-      obj = simplejson.load(fp)
-    finally:
-      fp.close()
-  except IOError:
-    raise InvalidClientSecretsError('File not found: "%s"' % filename)
-  return _validate_clientsecrets(obj)
-
-
-def loadfile(filename, cache=None):
-  """Loading of client_secrets JSON file, optionally backed by a cache.
-
-  Typical cache storage would be App Engine memcache service,
-  but you can pass in any other cache client that implements
-  these methods:
-    - get(key, namespace=ns)
-    - set(key, value, namespace=ns)
-
-  Usage:
-    # without caching
-    client_type, client_info = loadfile('secrets.json')
-    # using App Engine memcache service
-    from google.appengine.api import memcache
-    client_type, client_info = loadfile('secrets.json', cache=memcache)
-
-  Args:
-    filename: string, Path to a client_secrets.json file on a filesystem.
-    cache: An optional cache service client that implements get() and set()
-      methods. If not specified, the file is always being loaded from
-      a filesystem.
-
-  Raises:
-    InvalidClientSecretsError: In case of a validation error or some
-      I/O failure. Can happen only on cache miss.
-
-  Returns:
-    (client_type, client_info) tuple, as _loadfile() normally would.
-    JSON contents is validated only during first load. Cache hits are not
-    validated.
-  """
-  _SECRET_NAMESPACE = 'oauth2client:secrets#ns'
-
-  if not cache:
-    return _loadfile(filename)
-
-  obj = cache.get(filename, namespace=_SECRET_NAMESPACE)
-  if obj is None:
-    client_type, client_info = _loadfile(filename)
-    obj = {client_type: client_info}
-    cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
-
-  return obj.iteritems().next()
diff --git a/oauth2client/crypt.py b/oauth2client/crypt.py
deleted file mode 100644
index 2d31815..0000000
--- a/oauth2client/crypt.py
+++ /dev/null
@@ -1,377 +0,0 @@
-#!/usr/bin/python2.4
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2011 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import hashlib
-import logging
-import time
-
-from anyjson import simplejson
-
-
-CLOCK_SKEW_SECS = 300  # 5 minutes in seconds
-AUTH_TOKEN_LIFETIME_SECS = 300  # 5 minutes in seconds
-MAX_TOKEN_LIFETIME_SECS = 86400  # 1 day in seconds
-
-
-logger = logging.getLogger(__name__)
-
-
-class AppIdentityError(Exception):
-  pass
-
-
-try:
-  from OpenSSL import crypto
-
-
-  class OpenSSLVerifier(object):
-    """Verifies the signature on a message."""
-
-    def __init__(self, pubkey):
-      """Constructor.
-
-      Args:
-        pubkey, OpenSSL.crypto.PKey, The public key to verify with.
-      """
-      self._pubkey = pubkey
-
-    def verify(self, message, signature):
-      """Verifies a message against a signature.
-
-      Args:
-        message: string, The message to verify.
-        signature: string, The signature on the message.
-
-      Returns:
-        True if message was signed by the private key associated with the public
-        key that this object was constructed with.
-      """
-      try:
-        crypto.verify(self._pubkey, signature, message, 'sha256')
-        return True
-      except:
-        return False
-
-    @staticmethod
-    def from_string(key_pem, is_x509_cert):
-      """Construct a Verified instance from a string.
-
-      Args:
-        key_pem: string, public key in PEM format.
-        is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
-          expected to be an RSA key in PEM format.
-
-      Returns:
-        Verifier instance.
-
-      Raises:
-        OpenSSL.crypto.Error if the key_pem can't be parsed.
-      """
-      if is_x509_cert:
-        pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
-      else:
-        pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
-      return OpenSSLVerifier(pubkey)
-
-
-  class OpenSSLSigner(object):
-    """Signs messages with a private key."""
-
-    def __init__(self, pkey):
-      """Constructor.
-
-      Args:
-        pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
-      """
-      self._key = pkey
-
-    def sign(self, message):
-      """Signs a message.
-
-      Args:
-        message: string, Message to be signed.
-
-      Returns:
-        string, The signature of the message for the given key.
-      """
-      return crypto.sign(self._key, message, 'sha256')
-
-    @staticmethod
-    def from_string(key, password='notasecret'):
-      """Construct a Signer instance from a string.
-
-      Args:
-        key: string, private key in PKCS12 or PEM format.
-        password: string, password for the private key file.
-
-      Returns:
-        Signer instance.
-
-      Raises:
-        OpenSSL.crypto.Error if the key can't be parsed.
-      """
-      if key.startswith('-----BEGIN '):
-        pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
-      else:
-        pkey = crypto.load_pkcs12(key, password).get_privatekey()
-      return OpenSSLSigner(pkey)
-
-except ImportError:
-  OpenSSLVerifier = None
-  OpenSSLSigner = None
-
-
-try:
-  from Crypto.PublicKey import RSA
-  from Crypto.Hash import SHA256
-  from Crypto.Signature import PKCS1_v1_5
-
-
-  class PyCryptoVerifier(object):
-    """Verifies the signature on a message."""
-
-    def __init__(self, pubkey):
-      """Constructor.
-
-      Args:
-        pubkey, OpenSSL.crypto.PKey (or equiv), The public key to verify with.
-      """
-      self._pubkey = pubkey
-
-    def verify(self, message, signature):
-      """Verifies a message against a signature.
-
-      Args:
-        message: string, The message to verify.
-        signature: string, The signature on the message.
-
-      Returns:
-        True if message was signed by the private key associated with the public
-        key that this object was constructed with.
-      """
-      try:
-        return PKCS1_v1_5.new(self._pubkey).verify(
-            SHA256.new(message), signature)
-      except:
-        return False
-
-    @staticmethod
-    def from_string(key_pem, is_x509_cert):
-      """Construct a Verified instance from a string.
-
-      Args:
-        key_pem: string, public key in PEM format.
-        is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
-          expected to be an RSA key in PEM format.
-
-      Returns:
-        Verifier instance.
-
-      Raises:
-        NotImplementedError if is_x509_cert is true.
-      """
-      if is_x509_cert:
-        raise NotImplementedError(
-            'X509 certs are not supported by the PyCrypto library. '
-            'Try using PyOpenSSL if native code is an option.')
-      else:
-        pubkey = RSA.importKey(key_pem)
-      return PyCryptoVerifier(pubkey)
-
-
-  class PyCryptoSigner(object):
-    """Signs messages with a private key."""
-
-    def __init__(self, pkey):
-      """Constructor.
-
-      Args:
-        pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
-      """
-      self._key = pkey
-
-    def sign(self, message):
-      """Signs a message.
-
-      Args:
-        message: string, Message to be signed.
-
-      Returns:
-        string, The signature of the message for the given key.
-      """
-      return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
-
-    @staticmethod
-    def from_string(key, password='notasecret'):
-      """Construct a Signer instance from a string.
-
-      Args:
-        key: string, private key in PEM format.
-        password: string, password for private key file. Unused for PEM files.
-
-      Returns:
-        Signer instance.
-
-      Raises:
-        NotImplementedError if they key isn't in PEM format.
-      """
-      if key.startswith('-----BEGIN '):
-        pkey = RSA.importKey(key)
-      else:
-        raise NotImplementedError(
-            'PKCS12 format is not supported by the PyCrpto library. '
-            'Try converting to a "PEM" '
-            '(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > privatekey.pem) '
-            'or using PyOpenSSL if native code is an option.')
-      return PyCryptoSigner(pkey)
-
-except ImportError:
-  PyCryptoVerifier = None
-  PyCryptoSigner = None
-
-
-if OpenSSLSigner:
-  Signer = OpenSSLSigner
-  Verifier = OpenSSLVerifier
-elif PyCryptoSigner:
-  Signer = PyCryptoSigner
-  Verifier = PyCryptoVerifier
-else:
-  raise ImportError('No encryption library found. Please install either '
-                    'PyOpenSSL, or PyCrypto 2.6 or later')
-
-
-def _urlsafe_b64encode(raw_bytes):
-  return base64.urlsafe_b64encode(raw_bytes).rstrip('=')
-
-
-def _urlsafe_b64decode(b64string):
-  # Guard against unicode strings, which base64 can't handle.
-  b64string = b64string.encode('ascii')
-  padded = b64string + '=' * (4 - len(b64string) % 4)
-  return base64.urlsafe_b64decode(padded)
-
-
-def _json_encode(data):
-  return simplejson.dumps(data, separators = (',', ':'))
-
-
-def make_signed_jwt(signer, payload):
-  """Make a signed JWT.
-
-  See http://self-issued.info/docs/draft-jones-json-web-token.html.
-
-  Args:
-    signer: crypt.Signer, Cryptographic signer.
-    payload: dict, Dictionary of data to convert to JSON and then sign.
-
-  Returns:
-    string, The JWT for the payload.
-  """
-  header = {'typ': 'JWT', 'alg': 'RS256'}
-
-  segments = [
-          _urlsafe_b64encode(_json_encode(header)),
-          _urlsafe_b64encode(_json_encode(payload)),
-  ]
-  signing_input = '.'.join(segments)
-
-  signature = signer.sign(signing_input)
-  segments.append(_urlsafe_b64encode(signature))
-
-  logger.debug(str(segments))
-
-  return '.'.join(segments)
-
-
-def verify_signed_jwt_with_certs(jwt, certs, audience):
-  """Verify a JWT against public certs.
-
-  See http://self-issued.info/docs/draft-jones-json-web-token.html.
-
-  Args:
-    jwt: string, A JWT.
-    certs: dict, Dictionary where values of public keys in PEM format.
-    audience: string, The audience, 'aud', that this JWT should contain. If
-      None then the JWT's 'aud' parameter is not verified.
-
-  Returns:
-    dict, The deserialized JSON payload in the JWT.
-
-  Raises:
-    AppIdentityError if any checks are failed.
-  """
-  segments = jwt.split('.')
-
-  if (len(segments) != 3):
-    raise AppIdentityError(
-      'Wrong number of segments in token: %s' % jwt)
-  signed = '%s.%s' % (segments[0], segments[1])
-
-  signature = _urlsafe_b64decode(segments[2])
-
-  # Parse token.
-  json_body = _urlsafe_b64decode(segments[1])
-  try:
-    parsed = simplejson.loads(json_body)
-  except:
-    raise AppIdentityError('Can\'t parse token: %s' % json_body)
-
-  # Check signature.
-  verified = False
-  for (keyname, pem) in certs.items():
-    verifier = Verifier.from_string(pem, True)
-    if (verifier.verify(signed, signature)):
-      verified = True
-      break
-  if not verified:
-    raise AppIdentityError('Invalid token signature: %s' % jwt)
-
-  # Check creation timestamp.
-  iat = parsed.get('iat')
-  if iat is None:
-    raise AppIdentityError('No iat field in token: %s' % json_body)
-  earliest = iat - CLOCK_SKEW_SECS
-
-  # Check expiration timestamp.
-  now = long(time.time())
-  exp = parsed.get('exp')
-  if exp is None:
-    raise AppIdentityError('No exp field in token: %s' % json_body)
-  if exp >= now + MAX_TOKEN_LIFETIME_SECS:
-    raise AppIdentityError(
-      'exp field too far in future: %s' % json_body)
-  latest = exp + CLOCK_SKEW_SECS
-
-  if now < earliest:
-    raise AppIdentityError('Token used too early, %d < %d: %s' %
-      (now, earliest, json_body))
-  if now > latest:
-    raise AppIdentityError('Token used too late, %d > %d: %s' %
-      (now, latest, json_body))
-
-  # Check audience.
-  if audience is not None:
-    aud = parsed.get('aud')
-    if aud is None:
-      raise AppIdentityError('No aud field in token: %s' % json_body)
-    if aud != audience:
-      raise AppIdentityError('Wrong recipient, %s != %s: %s' %
-          (aud, audience, json_body))
-
-  return parsed
diff --git a/oauth2client/django_orm.py b/oauth2client/django_orm.py
deleted file mode 100644
index d54d20c..0000000
--- a/oauth2client/django_orm.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""OAuth 2.0 utilities for Django.
-
-Utilities for using OAuth 2.0 in conjunction with
-the Django datastore.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import oauth2client
-import base64
-import pickle
-
-from django.db import models
-from oauth2client.client import Storage as BaseStorage
-
-class CredentialsField(models.Field):
-
-  __metaclass__ = models.SubfieldBase
-
-  def __init__(self, *args, **kwargs):
-    if 'null' not in kwargs:
-      kwargs['null'] = True
-    super(CredentialsField, self).__init__(*args, **kwargs)
-
-  def get_internal_type(self):
-    return "TextField"
-
-  def to_python(self, value):
-    if value is None:
-      return None
-    if isinstance(value, oauth2client.client.Credentials):
-      return value
-    return pickle.loads(base64.b64decode(value))
-
-  def get_db_prep_value(self, value, connection, prepared=False):
-    if value is None:
-      return None
-    return base64.b64encode(pickle.dumps(value))
-
-
-class FlowField(models.Field):
-
-  __metaclass__ = models.SubfieldBase
-
-  def __init__(self, *args, **kwargs):
-    if 'null' not in kwargs:
-      kwargs['null'] = True
-    super(FlowField, self).__init__(*args, **kwargs)
-
-  def get_internal_type(self):
-    return "TextField"
-
-  def to_python(self, value):
-    if value is None:
-      return None
-    if isinstance(value, oauth2client.client.Flow):
-      return value
-    return pickle.loads(base64.b64decode(value))
-
-  def get_db_prep_value(self, value, connection, prepared=False):
-    if value is None:
-      return None
-    return base64.b64encode(pickle.dumps(value))
-
-
-class Storage(BaseStorage):
-  """Store and retrieve a single credential to and from
-  the datastore.
-
-  This Storage helper presumes the Credentials
-  have been stored as a CredenialsField
-  on a db model class.
-  """
-
-  def __init__(self, model_class, key_name, key_value, property_name):
-    """Constructor for Storage.
-
-    Args:
-      model: db.Model, model class
-      key_name: string, key name for the entity that has the credentials
-      key_value: string, key value for the entity that has the credentials
-      property_name: string, name of the property that is an CredentialsProperty
-    """
-    self.model_class = model_class
-    self.key_name = key_name
-    self.key_value = key_value
-    self.property_name = property_name
-
-  def locked_get(self):
-    """Retrieve Credential from datastore.
-
-    Returns:
-      oauth2client.Credentials
-    """
-    credential = None
-
-    query = {self.key_name: self.key_value}
-    entities = self.model_class.objects.filter(**query)
-    if len(entities) > 0:
-      credential = getattr(entities[0], self.property_name)
-      if credential and hasattr(credential, 'set_store'):
-        credential.set_store(self)
-    return credential
-
-  def locked_put(self, credentials):
-    """Write a Credentials to the datastore.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-    args = {self.key_name: self.key_value}
-    entity = self.model_class(**args)
-    setattr(entity, self.property_name, credentials)
-    entity.save()
-
-  def locked_delete(self):
-    """Delete Credentials from the datastore."""
-
-    query = {self.key_name: self.key_value}
-    entities = self.model_class.objects.filter(**query).delete()
diff --git a/oauth2client/file.py b/oauth2client/file.py
deleted file mode 100644
index 1895f94..0000000
--- a/oauth2client/file.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for OAuth.
-
-Utilities for making it easier to work with OAuth 2.0
-credentials.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import os
-import stat
-import threading
-
-from anyjson import simplejson
-from client import Storage as BaseStorage
-from client import Credentials
-
-
-class CredentialsFileSymbolicLinkError(Exception):
-  """Credentials files must not be symbolic links."""
-
-
-class Storage(BaseStorage):
-  """Store and retrieve a single credential to and from a file."""
-
-  def __init__(self, filename):
-    self._filename = filename
-    self._lock = threading.Lock()
-
-  def _validate_file(self):
-    if os.path.islink(self._filename):
-      raise CredentialsFileSymbolicLinkError(
-          'File: %s is a symbolic link.' % self._filename)
-
-  def acquire_lock(self):
-    """Acquires any lock necessary to access this Storage.
-
-    This lock is not reentrant."""
-    self._lock.acquire()
-
-  def release_lock(self):
-    """Release the Storage lock.
-
-    Trying to release a lock that isn't held will result in a
-    RuntimeError.
-    """
-    self._lock.release()
-
-  def locked_get(self):
-    """Retrieve Credential from file.
-
-    Returns:
-      oauth2client.client.Credentials
-
-    Raises:
-      CredentialsFileSymbolicLinkError if the file is a symbolic link.
-    """
-    credentials = None
-    self._validate_file()
-    try:
-      f = open(self._filename, 'rb')
-      content = f.read()
-      f.close()
-    except IOError:
-      return credentials
-
-    try:
-      credentials = Credentials.new_from_json(content)
-      credentials.set_store(self)
-    except ValueError:
-      pass
-
-    return credentials
-
-  def _create_file_if_needed(self):
-    """Create an empty file if necessary.
-
-    This method will not initialize the file. Instead it implements a
-    simple version of "touch" to ensure the file has been created.
-    """
-    if not os.path.exists(self._filename):
-      old_umask = os.umask(0177)
-      try:
-        open(self._filename, 'a+b').close()
-      finally:
-        os.umask(old_umask)
-
-  def locked_put(self, credentials):
-    """Write Credentials to file.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-
-    Raises:
-      CredentialsFileSymbolicLinkError if the file is a symbolic link.
-    """
-
-    self._create_file_if_needed()
-    self._validate_file()
-    f = open(self._filename, 'wb')
-    f.write(credentials.to_json())
-    f.close()
-
-  def locked_delete(self):
-    """Delete Credentials file.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-
-    os.unlink(self._filename)
diff --git a/oauth2client/gce.py b/oauth2client/gce.py
deleted file mode 100644
index c7fd7c1..0000000
--- a/oauth2client/gce.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright (C) 2012 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for Google Compute Engine
-
-Utilities for making it easier to use OAuth 2.0 on Google Compute Engine.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import httplib2
-import logging
-import uritemplate
-
-from oauth2client import util
-from oauth2client.anyjson import simplejson
-from oauth2client.client import AccessTokenRefreshError
-from oauth2client.client import AssertionCredentials
-
-logger = logging.getLogger(__name__)
-
-# URI Template for the endpoint that returns access_tokens.
-META = ('http://metadata.google.internal/0.1/meta-data/service-accounts/'
-        'default/acquire{?scope}')
-
-
-class AppAssertionCredentials(AssertionCredentials):
-  """Credentials object for Compute Engine Assertion Grants
-
-  This object will allow a Compute Engine instance to identify itself to
-  Google and other OAuth 2.0 servers that can verify assertions. It can be used
-  for the purpose of accessing data stored under an account assigned to the
-  Compute Engine instance itself.
-
-  This credential does not require a flow to instantiate because it represents
-  a two legged flow, and therefore has all of the required information to
-  generate and refresh its own access tokens.
-  """
-
-  @util.positional(2)
-  def __init__(self, scope, **kwargs):
-    """Constructor for AppAssertionCredentials
-
-    Args:
-      scope: string or iterable of strings, scope(s) of the credentials being
-        requested.
-    """
-    self.scope = util.scopes_to_string(scope)
-
-    # Assertion type is no longer used, but still in the parent class signature.
-    super(AppAssertionCredentials, self).__init__(None)
-
-  @classmethod
-  def from_json(cls, json):
-    data = simplejson.loads(json)
-    return AppAssertionCredentials(data['scope'])
-
-  def _refresh(self, http_request):
-    """Refreshes the access_token.
-
-    Skip all the storage hoops and just refresh using the API.
-
-    Args:
-      http_request: callable, a callable that matches the method signature of
-        httplib2.Http.request, used to make the refresh request.
-
-    Raises:
-      AccessTokenRefreshError: When the refresh fails.
-    """
-    uri = uritemplate.expand(META, {'scope': self.scope})
-    response, content = http_request(uri)
-    if response.status == 200:
-      try:
-        d = simplejson.loads(content)
-      except StandardError, e:
-        raise AccessTokenRefreshError(str(e))
-      self.access_token = d['accessToken']
-    else:
-      raise AccessTokenRefreshError(content)
diff --git a/oauth2client/keyring_storage.py b/oauth2client/keyring_storage.py
deleted file mode 100644
index efe2949..0000000
--- a/oauth2client/keyring_storage.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright (C) 2012 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A keyring based Storage.
-
-A Storage for Credentials that uses the keyring module.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import keyring
-import threading
-
-from client import Storage as BaseStorage
-from client import Credentials
-
-
-class Storage(BaseStorage):
-  """Store and retrieve a single credential to and from the keyring.
-
-  To use this module you must have the keyring module installed. See
-  <http://pypi.python.org/pypi/keyring/>. This is an optional module and is not
-  installed with oauth2client by default because it does not work on all the
-  platforms that oauth2client supports, such as Google App Engine.
-
-  The keyring module <http://pypi.python.org/pypi/keyring/> is a cross-platform
-  library for access the keyring capabilities of the local system. The user will
-  be prompted for their keyring password when this module is used, and the
-  manner in which the user is prompted will vary per platform.
-
-  Usage:
-    from oauth2client.keyring_storage import Storage
-
-    s = Storage('name_of_application', 'user1')
-    credentials = s.get()
-
-  """
-
-  def __init__(self, service_name, user_name):
-    """Constructor.
-
-    Args:
-      service_name: string, The name of the service under which the credentials
-        are stored.
-      user_name: string, The name of the user to store credentials for.
-    """
-    self._service_name = service_name
-    self._user_name = user_name
-    self._lock = threading.Lock()
-
-  def acquire_lock(self):
-    """Acquires any lock necessary to access this Storage.
-
-    This lock is not reentrant."""
-    self._lock.acquire()
-
-  def release_lock(self):
-    """Release the Storage lock.
-
-    Trying to release a lock that isn't held will result in a
-    RuntimeError.
-    """
-    self._lock.release()
-
-  def locked_get(self):
-    """Retrieve Credential from file.
-
-    Returns:
-      oauth2client.client.Credentials
-    """
-    credentials = None
-    content = keyring.get_password(self._service_name, self._user_name)
-
-    if content is not None:
-      try:
-        credentials = Credentials.new_from_json(content)
-        credentials.set_store(self)
-      except ValueError:
-        pass
-
-    return credentials
-
-  def locked_put(self, credentials):
-    """Write Credentials to file.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-    keyring.set_password(self._service_name, self._user_name,
-                         credentials.to_json())
-
-  def locked_delete(self):
-    """Delete Credentials file.
-
-    Args:
-      credentials: Credentials, the credentials to store.
-    """
-    keyring.set_password(self._service_name, self._user_name, '')
diff --git a/oauth2client/locked_file.py b/oauth2client/locked_file.py
deleted file mode 100644
index 26f783e..0000000
--- a/oauth2client/locked_file.py
+++ /dev/null
@@ -1,361 +0,0 @@
-# Copyright 2011 Google Inc. All Rights Reserved.
-
-"""Locked file interface that should work on Unix and Windows pythons.
-
-This module first tries to use fcntl locking to ensure serialized access
-to a file, then falls back on a lock file if that is unavialable.
-
-Usage:
-    f = LockedFile('filename', 'r+b', 'rb')
-    f.open_and_lock()
-    if f.is_locked():
-      print 'Acquired filename with r+b mode'
-      f.file_handle().write('locked data')
-    else:
-      print 'Aquired filename with rb mode'
-    f.unlock_and_close()
-"""
-
-__author__ = 'cache@google.com (David T McWherter)'
-
-import errno
-import logging
-import os
-import time
-
-from oauth2client import util
-
-logger = logging.getLogger(__name__)
-
-
-class CredentialsFileSymbolicLinkError(Exception):
-  """Credentials files must not be symbolic links."""
-
-
-class AlreadyLockedException(Exception):
-  """Trying to lock a file that has already been locked by the LockedFile."""
-  pass
-
-
-def validate_file(filename):
-  if os.path.islink(filename):
-    raise CredentialsFileSymbolicLinkError(
-        'File: %s is a symbolic link.' % filename)
-
-class _Opener(object):
-  """Base class for different locking primitives."""
-
-  def __init__(self, filename, mode, fallback_mode):
-    """Create an Opener.
-
-    Args:
-      filename: string, The pathname of the file.
-      mode: string, The preferred mode to access the file with.
-      fallback_mode: string, The mode to use if locking fails.
-    """
-    self._locked = False
-    self._filename = filename
-    self._mode = mode
-    self._fallback_mode = fallback_mode
-    self._fh = None
-
-  def is_locked(self):
-    """Was the file locked."""
-    return self._locked
-
-  def file_handle(self):
-    """The file handle to the file. Valid only after opened."""
-    return self._fh
-
-  def filename(self):
-    """The filename that is being locked."""
-    return self._filename
-
-  def open_and_lock(self, timeout, delay):
-    """Open the file and lock it.
-
-    Args:
-      timeout: float, How long to try to lock for.
-      delay: float, How long to wait between retries.
-    """
-    pass
-
-  def unlock_and_close(self):
-    """Unlock and close the file."""
-    pass
-
-
-class _PosixOpener(_Opener):
-  """Lock files using Posix advisory lock files."""
-
-  def open_and_lock(self, timeout, delay):
-    """Open the file and lock it.
-
-    Tries to create a .lock file next to the file we're trying to open.
-
-    Args:
-      timeout: float, How long to try to lock for.
-      delay: float, How long to wait between retries.
-
-    Raises:
-      AlreadyLockedException: if the lock is already acquired.
-      IOError: if the open fails.
-      CredentialsFileSymbolicLinkError if the file is a symbolic link.
-    """
-    if self._locked:
-      raise AlreadyLockedException('File %s is already locked' %
-                                   self._filename)
-    self._locked = False
-
-    validate_file(self._filename)
-    try:
-      self._fh = open(self._filename, self._mode)
-    except IOError, e:
-      # If we can't access with _mode, try _fallback_mode and don't lock.
-      if e.errno == errno.EACCES:
-        self._fh = open(self._filename, self._fallback_mode)
-        return
-
-    lock_filename = self._posix_lockfile(self._filename)
-    start_time = time.time()
-    while True:
-      try:
-        self._lock_fd = os.open(lock_filename,
-                                os.O_CREAT|os.O_EXCL|os.O_RDWR)
-        self._locked = True
-        break
-
-      except OSError, e:
-        if e.errno != errno.EEXIST:
-          raise
-        if (time.time() - start_time) >= timeout:
-          logger.warn('Could not acquire lock %s in %s seconds' % (
-              lock_filename, timeout))
-          # Close the file and open in fallback_mode.
-          if self._fh:
-            self._fh.close()
-          self._fh = open(self._filename, self._fallback_mode)
-          return
-        time.sleep(delay)
-
-  def unlock_and_close(self):
-    """Unlock a file by removing the .lock file, and close the handle."""
-    if self._locked:
-      lock_filename = self._posix_lockfile(self._filename)
-      os.close(self._lock_fd)
-      os.unlink(lock_filename)
-      self._locked = False
-      self._lock_fd = None
-    if self._fh:
-      self._fh.close()
-
-  def _posix_lockfile(self, filename):
-    """The name of the lock file to use for posix locking."""
-    return '%s.lock' % filename
-
-
-try:
-  import fcntl
-
-  class _FcntlOpener(_Opener):
-    """Open, lock, and unlock a file using fcntl.lockf."""
-
-    def open_and_lock(self, timeout, delay):
-      """Open the file and lock it.
-
-      Args:
-        timeout: float, How long to try to lock for.
-        delay: float, How long to wait between retries
-
-      Raises:
-        AlreadyLockedException: if the lock is already acquired.
-        IOError: if the open fails.
-        CredentialsFileSymbolicLinkError if the file is a symbolic link.
-      """
-      if self._locked:
-        raise AlreadyLockedException('File %s is already locked' %
-                                     self._filename)
-      start_time = time.time()
-
-      validate_file(self._filename)
-      try:
-        self._fh = open(self._filename, self._mode)
-      except IOError, e:
-        # If we can't access with _mode, try _fallback_mode and don't lock.
-        if e.errno == errno.EACCES:
-          self._fh = open(self._filename, self._fallback_mode)
-          return
-
-      # We opened in _mode, try to lock the file.
-      while True:
-        try:
-          fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
-          self._locked = True
-          return
-        except IOError, e:
-          # If not retrying, then just pass on the error.
-          if timeout == 0:
-            raise e
-          if e.errno != errno.EACCES:
-            raise e
-          # We could not acquire the lock. Try again.
-          if (time.time() - start_time) >= timeout:
-            logger.warn('Could not lock %s in %s seconds' % (
-                self._filename, timeout))
-            if self._fh:
-              self._fh.close()
-            self._fh = open(self._filename, self._fallback_mode)
-            return
-          time.sleep(delay)
-
-    def unlock_and_close(self):
-      """Close and unlock the file using the fcntl.lockf primitive."""
-      if self._locked:
-        fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN)
-      self._locked = False
-      if self._fh:
-        self._fh.close()
-except ImportError:
-  _FcntlOpener = None
-
-
-try:
-  import pywintypes
-  import win32con
-  import win32file
-
-  class _Win32Opener(_Opener):
-    """Open, lock, and unlock a file using windows primitives."""
-
-    # Error #33:
-    #  'The process cannot access the file because another process'
-    FILE_IN_USE_ERROR = 33
-
-    # Error #158:
-    #  'The segment is already unlocked.'
-    FILE_ALREADY_UNLOCKED_ERROR = 158
-
-    def open_and_lock(self, timeout, delay):
-      """Open the file and lock it.
-
-      Args:
-        timeout: float, How long to try to lock for.
-        delay: float, How long to wait between retries
-
-      Raises:
-        AlreadyLockedException: if the lock is already acquired.
-        IOError: if the open fails.
-        CredentialsFileSymbolicLinkError if the file is a symbolic link.
-      """
-      if self._locked:
-        raise AlreadyLockedException('File %s is already locked' %
-                                     self._filename)
-      start_time = time.time()
-
-      validate_file(self._filename)
-      try:
-        self._fh = open(self._filename, self._mode)
-      except IOError, e:
-        # If we can't access with _mode, try _fallback_mode and don't lock.
-        if e.errno == errno.EACCES:
-          self._fh = open(self._filename, self._fallback_mode)
-          return
-
-      # We opened in _mode, try to lock the file.
-      while True:
-        try:
-          hfile = win32file._get_osfhandle(self._fh.fileno())
-          win32file.LockFileEx(
-              hfile,
-              (win32con.LOCKFILE_FAIL_IMMEDIATELY|
-               win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
-              pywintypes.OVERLAPPED())
-          self._locked = True
-          return
-        except pywintypes.error, e:
-          if timeout == 0:
-            raise e
-
-          # If the error is not that the file is already in use, raise.
-          if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
-            raise
-
-          # We could not acquire the lock. Try again.
-          if (time.time() - start_time) >= timeout:
-            logger.warn('Could not lock %s in %s seconds' % (
-                self._filename, timeout))
-            if self._fh:
-              self._fh.close()
-            self._fh = open(self._filename, self._fallback_mode)
-            return
-          time.sleep(delay)
-
-    def unlock_and_close(self):
-      """Close and unlock the file using the win32 primitive."""
-      if self._locked:
-        try:
-          hfile = win32file._get_osfhandle(self._fh.fileno())
-          win32file.UnlockFileEx(hfile, 0, -0x10000, pywintypes.OVERLAPPED())
-        except pywintypes.error, e:
-          if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
-            raise
-      self._locked = False
-      if self._fh:
-        self._fh.close()
-except ImportError:
-  _Win32Opener = None
-
-
-class LockedFile(object):
-  """Represent a file that has exclusive access."""
-
-  @util.positional(4)
-  def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
-    """Construct a LockedFile.
-
-    Args:
-      filename: string, The path of the file to open.
-      mode: string, The mode to try to open the file with.
-      fallback_mode: string, The mode to use if locking fails.
-      use_native_locking: bool, Whether or not fcntl/win32 locking is used.
-    """
-    opener = None
-    if not opener and use_native_locking:
-      if _Win32Opener:
-        opener = _Win32Opener(filename, mode, fallback_mode)
-      if _FcntlOpener:
-        opener = _FcntlOpener(filename, mode, fallback_mode)
-
-    if not opener:
-      opener = _PosixOpener(filename, mode, fallback_mode)
-
-    self._opener = opener
-
-  def filename(self):
-    """Return the filename we were constructed with."""
-    return self._opener._filename
-
-  def file_handle(self):
-    """Return the file_handle to the opened file."""
-    return self._opener.file_handle()
-
-  def is_locked(self):
-    """Return whether we successfully locked the file."""
-    return self._opener.is_locked()
-
-  def open_and_lock(self, timeout=0, delay=0.05):
-    """Open the file, trying to lock it.
-
-    Args:
-      timeout: float, The number of seconds to try to acquire the lock.
-      delay: float, The number of seconds to wait between retry attempts.
-
-    Raises:
-      AlreadyLockedException: if the lock is already acquired.
-      IOError: if the open fails.
-    """
-    self._opener.open_and_lock(timeout, delay)
-
-  def unlock_and_close(self):
-    """Unlock and close a file."""
-    self._opener.unlock_and_close()
diff --git a/oauth2client/multistore_file.py b/oauth2client/multistore_file.py
deleted file mode 100644
index e1b39f7..0000000
--- a/oauth2client/multistore_file.py
+++ /dev/null
@@ -1,409 +0,0 @@
-# Copyright 2011 Google Inc. All Rights Reserved.
-
-"""Multi-credential file store with lock support.
-
-This module implements a JSON credential store where multiple
-credentials can be stored in one file. That file supports locking
-both in a single process and across processes.
-
-The credential themselves are keyed off of:
-* client_id
-* user_agent
-* scope
-
-The format of the stored data is like so:
-{
-  'file_version': 1,
-  'data': [
-    {
-      'key': {
-        'clientId': '<client id>',
-        'userAgent': '<user agent>',
-        'scope': '<scope>'
-      },
-      'credential': {
-        # JSON serialized Credentials.
-      }
-    }
-  ]
-}
-"""
-
-__author__ = 'jbeda@google.com (Joe Beda)'
-
-import base64
-import errno
-import logging
-import os
-import threading
-
-from anyjson import simplejson
-from oauth2client.client import Storage as BaseStorage
-from oauth2client.client import Credentials
-from oauth2client import util
-from locked_file import LockedFile
-
-logger = logging.getLogger(__name__)
-
-# A dict from 'filename'->_MultiStore instances
-_multistores = {}
-_multistores_lock = threading.Lock()
-
-
-class Error(Exception):
-  """Base error for this module."""
-  pass
-
-
-class NewerCredentialStoreError(Error):
-  """The credential store is a newer version that supported."""
-  pass
-
-
-@util.positional(4)
-def get_credential_storage(filename, client_id, user_agent, scope,
-                           warn_on_readonly=True):
-  """Get a Storage instance for a credential.
-
-  Args:
-    filename: The JSON file storing a set of credentials
-    client_id: The client_id for the credential
-    user_agent: The user agent for the credential
-    scope: string or iterable of strings, Scope(s) being requested
-    warn_on_readonly: if True, log a warning if the store is readonly
-
-  Returns:
-    An object derived from client.Storage for getting/setting the
-    credential.
-  """
-  # Recreate the legacy key with these specific parameters
-  key = {'clientId': client_id, 'userAgent': user_agent,
-         'scope': util.scopes_to_string(scope)}
-  return get_credential_storage_custom_key(
-      filename, key, warn_on_readonly=warn_on_readonly)
-
-
-@util.positional(2)
-def get_credential_storage_custom_string_key(
-    filename, key_string, warn_on_readonly=True):
-  """Get a Storage instance for a credential using a single string as a key.
-
-  Allows you to provide a string as a custom key that will be used for
-  credential storage and retrieval.
-
-  Args:
-    filename: The JSON file storing a set of credentials
-    key_string: A string to use as the key for storing this credential.
-    warn_on_readonly: if True, log a warning if the store is readonly
-
-  Returns:
-    An object derived from client.Storage for getting/setting the
-    credential.
-  """
-  # Create a key dictionary that can be used
-  key_dict = {'key': key_string}
-  return get_credential_storage_custom_key(
-      filename, key_dict, warn_on_readonly=warn_on_readonly)
-
-
-@util.positional(2)
-def get_credential_storage_custom_key(
-    filename, key_dict, warn_on_readonly=True):
-  """Get a Storage instance for a credential using a dictionary as a key.
-
-  Allows you to provide a dictionary as a custom key that will be used for
-  credential storage and retrieval.
-
-  Args:
-    filename: The JSON file storing a set of credentials
-    key_dict: A dictionary to use as the key for storing this credential. There
-      is no ordering of the keys in the dictionary. Logically equivalent
-      dictionaries will produce equivalent storage keys.
-    warn_on_readonly: if True, log a warning if the store is readonly
-
-  Returns:
-    An object derived from client.Storage for getting/setting the
-    credential.
-  """
-  filename = os.path.expanduser(filename)
-  _multistores_lock.acquire()
-  try:
-    multistore = _multistores.setdefault(
-        filename, _MultiStore(filename, warn_on_readonly=warn_on_readonly))
-  finally:
-    _multistores_lock.release()
-  key = util.dict_to_tuple_key(key_dict)
-  return multistore._get_storage(key)
-
-
-class _MultiStore(object):
-  """A file backed store for multiple credentials."""
-
-  @util.positional(2)
-  def __init__(self, filename, warn_on_readonly=True):
-    """Initialize the class.
-
-    This will create the file if necessary.
-    """
-    self._file = LockedFile(filename, 'r+b', 'rb')
-    self._thread_lock = threading.Lock()
-    self._read_only = False
-    self._warn_on_readonly = warn_on_readonly
-
-    self._create_file_if_needed()
-
-    # Cache of deserialized store. This is only valid after the
-    # _MultiStore is locked or _refresh_data_cache is called. This is
-    # of the form of:
-    #
-    # ((key, value), (key, value)...) -> OAuth2Credential
-    #
-    # If this is None, then the store hasn't been read yet.
-    self._data = None
-
-  class _Storage(BaseStorage):
-    """A Storage object that knows how to read/write a single credential."""
-
-    def __init__(self, multistore, key):
-      self._multistore = multistore
-      self._key = key
-
-    def acquire_lock(self):
-      """Acquires any lock necessary to access this Storage.
-
-      This lock is not reentrant.
-      """
-      self._multistore._lock()
-
-    def release_lock(self):
-      """Release the Storage lock.
-
-      Trying to release a lock that isn't held will result in a
-      RuntimeError.
-      """
-      self._multistore._unlock()
-
-    def locked_get(self):
-      """Retrieve credential.
-
-      The Storage lock must be held when this is called.
-
-      Returns:
-        oauth2client.client.Credentials
-      """
-      credential = self._multistore._get_credential(self._key)
-      if credential:
-        credential.set_store(self)
-      return credential
-
-    def locked_put(self, credentials):
-      """Write a credential.
-
-      The Storage lock must be held when this is called.
-
-      Args:
-        credentials: Credentials, the credentials to store.
-      """
-      self._multistore._update_credential(self._key, credentials)
-
-    def locked_delete(self):
-      """Delete a credential.
-
-      The Storage lock must be held when this is called.
-
-      Args:
-        credentials: Credentials, the credentials to store.
-      """
-      self._multistore._delete_credential(self._key)
-
-  def _create_file_if_needed(self):
-    """Create an empty file if necessary.
-
-    This method will not initialize the file. Instead it implements a
-    simple version of "touch" to ensure the file has been created.
-    """
-    if not os.path.exists(self._file.filename()):
-      old_umask = os.umask(0177)
-      try:
-        open(self._file.filename(), 'a+b').close()
-      finally:
-        os.umask(old_umask)
-
-  def _lock(self):
-    """Lock the entire multistore."""
-    self._thread_lock.acquire()
-    self._file.open_and_lock()
-    if not self._file.is_locked():
-      self._read_only = True
-      if self._warn_on_readonly:
-        logger.warn('The credentials file (%s) is not writable. Opening in '
-                    'read-only mode. Any refreshed credentials will only be '
-                    'valid for this run.' % self._file.filename())
-    if os.path.getsize(self._file.filename()) == 0:
-      logger.debug('Initializing empty multistore file')
-      # The multistore is empty so write out an empty file.
-      self._data = {}
-      self._write()
-    elif not self._read_only or self._data is None:
-      # Only refresh the data if we are read/write or we haven't
-      # cached the data yet. If we are readonly, we assume is isn't
-      # changing out from under us and that we only have to read it
-      # once. This prevents us from whacking any new access keys that
-      # we have cached in memory but were unable to write out.
-      self._refresh_data_cache()
-
-  def _unlock(self):
-    """Release the lock on the multistore."""
-    self._file.unlock_and_close()
-    self._thread_lock.release()
-
-  def _locked_json_read(self):
-    """Get the raw content of the multistore file.
-
-    The multistore must be locked when this is called.
-
-    Returns:
-      The contents of the multistore decoded as JSON.
-    """
-    assert self._thread_lock.locked()
-    self._file.file_handle().seek(0)
-    return simplejson.load(self._file.file_handle())
-
-  def _locked_json_write(self, data):
-    """Write a JSON serializable data structure to the multistore.
-
-    The multistore must be locked when this is called.
-
-    Args:
-      data: The data to be serialized and written.
-    """
-    assert self._thread_lock.locked()
-    if self._read_only:
-      return
-    self._file.file_handle().seek(0)
-    simplejson.dump(data, self._file.file_handle(), sort_keys=True, indent=2)
-    self._file.file_handle().truncate()
-
-  def _refresh_data_cache(self):
-    """Refresh the contents of the multistore.
-
-    The multistore must be locked when this is called.
-
-    Raises:
-      NewerCredentialStoreError: Raised when a newer client has written the
-        store.
-    """
-    self._data = {}
-    try:
-      raw_data = self._locked_json_read()
-    except Exception:
-      logger.warn('Credential data store could not be loaded. '
-                  'Will ignore and overwrite.')
-      return
-
-    version = 0
-    try:
-      version = raw_data['file_version']
-    except Exception:
-      logger.warn('Missing version for credential data store. It may be '
-                  'corrupt or an old version. Overwriting.')
-    if version > 1:
-      raise NewerCredentialStoreError(
-          'Credential file has file_version of %d. '
-          'Only file_version of 1 is supported.' % version)
-
-    credentials = []
-    try:
-      credentials = raw_data['data']
-    except (TypeError, KeyError):
-      pass
-
-    for cred_entry in credentials:
-      try:
-        (key, credential) = self._decode_credential_from_json(cred_entry)
-        self._data[key] = credential
-      except:
-        # If something goes wrong loading a credential, just ignore it
-        logger.info('Error decoding credential, skipping', exc_info=True)
-
-  def _decode_credential_from_json(self, cred_entry):
-    """Load a credential from our JSON serialization.
-
-    Args:
-      cred_entry: A dict entry from the data member of our format
-
-    Returns:
-      (key, cred) where the key is the key tuple and the cred is the
-        OAuth2Credential object.
-    """
-    raw_key = cred_entry['key']
-    key = util.dict_to_tuple_key(raw_key)
-    credential = None
-    credential = Credentials.new_from_json(simplejson.dumps(cred_entry['credential']))
-    return (key, credential)
-
-  def _write(self):
-    """Write the cached data back out.
-
-    The multistore must be locked.
-    """
-    raw_data = {'file_version': 1}
-    raw_creds = []
-    raw_data['data'] = raw_creds
-    for (cred_key, cred) in self._data.items():
-      raw_key = dict(cred_key)
-      raw_cred = simplejson.loads(cred.to_json())
-      raw_creds.append({'key': raw_key, 'credential': raw_cred})
-    self._locked_json_write(raw_data)
-
-  def _get_credential(self, key):
-    """Get a credential from the multistore.
-
-    The multistore must be locked.
-
-    Args:
-      key: The key used to retrieve the credential
-
-    Returns:
-      The credential specified or None if not present
-    """
-    return self._data.get(key, None)
-
-  def _update_credential(self, key, cred):
-    """Update a credential and write the multistore.
-
-    This must be called when the multistore is locked.
-
-    Args:
-      key: The key used to retrieve the credential
-      cred: The OAuth2Credential to update/set
-    """
-    self._data[key] = cred
-    self._write()
-
-  def _delete_credential(self, key):
-    """Delete a credential and write the multistore.
-
-    This must be called when the multistore is locked.
-
-    Args:
-      key: The key used to retrieve the credential
-    """
-    try:
-      del self._data[key]
-    except KeyError:
-      pass
-    self._write()
-
-  def _get_storage(self, key):
-    """Get a Storage object to get/set a credential.
-
-    This Storage is a 'view' into the multistore.
-
-    Args:
-      key: The key used to retrieve the credential
-
-    Returns:
-      A Storage object that can be used to get/set this cred
-    """
-    return self._Storage(self, key)
diff --git a/oauth2client/tools.py b/oauth2client/tools.py
deleted file mode 100644
index 93b0171..0000000
--- a/oauth2client/tools.py
+++ /dev/null
@@ -1,205 +0,0 @@
-# Copyright (C) 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Command-line tools for authenticating via OAuth 2.0
-
-Do the OAuth 2.0 Web Server dance for a command line application. Stores the
-generated credentials in a common file that is used by other example apps in
-the same directory.
-"""
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = ['run']
-
-
-import BaseHTTPServer
-import gflags
-import socket
-import sys
-import webbrowser
-
-from oauth2client.client import FlowExchangeError
-from oauth2client.client import OOB_CALLBACK_URN
-from oauth2client import util
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
-
-
-FLAGS = gflags.FLAGS
-
-gflags.DEFINE_boolean('auth_local_webserver', True,
-                      ('Run a local web server to handle redirects during '
-                       'OAuth authorization.'))
-
-gflags.DEFINE_string('auth_host_name', 'localhost',
-                     ('Host name to use when running a local web server to '
-                      'handle redirects during OAuth authorization.'))
-
-gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
-                        ('Port to use when running a local web server to '
-                         'handle redirects during OAuth authorization.'))
-
-
-class ClientRedirectServer(BaseHTTPServer.HTTPServer):
-  """A server to handle OAuth 2.0 redirects back to localhost.
-
-  Waits for a single request and parses the query parameters
-  into query_params and then stops serving.
-  """
-  query_params = {}
-
-
-class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-  """A handler for OAuth 2.0 redirects back to localhost.
-
-  Waits for a single request and parses the query parameters
-  into the servers query_params and then stops serving.
-  """
-
-  def do_GET(s):
-    """Handle a GET request.
-
-    Parses the query parameters and prints a message
-    if the flow has completed. Note that we can't detect
-    if an error occurred.
-    """
-    s.send_response(200)
-    s.send_header("Content-type", "text/html")
-    s.end_headers()
-    query = s.path.split('?', 1)[-1]
-    query = dict(parse_qsl(query))
-    s.server.query_params = query
-    s.wfile.write("<html><head><title>Authentication Status</title></head>")
-    s.wfile.write("<body><p>The authentication flow has completed.</p>")
-    s.wfile.write("</body></html>")
-
-  def log_message(self, format, *args):
-    """Do not log messages to stdout while running as command line program."""
-    pass
-
-
-@util.positional(2)
-def run(flow, storage, http=None):
-  """Core code for a command-line application.
-
-  The run() function is called from your application and runs through all the
-  steps to obtain credentials. It takes a Flow argument and attempts to open an
-  authorization server page in the user's default web browser. The server asks
-  the user to grant your application access to the user's data. If the user
-  grants access, the run() function returns new credentials. The new credentials
-  are also stored in the Storage argument, which updates the file associated
-  with the Storage object.
-
-  It presumes it is run from a command-line application and supports the
-  following flags:
-
-    --auth_host_name: Host name to use when running a local web server
-      to handle redirects during OAuth authorization.
-      (default: 'localhost')
-
-    --auth_host_port: Port to use when running a local web server to handle
-      redirects during OAuth authorization.;
-      repeat this option to specify a list of values
-      (default: '[8080, 8090]')
-      (an integer)
-
-    --[no]auth_local_webserver: Run a local web server to handle redirects
-      during OAuth authorization.
-      (default: 'true')
-
-  Since it uses flags make sure to initialize the gflags module before calling
-  run().
-
-  Args:
-    flow: Flow, an OAuth 2.0 Flow to step through.
-    storage: Storage, a Storage to store the credential in.
-    http: An instance of httplib2.Http.request
-         or something that acts like it.
-
-  Returns:
-    Credentials, the obtained credential.
-  """
-  if FLAGS.auth_local_webserver:
-    success = False
-    port_number = 0
-    for port in FLAGS.auth_host_port:
-      port_number = port
-      try:
-        httpd = ClientRedirectServer((FLAGS.auth_host_name, port),
-                                     ClientRedirectHandler)
-      except socket.error, e:
-        pass
-      else:
-        success = True
-        break
-    FLAGS.auth_local_webserver = success
-    if not success:
-      print 'Failed to start a local webserver listening on either port 8080'
-      print 'or port 9090. Please check your firewall settings and locally'
-      print 'running programs that may be blocking or using those ports.'
-      print
-      print 'Falling back to --noauth_local_webserver and continuing with',
-      print 'authorization.'
-      print
-
-  if FLAGS.auth_local_webserver:
-    oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
-  else:
-    oauth_callback = OOB_CALLBACK_URN
-  flow.redirect_uri = oauth_callback
-  authorize_url = flow.step1_get_authorize_url()
-
-  if FLAGS.auth_local_webserver:
-    webbrowser.open(authorize_url, new=1, autoraise=True)
-    print 'Your browser has been opened to visit:'
-    print
-    print '    ' + authorize_url
-    print
-    print 'If your browser is on a different machine then exit and re-run this'
-    print 'application with the command-line parameter '
-    print
-    print '  --noauth_local_webserver'
-    print
-  else:
-    print 'Go to the following link in your browser:'
-    print
-    print '    ' + authorize_url
-    print
-
-  code = None
-  if FLAGS.auth_local_webserver:
-    httpd.handle_request()
-    if 'error' in httpd.query_params:
-      sys.exit('Authentication request was rejected.')
-    if 'code' in httpd.query_params:
-      code = httpd.query_params['code']
-    else:
-      print 'Failed to find "code" in the query parameters of the redirect.'
-      sys.exit('Try running with --noauth_local_webserver.')
-  else:
-    code = raw_input('Enter verification code: ').strip()
-
-  try:
-    credential = flow.step2_exchange(code, http=http)
-  except FlowExchangeError, e:
-    sys.exit('Authentication has failed: %s' % e)
-
-  storage.put(credential)
-  credential.set_store(storage)
-  print 'Authentication successful.'
-
-  return credential
diff --git a/oauth2client/util.py b/oauth2client/util.py
deleted file mode 100644
index ee6a100..0000000
--- a/oauth2client/util.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""Common utility library."""
-
-__author__ = ['rafek@google.com (Rafe Kaplan)',
-              'guido@google.com (Guido van Rossum)',
-]
-__all__ = [
-  'positional',
-]
-
-import gflags
-import inspect
-import logging
-import types
-import urllib
-import urlparse
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
-
-logger = logging.getLogger(__name__)
-
-FLAGS = gflags.FLAGS
-
-gflags.DEFINE_enum('positional_parameters_enforcement', 'WARNING',
-    ['EXCEPTION', 'WARNING', 'IGNORE'],
-    'The action when an oauth2client.util.positional declaration is violated.')
-
-
-def positional(max_positional_args):
-  """A decorator to declare that only the first N arguments my be positional.
-
-  This decorator makes it easy to support Python 3 style key-word only
-  parameters. For example, in Python 3 it is possible to write:
-
-    def fn(pos1, *, kwonly1=None, kwonly1=None):
-      ...
-
-  All named parameters after * must be a keyword:
-
-    fn(10, 'kw1', 'kw2')  # Raises exception.
-    fn(10, kwonly1='kw1')  # Ok.
-
-  Example:
-    To define a function like above, do:
-
-      @positional(1)
-      def fn(pos1, kwonly1=None, kwonly2=None):
-        ...
-
-    If no default value is provided to a keyword argument, it becomes a required
-    keyword argument:
-
-      @positional(0)
-      def fn(required_kw):
-        ...
-
-    This must be called with the keyword parameter:
-
-      fn()  # Raises exception.
-      fn(10)  # Raises exception.
-      fn(required_kw=10)  # Ok.
-
-    When defining instance or class methods always remember to account for
-    'self' and 'cls':
-
-      class MyClass(object):
-
-        @positional(2)
-        def my_method(self, pos1, kwonly1=None):
-          ...
-
-        @classmethod
-        @positional(2)
-        def my_method(cls, pos1, kwonly1=None):
-          ...
-
-  The positional decorator behavior is controlled by the
-  --positional_parameters_enforcement flag. The flag may be set to 'EXCEPTION',
-  'WARNING' or 'IGNORE' to raise an exception, log a warning, or do nothing,
-  respectively, if a declaration is violated.
-
-  Args:
-    max_positional_arguments: Maximum number of positional arguments. All
-      parameters after the this index must be keyword only.
-
-  Returns:
-    A decorator that prevents using arguments after max_positional_args from
-    being used as positional parameters.
-
-  Raises:
-    TypeError if a key-word only argument is provided as a positional parameter,
-    but only if the --positional_parameters_enforcement flag is set to
-    'EXCEPTION'.
-  """
-  def positional_decorator(wrapped):
-    def positional_wrapper(*args, **kwargs):
-      if len(args) > max_positional_args:
-        plural_s = ''
-        if max_positional_args != 1:
-          plural_s = 's'
-        message = '%s() takes at most %d positional argument%s (%d given)' % (
-            wrapped.__name__, max_positional_args, plural_s, len(args))
-        if FLAGS.positional_parameters_enforcement == 'EXCEPTION':
-          raise TypeError(message)
-        elif FLAGS.positional_parameters_enforcement == 'WARNING':
-          logger.warning(message)
-        else: # IGNORE
-          pass
-      return wrapped(*args, **kwargs)
-    return positional_wrapper
-
-  if isinstance(max_positional_args, (int, long)):
-    return positional_decorator
-  else:
-    args, _, _, defaults = inspect.getargspec(max_positional_args)
-    return positional(len(args) - len(defaults))(max_positional_args)
-
-
-def scopes_to_string(scopes):
-  """Converts scope value to a string.
-
-  If scopes is a string then it is simply passed through. If scopes is an
-  iterable then a string is returned that is all the individual scopes
-  concatenated with spaces.
-
-  Args:
-    scopes: string or iterable of strings, the scopes.
-
-  Returns:
-    The scopes formatted as a single string.
-  """
-  if isinstance(scopes, types.StringTypes):
-    return scopes
-  else:
-    return ' '.join(scopes)
-
-
-def dict_to_tuple_key(dictionary):
-  """Converts a dictionary to a tuple that can be used as an immutable key.
-
-  The resulting key is always sorted so that logically equivalent dictionaries
-  always produce an identical tuple for a key.
-
-  Args:
-    dictionary: the dictionary to use as the key.
-
-  Returns:
-    A tuple representing the dictionary in it's naturally sorted ordering.
-  """
-  return tuple(sorted(dictionary.items()))
-
-
-def _add_query_parameter(url, name, value):
-  """Adds a query parameter to a url.
-
-  Replaces the current value if it already exists in the URL.
-
-  Args:
-    url: string, url to add the query parameter to.
-    name: string, query parameter name.
-    value: string, query parameter value.
-
-  Returns:
-    Updated query parameter. Does not update the url if value is None.
-  """
-  if value is None:
-    return url
-  else:
-    parsed = list(urlparse.urlparse(url))
-    q = dict(parse_qsl(parsed[4]))
-    q[name] = value
-    parsed[4] = urllib.urlencode(q)
-    return urlparse.urlunparse(parsed)
diff --git a/oauth2client/xsrfutil.py b/oauth2client/xsrfutil.py
deleted file mode 100644
index 7e1fe5c..0000000
--- a/oauth2client/xsrfutil.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/python2.5
-#
-# Copyright 2010 the Melange authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helper methods for creating & verifying XSRF tokens."""
-
-__authors__ = [
-  '"Doug Coker" <dcoker@google.com>',
-  '"Joe Gregorio" <jcgregorio@google.com>',
-]
-
-
-import base64
-import hmac
-import os  # for urandom
-import time
-
-from oauth2client import util
-
-
-# Delimiter character
-DELIMITER = ':'
-
-# 1 hour in seconds
-DEFAULT_TIMEOUT_SECS = 1*60*60
-
-@util.positional(2)
-def generate_token(key, user_id, action_id="", when=None):
-  """Generates a URL-safe token for the given user, action, time tuple.
-
-  Args:
-    key: secret key to use.
-    user_id: the user ID of the authenticated user.
-    action_id: a string identifier of the action they requested
-      authorization for.
-    when: the time in seconds since the epoch at which the user was
-      authorized for this action. If not set the current time is used.
-
-  Returns:
-    A string XSRF protection token.
-  """
-  when = when or int(time.time())
-  digester = hmac.new(key)
-  digester.update(str(user_id))
-  digester.update(DELIMITER)
-  digester.update(action_id)
-  digester.update(DELIMITER)
-  digester.update(str(when))
-  digest = digester.digest()
-
-  token = base64.urlsafe_b64encode('%s%s%d' % (digest,
-                                               DELIMITER,
-                                               when))
-  return token
-
-
-@util.positional(3)
-def validate_token(key, token, user_id, action_id="", current_time=None):
-  """Validates that the given token authorizes the user for the action.
-
-  Tokens are invalid if the time of issue is too old or if the token
-  does not match what generateToken outputs (i.e. the token was forged).
-
-  Args:
-    key: secret key to use.
-    token: a string of the token generated by generateToken.
-    user_id: the user ID of the authenticated user.
-    action_id: a string identifier of the action they requested
-      authorization for.
-
-  Returns:
-    A boolean - True if the user is authorized for the action, False
-    otherwise.
-  """
-  if not token:
-    return False
-  try:
-    decoded = base64.urlsafe_b64decode(str(token))
-    token_time = long(decoded.split(DELIMITER)[-1])
-  except (TypeError, ValueError):
-    return False
-  if current_time is None:
-    current_time = time.time()
-  # If the token is too old it's not valid.
-  if current_time - token_time > DEFAULT_TIMEOUT_SECS:
-    return False
-
-  # The given token should match the generated one with the same time.
-  expected_token = generate_token(key, user_id, action_id=action_id,
-                                  when=token_time)
-  if len(token) != len(expected_token):
-    return False
-
-  # Perform constant time comparison to avoid timing attacks
-  different = 0
-  for x, y in zip(token, expected_token):
-    different |= ord(x) ^ ord(y)
-  if different:
-    return False
-
-  return True
diff --git a/old_dev_appserver.py b/old_dev_appserver.py
index af18e1c..661f26e 100644
--- a/old_dev_appserver.py
+++ b/old_dev_appserver.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php b/php/sdk/google/appengine/api/modules/InvalidModuleStateException.php
similarity index 65%
copy from php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php
copy to php/sdk/google/appengine/api/modules/InvalidModuleStateException.php
index fd4b28e..d3fdda8 100644
--- a/php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php
+++ b/php/sdk/google/appengine/api/modules/InvalidModuleStateException.php
@@ -14,15 +14,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ */
+
 namespace google\appengine\api\modules;
 
-require_once 'google/appengine/api/modules/modules_service_pb.php';
+require_once "google/appengine/api/modules/ModulesException.php";
 
-use \google\appengine\GetModulesRequest;
-
-class ModulesServiceProtoTest extends\PHPUnit_Framework_TestCase {
-  public function testGetModulesRequestInstantiation() {
-    $req = new GetModulesRequest();
-    $this->assertEquals("", $req->serializePartialToString());
-  }
+/**
+ * Thrown when the current state was unexpected when starting/stopping a
+ * module.
+ */
+class InvalidModuleStateException extends ModulesException {
 }
diff --git a/php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php b/php/sdk/google/appengine/api/modules/ModulesException.php
similarity index 65%
copy from php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php
copy to php/sdk/google/appengine/api/modules/ModulesException.php
index fd4b28e..15718e7 100644
--- a/php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php
+++ b/php/sdk/google/appengine/api/modules/ModulesException.php
@@ -14,15 +14,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ */
+
 namespace google\appengine\api\modules;
 
-require_once 'google/appengine/api/modules/modules_service_pb.php';
-
-use \google\appengine\GetModulesRequest;
-
-class ModulesServiceProtoTest extends\PHPUnit_Framework_TestCase {
-  public function testGetModulesRequestInstantiation() {
-    $req = new GetModulesRequest();
-    $this->assertEquals("", $req->serializePartialToString());
-  }
+/**
+ * Thrown when there is a failure using the Modules API.
+ */
+class ModulesException extends \Exception {
 }
diff --git a/php/sdk/google/appengine/api/modules/ModulesService.php b/php/sdk/google/appengine/api/modules/ModulesService.php
new file mode 100644
index 0000000..0b8be0c
--- /dev/null
+++ b/php/sdk/google/appengine/api/modules/ModulesService.php
@@ -0,0 +1,431 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * An API for fetching information about and controlling App Engine Modules.
+ *
+ */
+
+namespace google\appengine\api\modules;
+
+require_once 'google/appengine/api/modules/modules_service_pb.php';
+require_once "google/appengine/api/modules/InvalidModuleStateException.php";
+require_once "google/appengine/api/modules/ModulesException.php";
+require_once "google/appengine/api/modules/TransientModulesException.php";
+require_once 'google/appengine/runtime/ApiProxy.php';
+require_once 'google/appengine/runtime/ApplicationError.php';
+
+use \google\appengine\runtime\ApiProxy;
+use \google\appengine\runtime\ApplicationError;
+use \google\appengine\GetDefaultVersionRequest;
+use \google\appengine\GetDefaultVersionResponse;
+use \google\appengine\GetHostnameRequest;
+use \google\appengine\GetHostnameResponse;
+use \google\appengine\GetModulesRequest;
+use \google\appengine\GetModulesResponse;
+use \google\appengine\GetNumInstancesRequest;
+use \google\appengine\GetNumInstancesResponse;
+use \google\appengine\GetVersionsRequest;
+use \google\appengine\GetVersionsResponse;
+use \google\appengine\ModulesServiceError\ErrorCode;
+use \google\appengine\SetNumInstancesRequest;
+use \google\appengine\SetNumInstancesResponse;
+use \google\appengine\StartModuleRequest;
+use \google\appengine\StartModuleResponse;
+use \google\appengine\StopModuleRequest;
+use \google\appengine\StopModuleResponse;
+
+final class ModulesService {
+  private static function errorCodeToException($error) {
+    switch($error) {
+      case ErrorCode::INVALID_MODULE:
+        return new ModulesException('Invalid module.');
+      case ErrorCode::INVALID_VERSION:
+        return new ModulesException('Invalid version.');
+      case ErrorCode::INVALID_INSTANCES:
+        return new ModulesException('Invalid instances.');
+      case ErrorCode::TRANSIENT_ERROR:
+        return new TransientModulesException();
+      case ErrorCode::UNEXPECTED_STATE:
+        return new InvalidModuleStateException();
+      default:
+        return new ModulesException('Error Code: ' . $error);
+    }
+  }
+
+  /**
+   * Gets the name of the currently running module.
+   *
+   * @return string The name of the current module. For example, if this is
+   * version "v1" of module "module5" for app "my-app", this function
+   * will return "module5".
+   */
+  public static function getCurrentModuleName() {
+    return $_SERVER['CURRENT_MODULE_ID'];
+  }
+
+  /**
+   * Gets the version of the currently running module.
+   *
+   * @return string The name of the current module. For example, if this is
+   * version "v1" of module "module5" for app "my-app", this function
+   * will return "v1".
+   */
+  public static function getCurrentVersionName() {
+    return explode('.', $_SERVER['CURRENT_VERSION_ID'])[0];
+  }
+
+  /**
+   * Gets the id of the currently running instance.
+   *
+   * @return string The name of the current module. For example, if this is
+   * instance 2 of version "v1" of module "module5" for app "my-app", this
+   * function will return "2". Will return null for automatically-scaled
+   * modules.
+   */
+  public static function getCurrentInstanceId() {
+    if (array_key_exists('INSTANCE_ID', $_SERVER)) {
+      return $_SERVER['INSTANCE_ID'];
+    }
+    return null;
+  }
+
+  /**
+   * Gets an array of all the modules for the application.
+   *
+   * @return string[] An array of string containing the names of the modules
+   * associated with the application. The 'default' module will be included if
+   * it exists, as will the name of the module that is associated with the
+   * instance that calls this function.
+   */
+  public static function getModules() {
+    $req = new GetModulesRequest();
+    $resp = new GetModulesResponse();
+
+    ApiProxy::makeSyncCall('modules', 'GetModules', $req, $resp);
+    return $resp->getModuleList();
+  }
+
+  /**
+   * Get an array of all versions associated with a module.
+   *
+   * @param string $module The name of the module to retrieve the versions for.
+   * If null then the versions for the current module will be retrieved.
+   *
+   * @return string[] An array of strings containing the names of versions
+   * associated with the module. The current version will also be included in
+   * this list.
+   *
+   * @throws \InvalidArgumentException If $module is not a string.
+   * @throws ModulesException If the given $module isn't valid.
+   * @throws TransientModulesException if there is an issue fetching the
+   * information.
+   */
+  public static function getVersions($module=null) {
+    $req = new GetVersionsRequest();
+    $resp = new GetVersionsResponse();
+
+    if ($module !== null) {
+      if (!is_string($module)) {
+        throw new \InvalidArgumentException(
+            '$module must be a string. Actual type: ' . gettype($module));
+      }
+      $req->setModule($module);
+    }
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'GetVersions', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw errorCodeToException($e->getApplicationError());
+    }
+    return $resp->getVersionList();
+  }
+
+  /**
+   * Get the default version of a module.
+   *
+   * @param string $module The name of the module to retrieve the default
+   * versions for. If null then the default versions for the current module
+   * will be retrieved.
+   *
+   * @return string The default version of the module.
+   *
+   * @throws \InvalidArgumentException If $module is not a string.
+   * @throws ModulesException If the given $module is invalid or if no default
+   * version could be found.
+   */
+  public static function getDefaultVersion($module=null) {
+    $req = new GetDefaultVersionRequest();
+    $resp = new GetDefaultVersionResponse();
+
+    if ($module !== null) {
+      if (!is_string($module)) {
+        throw new \InvalidArgumentException(
+            '$module must be a string. Actual type: ' . gettype($module));
+      }
+      $req->setModule($module);
+    }
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'GetDefaultVersion', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw errorCodeToException($e->getApplicationError());
+    }
+    return $resp->getVersion();
+  }
+
+  /**
+   * Get the number of instances set for a version of a module.
+   *
+   * This function does not work on automatically-scaled modules.
+   *
+   * @param string $module The name of the module to retrieve the count for. If
+   * null then the count for the current module will be retrieved.
+   *
+   * @param string $version The version of the module to retrieve the count for.
+   * If null then the count for the version of the current instance will be
+   * retrieved.
+   *
+   * @return integer The number of instances set for the current module
+   * version.
+   *
+   * @throws \InvalidArgumentException If $module or $version is not a string.
+   * @throws ModulesException if the given combination of $module and $version
+   * is invalid.
+   */
+  public static function getNumInstances($module=null, $version=null) {
+    $req = new GetNumInstancesRequest();
+    $resp = new GetNumInstancesResponse();
+
+    if ($module !== null) {
+      if (!is_string($module)) {
+        throw new \InvalidArgumentException(
+            '$module must be a string. Actual type: ' . gettype($module));
+      }
+      $req->setModule($module);
+    }
+
+    if ($version !== null) {
+      if (!is_string($version)) {
+        throw new \InvalidArgumentException(
+            '$version must be a string. Actual type: ' . gettype($version));
+      }
+      $req->setVersion($version);
+    }
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'GetNumInstances', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw self::errorCodeToException($e->getApplicationError());
+    }
+    return (int) $resp->getInstances();
+  }
+
+  /**
+   * Set the number of instances for a version of a module.
+   *
+   * This function does not work on automatically-scaled modules.
+   *
+   * @param string $module The name of the module to set the instance count for.
+   * If null then the instance count for the current module will be set.
+   *
+   * @param string $version The version of the module to set the instance count
+   * for. If null then the count for the version of the current instance will
+   * be set.
+   *
+   * @throws \InvalidArgumentException If $instances is not an integer or if
+   * $module or $version is not a string.
+   * @throws ModulesException if the given combination of $module and $version
+   * is invalid.
+   * @throws TransientModulesException if there is an issue setting the
+   * instance count.
+  */
+  public static function setNumInstances($instances,
+                                         $module=null,
+                                         $version=null) {
+    $req = new SetNumInstancesRequest();
+    $resp = new SetNumInstancesResponse();
+
+    if (!is_int($instances)) {
+      throw new \InvalidArgumentException(
+          '$instances must be an integer. Actual type: ' . gettype($instances));
+    }
+    $req->setInstances($instances);
+
+    if ($module !== null) {
+      if (!is_string($module)) {
+        throw new \InvalidArgumentException(
+            '$module must be a string. Actual type: ' . gettype($module));
+      }
+      $req->setModule($module);
+    }
+
+    if ($version !== null) {
+      if (!is_string($version)) {
+        throw new \InvalidArgumentException(
+            '$version must be a string. Actual type: ' . gettype($version));
+      }
+      $req->setVersion($version);
+    }
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'SetNumInstances', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw self::errorCodeToException($e->getApplicationError());
+    }
+  }
+
+  /**
+   * Starts all instances of the given version of a module.
+   * *
+   * @param string $module The name of the module to start.
+   *
+   * @param string $version The version of the module to start.
+   *
+   * @throws \InvalidArgumentException If $module or $version is not a string.
+   * @throws ModulesException if the given combination of $module and $version
+   * is invalid.
+   * @throws InvalidModuleStateException if the given $module is already
+   * started or cannot be started.
+   * @throws TransientModulesException if there is an issue starting the module
+   * version.
+  */
+  public static function startModule($module, $version) {
+    $req = new StartModuleRequest();
+    $resp = new StartModuleResponse();
+
+    if (!is_string($module)) {
+      throw new \InvalidArgumentException(
+          '$module must be a string. Actual type: ' . gettype($module));
+    }
+    $req->setModule($module);
+
+    if (!is_string($version)) {
+      throw new \InvalidArgumentException(
+          '$version must be a string. Actual type: ' . gettype($version));
+    }
+    $req->setVersion($version);
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'StartModule', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw self::errorCodeToException($e->getApplicationError());
+    }
+  }
+
+  /**
+   * Stops all instances of the given version of a module.
+   * *
+   * @param string $module The name of the module to stop. If null then the
+   * current module will be stopped.
+   *
+   * @param string $version The version of the module to stop. If null then the
+   * current version will be stopped.
+   *
+   * @throws \InvalidArgumentException If $module or $version is not a string.
+   * @throws ModulesException if the given combination of $module and $version
+   * instance is invalid.
+   * @throws InvalidModuleStateException if the given $module is already
+   * stopped or cannot be stopped.
+   * @throws TransientModulesException if there is an issue stopping the module
+   * version.
+  */
+  public static function stopModule($module=null, $version=null) {
+    $req = new StopModuleRequest();
+    $resp = new StopModuleResponse();
+
+    if ($module !== null) {
+      if (!is_string($module)) {
+        throw new \InvalidArgumentException(
+            '$module must be a string. Actual type: ' . gettype($module));
+      }
+      $req->setModule($module);
+    }
+
+    if ($version !== null) {
+      if (!is_string($version)) {
+        throw new \InvalidArgumentException(
+            '$version must be a string. Actual type: ' . gettype($version));
+      }
+      $req->setVersion($version);
+    }
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'StopModule', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw self::errorCodeToException($e->getApplicationError());
+    }
+  }
+
+  /**
+   * Returns the hostname to use when contacting a module.
+   * *
+   * @param string $module The name of the module whose hostname should be
+   * returned. If null then the hostname of the current module will be returned.
+   *
+   * @param string $version The version of the module whose hostname should be
+   * returned. If null then the hostname for the version of the current
+   * instance will be returned.
+   *
+   * @param string $instance The instance whose hostname should be returned. If
+   * null then the load balanced hostname for the module will be returned. If
+   * the module is not a fixed module then the instance parameter is ignored.
+   *
+   * @throws \InvalidArgumentException If $module or $version is not a string
+   * or if $instance is not a string or integer.
+   * @throws ModulesException if the given combination of $module and $instance
+   * is invalid.
+  */
+  public static function getHostname($module=null,
+                                     $version=null,
+                                     $instance=null) {
+    $req = new GetHostnameRequest();
+    $resp = new GetHostnameResponse();
+
+    if ($module !== null) {
+      if (!is_string($module)) {
+        throw new \InvalidArgumentException(
+            '$module must be a string. Actual type: ' . gettype($module));
+      }
+      $req->setModule($module);
+    }
+
+    if ($version !== null) {
+      if (!is_string($version)) {
+        throw new \InvalidArgumentException(
+            '$version must be a string. Actual type: ' . gettype($version));
+      }
+      $req->setVersion($version);
+    }
+
+    if ($instance !== null) {
+      if (!is_int($instance) && !is_string($instance)) {
+        throw new \InvalidArgumentException(
+            '$instance must be an integer or string. Actual type: ' .
+            gettype($instance));
+      }
+      $req->setInstance((string) $instance);
+    }
+
+    try {
+      ApiProxy::makeSyncCall('modules', 'GetHostname', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw self::errorCodeToException($e->getApplicationError());
+    }
+
+    return $resp->getHostname();
+  }
+}
diff --git a/php/sdk/google/appengine/api/modules/ModulesServiceTest.php b/php/sdk/google/appengine/api/modules/ModulesServiceTest.php
new file mode 100644
index 0000000..053314c
--- /dev/null
+++ b/php/sdk/google/appengine/api/modules/ModulesServiceTest.php
@@ -0,0 +1,406 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Unit tests for the Modules API.
+ *
+ */
+
+namespace google\appengine\api\modules;
+
+require_once 'google/appengine/api/modules/modules_service_pb.php';
+require_once 'google/appengine/api/modules/ModulesService.php';
+require_once 'google/appengine/runtime/ApplicationError.php';
+require_once 'google/appengine/testing/ApiProxyTestBase.php';
+
+use \google\appengine\runtime\ApplicationError;
+use \google\appengine\testing\ApiProxyTestBase;
+use \google\appengine\GetDefaultVersionRequest;
+use \google\appengine\GetDefaultVersionResponse;
+use \google\appengine\GetHostnameRequest;
+use \google\appengine\GetHostnameResponse;
+use \google\appengine\GetModulesRequest;
+use \google\appengine\GetModulesResponse;
+use \google\appengine\GetNumInstancesRequest;
+use \google\appengine\GetNumInstancesResponse;
+use \google\appengine\GetVersionsRequest;
+use \google\appengine\GetVersionsResponse;
+use \google\appengine\ModulesServiceError\ErrorCode;
+use \google\appengine\SetNumInstancesRequest;
+use \google\appengine\SetNumInstancesResponse;
+use \google\appengine\StartModuleRequest;
+use \google\appengine\StartModuleResponse;
+use \google\appengine\StopModuleRequest;
+use \google\appengine\StopModuleResponse;
+
+
+class ModulesTest extends ApiProxyTestBase {
+
+  public function setUp() {
+    parent::setUp();
+    $this->_SERVER = $_SERVER;
+  }
+
+  public function tearDown() {
+    $_SERVER = $this->_SERVER;
+    parent::tearDown();
+  }
+
+  public function testGetCurrentModuleNameWithDefaultModule() {
+    $_SERVER['CURRENT_MODULE_ID'] = 'default';
+    $_SERVER['CURRENT_VERSION_ID'] = 'v1.123';
+    $this->assertEquals('default', ModulesService::getCurrentModuleName());
+  }
+
+  public function testGetCurrentModuleNameWithNonDefaultModule() {
+    $_SERVER['CURRENT_MODULE_ID'] = 'module1';
+    $_SERVER['CURRENT_VERSION_ID'] = 'v1.123';
+    $this->assertEquals('module1', ModulesService::getCurrentModuleName());
+  }
+
+  public function testGetCurrentVersionName() {
+    $_SERVER['CURRENT_VERSION_ID'] = 'v1.123';
+    $this->assertEquals('v1', ModulesService::getCurrentVersionName());
+  }
+
+  public function testGetCurrentInstanceIdNoneSet() {
+    $this->assertEquals(null, ModulesService::getCurrentInstanceId());
+  }
+
+  public function testGetCurrentInstanceId() {
+    $_SERVER['INSTANCE_ID'] = '123';
+    $this->assertEquals('123', ModulesService::getCurrentInstanceId());
+  }
+
+  public function testGetModules() {
+    $req = new GetModulesRequest();
+    $resp = new GetModulesResponse();
+
+    $resp->addModule('module1');
+    $resp->addModule('module2');
+
+    $this->apiProxyMock->expectCall('modules', 'GetModules', $req, $resp);
+
+    $this->assertEquals(['module1', 'module2'], ModulesService::getModules());
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetVersions() {
+    $req = new GetVersionsRequest();
+    $resp = new GetVersionsResponse();
+
+    $resp->addVersion('v1');
+    $resp->addVersion('v2');
+
+    $this->apiProxyMock->expectCall('modules', 'GetVersions', $req, $resp);
+
+    $this->assertEquals(['v1', 'v2'], ModulesService::getVersions());
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetVersionsWithModule() {
+    $req = new GetVersionsRequest();
+    $resp = new GetVersionsResponse();
+
+    $req->setModule('module1');
+    $resp->addVersion('v1');
+    $resp->addVersion('v2');
+
+    $this->apiProxyMock->expectCall('modules', 'GetVersions', $req, $resp);
+
+    $this->assertEquals(['v1', 'v2'], ModulesService::getVersions('module1'));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetVersionsWithIntegerModule() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$module must be a string. Actual type: integer');
+    ModulesService::getVersions(5);
+  }
+
+  public function testGetNumInstances() {
+    $req = new GetNumInstancesRequest();
+    $resp = new GetNumInstancesResponse();
+
+    $resp->setInstances(3);
+
+    $this->apiProxyMock->expectCall('modules', 'GetNumInstances', $req, $resp);
+
+    $this->assertEquals(3, ModulesService::getNumInstances());
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetNumInstancesWithModuleAndVersion() {
+    $req = new GetNumInstancesRequest();
+    $resp = new GetNumInstancesResponse();
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+    $resp->setInstances(3);
+
+    $this->apiProxyMock->expectCall('modules', 'GetNumInstances', $req, $resp);
+
+    $this->assertEquals(3, ModulesService::getNumInstances('module1', 'v1'));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetNumInstancesWithIntegerModule() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$module must be a string. Actual type: integer');
+    ModulesService::getNumInstances(5);
+  }
+
+  public function testGetNumInstancesWithIntegerVersion() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$version must be a string. Actual type: integer');
+    ModulesService::getNumInstances('module1', 5);
+  }
+
+  public function testGetNumInstancesInvalidModule() {
+    $req = new GetNumInstancesRequest();
+    $resp = new ApplicationError(ErrorCode::INVALID_MODULE, 'invalid module');
+
+    $this->setExpectedException(
+        '\google\appengine\api\modules\ModulesException');
+    $this->apiProxyMock->expectCall('modules', 'GetNumInstances', $req, $resp);
+
+    $this->assertEquals(3, ModulesService::getNumInstances());
+    $this->apiProxyMock->verify();
+  }
+
+  public function testSetNumInstances() {
+    $req = new SetNumInstancesRequest();
+    $resp = new SetNumInstancesResponse();
+
+    $req->setInstances(3);
+
+    $this->apiProxyMock->expectCall('modules', 'SetNumInstances', $req, $resp);
+
+    ModulesService::setNumInstances(3);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testSetNumInstancesWithModuleAndVersion() {
+    $req = new SetNumInstancesRequest();
+    $resp = new SetNumInstancesResponse();
+
+    $req->setInstances(3);
+
+    $this->apiProxyMock->expectCall('modules', 'SetNumInstances', $req, $resp);
+
+    ModulesService::setNumInstances(3);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testSetNumInstancesWithStringInstances() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$instances must be an integer. Actual type: string');
+    ModulesService::setNumInstances('hello');
+  }
+
+  public function testSetNumInstancesWithIntegerModule() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$module must be a string. Actual type: integer');
+    ModulesService::setNumInstances(5, 10);
+  }
+
+  public function testSetNumInstancesWithIntegerVersion() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$version must be a string. Actual type: integer');
+    ModulesService::setNumInstances(5, 'module1', 5);
+  }
+
+  public function testSetNumInstancesInvalidVersion() {
+    $req = new SetNumInstancesRequest();
+    $resp = new ApplicationError(ErrorCode::INVALID_VERSION, 'invalid version');
+
+    $req->setInstances(3);
+
+    $this->setExpectedException(
+        '\google\appengine\api\modules\ModulesException');
+    $this->apiProxyMock->expectCall('modules', 'SetNumInstances', $req, $resp);
+
+    ModulesService::setNumInstances(3);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testStartModule() {
+    $req = new StartModuleRequest();
+    $resp = new StartModuleResponse();
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+
+    $this->apiProxyMock->expectCall('modules', 'StartModule', $req, $resp);
+
+    ModulesService::startModule('module1', 'v1');
+    $this->apiProxyMock->verify();
+  }
+
+  public function testStartModuleWithIntegerModule() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$module must be a string. Actual type: integer');
+    ModulesService::startModule(5, 'v1');
+  }
+
+  public function testStartModuleWithIntegerVersion() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$version must be a string. Actual type: integer');
+    ModulesService::startModule('module1', 5);
+  }
+
+  public function testStartModuleWithTransientError() {
+    $req = new StartModuleRequest();
+    $resp = new ApplicationError(ErrorCode::TRANSIENT_ERROR,
+                                 'invalid version');
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+
+    $this->setExpectedException(
+        '\google\appengine\api\modules\TransientModulesException');
+    $this->apiProxyMock->expectCall('modules', 'StartModule', $req, $resp);
+
+    ModulesService::startModule('module1', 'v1');
+    $this->apiProxyMock->verify();
+  }
+
+  public function testStopModule() {
+    $req = new StopModuleRequest();
+    $resp = new StopModuleResponse();
+
+    $this->apiProxyMock->expectCall('modules', 'StopModule', $req, $resp);
+
+    ModulesService::stopModule();
+    $this->apiProxyMock->verify();
+  }
+
+  public function testStopModuleWithModuleAndVersion() {
+    $req = new StopModuleRequest();
+    $resp = new StopModuleResponse();
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+
+    $this->apiProxyMock->expectCall('modules', 'StopModule', $req, $resp);
+
+    ModulesService::stopModule('module1', 'v1');
+    $this->apiProxyMock->verify();
+  }
+
+  public function testStopModuleWithIntegerModule() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$module must be a string. Actual type: integer');
+    ModulesService::stopModule(5, 'v1');
+  }
+
+  public function testStopModuleWithIntegerVersion() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$version must be a string. Actual type: integer');
+    ModulesService::stopModule('module1', 5);
+  }
+
+  public function testStopModuleWithTransientError() {
+    $req = new StopModuleRequest();
+    $resp = new ApplicationError(ErrorCode::TRANSIENT_ERROR,
+                                 'invalid version');
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+
+    $this->setExpectedException(
+        '\google\appengine\api\modules\TransientModulesException');
+    $this->apiProxyMock->expectCall('modules', 'StopModule', $req, $resp);
+
+    ModulesService::stopModule('module1', 'v1');
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetHostname() {
+    $req = new GetHostnameRequest();
+    $resp = new GetHostnameResponse();
+
+    $resp->setHostname('hostname');
+
+    $this->apiProxyMock->expectCall('modules', 'GetHostname', $req, $resp);
+
+    $this->assertEquals('hostname', ModulesService::getHostname());
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetHostnameWithModuleVersionAndIntegerInstance() {
+    $req = new GetHostnameRequest();
+    $resp = new GetHostnameResponse();
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+    $req->setInstance('73');
+    $resp->setHostname('hostname');
+
+    $this->apiProxyMock->expectCall('modules', 'GetHostname', $req, $resp);
+
+    $this->assertEquals('hostname',
+                        ModulesService::getHostname('module1', 'v1', 73));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetHostnameWithModuleVersionAndStringInstance() {
+    $req = new GetHostnameRequest();
+    $resp = new GetHostnameResponse();
+
+    $req->setModule('module1');
+    $req->setVersion('v1');
+    $req->setInstance('73');
+    $resp->setHostname('hostname');
+
+    $this->apiProxyMock->expectCall('modules', 'GetHostname', $req, $resp);
+
+    $this->assertEquals('hostname',
+                        ModulesService::getHostname('module1', 'v1', '73'));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testGetHostnameWithIntegerModule() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$module must be a string. Actual type: integer');
+    ModulesService::getHostname(5);
+  }
+
+  public function testGetHostnameWithIntegerVersion() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$version must be a string. Actual type: integer');
+    ModulesService::getHostname('module1', 5);
+  }
+
+  public function testGetHostnameWithArrayInstance() {
+    $this->setExpectedException('\InvalidArgumentException',
+      '$instance must be an integer or string. Actual type: array');
+    ModulesService::getHostname('module1', 'v1', []);
+  }
+
+  public function testGetHostnameWithInvalidInstancesError() {
+    $req = new GetHostnameRequest();
+    $resp = new ApplicationError(ErrorCode::INVALID_INSTANCES,
+                                 'invalid instances');
+
+    $this->setExpectedException(
+        '\google\appengine\api\modules\ModulesException');
+    $this->apiProxyMock->expectCall('modules', 'GetHostname', $req, $resp);
+
+    $this->assertEquals('hostname', ModulesService::getHostname());
+    $this->apiProxyMock->verify();
+  }
+}
diff --git a/php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php b/php/sdk/google/appengine/api/modules/TransientModulesException.php
similarity index 65%
rename from php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php
rename to php/sdk/google/appengine/api/modules/TransientModulesException.php
index fd4b28e..222feff 100644
--- a/php/sdk/google/appengine/api/modules/ModulesServiceProtoTest.php
+++ b/php/sdk/google/appengine/api/modules/TransientModulesException.php
@@ -14,15 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ */
+
 namespace google\appengine\api\modules;
 
-require_once 'google/appengine/api/modules/modules_service_pb.php';
+require_once "google/appengine/api/modules/ModulesException.php";
 
-use \google\appengine\GetModulesRequest;
-
-class ModulesServiceProtoTest extends\PHPUnit_Framework_TestCase {
-  public function testGetModulesRequestInstantiation() {
-    $req = new GetModulesRequest();
-    $this->assertEquals("", $req->serializePartialToString());
-  }
+/**
+ * Thrown when a temporary error occurred. Retry the operation.
+ */
+class TransientModulesException extends ModulesException {
 }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
index 1353fb7..7fb91db 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
@@ -48,9 +48,12 @@
   // we can cache the reads.
   const DEFAULT_READ_SIZE = 524288;
 
-  // The default amount of time that reads will be held in the cache
+  // The default amount of time that reads will be held in the cache.
   const DEFAULT_READ_CACHE_EXPIRY_SECONDS = 3600;  // one hour
 
+  // The default time the writable state of a bucket will be cached for.
+  const DEFAULT_WRITABLE_CACHE_EXPIRY_SECONDS = 600;  // ten minutes
+
   // Token scopers for accessing objects in Google Cloud Storage
   const READ_SCOPE = "https://www.googleapis.com/auth/devstorage.read_only";
   const WRITE_SCOPE = "https://www.googleapis.com/auth/devstorage.read_write";
@@ -89,6 +92,9 @@
   // like a regular file system folder.
   const FOLDER_SUFFIX = '_$folder$';
 
+  // Temporary file name we create when checking if a bucket is writable.
+  const WRITABLE_TEMP_FILENAME = "/_ah_is_writable_temp_file";
+
   // Bit fields for the stat mode field
   const S_IFREG = 0100000;
   const S_IFDIR = 0040000;
@@ -123,6 +129,14 @@
    */
   const MEMCACHE_KEY_FORMAT = "_ah_gs_read_cache_%s_%s";
 
+  /**
+   * Memcache key format for caching the results of checking if a bucket is
+   * writable. The only way to check if an app can write to a bucket is by
+   * actually writing a file. As the ACL on a bucket is unlikely to change
+   * then we can cache the result.
+   */
+  const WRITABLE_MEMCACHE_KEY_FORMAT = "_ah_gs_write_bucket_cache_%s";
+
   // HTTP status codes that should be retried if they are returned by a request
   // to GCS. Retry should occur with a random exponential back-off.
   protected static $retry_error_codes = [HttpResponse::REQUEST_TIMEOUT,
@@ -139,6 +153,8 @@
                                         "bucket-owner-read",
                                         "bucket-owner-full-control"];
 
+ protected static $upload_start_header = ["x-goog-resumable" => "start"];
+
   // Map HTTP request types to URLFetch method enum.
   private static $request_map = [
       "GET" => RequestMethod::GET,
@@ -152,7 +168,9 @@
   private static $default_gs_context_options = [
       "enable_cache" => true,
       "enable_optimistic_cache" => false,
-      "cache_expiry_seconds" => self::DEFAULT_READ_CACHE_EXPIRY_SECONDS,
+      "read_cache_expiry_seconds" => self::DEFAULT_READ_CACHE_EXPIRY_SECONDS,
+      "writable_cache_expiry_seconds" =>
+          self::DEFAULT_WRITABLE_CACHE_EXPIRY_SECONDS,
   ];
 
   protected $bucket_name;  // Name of the bucket for this object.
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
index 2861447..0b055b6 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
@@ -234,7 +234,7 @@
     }
     if (in_array($status_code, self::$valid_status_codes)) {
       $this->memcache_client->set($cache_key, $result, 0,
-          $this->context_options['cache_expiry_seconds']);
+          $this->context_options['read_cache_expiry_seconds']);
     }
     return $result;
   }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
index e44f0cb..cb7aecf 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
@@ -230,7 +230,7 @@
     $options = [ 'gs' => [
             'enable_cache' => true,
             'enable_optimistic_cache' => false,
-            'cache_expiry_seconds' => $cache_expiry_seconds,
+            'read_cache_expiry_seconds' => $cache_expiry_seconds,
         ]
     ];
     $ctx = stream_context_create($options);
@@ -436,6 +436,9 @@
                              null,
                              $response);
 
+    // Return a false is writable check from the cache
+    $this->expectIsWritableMemcacheLookup(true, false);
+
     $this->assertTrue(is_dir("gs://bucket"));
     $this->apiProxyMock->verify();
   }
@@ -485,9 +488,17 @@
                              null,
                              $response);
 
+    // Don't find the key in the cache, to force a write attempt to the bucket.
+    $temp_url = $this->makeCloudStorageObjectUrl("bucket",
+        CloudStorageClient::WRITABLE_TEMP_FILENAME);
+    $this->expectIsWritableMemcacheLookup(false, false);
+    $this->expectFileWriteStartRequest(null, null, 'foo', $temp_url, null);
+    $this->expectIsWritableMemcacheSet(true);
+
+
     $result = stat("gs://bucket/object.png");
     $this->assertEquals(37337, $result['size']);
-    $this->assertEquals(0100444, $result['mode']);
+    $this->assertEquals(0100666, $result['mode']);
     $this->assertEquals(strtotime($last_modified), $result['mtime']);
     $this->apiProxyMock->verify();
   }
@@ -517,11 +528,48 @@
                              $request_headers,
                              null,
                              $response);
+    // Return a false is writable check from the cache
+    $this->expectIsWritableMemcacheLookup(true, false);
 
     $this->assertTrue(is_dir('gs://bucket/a/b/'));
     $this->apiProxyMock->verify();
   }
 
+  public function testStatObjectWithCommonPrefixSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    $last_modified = 'Mon, 01 Jul 2013 10:02:46 GMT';
+    $common_prefix_results = ['a/b/c/',
+        'a/b/d/',
+    ];
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse('a/b',
+                                                  [],
+                                                  null,
+                                                  $common_prefix_results),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl('bucket', null);
+    $expected_query = http_build_query([
+        'delimiter' => CloudStorageClient::DELIMITER,
+        'max-keys' => CloudStorageUrlStatClient::MAX_KEYS,
+        'prefix' => 'a/b',
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+    // Return a false is writable check from the cache
+    $this->expectIsWritableMemcacheLookup(true, false);
+
+    $this->assertTrue(is_dir('gs://bucket/a/b'));
+    $this->apiProxyMock->verify();
+  }
+
   public function testStatObjectFailed() {
     $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
     $request_headers = $this->getStandardRequestHeaders();
@@ -1366,9 +1414,13 @@
     $request_headers = [
         "x-goog-resumable" => "start",
         "Authorization" => "OAuth foo token",
-        "Content-Type" => $content_type,
-        "x-goog-acl" => $acl,
     ];
+    if ($content_type != null) {
+      $request_headers['Content-Type'] = $content_type;
+    }
+    if ($acl != null) {
+      $request_headers['x-goog-acl'] = $acl;
+    }
     if (isset($metadata)) {
       foreach ($metadata as $key => $value) {
         $request_headers["x-goog-meta-" . $key] = $value;
@@ -1466,6 +1518,30 @@
                                     $resp);
   }
 
+  private function expectIsWritableMemcacheLookup($key_found, $result) {
+    if ($key_found) {
+      $lookup_result = ['is_writable' => $result];
+    } else {
+      $lookup_result = false;
+    }
+
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('get')
+                        ->with($this->stringStartsWith(
+                            '_ah_gs_write_bucket_cache_'))
+                        ->will($this->returnValue($lookup_result));
+  }
+
+  private function expectIsWritableMemcacheSet($value) {
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+        ->method('set')
+        ->with($this->stringStartsWith('_ah_gs_write_bucket_cache_'),
+               ['is_writable' => $value],
+               null,
+               CloudStorageClient::DEFAULT_WRITABLE_CACHE_EXPIRY_SECONDS)
+        ->will($this->returnValue(false));
+  }
+
   private function makeCloudStorageObjectUrl($bucket = "bucket",
                                              $object = "/object.png") {
     if (isset($object)){
@@ -1489,7 +1565,8 @@
 
   private function makeGetBucketXmlResponse($prefix,
                                             $contents_array,
-                                            $next_marker = null) {
+                                            $next_marker = null,
+                                            $common_prefix_array = null) {
     $result = "<?xml version='1.0' encoding='UTF-8'?>
         <ListBucketResult xmlns='http://doc.s3.amazonaws.com/2006-03-01'>
         <Name>sjl-test</Name>
@@ -1512,6 +1589,13 @@
       }
       $result .= '</Contents>';
     }
+    if (isset($common_prefix_array)) {
+      foreach($common_prefix_array as $common_prefix) {
+        $result .= '<CommonPrefixes>';
+        $result .= '<Prefix>' . $common_prefix . '</Prefix>';
+        $result .= '</CommonPrefixes>';
+      }
+    }
     $result .= "</ListBucketResult>";
     return $result;
   }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
index dae2769..101acdc 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
@@ -110,6 +110,10 @@
     // PHP internal APIs check the access mode, we'll set them all to readable.
     $mode |= parent::S_IRUSR | parent::S_IRGRP | parent::S_IROTH;
 
+    if ($this->isBucketWritable($this->bucket_name)) {
+      $mode |= parent::S_IWUSR | parent::S_IWGRP | parent::S_IWOTH;
+    }
+
     $stat_args["mode"] = $mode;
     if (isset($mtime)) {
       $unix_time = strtotime($mtime);
@@ -189,7 +193,55 @@
           'mtime' => (string) $content->LastModified,
       ];
     }
-
+    // Subdirectories will be returned in the CommonPrefixes section. Refer to
+    // https://developers.google.com/storage/docs/reference-methods#getbucket
+    foreach($xml->CommonPrefixes as $common_prefix) {
+      $results[] = [
+          'name' => (string) $common_prefix->Prefix,
+      ];
+    }
     return $results;
   }
+
+  /**
+   * Test if a given bucket is writable. We will cache results in memcache as
+   * this is an expensive operation. This might lead to incorrect results being
+   * returned for this call for a short period while the result remains in the
+   * cache.
+   */
+  private function isBucketWritable($bucket) {
+    $cache_key_name = sprintf(parent::WRITABLE_MEMCACHE_KEY_FORMAT, $bucket);
+    $memcache = new \Memcache();
+    $result = $memcache->get($cache_key_name);
+
+    if ($result) {
+      return $result['is_writable'];
+    }
+
+    // We determine if the bucket is writable by trying to start a resumable
+    // upload. GCS will cleanup the abandoned upload after 7 days, and it will
+    // not be charged to the bucket owner.
+    $token_header = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
+    if ($token_header === false) {
+      return false;
+    }
+    $headers = array_merge(parent::$upload_start_header, $token_header);
+    $url = parent::createObjectUrl($bucket, parent::WRITABLE_TEMP_FILENAME);
+    $http_response = $this->makeHttpRequest($url,
+                                            "POST",
+                                            $headers);
+
+    if ($http_response === false) {
+      return false;
+    }
+
+    $status_code = $http_response['status_code'];
+    $is_writable = $status_code == HttpResponse::CREATED;
+
+    $memcache->set($cache_key_name,
+                   ['is_writable' => $is_writable],
+                   null,
+                   $this->context_options['writable_cache_expiry_seconds']);
+    return $is_writable;
+  }
 }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
index e070b74..6e8f2af 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
@@ -38,8 +38,6 @@
   // http://tools.ietf.org/html/rfc2616#section-4.2
   const METADATA_VALUE_REGEX = "/^[[:print:]]*$/";
 
-  private static $upload_start_header = ["x-goog-resumable" => "start"];
-
   // The array of bytes to be written to GS
   private $byte_buffer;
 
@@ -63,7 +61,7 @@
    * @return true if the streamable upload started, false otherwise.
    */
   public function initialize() {
-    $headers = self::$upload_start_header;
+    $headers = parent::$upload_start_header;
 
     $token_header = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
     if ($token_header === false) {
diff --git a/remote_api_shell.py b/remote_api_shell.py
index af18e1c..661f26e 100644
--- a/remote_api_shell.py
+++ b/remote_api_shell.py
@@ -56,15 +56,28 @@
   Raises:
     ValueError: If no proper path could be determined.
   """
-  py_file = __file__.replace('.pyc', '.py')
-  dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
-               os.path.abspath(os.path.dirname(py_file))]
-  for dir_path in dir_paths:
-    sibling_path = os.path.join(dir_path, sibling)
-    if os.path.exists(sibling_path):
-      return dir_path
-  raise ValueError('Could not determine directory that contains both, this '
-                   'file and %s.' % sibling)
+  if 'GAE_SDK_ROOT' in os.environ:
+    gae_sdk_root = os.path.abspath(os.environ['GAE_SDK_ROOT'])
+
+
+
+    os.environ['GAE_SDK_ROOT'] = gae_sdk_root
+    for dir_path in [gae_sdk_root,
+                     os.path.join(gae_sdk_root, 'google_appengine')]:
+      if os.path.exists(os.path.join(dir_path, sibling)):
+        return dir_path
+    raise ValueError('GAE_SDK_ROOT %r does not refer to a valid SDK '
+                     'directory' % gae_sdk_root)
+  else:
+    py_file = __file__.replace('.pyc', '.py')
+    dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
+                 os.path.abspath(os.path.dirname(py_file))]
+    for dir_path in dir_paths:
+      sibling_path = os.path.join(dir_path, sibling)
+      if os.path.exists(sibling_path):
+        return dir_path
+    raise ValueError('Could not determine SDK root; please set GAE_SDK_ROOT '
+                     'environment variable.')
 
 
 
diff --git a/uritemplate/__init__.py b/uritemplate/__init__.py
deleted file mode 100644
index 046cf61..0000000
--- a/uritemplate/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/python2.4
-#
-# Copyright 2010 Google Inc. All Rights Reserved.
-
-"""Module definition for uritemplate.
-
-   This differs from the upstream version (which is an empty file),
-   in order to expose the 'expand' method from the module
-   the same way that it is exposed by the upstream version.
-"""
-
-__author__ = 'ojarjur@google.com (Omar Jarjur)'
-
-from uritemplate import expand
diff --git a/uritemplate/uritemplate.py b/uritemplate/uritemplate.py
deleted file mode 100644
index c681049..0000000
--- a/uritemplate/uritemplate.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# Early, and incomplete implementation of -04.
-#
-import re
-import urllib
-
-RESERVED = ":/?#[]@!$&'()*+,;="
-OPERATOR = "+./;?|!@"
-EXPLODE = "*+"
-MODIFIER = ":^"
-TEMPLATE = re.compile("{([^\}]+)}")
-
-def _tostring(varname, value, explode, operator, safe=""):
-  if type(value) == type([]):
-    if explode == "+":
-      return ",".join([varname + "." + urllib.quote(x, safe) for x in value])
-    else:
-      return ",".join([urllib.quote(x, safe) for x in value])
-  if type(value) == type({}):
-    keys = value.keys()
-    keys.sort()
-    if explode == "+":
-      return ",".join([varname + "." + urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
-    else:
-      return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
-  else:
-    return urllib.quote(value, safe)
-
-
-def _tostring_path(varname, value, explode, operator, safe=""):
-  joiner = operator
-  if type(value) == type([]):
-    if explode == "+":
-      return joiner.join([varname + "." + urllib.quote(x, safe) for x in value])
-    elif explode == "*":
-      return joiner.join([urllib.quote(x, safe) for x in value])
-    else:
-      return ",".join([urllib.quote(x, safe) for x in value])
-  elif type(value) == type({}):
-    keys = value.keys()
-    keys.sort()
-    if explode == "+":
-      return joiner.join([varname + "." + urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
-    elif explode == "*":
-      return joiner.join([urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
-    else:
-      return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
-  else:
-    if value:
-      return urllib.quote(value, safe)
-    else:
-      return ""
-
-
-def _tostring_semi(varname, value, explode, operator, safe=""):
-  joiner = operator
-  if operator == "?":
-    joiner = "&"
-  if type(value) == type([]):
-    if explode == "+":
-      return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
-    elif explode == "*":
-      return joiner.join([urllib.quote(x, safe) for x in value])
-    else:
-      return ",".join([urllib.quote(x, safe) for x in value])
-  elif type(value) == type({}):
-    keys = value.keys()
-    keys.sort()
-    if explode == "+":
-      return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
-    elif explode == "*":
-      return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
-    else:
-      return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
-  else:
-    if value:
-      return varname + "=" + urllib.quote(value, safe)
-    else:
-      return varname 
-
-
-def _tostring_query(varname, value, explode, operator, safe=""):
-  joiner = operator
-  if operator == "?":
-    joiner = "&"
-  if type(value) == type([]):
-    if 0 == len(value):
-      return ""
-    if explode == "+":
-      return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
-    elif explode == "*":
-      return joiner.join([urllib.quote(x, safe) for x in value])
-    else:
-      return varname + "=" + ",".join([urllib.quote(x, safe) for x in value])
-  elif type(value) == type({}):
-    if 0 == len(value):
-      return ""
-    keys = value.keys()
-    keys.sort()
-    if explode == "+":
-      return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
-    elif explode == "*":
-      return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
-    else:
-      return varname + "=" + ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
-  else:
-    if value:
-      return varname + "=" + urllib.quote(value, safe)
-    else:
-      return varname 
-
-TOSTRING = {
-    "" : _tostring,
-    "+": _tostring,
-    ";": _tostring_semi,
-    "?": _tostring_query,
-    "/": _tostring_path,
-    ".": _tostring_path,
-    }
-
-
-def expand(template, vars):
-  def _sub(match):
-    expression = match.group(1)
-    operator = ""
-    if expression[0] in OPERATOR:
-      operator = expression[0] 
-      varlist = expression[1:]
-    else:
-      varlist = expression
-
-    safe = ""
-    explode = ""
-    if operator == '+':
-      safe = RESERVED
-    varspecs = varlist.split(",")
-    varnames = []
-    defaults = {}
-    for varspec in varspecs:
-      default = None
-      if "=" in varspec:
-        varname, default = tuple(varspec.split("=", 1))
-      else:
-        varname = varspec
-      if varname[-1] in EXPLODE:
-        explode = varname[-1]
-        varname = varname[:-1]
-      if default:
-        defaults[varname] = default
-      varnames.append((varname, explode))
-
-    retval = []
-    joiner = operator
-    prefix = operator
-    if operator == "+":
-      prefix = ""
-      joiner = ","
-    if operator == "?":
-      joiner = "&"
-    if operator == "":
-      joiner = ","
-    for varname, explode in varnames:
-      if varname in vars:
-        value = vars[varname]
-        #if not value and (type(value) == type({}) or type(value) == type([])) and varname in defaults:
-        if not value and value != "" and varname in defaults:
-          value = defaults[varname]
-      elif varname in defaults:
-        value = defaults[varname]
-      else:
-        continue
-      retval.append(TOSTRING[operator](varname, value, explode, operator, safe=safe))
-    if "".join(retval):
-      return prefix + joiner.join(retval)
-    else:
-      return ""
-
-  return TEMPLATE.sub(_sub, template)